Merge branch 'fix/chore-fix' into dev/plugin-deploy

This commit is contained in:
Yeuoly
2024-12-31 17:43:16 +08:00
97 changed files with 2006 additions and 909 deletions

View File

@ -85,11 +85,11 @@ ignore = [
]
"tests/*" = [
"F811", # redefined-while-unused
"F401", # unused-import
]
[lint.pyflakes]
extend-generics = [
allowed-unused-imports = [
"_pytest.monkeypatch",
"tests.integration_tests",
"tests.unit_tests",
]

View File

@ -1,12 +1,8 @@
from libs import version_utils
# preparation before creating app
version_utils.check_supported_python_version()
import os
import sys
def is_db_command():
import sys
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
return True
return False
@ -18,10 +14,22 @@ if is_db_command():
app = create_migrations_app()
else:
from app_factory import create_app
from libs import threadings_utils
if os.environ.get("FLASK_DEBUG", "False") != "True":
from gevent import monkey # type: ignore
threadings_utils.apply_gevent_threading_patch()
# gevent
monkey.patch_all()
from grpc.experimental import gevent as grpc_gevent # type: ignore
# grpc gevent
grpc_gevent.init_gevent()
import psycogreen.gevent # type: ignore
psycogreen.gevent.patch_psycopg()
from app_factory import create_app
app = create_app()
celery = app.extensions["celery"]

View File

@ -677,3 +677,42 @@ def extract_plugins(output_file: str, workers: int):
PluginMigration.extract_plugins(output_file, workers)
click.echo(click.style("Extract plugins completed.", fg="green"))
@click.command("extract-unique-identifiers", help="Extract unique identifiers.")
@click.option(
"--output_file",
prompt=True,
help="The file to store the extracted unique identifiers.",
default="unique_identifiers.json",
)
@click.option(
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
)
def extract_unique_plugins(output_file: str, input_file: str):
"""
Extract unique plugins.
"""
click.echo(click.style("Starting extract unique plugins.", fg="white"))
PluginMigration.extract_unique_plugins(input_file, output_file)
click.echo(click.style("Extract unique plugins completed.", fg="green"))
@click.command("install-plugins", help="Install plugins.")
@click.option(
"--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
)
@click.option(
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
)
def install_plugins(input_file: str, output_file: str):
"""
Install plugins.
"""
click.echo(click.style("Starting install plugins.", fg="white"))
PluginMigration.install_plugins(input_file, output_file)
click.echo(click.style("Install plugins completed.", fg="green"))

View File

@ -823,6 +823,13 @@ class LoginConfig(BaseSettings):
)
class AccountConfig(BaseSettings):
ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a account deletion token remains valid",
default=5,
)
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
@ -852,6 +859,7 @@ class FeatureConfig(
WorkflowNodeExecutionConfig,
WorkspaceConfig,
LoginConfig,
AccountConfig,
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,

View File

@ -2,7 +2,7 @@ import json
import logging
from flask import abort, request
from flask_restful import Resource, marshal_with, reqparse # type: ignore
from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
@ -14,7 +14,7 @@ from controllers.console.wraps import account_initialization_required, setup_req
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from factories import variable_factory
from fields.workflow_fields import workflow_fields
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
from libs.helper import TimestampField, uuid_value
@ -474,6 +474,31 @@ class WorkflowConfigApi(Resource):
}
class PublishedAllWorkflowApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_pagination_fields)
def get(self, app_model: App):
"""
Get published workflows
"""
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
page = args.get("page")
limit = args.get("limit")
workflow_service = WorkflowService()
workflows, has_more = workflow_service.get_all_published_workflow(app_model=app_model, page=page, limit=limit)
return {"items": workflows, "page": page, "limit": limit, "has_more": has_more}
api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
api.add_resource(WorkflowConfigApi, "/apps/<uuid:app_id>/workflows/draft/config")
api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
@ -488,6 +513,7 @@ api.add_resource(
WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
)
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
api.add_resource(PublishedAllWorkflowApi, "/apps/<uuid:app_id>/workflows")
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
api.add_resource(
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"

View File

@ -53,3 +53,9 @@ class EmailCodeLoginRateLimitExceededError(BaseHTTPException):
error_code = "email_code_login_rate_limit_exceeded"
description = "Too many login emails have been sent. Please try again in 5 minutes."
code = 429
class EmailCodeAccountDeletionRateLimitExceededError(BaseHTTPException):
error_code = "email_code_account_deletion_rate_limit_exceeded"
description = "Too many account deletion emails have been sent. Please try again in 5 minutes."
code = 429

View File

@ -8,13 +8,8 @@ from sqlalchemy.orm import Session
from constants.languages import languages
from controllers.console import api
from controllers.console.auth.error import (
EmailCodeError,
InvalidEmailError,
InvalidTokenError,
PasswordMismatchError,
)
from controllers.console.error import AccountNotFound, EmailSendIpLimitError
from controllers.console.auth.error import EmailCodeError, InvalidEmailError, InvalidTokenError, PasswordMismatchError
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
from controllers.console.wraps import setup_required
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
@ -22,6 +17,7 @@ from libs.helper import email, extract_remote_ip
from libs.password import hash_password, valid_password
from models.account import Account
from services.account_service import AccountService, TenantService
from services.errors.account import AccountRegisterError
from services.errors.workspace import WorkSpaceNotAllowedCreateError
from services.feature_service import FeatureService
@ -133,6 +129,8 @@ class ForgotPasswordResetApi(Resource):
)
except WorkSpaceNotAllowedCreateError:
pass
except AccountRegisterError as are:
raise AccountInFreezeError()
return {"result": "success"}

View File

@ -5,6 +5,7 @@ from flask import request
from flask_restful import Resource, reqparse # type: ignore
import services
from configs import dify_config
from constants.languages import languages
from controllers.console import api
from controllers.console.auth.error import (
@ -16,6 +17,7 @@ from controllers.console.auth.error import (
)
from controllers.console.error import (
AccountBannedError,
AccountInFreezeError,
AccountNotFound,
EmailSendIpLimitError,
NotAllowedCreateWorkspace,
@ -26,6 +28,8 @@ from libs.helper import email, extract_remote_ip
from libs.password import valid_password
from models.account import Account
from services.account_service import AccountService, RegisterService, TenantService
from services.billing_service import BillingService
from services.errors.account import AccountRegisterError
from services.errors.workspace import WorkSpaceNotAllowedCreateError
from services.feature_service import FeatureService
@ -44,6 +48,9 @@ class LoginApi(Resource):
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
args = parser.parse_args()
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]):
raise AccountInFreezeError()
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
if is_login_error_rate_limit:
raise EmailPasswordLoginLimitError()
@ -113,8 +120,10 @@ class ResetPasswordSendEmailApi(Resource):
language = "zh-Hans"
else:
language = "en-US"
account = AccountService.get_user_through_email(args["email"])
try:
account = AccountService.get_user_through_email(args["email"])
except AccountRegisterError as are:
raise AccountInFreezeError()
if account is None:
if FeatureService.get_system_features().is_allow_register:
token = AccountService.send_reset_password_email(email=args["email"], language=language)
@ -142,8 +151,11 @@ class EmailCodeLoginSendEmailApi(Resource):
language = "zh-Hans"
else:
language = "en-US"
try:
account = AccountService.get_user_through_email(args["email"])
except AccountRegisterError as are:
raise AccountInFreezeError()
account = AccountService.get_user_through_email(args["email"])
if account is None:
if FeatureService.get_system_features().is_allow_register:
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
@ -177,7 +189,10 @@ class EmailCodeLoginApi(Resource):
raise EmailCodeError()
AccountService.revoke_email_code_login_token(args["token"])
account = AccountService.get_user_through_email(user_email)
try:
account = AccountService.get_user_through_email(user_email)
except AccountRegisterError as are:
raise AccountInFreezeError()
if account:
tenant = TenantService.get_join_tenants(account)
if not tenant:
@ -196,6 +211,8 @@ class EmailCodeLoginApi(Resource):
)
except WorkSpaceNotAllowedCreateError:
return NotAllowedCreateWorkspace()
except AccountRegisterError as are:
raise AccountInFreezeError()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
AccountService.reset_login_error_rate_limit(args["email"])
return {"result": "success", "data": token_pair.model_dump()}

View File

@ -18,7 +18,7 @@ from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from models import Account
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService, TenantService
from services.errors.account import AccountNotFoundError
from services.errors.account import AccountNotFoundError, AccountRegisterError
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
from services.feature_service import FeatureService
@ -101,6 +101,8 @@ class OAuthCallback(Resource):
f"{dify_config.CONSOLE_WEB_URL}/signin"
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
)
except AccountRegisterError as e:
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}")
# Check account status
if account.status == AccountStatus.BANNED.value:

View File

@ -92,3 +92,12 @@ class UnauthorizedAndForceLogout(BaseHTTPException):
error_code = "unauthorized_and_force_logout"
description = "Unauthorized and force logout."
code = 401
class AccountInFreezeError(BaseHTTPException):
error_code = "account_in_freeze"
code = 400
description = (
"This email account has been deleted within the past 30 days"
"and is temporarily unavailable for new account registration."
)

View File

@ -11,6 +11,7 @@ from controllers.console import api
from controllers.console.workspace.error import (
AccountAlreadyInitedError,
CurrentPasswordIncorrectError,
InvalidAccountDeletionCodeError,
InvalidInvitationCodeError,
RepeatPasswordNotMatchError,
)
@ -21,6 +22,7 @@ from libs.helper import TimestampField, timezone
from libs.login import login_required
from models import AccountIntegrate, InvitationCode
from services.account_service import AccountService
from services.billing_service import BillingService
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
@ -242,6 +244,54 @@ class AccountIntegrateApi(Resource):
return {"data": integrate_data}
class AccountDeleteVerifyApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
account = current_user
token, code = AccountService.generate_account_deletion_verification_code(account)
AccountService.send_account_deletion_verification_email(account, code)
return {"result": "success", "data": token}
class AccountDeleteApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
account = current_user
parser = reqparse.RequestParser()
parser.add_argument("token", type=str, required=True, location="json")
parser.add_argument("code", type=str, required=True, location="json")
args = parser.parse_args()
if not AccountService.verify_account_deletion_code(args["token"], args["code"]):
raise InvalidAccountDeletionCodeError()
AccountService.delete_account(account)
return {"result": "success"}
class AccountDeleteUpdateFeedbackApi(Resource):
@setup_required
def post(self):
account = current_user
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
parser.add_argument("feedback", type=str, required=True, location="json")
args = parser.parse_args()
BillingService.update_account_deletion_feedback(args["email"], args["feedback"])
return {"result": "success"}
# Register API resources
api.add_resource(AccountInitApi, "/account/init")
api.add_resource(AccountProfileApi, "/account/profile")
@ -252,5 +302,8 @@ api.add_resource(AccountInterfaceThemeApi, "/account/interface-theme")
api.add_resource(AccountTimezoneApi, "/account/timezone")
api.add_resource(AccountPasswordApi, "/account/password")
api.add_resource(AccountIntegrateApi, "/account/integrates")
api.add_resource(AccountDeleteVerifyApi, "/account/delete/verify")
api.add_resource(AccountDeleteApi, "/account/delete")
api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback")
# api.add_resource(AccountEmailApi, '/account/email')
# api.add_resource(AccountEmailVerifyApi, '/account/email-verify')

View File

@ -35,3 +35,9 @@ class AccountNotInitializedError(BaseHTTPException):
error_code = "account_not_initialized"
description = "The account has not been initialized yet. Please proceed with the initialization process first."
code = 400
class InvalidAccountDeletionCodeError(BaseHTTPException):
error_code = "invalid_account_deletion_code"
description = "Invalid account deletion code."
code = 400

View File

@ -122,7 +122,7 @@ class MemberUpdateRoleApi(Resource):
return {"code": "invalid-role", "message": "Invalid role"}, 400
member = db.session.get(Account, str(member_id))
if member:
if not member:
abort(404)
try:

View File

@ -330,13 +330,13 @@ class BaseAgentRunner(AppRunner):
if not updated_agent_thought:
raise ValueError("agent thought not found")
if thought is not None:
updated_agent_thought.thought = thought
if thought:
agent_thought.thought = thought
if tool_name is not None:
updated_agent_thought.tool = tool_name
if tool_name:
agent_thought.tool = tool_name
if tool_input is not None:
if tool_input:
if isinstance(tool_input, dict):
try:
tool_input = json.dumps(tool_input, ensure_ascii=False)
@ -345,7 +345,7 @@ class BaseAgentRunner(AppRunner):
updated_agent_thought.tool_input = tool_input
if observation is not None:
if observation:
if isinstance(observation, dict):
try:
observation = json.dumps(observation, ensure_ascii=False)
@ -354,8 +354,8 @@ class BaseAgentRunner(AppRunner):
updated_agent_thought.observation = observation
if answer is not None:
updated_agent_thought.answer = answer
if answer:
agent_thought.answer = answer
if messages_ids is not None and len(messages_ids) > 0:
updated_agent_thought.message_files = json.dumps(messages_ids)

View File

@ -276,7 +276,7 @@ class WorkflowCycleManage:
self, *, session: Session, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
) -> WorkflowNodeExecution:
workflow_node_execution = WorkflowNodeExecution()
workflow_node_execution.id = event.node_execution_id
workflow_node_execution.id = str(uuid4())
workflow_node_execution.tenant_id = workflow_run.tenant_id
workflow_node_execution.app_id = workflow_run.app_id
workflow_node_execution.workflow_id = workflow_run.workflow_id
@ -392,7 +392,7 @@ class WorkflowCycleManage:
execution_metadata = json.dumps(merged_metadata)
workflow_node_execution = WorkflowNodeExecution()
workflow_node_execution.id = event.node_execution_id
workflow_node_execution.id = str(uuid4())
workflow_node_execution.tenant_id = workflow_run.tenant_id
workflow_node_execution.app_id = workflow_run.app_id
workflow_node_execution.workflow_id = workflow_run.workflow_id
@ -825,7 +825,7 @@ class WorkflowCycleManage:
return workflow_run
def _get_workflow_node_execution(self, session: Session, node_execution_id: str) -> WorkflowNodeExecution:
stmt = select(WorkflowNodeExecution).where(WorkflowNodeExecution.id == node_execution_id)
stmt = select(WorkflowNodeExecution).where(WorkflowNodeExecution.node_execution_id == node_execution_id)
workflow_node_execution = session.scalar(stmt)
if not workflow_node_execution:
raise WorkflowNodeExecutionNotFoundError(node_execution_id)

View File

@ -2,7 +2,7 @@ from collections.abc import Sequence
from enum import Enum
from typing import Optional
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, ConfigDict, Field, field_validator
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
@ -134,6 +134,14 @@ class ProviderEntity(BaseModel):
# pydantic configs
model_config = ConfigDict(protected_namespaces=())
@field_validator("models", mode="before")
@classmethod
def validate_models(cls, v):
# returns EmptyList if v is empty
if not v:
return []
return v
def to_simple_provider(self) -> SimpleProviderEntity:
"""
Convert to simple provider.

View File

@ -0,0 +1,42 @@
model: ernie-lite-pro-128k
label:
en_US: Ernie-Lite-Pro-128K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 128000
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: min_output_tokens
label:
en_US: "Min Output Tokens"
zh_Hans: "最小输出Token数"
use_template: max_tokens
min: 2
max: 2048
help:
zh_Hans: 指定模型最小输出token数
en_US: Specifies the lower limit on the length of generated results.
- name: max_output_tokens
label:
en_US: "Max Output Tokens"
zh_Hans: "最大输出Token数"
use_template: max_tokens
min: 2
max: 2048
default: 2048
help:
zh_Hans: 指定模型最大输出token数
en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter.
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty

View File

@ -76,7 +76,11 @@ class PluginInstallationManager(BasePluginManager):
)
def install_from_identifiers(
self, tenant_id: str, identifiers: Sequence[str], source: PluginInstallationSource, meta: dict
self,
tenant_id: str,
identifiers: Sequence[str],
source: PluginInstallationSource,
metas: list[dict],
) -> PluginInstallTaskStartResponse:
"""
Install a plugin from an identifier.
@ -89,7 +93,7 @@ class PluginInstallationManager(BasePluginManager):
data={
"plugin_unique_identifiers": identifiers,
"source": source,
"meta": meta,
"metas": metas,
},
headers={"Content-Type": "application/json"},
)

View File

@ -138,17 +138,24 @@ class NotionExtractor(BaseExtractor):
block_url = BLOCK_CHILD_URL_TMPL.format(block_id=page_id)
while True:
query_dict: dict[str, Any] = {} if not start_cursor else {"start_cursor": start_cursor}
res = requests.request(
"GET",
block_url,
headers={
"Authorization": "Bearer " + self._notion_access_token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
},
params=query_dict,
)
data = res.json()
try:
res = requests.request(
"GET",
block_url,
headers={
"Authorization": "Bearer " + self._notion_access_token,
"Content-Type": "application/json",
"Notion-Version": "2022-06-28",
},
params=query_dict,
)
if res.status_code != 200:
raise ValueError(f"Error fetching Notion block data: {res.text}")
data = res.json()
except requests.RequestException as e:
raise ValueError("Error fetching Notion block data") from e
if "results" not in data or not isinstance(data["results"], list):
raise ValueError("Error fetching Notion block data")
for result in data["results"]:
result_type = result["type"]
result_obj = result[result_type]

View File

@ -31,3 +31,7 @@ class ToolApiSchemaError(ValueError):
class ToolEngineInvokeError(Exception):
meta: ToolInvokeMeta
def __init__(self, meta, **kwargs):
self.meta = meta
super().__init__(**kwargs)

View File

@ -1,13 +1,14 @@
import json
import logging
from collections.abc import Generator
from typing import Any, Optional, Union
from typing import Any, Optional, Union, cast
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType
from extensions.ext_database import db
from factories.file_factory import build_from_mapping
from models.account import Account
from models.model import App, EndUser
from models.workflow import Workflow
@ -222,10 +223,18 @@ class WorkflowTool(Tool):
if isinstance(value, list):
for item in value:
if isinstance(item, dict) and item.get("dify_model_identity") == FILE_MODEL_IDENTITY:
file = File.model_validate(item)
item["tool_file_id"] = item.get("related_id")
file = build_from_mapping(
mapping=item,
tenant_id=str(cast(ToolRuntime, self.runtime).tenant_id),
)
files.append(file)
elif isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
file = File.model_validate(value)
value["tool_file_id"] = value.get("related_id")
file = build_from_mapping(
mapping=value,
tenant_id=str(cast(ToolRuntime, self.runtime).tenant_id),
)
files.append(file)
result[key] = value

View File

@ -15,11 +15,11 @@ def handle(sender, **kwargs):
app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
removed_dataset_ids: set[int] = set()
removed_dataset_ids: set[str] = set()
if not app_dataset_joins:
added_dataset_ids = dataset_ids
else:
old_dataset_ids: set[int] = set()
old_dataset_ids: set[str] = set()
old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
added_dataset_ids = dataset_ids - old_dataset_ids
@ -39,8 +39,8 @@ def handle(sender, **kwargs):
db.session.commit()
def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[int]:
dataset_ids: set[int] = set()
def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[str]:
dataset_ids: set[str] = set()
if not app_model_config:
return dataset_ids

View File

@ -17,11 +17,11 @@ def handle(sender, **kwargs):
dataset_ids = get_dataset_ids_from_workflow(published_workflow)
app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
removed_dataset_ids: set[int] = set()
removed_dataset_ids: set[str] = set()
if not app_dataset_joins:
added_dataset_ids = dataset_ids
else:
old_dataset_ids: set[int] = set()
old_dataset_ids: set[str] = set()
old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins)
added_dataset_ids = dataset_ids - old_dataset_ids
@ -41,8 +41,8 @@ def handle(sender, **kwargs):
db.session.commit()
def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]:
dataset_ids: set[int] = set()
def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]:
dataset_ids: set[str] = set()
graph = published_workflow.graph_dict
if not graph:
return dataset_ids
@ -60,7 +60,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]:
for node in knowledge_retrieval_nodes:
try:
node_data = KnowledgeRetrievalNodeData(**node.get("data", {}))
dataset_ids.update(int(dataset_id) for dataset_id in node_data.dataset_ids)
dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids)
except Exception as e:
continue

View File

@ -7,7 +7,9 @@ def init_app(app: DifyApp):
convert_to_agent_apps,
create_tenant,
extract_plugins,
extract_unique_plugins,
fix_app_site_missing,
install_plugins,
migrate_data_for_plugin,
reset_email,
reset_encrypt_key_pair,
@ -28,6 +30,8 @@ def init_app(app: DifyApp):
fix_app_site_missing,
migrate_data_for_plugin,
extract_plugins,
extract_unique_plugins,
install_plugins,
]
for cmd in cmds_to_register:
app.cli.add_command(cmd)

View File

@ -45,6 +45,7 @@ workflow_fields = {
"graph": fields.Raw(attribute="graph_dict"),
"features": fields.Raw(attribute="features_dict"),
"hash": fields.String(attribute="unique_hash"),
"version": fields.String(attribute="version"),
"created_by": fields.Nested(simple_account_fields, attribute="created_by_account"),
"created_at": TimestampField,
"updated_by": fields.Nested(simple_account_fields, attribute="updated_by_account", allow_null=True),
@ -61,3 +62,10 @@ workflow_partial_fields = {
"updated_by": fields.String,
"updated_at": TimestampField,
}
workflow_pagination_fields = {
"items": fields.List(fields.Nested(workflow_fields), attribute="items"),
"page": fields.Integer,
"limit": fields.Integer(attribute="limit"),
"has_more": fields.Boolean(attribute="has_more"),
}

View File

@ -1,19 +0,0 @@
from configs import dify_config
def apply_gevent_threading_patch():
"""
Run threading patch by gevent
to make standard library threading compatible.
Patching should be done as early as possible in the lifecycle of the program.
:return:
"""
if not dify_config.DEBUG:
from gevent import monkey # type: ignore
from grpc.experimental import gevent as grpc_gevent # type: ignore
# gevent
monkey.patch_all()
# grpc gevent
grpc_gevent.init_gevent()

View File

@ -1,12 +0,0 @@
import sys
def check_supported_python_version():
python_version = sys.version_info
if not ((3, 11) <= python_version < (3, 13)):
print(
"Aborted to launch the service "
f" with unsupported Python version {python_version.major}.{python_version.minor}."
" Please ensure Python 3.11 or 3.12."
)
raise SystemExit(1)

View File

@ -1,2 +1 @@
Single-database configuration for Flask.

65
api/poetry.lock generated
View File

@ -11,17 +11,6 @@ files = [
{file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
]
[[package]]
name = "aiofiles"
version = "24.1.0"
description = "File support for asyncio."
optional = false
python-versions = ">=3.8"
files = [
{file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
{file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
]
[[package]]
name = "aiohappyeyeballs"
version = "2.4.4"
@ -966,10 +955,6 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@ -982,14 +967,8 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@ -1000,24 +979,8 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@ -1027,10 +990,6 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@ -1042,10 +1001,6 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@ -1058,10 +1013,6 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@ -1074,10 +1025,6 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@ -7011,6 +6958,16 @@ files = [
dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"]
test = ["pytest", "pytest-xdist", "setuptools"]
[[package]]
name = "psycogreen"
version = "1.0.2"
description = "psycopg2 integration with coroutine libraries"
optional = false
python-versions = "*"
files = [
{file = "psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d"},
]
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
@ -11184,4 +11141,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.13"
content-hash = "f4accd01805cbf080c4c5295f97a06c8e4faec7365d2c43d0435e56b46461732"
content-hash = "8b2b1bbc4d9c1d47f126775ea587ee116956df29f500534cb87f512402856e05"

View File

@ -61,6 +61,7 @@ openai = "~1.52.0"
openpyxl = "~3.1.5"
pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
pandas-stubs = "~2.2.3.241009"
psycogreen = "~1.0.2"
psycopg2-binary = "~2.9.6"
pycryptodome = "3.19.1"
pydantic = "~2.9.2"

View File

@ -34,6 +34,7 @@ from models.account import (
)
from models.model import DifySetup
from models.staging import StagingAccountWhitelist
from services.billing_service import BillingService
from services.errors.account import (
AccountAlreadyInTenantError,
AccountLoginError,
@ -52,6 +53,8 @@ from services.errors.account import (
)
from services.errors.workspace import WorkSpaceNotAllowedCreateError
from services.feature_service import FeatureService
from tasks.delete_account_task import delete_account_task
from tasks.mail_account_deletion_task import send_account_deletion_verification_code
from tasks.mail_email_code_login import send_email_code_login_mail_task
from tasks.mail_invite_member_task import send_invite_member_mail_task
from tasks.mail_reset_password_task import send_reset_password_mail_task
@ -72,6 +75,9 @@ class AccountService:
email_code_login_rate_limiter = RateLimiter(
prefix="email_code_login_rate_limit", max_attempts=1, time_window=60 * 1
)
email_code_account_deletion_rate_limiter = RateLimiter(
prefix="email_code_account_deletion_rate_limit", max_attempts=1, time_window=60 * 1
)
LOGIN_MAX_ERROR_LIMITS = 5
@staticmethod
@ -203,6 +209,15 @@ class AccountService:
from controllers.console.error import AccountNotFound
raise AccountNotFound()
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email):
raise AccountRegisterError(
description=(
"This email account has been deleted within the past "
"30 days and is temporarily unavailable for new account registration"
)
)
account = Account()
account.email = email
account.name = name
@ -242,6 +257,42 @@ class AccountService:
return account
@staticmethod
def generate_account_deletion_verification_code(account: Account) -> tuple[str, str]:
code = "".join([str(random.randint(0, 9)) for _ in range(6)])
token = TokenManager.generate_token(
account=account, token_type="account_deletion", additional_data={"code": code}
)
return token, code
@classmethod
def send_account_deletion_verification_email(cls, account: Account, code: str):
email = account.email
if cls.email_code_account_deletion_rate_limiter.is_rate_limited(email):
from controllers.console.auth.error import EmailCodeAccountDeletionRateLimitExceededError
raise EmailCodeAccountDeletionRateLimitExceededError()
send_account_deletion_verification_code.delay(to=email, code=code)
cls.email_code_account_deletion_rate_limiter.increment_rate_limit(email)
@staticmethod
def verify_account_deletion_code(token: str, code: str) -> bool:
token_data = TokenManager.get_token_data(token, "account_deletion")
if token_data is None:
return False
if token_data["code"] != code:
return False
return True
@staticmethod
def delete_account(account: Account) -> None:
"""Delete account. This method only adds a task to the queue for deletion."""
delete_account_task.delay(account.id)
@staticmethod
def link_account_integrate(provider: str, open_id: str, account: Account) -> None:
"""Link account integrate"""
@ -431,6 +482,14 @@ class AccountService:
@classmethod
def get_user_through_email(cls, email: str):
if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email):
raise AccountRegisterError(
description=(
"This email account has been deleted within the past "
"30 days and is temporarily unavailable for new account registration"
)
)
account = db.session.query(Account).filter(Account.email == email).first()
if not account:
return None
@ -847,6 +906,10 @@ class RegisterService:
db.session.commit()
except WorkSpaceNotAllowedCreateError:
db.session.rollback()
except AccountRegisterError as are:
db.session.rollback()
logging.exception("Register failed")
raise are
except Exception as e:
db.session.rollback()
logging.exception("Register failed")

View File

@ -139,7 +139,7 @@ class AudioService:
return Response(stream_with_context(response), content_type="audio/mpeg")
return response
else:
if not text:
if text is None:
raise ValueError("Text is required")
response = invoke_tts(text, app_model, voice)
if isinstance(response, Generator):

View File

@ -70,3 +70,24 @@ class BillingService:
if not TenantAccountRole.is_privileged_role(join.role):
raise ValueError("Only team owner or team admin can perform this action")
@classmethod
def delete_account(cls, account_id: str):
"""Delete account."""
params = {"account_id": account_id}
return cls._send_request("DELETE", "/account/", params=params)
@classmethod
def is_email_in_freeze(cls, email: str) -> bool:
params = {"email": email}
try:
response = cls._send_request("GET", "/account/in-freeze", params=params)
return bool(response.get("data", False))
except Exception:
return False
@classmethod
def update_account_deletion_feedback(cls, email: str, feedback: str):
"""Update account deletion feedback."""
json = {"email": email, "feedback": feedback}
return cls._send_request("POST", "/account/delete-feedback", json=json)

View File

@ -86,25 +86,30 @@ class DatasetService:
else:
return [], 0
else:
# show all datasets that the user has permission to access
if permitted_dataset_ids:
query = query.filter(
db.or_(
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
db.and_(Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id),
db.and_(
Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM,
Dataset.id.in_(permitted_dataset_ids),
),
if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN):
# show all datasets that the user has permission to access
if permitted_dataset_ids:
query = query.filter(
db.or_(
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
db.and_(
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
),
db.and_(
Dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM,
Dataset.id.in_(permitted_dataset_ids),
),
)
)
)
else:
query = query.filter(
db.or_(
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
db.and_(Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id),
else:
query = query.filter(
db.or_(
Dataset.permission == DatasetPermissionEnum.ALL_TEAM,
db.and_(
Dataset.permission == DatasetPermissionEnum.ONLY_ME, Dataset.created_by == user.id
),
)
)
)
else:
# if no user, only show datasets that are shared with all team members
query = query.filter(Dataset.permission == DatasetPermissionEnum.ALL_TEAM)
@ -377,14 +382,19 @@ class DatasetService:
if dataset.tenant_id != user.current_tenant_id:
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == "partial_members":
user_permission = DatasetPermission.query.filter_by(dataset_id=dataset.id, account_id=user.id).first()
if not user_permission and dataset.tenant_id != user.current_tenant_id and dataset.created_by != user.id:
if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN):
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == "partial_members":
user_permission = DatasetPermission.query.filter_by(dataset_id=dataset.id, account_id=user.id).first()
if (
not user_permission
and dataset.tenant_id != user.current_tenant_id
and dataset.created_by != user.id
):
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.")
@staticmethod
def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None):
@ -394,15 +404,16 @@ class DatasetService:
if not user:
raise ValueError("User not found")
if dataset.permission == DatasetPermissionEnum.ONLY_ME:
if dataset.created_by != user.id:
raise NoPermissionError("You do not have permission to access this dataset.")
if user.current_role not in (TenantAccountRole.OWNER, TenantAccountRole.ADMIN):
if dataset.permission == DatasetPermissionEnum.ONLY_ME:
if dataset.created_by != user.id:
raise NoPermissionError("You do not have permission to access this dataset.")
elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
if not any(
dp.dataset_id == dataset.id for dp in DatasetPermission.query.filter_by(account_id=user.id).all()
):
raise NoPermissionError("You do not have permission to access this dataset.")
elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
if not any(
dp.dataset_id == dataset.id for dp in DatasetPermission.query.filter_by(account_id=user.id).all()
):
raise NoPermissionError("You do not have permission to access this dataset.")
@staticmethod
def get_dataset_queries(dataset_id: str, page: int, per_page: int):
@ -441,7 +452,7 @@ class DatasetService:
class DocumentService:
DEFAULT_RULES = {
DEFAULT_RULES: dict[str, Any] = {
"mode": "custom",
"rules": {
"pre_processing_rules": [
@ -455,7 +466,7 @@ class DocumentService:
},
}
DOCUMENT_METADATA_SCHEMA = {
DOCUMENT_METADATA_SCHEMA: dict[str, Any] = {
"book": {
"title": str,
"language": str,

View File

@ -1,14 +1,25 @@
import datetime
import json
import logging
from collections.abc import Sequence
import sys
import time
from collections.abc import Mapping, Sequence
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Any, Optional
from uuid import uuid4
import click
import tqdm
from flask import Flask, current_app
from sqlalchemy.orm import Session
from core.agent.entities import AgentToolEntity
from core.entities import DEFAULT_PLUGIN_ID
from core.helper import marketplace
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.entities.plugin_daemon import PluginInstallTaskStatus
from core.plugin.manager.plugin import PluginInstallationManager
from core.tools.entities.tool_entities import ToolProviderType
from models.account import Tenant
from models.engine import db
@ -199,7 +210,7 @@ class PluginMigration:
if provider_name == "google":
provider_name = "gemini"
result.append(DEFAULT_PLUGIN_ID + "/" + provider_name + "/" + provider_name)
result.append(DEFAULT_PLUGIN_ID + "/" + provider_name)
elif provider_name:
result.append(provider_name)
@ -215,7 +226,7 @@ class PluginMigration:
result = []
for row in rs:
if "/" not in row.provider:
result.append(DEFAULT_PLUGIN_ID + "/" + row.provider + "/" + row.provider)
result.append(DEFAULT_PLUGIN_ID + "/" + row.provider)
else:
result.append(row.provider)
@ -234,7 +245,7 @@ class PluginMigration:
provider_name = "stepfun_tool"
if "/" not in provider_name:
return DEFAULT_PLUGIN_ID + "/" + provider_name + "/" + provider_name
return DEFAULT_PLUGIN_ID + "/" + provider_name
else:
return provider_name
@ -297,3 +308,216 @@ class PluginMigration:
continue
return result
@classmethod
def _fetch_plugin_unique_identifier(cls, plugin_id: str) -> Optional[str]:
"""
Fetch plugin unique identifier using plugin id.
"""
plugin_manifest = marketplace.batch_fetch_plugin_manifests([plugin_id])
if not plugin_manifest:
return None
return plugin_manifest[0].latest_package_identifier
@classmethod
def extract_unique_plugins_to_file(cls, extracted_plugins: str, output_file: str) -> None:
"""
Extract unique plugins.
"""
Path(output_file).write_text(json.dumps(cls.extract_unique_plugins(extracted_plugins)))
@classmethod
def extract_unique_plugins(cls, extracted_plugins: str) -> Mapping[str, Any]:
plugins: dict[str, str] = {}
plugin_ids = []
plugin_not_exist = []
logger.info(f"Extracting unique plugins from {extracted_plugins}")
with open(extracted_plugins) as f:
for line in f:
data = json.loads(line)
new_plugin_ids = data.get("plugins", [])
for plugin_id in new_plugin_ids:
if plugin_id not in plugin_ids:
plugin_ids.append(plugin_id)
def fetch_plugin(plugin_id):
unique_identifier = cls._fetch_plugin_unique_identifier(plugin_id)
if unique_identifier:
plugins[plugin_id] = unique_identifier
else:
plugin_not_exist.append(plugin_id)
with ThreadPoolExecutor(max_workers=10) as executor:
list(tqdm.tqdm(executor.map(fetch_plugin, plugin_ids), total=len(plugin_ids)))
return {"plugins": plugins, "plugin_not_exist": plugin_not_exist}
@classmethod
def install_plugins(cls, extracted_plugins: str, output_file: str) -> None:
"""
Install plugins.
"""
manager = PluginInstallationManager()
plugins = cls.extract_unique_plugins(extracted_plugins)
not_installed = []
plugin_install_failed = []
# use a fake tenant id to install all the plugins
fake_tenant_id = uuid4().hex
logger.info(f"Installing {len(plugins['plugins'])} plugin instances for fake tenant {fake_tenant_id}")
thread_pool = ThreadPoolExecutor(max_workers=40)
response = cls.handle_plugin_instance_install(fake_tenant_id, plugins["plugins"])
if response.get("failed"):
plugin_install_failed.extend(response.get("failed", []))
def install(tenant_id: str, plugin_ids: list[str]) -> None:
logger.info(f"Installing {len(plugin_ids)} plugins for tenant {tenant_id}")
# at most 64 plugins one batch
for i in range(0, len(plugin_ids), 64):
batch_plugin_ids = plugin_ids[i : i + 64]
batch_plugin_identifiers = [plugins["plugins"][plugin_id] for plugin_id in batch_plugin_ids]
manager.install_from_identifiers(
tenant_id,
batch_plugin_identifiers,
PluginInstallationSource.Marketplace,
metas=[
{
"plugin_unique_identifier": identifier,
}
for identifier in batch_plugin_identifiers
],
)
with open(extracted_plugins) as f:
"""
Read line by line, and install plugins for each tenant.
"""
for line in f:
data = json.loads(line)
tenant_id = data.get("tenant_id")
plugin_ids = data.get("plugins", [])
current_not_installed = {
"tenant_id": tenant_id,
"plugin_not_exist": [],
}
# get plugin unique identifier
for plugin_id in plugin_ids:
unique_identifier = plugins.get(plugin_id)
if unique_identifier:
current_not_installed["plugin_not_exist"].append(plugin_id)
if current_not_installed["plugin_not_exist"]:
not_installed.append(current_not_installed)
thread_pool.submit(install, tenant_id, plugin_ids)
thread_pool.shutdown(wait=True)
logger.info("Uninstall plugins")
sys.exit(-1)
# get installation
try:
installation = manager.list_plugins(fake_tenant_id)
while installation:
for plugin in installation:
manager.uninstall(fake_tenant_id, plugin.installation_id)
installation = manager.list_plugins(fake_tenant_id)
except Exception:
logger.exception(f"Failed to get installation for tenant {fake_tenant_id}")
Path(output_file).write_text(
json.dumps(
{
"not_installed": not_installed,
"plugin_install_failed": plugin_install_failed,
}
)
)
@classmethod
def handle_plugin_instance_install(
cls, tenant_id: str, plugin_identifiers_map: Mapping[str, str]
) -> Mapping[str, Any]:
"""
Install plugins for a tenant.
"""
manager = PluginInstallationManager()
# download all the plugins and upload
thread_pool = ThreadPoolExecutor(max_workers=10)
futures = []
for plugin_id, plugin_identifier in plugin_identifiers_map.items():
def download_and_upload(tenant_id, plugin_id, plugin_identifier):
plugin_package = marketplace.download_plugin_pkg(plugin_identifier)
if not plugin_package:
raise Exception(f"Failed to download plugin {plugin_identifier}")
# upload
manager.upload_pkg(tenant_id, plugin_package, verify_signature=True)
futures.append(thread_pool.submit(download_and_upload, tenant_id, plugin_id, plugin_identifier))
# Wait for all downloads to complete
for future in futures:
future.result() # This will raise any exceptions that occurred
thread_pool.shutdown(wait=True)
success = []
failed = []
reverse_map = {v: k for k, v in plugin_identifiers_map.items()}
# at most 64 plugins one batch
for i in range(0, len(plugin_identifiers_map), 64):
batch_plugin_ids = list(plugin_identifiers_map.keys())[i : i + 64]
batch_plugin_identifiers = [plugin_identifiers_map[plugin_id] for plugin_id in batch_plugin_ids]
try:
response = manager.install_from_identifiers(
tenant_id=tenant_id,
identifiers=batch_plugin_identifiers,
source=PluginInstallationSource.Marketplace,
metas=[
{
"plugin_unique_identifier": identifier,
}
for identifier in batch_plugin_identifiers
],
)
except Exception:
# add to failed
failed.extend(batch_plugin_identifiers)
continue
if response.all_installed:
success.extend(batch_plugin_identifiers)
continue
task_id = response.task_id
done = False
while not done:
status = manager.fetch_plugin_installation_task(tenant_id, task_id)
if status.status in [PluginInstallTaskStatus.Failed, PluginInstallTaskStatus.Success]:
for plugin in status.plugins:
if plugin.status == PluginInstallTaskStatus.Success:
success.append(reverse_map[plugin.plugin_unique_identifier])
else:
failed.append(reverse_map[plugin.plugin_unique_identifier])
logger.error(
f"Failed to install plugin {plugin.plugin_unique_identifier}, error: {plugin.message}"
)
done = True
else:
time.sleep(1)
return {"success": success, "failed": failed}

View File

@ -232,7 +232,7 @@ class PluginService:
tenant_id,
plugin_unique_identifiers,
PluginInstallationSource.Package,
{},
[{}],
)
@staticmethod
@ -246,11 +246,13 @@ class PluginService:
tenant_id,
[plugin_unique_identifier],
PluginInstallationSource.Github,
{
"repo": repo,
"version": version,
"package": package,
},
[
{
"repo": repo,
"version": version,
"package": package,
}
],
)
@staticmethod
@ -277,9 +279,12 @@ class PluginService:
tenant_id,
plugin_unique_identifiers,
PluginInstallationSource.Marketplace,
{
"plugin_unique_identifier": plugin_unique_identifier,
},
[
{
"plugin_unique_identifier": plugin_unique_identifier,
}
for plugin_unique_identifier in plugin_unique_identifiers
],
)
@staticmethod

View File

@ -439,7 +439,7 @@ class ApiToolManageService:
tenant_id=tenant_id,
)
)
result = tool.validate_credentials(credentials, parameters)
result = runtime_tool.validate_credentials(credentials, parameters)
except Exception as e:
return {"error": str(e)}

View File

@ -5,6 +5,8 @@ from datetime import UTC, datetime
from typing import Any, Optional
from uuid import uuid4
from sqlalchemy import desc
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
from core.model_runtime.utils.encoders import jsonable_encoder
@ -77,6 +79,28 @@ class WorkflowService:
return workflow
def get_all_published_workflow(self, app_model: App, page: int, limit: int) -> tuple[list[Workflow], bool]:
"""
Get published workflow with pagination
"""
if not app_model.workflow_id:
return [], False
workflows = (
db.session.query(Workflow)
.filter(Workflow.app_id == app_model.id)
.order_by(desc(Workflow.version))
.offset((page - 1) * limit)
.limit(limit + 1)
.all()
)
has_more = len(workflows) > limit
if has_more:
workflows = workflows[:-1]
return workflows, has_more
def sync_draft_workflow(
self,
*,

View File

@ -38,7 +38,11 @@ def add_document_to_index_task(dataset_document_id: str):
try:
segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
.filter(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.enabled == False,
DocumentSegment.status == "completed",
)
.order_by(DocumentSegment.position.asc())
.all()
)
@ -85,6 +89,16 @@ def add_document_to_index_task(dataset_document_id: str):
db.session.query(DatasetAutoDisableLog).filter(
DatasetAutoDisableLog.document_id == dataset_document.id
).delete()
# update segment to enable
db.session.query(DocumentSegment).filter(DocumentSegment.document_id == dataset_document.id).update(
{
DocumentSegment.enabled: True,
DocumentSegment.disabled_at: None,
DocumentSegment.disabled_by: None,
DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
}
)
db.session.commit()
end_at = time.perf_counter()

View File

@ -0,0 +1,26 @@
import logging
from celery import shared_task # type: ignore
from extensions.ext_database import db
from models.account import Account
from services.billing_service import BillingService
from tasks.mail_account_deletion_task import send_deletion_success_task
logger = logging.getLogger(__name__)
@shared_task(queue="dataset")
def delete_account_task(account_id):
account = db.session.query(Account).filter(Account.id == account_id).first()
try:
BillingService.delete_account(account_id)
except Exception as e:
logger.exception(f"Failed to delete account {account_id} from billing service.")
raise
if not account:
logger.error(f"Account {account_id} not found.")
return
# send success email
send_deletion_success_task.delay(account.email)

View File

@ -0,0 +1,70 @@
import logging
import time
import click
from celery import shared_task # type: ignore
from flask import render_template
from extensions.ext_mail import mail
@shared_task(queue="mail")
def send_deletion_success_task(to):
"""Send email to user regarding account deletion.
Args:
log (AccountDeletionLog): Account deletion log object
"""
if not mail.is_inited():
return
logging.info(click.style(f"Start send account deletion success email to {to}", fg="green"))
start_at = time.perf_counter()
try:
html_content = render_template(
"delete_account_success_template_en-US.html",
to=to,
email=to,
)
mail.send(to=to, subject="Your Dify.AI Account Has Been Successfully Deleted", html=html_content)
end_at = time.perf_counter()
logging.info(
click.style(
"Send account deletion success email to {}: latency: {}".format(to, end_at - start_at), fg="green"
)
)
except Exception:
logging.exception("Send account deletion success email to {} failed".format(to))
@shared_task(queue="mail")
def send_account_deletion_verification_code(to, code):
"""Send email to user regarding account deletion verification code.
Args:
to (str): Recipient email address
code (str): Verification code
"""
if not mail.is_inited():
return
logging.info(click.style(f"Start send account deletion verification code email to {to}", fg="green"))
start_at = time.perf_counter()
try:
html_content = render_template("delete_account_code_email_template_en-US.html", to=to, code=code)
mail.send(to=to, subject="Dify.AI Account Deletion and Verification", html=html_content)
end_at = time.perf_counter()
logging.info(
click.style(
"Send account deletion verification code email to {} succeeded: latency: {}".format(
to, end_at - start_at
),
fg="green",
)
)
except Exception:
logging.exception("Send account deletion verification code email to {} failed".format(to))

View File

@ -1,3 +1,4 @@
import datetime
import logging
import time
@ -46,6 +47,16 @@ def remove_document_from_index_task(document_id: str):
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
except Exception:
logging.exception(f"clean dataset {dataset.id} from index failed")
# update segment to disable
db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).update(
{
DocumentSegment.enabled: False,
DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DocumentSegment.disabled_by: document.disabled_by,
DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
}
)
db.session.commit()
end_at = time.perf_counter()
logging.info(

View File

@ -0,0 +1,125 @@
<!DOCTYPE html>
<html>
<head>
<style>
body {
font-family: 'Arial', sans-serif;
line-height: 16pt;
color: #101828;
background-color: #e9ebf0;
margin: 0;
padding: 0;
}
.container {
width: 600px;
min-height: 605px;
margin: 40px auto;
padding: 36px 48px;
background-color: #fcfcfd;
border-radius: 16px;
border: 1px solid #ffffff;
box-shadow: 0 2px 4px -2px rgba(9, 9, 11, 0.08);
}
.header {
margin-bottom: 24px;
}
.header img {
max-width: 100px;
height: auto;
}
.title {
font-weight: 600;
font-size: 24px;
line-height: 28.8px;
}
.description {
font-size: 13px;
line-height: 16px;
color: #676f83;
margin-top: 12px;
}
.code-content {
padding: 16px 32px;
text-align: center;
border-radius: 16px;
background-color: #f2f4f7;
margin: 16px auto;
}
.code {
line-height: 36px;
font-weight: 700;
font-size: 30px;
}
.tips {
line-height: 16px;
color: #676f83;
font-size: 13px;
}
.typography {
letter-spacing: -0.07px;
font-weight: 400;
font-style: normal;
font-size: 14px;
line-height: 20px;
color: #354052;
margin-top: 12px;
margin-bottom: 12px;
}
.typography p{
margin: 0 auto;
}
.typography-title {
color: #101828;
font-size: 14px;
font-style: normal;
font-weight: 600;
line-height: 20px;
margin-top: 12px;
margin-bottom: 4px;
}
.tip-list{
margin: 0;
padding-left: 10px;
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<!-- Optional: Add a logo or a header image here -->
<img src="https://cloud.dify.ai/logo/logo-site.png" alt="Dify Logo" />
</div>
<p class="title">Dify.AI Account Deletion and Verification</p>
<p class="typography">We received a request to delete your Dify account. To ensure the security of your account and
confirm this action, please use the verification code below:</p>
<div class="code-content">
<span class="code">{{code}}</span>
</div>
<div class="typography">
<p style="margin-bottom:4px">To complete the account deletion process:</p>
<p>1. Return to the account deletion page on our website</p>
<p>2. Enter the verification code above</p>
<p>3. Click "Confirm Deletion"</p>
</div>
<p class="typography-title">Please note:</p>
<ul class="typography tip-list">
<li>This code is valid for 5 minutes</li>
<li>As the Owner of any Workspaces, your workspaces will be scheduled in a queue for permanent deletion.</li>
<li>All your user data will be queued for permanent deletion.</li>
</ul>
</div>
</body>
</html>

View File

@ -0,0 +1,105 @@
<!DOCTYPE html>
<html>
<head>
<style>
body {
font-family: 'Arial', sans-serif;
line-height: 16pt;
color: #101828;
background-color: #e9ebf0;
margin: 0;
padding: 0;
}
.container {
width: 600px;
min-height: 380px;
margin: 40px auto;
padding: 36px 48px;
background-color: #fcfcfd;
border-radius: 16px;
border: 1px solid #ffffff;
box-shadow: 0 2px 4px -2px rgba(9, 9, 11, 0.08);
}
.header {
margin-bottom: 24px;
}
.header img {
max-width: 100px;
height: auto;
}
.title {
font-weight: 600;
font-size: 24px;
line-height: 28.8px;
margin-bottom: 12px;
}
.description {
color: #354052;
font-weight: 400;
line-height: 20px;
font-size: 14px;
}
.code-content {
padding: 16px 32px;
text-align: center;
border-radius: 16px;
background-color: #f2f4f7;
margin: 16px auto;
}
.code {
line-height: 36px;
font-weight: 700;
font-size: 30px;
}
.tips {
line-height: 16px;
color: #676f83;
font-size: 13px;
}
.email {
color: #354052;
font-weight: 600;
line-height: 20px;
font-size: 14px;
}
.typography{
font-weight: 400;
font-style: normal;
font-size: 14px;
line-height: 20px;
color: #354052;
margin-top: 4px;
margin-bottom: 0;
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<!-- Optional: Add a logo or a header image here -->
<img src="https://cloud.dify.ai/logo/logo-site.png" alt="Dify Logo" />
</div>
<p class="title">Your Dify.AI Account Has Been Successfully Deleted</p>
<p class="typography">We're writing to confirm that your Dify.AI account has been successfully deleted as per your request. Your
account is no longer accessible, and you can't log in using your previous credentials. If you decide to use
Dify.AI services in the future, you'll need to create a new account after 30 days. We appreciate the time you
spent with Dify.AI and are sorry to see you go. If you have any questions or concerns about the deletion process,
please don't hesitate to reach out to our support team.</p>
<p class="typography">Thank you for being a part of the Dify.AI community.</p>
<p class="typography">Best regards,</p>
<p class="typography">Dify.AI Team</p>
</div>
</body>
</html>

View File

@ -1,5 +1,3 @@
from unittest.mock import MagicMock
from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector
from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock
from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis

View File

@ -1,5 +1,3 @@
from unittest.mock import MagicMock, patch
import pytest
from core.rag.datasource.vdb.tidb_vector.tidb_vector import TiDBVector, TiDBVectorConfig

View File

@ -4,7 +4,7 @@ import pytest
from configs import dify_config
from core.app.app_config.entities import ModelConfigEntity
from core.file import File, FileTransferMethod, FileType, FileUploadConfig, ImageConfig
from core.file import File, FileTransferMethod, FileType
from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_runtime.entities.message_entities import (
AssistantPromptMessage,

View File

@ -1,6 +1,6 @@
import uuid
from collections.abc import Generator
from datetime import UTC, datetime, timezone
from datetime import UTC, datetime
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.enums import SystemVariableKey

View File

@ -21,8 +21,7 @@ from core.model_runtime.entities.message_entities import (
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelFeature, ModelType
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.prompt.entities.advanced_prompt_entities import MemoryConfig
from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment, StringSegment
from core.workflow.entities.variable_entities import VariableSelector
from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState
from core.workflow.nodes.answer import AnswerStreamGenerateRoute

View File

@ -1,7 +1,6 @@
from core.workflow.graph_engine.entities.event import (
GraphRunFailedEvent,
GraphRunPartialSucceededEvent,
GraphRunSucceededEvent,
NodeRunRetryEvent,
)
from tests.unit_tests.core.workflow.nodes.test_continue_on_error import ContinueOnErrorTestHelper

View File

@ -1,5 +1,3 @@
import pytest
from core.variables import SegmentType
from core.workflow.nodes.variable_assigner.v2.enums import Operation
from core.workflow.nodes.variable_assigner.v2.helpers import is_input_value_valid

View File

@ -1,4 +1,4 @@
from unittest.mock import MagicMock, patch
from unittest.mock import patch
import pytest
from oss2 import Auth # type: ignore

View File

@ -1,5 +1,3 @@
from textwrap import dedent
import pytest
from core.tools.utils.text_processing_utils import remove_leading_symbols