mirror of
https://github.com/langgenius/dify.git
synced 2026-05-04 09:28:04 +08:00
fix: merge main
This commit is contained in:
@ -427,8 +427,8 @@ CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
CODE_MAX_NUMBER=9223372036854775807
|
||||
CODE_MIN_NUMBER=-9223372036854775808
|
||||
CODE_MAX_STRING_LENGTH=80000
|
||||
TEMPLATE_TRANSFORM_MAX_LENGTH=80000
|
||||
CODE_MAX_STRING_LENGTH=400000
|
||||
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
|
||||
CODE_MAX_STRING_ARRAY_LENGTH=30
|
||||
CODE_MAX_OBJECT_ARRAY_LENGTH=30
|
||||
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||
|
||||
@ -150,7 +150,7 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||
|
||||
CODE_MAX_STRING_LENGTH: PositiveInt = Field(
|
||||
description="Maximum allowed length for strings in code execution",
|
||||
default=80000,
|
||||
default=400_000,
|
||||
)
|
||||
|
||||
CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
|
||||
@ -582,6 +582,11 @@ class WorkflowConfig(BaseSettings):
|
||||
default=200 * 1024,
|
||||
)
|
||||
|
||||
TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field(
|
||||
description="Maximum number of characters allowed in Template Transform node output",
|
||||
default=400_000,
|
||||
)
|
||||
|
||||
# GraphEngine Worker Pool Configuration
|
||||
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
|
||||
description="Minimum number of workers per GraphEngine instance",
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
from configs import dify_config
|
||||
from libs.collection_utils import convert_to_lower_and_upper_set
|
||||
|
||||
HIDDEN_VALUE = "[__HIDDEN__]"
|
||||
UNKNOWN_VALUE = "[__UNKNOWN__]"
|
||||
@ -6,24 +7,39 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
|
||||
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
|
||||
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
|
||||
IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
|
||||
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
|
||||
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
|
||||
VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
|
||||
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
|
||||
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
||||
AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
|
||||
|
||||
|
||||
_doc_extensions: list[str]
|
||||
_doc_extensions: set[str]
|
||||
if dify_config.ETL_TYPE == "Unstructured":
|
||||
_doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
|
||||
_doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||
_doc_extensions = {
|
||||
"txt",
|
||||
"markdown",
|
||||
"md",
|
||||
"mdx",
|
||||
"pdf",
|
||||
"html",
|
||||
"htm",
|
||||
"xlsx",
|
||||
"xls",
|
||||
"vtt",
|
||||
"properties",
|
||||
"doc",
|
||||
"docx",
|
||||
"csv",
|
||||
"eml",
|
||||
"msg",
|
||||
"pptx",
|
||||
"xml",
|
||||
"epub",
|
||||
}
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
_doc_extensions.append("ppt")
|
||||
_doc_extensions.add("ppt")
|
||||
else:
|
||||
_doc_extensions = [
|
||||
_doc_extensions = {
|
||||
"txt",
|
||||
"markdown",
|
||||
"md",
|
||||
@ -37,5 +53,5 @@ else:
|
||||
"csv",
|
||||
"vtt",
|
||||
"properties",
|
||||
]
|
||||
DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]
|
||||
}
|
||||
DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from flask import make_response, redirect, request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, reqparse
|
||||
@ -11,6 +10,7 @@ from controllers.console.wraps import (
|
||||
setup_required,
|
||||
)
|
||||
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from libs.helper import StrLen
|
||||
from libs.login import login_required
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import uuid
|
||||
from typing import Literal, cast
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
DatasetEntity,
|
||||
@ -74,6 +75,9 @@ class DatasetConfigManager:
|
||||
return None
|
||||
query_variable = config.get("dataset_query_variable")
|
||||
|
||||
metadata_model_config_dict = dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_conditions_dict = dataset_configs.get("metadata_filtering_conditions")
|
||||
|
||||
if dataset_configs["retrieval_model"] == "single":
|
||||
return DatasetEntity(
|
||||
dataset_ids=dataset_ids,
|
||||
@ -82,18 +86,23 @@ class DatasetConfigManager:
|
||||
retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of(
|
||||
dataset_configs["retrieval_model"]
|
||||
),
|
||||
metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config"))
|
||||
if dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_mode=cast(
|
||||
Literal["disabled", "automatic", "manual"],
|
||||
dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
),
|
||||
metadata_model_config=ModelConfig(**metadata_model_config_dict)
|
||||
if isinstance(metadata_model_config_dict, dict)
|
||||
else None,
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(
|
||||
**dataset_configs.get("metadata_filtering_conditions", {})
|
||||
)
|
||||
if dataset_configs.get("metadata_filtering_conditions")
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict)
|
||||
if isinstance(metadata_filtering_conditions_dict, dict)
|
||||
else None,
|
||||
),
|
||||
)
|
||||
else:
|
||||
score_threshold_val = dataset_configs.get("score_threshold")
|
||||
reranking_model_val = dataset_configs.get("reranking_model")
|
||||
weights_val = dataset_configs.get("weights")
|
||||
|
||||
return DatasetEntity(
|
||||
dataset_ids=dataset_ids,
|
||||
retrieve_config=DatasetRetrieveConfigEntity(
|
||||
@ -101,22 +110,23 @@ class DatasetConfigManager:
|
||||
retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of(
|
||||
dataset_configs["retrieval_model"]
|
||||
),
|
||||
top_k=dataset_configs.get("top_k", 4),
|
||||
score_threshold=dataset_configs.get("score_threshold")
|
||||
if dataset_configs.get("score_threshold_enabled", False)
|
||||
top_k=int(dataset_configs.get("top_k", 4)),
|
||||
score_threshold=float(score_threshold_val)
|
||||
if dataset_configs.get("score_threshold_enabled", False) and score_threshold_val is not None
|
||||
else None,
|
||||
reranking_model=dataset_configs.get("reranking_model"),
|
||||
weights=dataset_configs.get("weights"),
|
||||
reranking_enabled=dataset_configs.get("reranking_enabled", True),
|
||||
reranking_model=reranking_model_val if isinstance(reranking_model_val, dict) else None,
|
||||
weights=weights_val if isinstance(weights_val, dict) else None,
|
||||
reranking_enabled=bool(dataset_configs.get("reranking_enabled", True)),
|
||||
rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"),
|
||||
metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config"))
|
||||
if dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_mode=cast(
|
||||
Literal["disabled", "automatic", "manual"],
|
||||
dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
),
|
||||
metadata_model_config=ModelConfig(**metadata_model_config_dict)
|
||||
if isinstance(metadata_model_config_dict, dict)
|
||||
else None,
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(
|
||||
**dataset_configs.get("metadata_filtering_conditions", {})
|
||||
)
|
||||
if dataset_configs.get("metadata_filtering_conditions")
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict)
|
||||
if isinstance(metadata_filtering_conditions_dict, dict)
|
||||
else None,
|
||||
),
|
||||
)
|
||||
@ -134,18 +144,17 @@ class DatasetConfigManager:
|
||||
config = cls.extract_dataset_config_for_legacy_compatibility(tenant_id, app_mode, config)
|
||||
|
||||
# dataset_configs
|
||||
if not config.get("dataset_configs"):
|
||||
config["dataset_configs"] = {"retrieval_model": "single"}
|
||||
if "dataset_configs" not in config or not config.get("dataset_configs"):
|
||||
config["dataset_configs"] = {}
|
||||
config["dataset_configs"]["retrieval_model"] = config["dataset_configs"].get("retrieval_model", "single")
|
||||
|
||||
if not isinstance(config["dataset_configs"], dict):
|
||||
raise ValueError("dataset_configs must be of object type")
|
||||
|
||||
if not config["dataset_configs"].get("datasets"):
|
||||
if "datasets" not in config["dataset_configs"] or not config["dataset_configs"].get("datasets"):
|
||||
config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []}
|
||||
|
||||
need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get(
|
||||
"datasets", {}
|
||||
).get("datasets")
|
||||
need_manual_query_datasets = config.get("dataset_configs", {}).get("datasets", {}).get("datasets")
|
||||
|
||||
if need_manual_query_datasets and app_mode == AppMode.COMPLETION:
|
||||
# Only check when mode is completion
|
||||
@ -166,8 +175,8 @@ class DatasetConfigManager:
|
||||
:param config: app model config args
|
||||
"""
|
||||
# Extract dataset config for legacy compatibility
|
||||
if not config.get("agent_mode"):
|
||||
config["agent_mode"] = {"enabled": False, "tools": []}
|
||||
if "agent_mode" not in config or not config.get("agent_mode"):
|
||||
config["agent_mode"] = {}
|
||||
|
||||
if not isinstance(config["agent_mode"], dict):
|
||||
raise ValueError("agent_mode must be of object type")
|
||||
@ -180,19 +189,22 @@ class DatasetConfigManager:
|
||||
raise ValueError("enabled in agent_mode must be of boolean type")
|
||||
|
||||
# tools
|
||||
if not config["agent_mode"].get("tools"):
|
||||
if "tools" not in config["agent_mode"] or not config["agent_mode"].get("tools"):
|
||||
config["agent_mode"]["tools"] = []
|
||||
|
||||
if not isinstance(config["agent_mode"]["tools"], list):
|
||||
raise ValueError("tools in agent_mode must be a list of objects")
|
||||
|
||||
# strategy
|
||||
if not config["agent_mode"].get("strategy"):
|
||||
if "strategy" not in config["agent_mode"] or not config["agent_mode"].get("strategy"):
|
||||
config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER.value
|
||||
|
||||
has_datasets = False
|
||||
if config["agent_mode"]["strategy"] in {PlanningStrategy.ROUTER.value, PlanningStrategy.REACT_ROUTER.value}:
|
||||
for tool in config["agent_mode"]["tools"]:
|
||||
if config.get("agent_mode", {}).get("strategy") in {
|
||||
PlanningStrategy.ROUTER.value,
|
||||
PlanningStrategy.REACT_ROUTER.value,
|
||||
}:
|
||||
for tool in config.get("agent_mode", {}).get("tools", []):
|
||||
key = list(tool.keys())[0]
|
||||
if key == "dataset":
|
||||
# old style, use tool name as key
|
||||
@ -217,7 +229,7 @@ class DatasetConfigManager:
|
||||
|
||||
has_datasets = True
|
||||
|
||||
need_manual_query_datasets = has_datasets and config["agent_mode"]["enabled"]
|
||||
need_manual_query_datasets = has_datasets and config.get("agent_mode", {}).get("enabled")
|
||||
|
||||
if need_manual_query_datasets and app_mode == AppMode.COMPLETION:
|
||||
# Only check when mode is completion
|
||||
|
||||
@ -107,7 +107,6 @@ class MessageCycleManager:
|
||||
if dify_config.DEBUG:
|
||||
logger.exception("generate conversation name failed, conversation_id: %s", conversation_id)
|
||||
|
||||
db.session.merge(conversation)
|
||||
db.session.commit()
|
||||
db.session.close()
|
||||
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
from openai import BaseModel
|
||||
from pydantic import Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Import InvokeFrom locally to avoid circular import
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
from openai import BaseModel
|
||||
from pydantic import Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import os
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage
|
||||
from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
@ -9,7 +9,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig
|
||||
from core.workflow.nodes.base.node import Node
|
||||
from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData
|
||||
|
||||
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = int(os.environ.get("TEMPLATE_TRANSFORM_MAX_LENGTH", "80000"))
|
||||
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
|
||||
|
||||
class TemplateTransformNode(Node):
|
||||
|
||||
@ -10,14 +10,14 @@ from dify_app import DifyApp
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
def after_request(response): # pyright: ignore[reportUnusedFunction]
|
||||
"""Add Version headers to the response."""
|
||||
response.headers.add("X-Version", dify_config.project.version)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
def health(): # pyright: ignore[reportUnusedFunction]
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}),
|
||||
status=200,
|
||||
@ -25,7 +25,7 @@ def init_app(app: DifyApp):
|
||||
)
|
||||
|
||||
@app.route("/threads")
|
||||
def threads():
|
||||
def threads(): # pyright: ignore[reportUnusedFunction]
|
||||
num_threads = threading.active_count()
|
||||
threads = threading.enumerate()
|
||||
|
||||
@ -50,7 +50,7 @@ def init_app(app: DifyApp):
|
||||
}
|
||||
|
||||
@app.route("/db-pool-stat")
|
||||
def pool_stat():
|
||||
def pool_stat(): # pyright: ignore[reportUnusedFunction]
|
||||
from extensions.ext_database import db
|
||||
|
||||
engine = db.engine
|
||||
|
||||
@ -10,7 +10,7 @@ from models.engine import db
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Global flag to avoid duplicate registration of event listener
|
||||
_GEVENT_COMPATIBILITY_SETUP: bool = False
|
||||
_gevent_compatibility_setup: bool = False
|
||||
|
||||
|
||||
def _safe_rollback(connection):
|
||||
@ -26,14 +26,14 @@ def _safe_rollback(connection):
|
||||
|
||||
|
||||
def _setup_gevent_compatibility():
|
||||
global _GEVENT_COMPATIBILITY_SETUP # pylint: disable=global-statement
|
||||
global _gevent_compatibility_setup # pylint: disable=global-statement
|
||||
|
||||
# Avoid duplicate registration
|
||||
if _GEVENT_COMPATIBILITY_SETUP:
|
||||
if _gevent_compatibility_setup:
|
||||
return
|
||||
|
||||
@event.listens_for(Pool, "reset")
|
||||
def _safe_reset(dbapi_connection, connection_record, reset_state): # pylint: disable=unused-argument
|
||||
def _safe_reset(dbapi_connection, connection_record, reset_state): # pyright: ignore[reportUnusedFunction]
|
||||
if reset_state.terminate_only:
|
||||
return
|
||||
|
||||
@ -47,7 +47,7 @@ def _setup_gevent_compatibility():
|
||||
except (AttributeError, ImportError):
|
||||
_safe_rollback(dbapi_connection)
|
||||
|
||||
_GEVENT_COMPATIBILITY_SETUP = True
|
||||
_gevent_compatibility_setup = True
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
|
||||
@ -2,4 +2,4 @@ from dify_app import DifyApp
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
from events import event_handlers # noqa: F401
|
||||
from events import event_handlers # noqa: F401 # pyright: ignore[reportUnusedImport]
|
||||
|
||||
@ -136,6 +136,7 @@ def init_app(app: DifyApp):
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPSpanExporter
|
||||
from opentelemetry.instrumentation.celery import CeleryInstrumentor
|
||||
from opentelemetry.instrumentation.flask import FlaskInstrumentor
|
||||
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
|
||||
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
||||
@ -238,6 +239,7 @@ def init_app(app: DifyApp):
|
||||
init_sqlalchemy_instrumentor(app)
|
||||
RedisInstrumentor().instrument()
|
||||
RequestsInstrumentor().instrument()
|
||||
HTTPXClientInstrumentor().instrument()
|
||||
atexit.register(shutdown_tracer)
|
||||
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@ from dify_app import DifyApp
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
if dify_config.SENTRY_DSN:
|
||||
import openai
|
||||
import sentry_sdk
|
||||
from langfuse import parse_error # type: ignore
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
@ -28,7 +27,6 @@ def init_app(app: DifyApp):
|
||||
HTTPException,
|
||||
ValueError,
|
||||
FileNotFoundError,
|
||||
openai.APIStatusError,
|
||||
InvokeRateLimitError,
|
||||
parse_error.defaultErrorResponse,
|
||||
],
|
||||
|
||||
@ -33,7 +33,9 @@ class AliyunOssStorage(BaseStorage):
|
||||
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
obj = self.client.get_object(self.__wrapper_folder_filename(filename))
|
||||
data: bytes = obj.read()
|
||||
data = obj.read()
|
||||
if not isinstance(data, bytes):
|
||||
return b""
|
||||
return data
|
||||
|
||||
def load_stream(self, filename: str) -> Generator:
|
||||
|
||||
@ -39,10 +39,10 @@ class AwsS3Storage(BaseStorage):
|
||||
self.client.head_bucket(Bucket=self.bucket_name)
|
||||
except ClientError as e:
|
||||
# if bucket not exists, create it
|
||||
if e.response["Error"]["Code"] == "404":
|
||||
if e.response.get("Error", {}).get("Code") == "404":
|
||||
self.client.create_bucket(Bucket=self.bucket_name)
|
||||
# if bucket is not accessible, pass, maybe the bucket is existing but not accessible
|
||||
elif e.response["Error"]["Code"] == "403":
|
||||
elif e.response.get("Error", {}).get("Code") == "403":
|
||||
pass
|
||||
else:
|
||||
# other error, raise exception
|
||||
@ -55,7 +55,7 @@ class AwsS3Storage(BaseStorage):
|
||||
try:
|
||||
data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
|
||||
except ClientError as ex:
|
||||
if ex.response["Error"]["Code"] == "NoSuchKey":
|
||||
if ex.response.get("Error", {}).get("Code") == "NoSuchKey":
|
||||
raise FileNotFoundError("File not found")
|
||||
else:
|
||||
raise
|
||||
@ -66,7 +66,7 @@ class AwsS3Storage(BaseStorage):
|
||||
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
|
||||
yield from response["Body"].iter_chunks()
|
||||
except ClientError as ex:
|
||||
if ex.response["Error"]["Code"] == "NoSuchKey":
|
||||
if ex.response.get("Error", {}).get("Code") == "NoSuchKey":
|
||||
raise FileNotFoundError("file not found")
|
||||
elif "reached max retries" in str(ex):
|
||||
raise ValueError("please do not request the same file too frequently")
|
||||
|
||||
@ -27,24 +27,38 @@ class AzureBlobStorage(BaseStorage):
|
||||
self.credential = None
|
||||
|
||||
def save(self, filename, data):
|
||||
if not self.bucket_name:
|
||||
return
|
||||
|
||||
client = self._sync_client()
|
||||
blob_container = client.get_container_client(container=self.bucket_name)
|
||||
blob_container.upload_blob(filename, data)
|
||||
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
if not self.bucket_name:
|
||||
raise FileNotFoundError("Azure bucket name is not configured.")
|
||||
|
||||
client = self._sync_client()
|
||||
blob = client.get_container_client(container=self.bucket_name)
|
||||
blob = blob.get_blob_client(blob=filename)
|
||||
data: bytes = blob.download_blob().readall()
|
||||
data = blob.download_blob().readall()
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError(f"Expected bytes from blob.readall(), got {type(data).__name__}")
|
||||
return data
|
||||
|
||||
def load_stream(self, filename: str) -> Generator:
|
||||
if not self.bucket_name:
|
||||
raise FileNotFoundError("Azure bucket name is not configured.")
|
||||
|
||||
client = self._sync_client()
|
||||
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
|
||||
blob_data = blob.download_blob()
|
||||
yield from blob_data.chunks()
|
||||
|
||||
def download(self, filename, target_filepath):
|
||||
if not self.bucket_name:
|
||||
return
|
||||
|
||||
client = self._sync_client()
|
||||
|
||||
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
|
||||
@ -53,12 +67,18 @@ class AzureBlobStorage(BaseStorage):
|
||||
blob_data.readinto(my_blob)
|
||||
|
||||
def exists(self, filename):
|
||||
if not self.bucket_name:
|
||||
return False
|
||||
|
||||
client = self._sync_client()
|
||||
|
||||
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
|
||||
return blob.exists()
|
||||
|
||||
def delete(self, filename):
|
||||
if not self.bucket_name:
|
||||
return
|
||||
|
||||
client = self._sync_client()
|
||||
|
||||
blob_container = client.get_container_client(container=self.bucket_name)
|
||||
|
||||
@ -430,7 +430,7 @@ class ClickZettaVolumeStorage(BaseStorage):
|
||||
|
||||
rows = self._execute_sql(sql, fetch=True)
|
||||
|
||||
exists = len(rows) > 0
|
||||
exists = len(rows) > 0 if rows else False
|
||||
logger.debug("File %s exists check: %s", filename, exists)
|
||||
return exists
|
||||
except Exception as e:
|
||||
@ -509,16 +509,17 @@ class ClickZettaVolumeStorage(BaseStorage):
|
||||
rows = self._execute_sql(sql, fetch=True)
|
||||
|
||||
result = []
|
||||
for row in rows:
|
||||
file_path = row[0] # relative_path column
|
||||
if rows:
|
||||
for row in rows:
|
||||
file_path = row[0] # relative_path column
|
||||
|
||||
# For User Volume, remove dify prefix from results
|
||||
dify_prefix_with_slash = f"{self._config.dify_prefix}/"
|
||||
if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash):
|
||||
file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix
|
||||
# For User Volume, remove dify prefix from results
|
||||
dify_prefix_with_slash = f"{self._config.dify_prefix}/"
|
||||
if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash):
|
||||
file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix
|
||||
|
||||
if files and not file_path.endswith("/") or directories and file_path.endswith("/"):
|
||||
result.append(file_path)
|
||||
if files and not file_path.endswith("/") or directories and file_path.endswith("/"):
|
||||
result.append(file_path)
|
||||
|
||||
logger.debug("Scanned %d items in path %s", len(result), path)
|
||||
return result
|
||||
|
||||
@ -439,6 +439,11 @@ class VolumePermissionManager:
|
||||
self._permission_cache.clear()
|
||||
logger.debug("Permission cache cleared")
|
||||
|
||||
@property
|
||||
def volume_type(self) -> str | None:
|
||||
"""Get the volume type."""
|
||||
return self._volume_type
|
||||
|
||||
def get_permission_summary(self, dataset_id: str | None = None) -> dict[str, bool]:
|
||||
"""Get permission summary
|
||||
|
||||
@ -632,13 +637,13 @@ def check_volume_permission(permission_manager: VolumePermissionManager, operati
|
||||
VolumePermissionError: If no permission
|
||||
"""
|
||||
if not permission_manager.validate_operation(operation, dataset_id):
|
||||
error_message = f"Permission denied for operation '{operation}' on {permission_manager._volume_type} volume"
|
||||
error_message = f"Permission denied for operation '{operation}' on {permission_manager.volume_type} volume"
|
||||
if dataset_id:
|
||||
error_message += f" (dataset: {dataset_id})"
|
||||
|
||||
raise VolumePermissionError(
|
||||
error_message,
|
||||
operation=operation,
|
||||
volume_type=permission_manager._volume_type or "unknown",
|
||||
volume_type=permission_manager.volume_type or "unknown",
|
||||
dataset_id=dataset_id,
|
||||
)
|
||||
|
||||
@ -35,12 +35,16 @@ class GoogleCloudStorage(BaseStorage):
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
bucket = self.client.get_bucket(self.bucket_name)
|
||||
blob = bucket.get_blob(filename)
|
||||
if blob is None:
|
||||
raise FileNotFoundError("File not found")
|
||||
data: bytes = blob.download_as_bytes()
|
||||
return data
|
||||
|
||||
def load_stream(self, filename: str) -> Generator:
|
||||
bucket = self.client.get_bucket(self.bucket_name)
|
||||
blob = bucket.get_blob(filename)
|
||||
if blob is None:
|
||||
raise FileNotFoundError("File not found")
|
||||
with blob.open(mode="rb") as blob_stream:
|
||||
while chunk := blob_stream.read(4096):
|
||||
yield chunk
|
||||
@ -48,6 +52,8 @@ class GoogleCloudStorage(BaseStorage):
|
||||
def download(self, filename, target_filepath):
|
||||
bucket = self.client.get_bucket(self.bucket_name)
|
||||
blob = bucket.get_blob(filename)
|
||||
if blob is None:
|
||||
raise FileNotFoundError("File not found")
|
||||
blob.download_to_filename(target_filepath)
|
||||
|
||||
def exists(self, filename):
|
||||
|
||||
@ -45,7 +45,7 @@ class HuaweiObsStorage(BaseStorage):
|
||||
|
||||
def _get_meta(self, filename):
|
||||
res = self.client.getObjectMetadata(bucketName=self.bucket_name, objectKey=filename)
|
||||
if res.status < 300:
|
||||
if res and res.status and res.status < 300:
|
||||
return res
|
||||
else:
|
||||
return None
|
||||
|
||||
@ -3,9 +3,9 @@ import os
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
import opendal
|
||||
from dotenv import dotenv_values
|
||||
from opendal import Operator
|
||||
from opendal.layers import RetryLayer
|
||||
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
@ -35,7 +35,7 @@ class OpenDALStorage(BaseStorage):
|
||||
root = kwargs.get("root", "storage")
|
||||
Path(root).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
retry_layer = RetryLayer(max_times=3, factor=2.0, jitter=True)
|
||||
retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True)
|
||||
self.op = Operator(scheme=scheme, **kwargs).layer(retry_layer)
|
||||
logger.debug("opendal operator created with scheme %s", scheme)
|
||||
logger.debug("added retry layer to opendal operator")
|
||||
|
||||
@ -29,7 +29,7 @@ class OracleOCIStorage(BaseStorage):
|
||||
try:
|
||||
data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
|
||||
except ClientError as ex:
|
||||
if ex.response["Error"]["Code"] == "NoSuchKey":
|
||||
if ex.response.get("Error", {}).get("Code") == "NoSuchKey":
|
||||
raise FileNotFoundError("File not found")
|
||||
else:
|
||||
raise
|
||||
@ -40,7 +40,7 @@ class OracleOCIStorage(BaseStorage):
|
||||
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
|
||||
yield from response["Body"].iter_chunks()
|
||||
except ClientError as ex:
|
||||
if ex.response["Error"]["Code"] == "NoSuchKey":
|
||||
if ex.response.get("Error", {}).get("Code") == "NoSuchKey":
|
||||
raise FileNotFoundError("File not found")
|
||||
else:
|
||||
raise
|
||||
|
||||
@ -46,13 +46,13 @@ class SupabaseStorage(BaseStorage):
|
||||
Path(target_filepath).write_bytes(result)
|
||||
|
||||
def exists(self, filename):
|
||||
result = self.client.storage.from_(self.bucket_name).list(filename)
|
||||
if result.count() > 0:
|
||||
result = self.client.storage.from_(self.bucket_name).list(path=filename)
|
||||
if len(result) > 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
def delete(self, filename):
|
||||
self.client.storage.from_(self.bucket_name).remove(filename)
|
||||
self.client.storage.from_(self.bucket_name).remove([filename])
|
||||
|
||||
def bucket_exists(self):
|
||||
buckets = self.client.storage.list_buckets()
|
||||
|
||||
@ -11,6 +11,14 @@ class VolcengineTosStorage(BaseStorage):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
if not dify_config.VOLCENGINE_TOS_ACCESS_KEY:
|
||||
raise ValueError("VOLCENGINE_TOS_ACCESS_KEY is not set")
|
||||
if not dify_config.VOLCENGINE_TOS_SECRET_KEY:
|
||||
raise ValueError("VOLCENGINE_TOS_SECRET_KEY is not set")
|
||||
if not dify_config.VOLCENGINE_TOS_ENDPOINT:
|
||||
raise ValueError("VOLCENGINE_TOS_ENDPOINT is not set")
|
||||
if not dify_config.VOLCENGINE_TOS_REGION:
|
||||
raise ValueError("VOLCENGINE_TOS_REGION is not set")
|
||||
self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME
|
||||
self.client = tos.TosClientV2(
|
||||
ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY,
|
||||
@ -20,27 +28,39 @@ class VolcengineTosStorage(BaseStorage):
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
if not self.bucket_name:
|
||||
raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set")
|
||||
self.client.put_object(bucket=self.bucket_name, key=filename, content=data)
|
||||
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
if not self.bucket_name:
|
||||
raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set")
|
||||
data = self.client.get_object(bucket=self.bucket_name, key=filename).read()
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError(f"Expected bytes, got {type(data).__name__}")
|
||||
return data
|
||||
|
||||
def load_stream(self, filename: str) -> Generator:
|
||||
if not self.bucket_name:
|
||||
raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set")
|
||||
response = self.client.get_object(bucket=self.bucket_name, key=filename)
|
||||
while chunk := response.read(4096):
|
||||
yield chunk
|
||||
|
||||
def download(self, filename, target_filepath):
|
||||
if not self.bucket_name:
|
||||
raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set")
|
||||
self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath)
|
||||
|
||||
def exists(self, filename):
|
||||
if not self.bucket_name:
|
||||
return False
|
||||
res = self.client.head_object(bucket=self.bucket_name, key=filename)
|
||||
if res.status_code != 200:
|
||||
return False
|
||||
return True
|
||||
|
||||
def delete(self, filename):
|
||||
if not self.bucket_name:
|
||||
return
|
||||
self.client.delete_object(bucket=self.bucket_name, key=filename)
|
||||
|
||||
14
api/libs/collection_utils.py
Normal file
14
api/libs/collection_utils.py
Normal file
@ -0,0 +1,14 @@
|
||||
def convert_to_lower_and_upper_set(inputs: list[str] | set[str]) -> set[str]:
|
||||
"""
|
||||
Convert a list or set of strings to a set containing both lower and upper case versions of each string.
|
||||
|
||||
Args:
|
||||
inputs (list[str] | set[str]): A list or set of strings to be converted.
|
||||
|
||||
Returns:
|
||||
set[str]: A set containing both lower and upper case versions of each string.
|
||||
"""
|
||||
if not inputs:
|
||||
return set()
|
||||
else:
|
||||
return {case for s in inputs if s for case in (s.lower(), s.upper())}
|
||||
@ -5,7 +5,6 @@ requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
"arize-phoenix-otel~=0.9.2",
|
||||
"authlib==1.6.4",
|
||||
"azure-identity==1.16.1",
|
||||
"beautifulsoup4==4.12.2",
|
||||
"boto3==1.35.99",
|
||||
@ -35,10 +34,8 @@ dependencies = [
|
||||
"json-repair>=0.41.1",
|
||||
"langfuse~=2.51.3",
|
||||
"langsmith~=0.1.77",
|
||||
"mailchimp-transactional~=1.0.50",
|
||||
"markdown~=3.5.1",
|
||||
"numpy~=1.26.4",
|
||||
"openai~=1.61.0",
|
||||
"openpyxl~=3.1.5",
|
||||
"opik~=1.7.25",
|
||||
"opentelemetry-api==1.27.0",
|
||||
@ -50,6 +47,7 @@ dependencies = [
|
||||
"opentelemetry-instrumentation==0.48b0",
|
||||
"opentelemetry-instrumentation-celery==0.48b0",
|
||||
"opentelemetry-instrumentation-flask==0.48b0",
|
||||
"opentelemetry-instrumentation-httpx==0.48b0",
|
||||
"opentelemetry-instrumentation-redis==0.48b0",
|
||||
"opentelemetry-instrumentation-requests==0.48b0",
|
||||
"opentelemetry-instrumentation-sqlalchemy==0.48b0",
|
||||
@ -61,7 +59,6 @@ dependencies = [
|
||||
"opentelemetry-semantic-conventions==0.48b0",
|
||||
"opentelemetry-util-http==0.48b0",
|
||||
"pandas[excel,output-formatting,performance]~=2.2.2",
|
||||
"pandoc~=2.4",
|
||||
"psycogreen~=1.0.2",
|
||||
"psycopg2-binary~=2.9.6",
|
||||
"pycryptodome==3.19.1",
|
||||
@ -181,10 +178,10 @@ dev = [
|
||||
# Required for storage clients
|
||||
############################################################
|
||||
storage = [
|
||||
"azure-storage-blob==12.13.0",
|
||||
"azure-storage-blob==12.26.0",
|
||||
"bce-python-sdk~=0.9.23",
|
||||
"cos-python-sdk-v5==1.9.38",
|
||||
"esdk-obs-python==3.24.6.1",
|
||||
"esdk-obs-python==3.25.8",
|
||||
"google-cloud-storage==2.16.0",
|
||||
"opendal~=0.46.0",
|
||||
"oss2==2.18.5",
|
||||
|
||||
@ -4,9 +4,7 @@
|
||||
"tests/",
|
||||
".venv",
|
||||
"migrations/",
|
||||
"core/rag",
|
||||
"extensions",
|
||||
"core/app/app_config/easy_ui_based_app/dataset"
|
||||
"core/rag"
|
||||
],
|
||||
"typeCheckingMode": "strict",
|
||||
"allowedUntypedLibraries": [
|
||||
@ -14,6 +12,7 @@
|
||||
"flask_login",
|
||||
"opentelemetry.instrumentation.celery",
|
||||
"opentelemetry.instrumentation.flask",
|
||||
"opentelemetry.instrumentation.httpx",
|
||||
"opentelemetry.instrumentation.requests",
|
||||
"opentelemetry.instrumentation.sqlalchemy",
|
||||
"opentelemetry.instrumentation.redis"
|
||||
@ -25,7 +24,6 @@
|
||||
"reportUnknownLambdaType": "hint",
|
||||
"reportMissingParameterType": "hint",
|
||||
"reportMissingTypeArgument": "hint",
|
||||
"reportUnnecessaryContains": "hint",
|
||||
"reportUnnecessaryComparison": "hint",
|
||||
"reportUnnecessaryCast": "hint",
|
||||
"reportUnnecessaryIsInstance": "hint",
|
||||
|
||||
@ -7,7 +7,7 @@ env =
|
||||
CHATGLM_API_BASE = http://a.abc.com:11451
|
||||
CODE_EXECUTION_API_KEY = dify-sandbox
|
||||
CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194
|
||||
CODE_MAX_STRING_LENGTH = 80000
|
||||
CODE_MAX_STRING_LENGTH = 400000
|
||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||
PLUGIN_DAEMON_URL=http://127.0.0.1:5002
|
||||
PLUGIN_MAX_PACKAGE_SIZE=15728640
|
||||
|
||||
@ -2,8 +2,6 @@ import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Union
|
||||
|
||||
from openai._exceptions import RateLimitError
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
|
||||
from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator
|
||||
@ -122,8 +120,6 @@ class AppGenerateService:
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Invalid app mode {app_model.mode}")
|
||||
except RateLimitError as e:
|
||||
raise InvokeRateLimitError(str(e))
|
||||
except Exception:
|
||||
rate_limit.exit(request_id)
|
||||
raise
|
||||
|
||||
@ -149,8 +149,7 @@ class RagPipelineTransformService:
|
||||
file_extensions = node.get("data", {}).get("fileExtensions", [])
|
||||
if not file_extensions:
|
||||
return node
|
||||
file_extensions = [file_extension.lower() for file_extension in file_extensions]
|
||||
node["data"]["fileExtensions"] = DOCUMENT_EXTENSIONS
|
||||
node["data"]["fileExtensions"] = [ext.lower() for ext in file_extensions if ext in DOCUMENT_EXTENSIONS]
|
||||
return node
|
||||
|
||||
def _deal_knowledge_index(
|
||||
|
||||
@ -349,14 +349,10 @@ class BuiltinToolManageService:
|
||||
provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id)
|
||||
|
||||
credentials: list[ToolProviderCredentialApiEntity] = []
|
||||
encrypters = {}
|
||||
for provider in providers:
|
||||
credential_type = provider.credential_type
|
||||
if credential_type not in encrypters:
|
||||
encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, provider, provider.provider, provider_controller
|
||||
)[0]
|
||||
encrypter = encrypters[credential_type]
|
||||
encrypter, _ = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, provider, provider.provider, provider_controller
|
||||
)
|
||||
decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials))
|
||||
credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity(
|
||||
provider=provider,
|
||||
|
||||
@ -79,7 +79,6 @@ class WorkflowConverter:
|
||||
new_app.updated_by = account.id
|
||||
db.session.add(new_app)
|
||||
db.session.flush()
|
||||
db.session.commit()
|
||||
|
||||
workflow.app_id = new_app.id
|
||||
db.session.commit()
|
||||
|
||||
@ -29,23 +29,10 @@ def priority_rag_pipeline_run_task(
|
||||
tenant_id: str,
|
||||
):
|
||||
"""
|
||||
Async Run rag pipeline
|
||||
:param rag_pipeline_invoke_entities: Rag pipeline invoke entities
|
||||
rag_pipeline_invoke_entities include:
|
||||
:param pipeline_id: Pipeline ID
|
||||
:param user_id: User ID
|
||||
:param tenant_id: Tenant ID
|
||||
:param workflow_id: Workflow ID
|
||||
:param invoke_from: Invoke source (debugger, published, etc.)
|
||||
:param streaming: Whether to stream results
|
||||
:param datasource_type: Type of datasource
|
||||
:param datasource_info: Datasource information dict
|
||||
:param batch: Batch identifier
|
||||
:param document_id: Document ID (optional)
|
||||
:param start_node_id: Starting node ID
|
||||
:param inputs: Input parameters dict
|
||||
:param workflow_execution_id: Workflow execution ID
|
||||
:param workflow_thread_pool_id: Thread pool ID for workflow execution
|
||||
Async Run rag pipeline task using high priority queue.
|
||||
|
||||
:param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities
|
||||
:param tenant_id: Tenant ID for the pipeline execution
|
||||
"""
|
||||
# run with threading, thread pool size is 10
|
||||
|
||||
|
||||
@ -30,23 +30,10 @@ def rag_pipeline_run_task(
|
||||
tenant_id: str,
|
||||
):
|
||||
"""
|
||||
Async Run rag pipeline
|
||||
:param rag_pipeline_invoke_entities: Rag pipeline invoke entities
|
||||
rag_pipeline_invoke_entities include:
|
||||
:param pipeline_id: Pipeline ID
|
||||
:param user_id: User ID
|
||||
:param tenant_id: Tenant ID
|
||||
:param workflow_id: Workflow ID
|
||||
:param invoke_from: Invoke source (debugger, published, etc.)
|
||||
:param streaming: Whether to stream results
|
||||
:param datasource_type: Type of datasource
|
||||
:param datasource_info: Datasource information dict
|
||||
:param batch: Batch identifier
|
||||
:param document_id: Document ID (optional)
|
||||
:param start_node_id: Starting node ID
|
||||
:param inputs: Input parameters dict
|
||||
:param workflow_execution_id: Workflow execution ID
|
||||
:param workflow_thread_pool_id: Thread pool ID for workflow execution
|
||||
Async Run rag pipeline task using regular priority queue.
|
||||
|
||||
:param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities
|
||||
:param tenant_id: Tenant ID for the pipeline execution
|
||||
"""
|
||||
# run with threading, thread pool size is 10
|
||||
|
||||
|
||||
@ -5,15 +5,10 @@ These tasks provide asynchronous storage capabilities for workflow execution dat
|
||||
improving performance by offloading storage operations to background workers.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from celery import shared_task # type: ignore[import-untyped]
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService
|
||||
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import time
|
||||
import uuid
|
||||
from os import getenv
|
||||
|
||||
import pytest
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool
|
||||
from core.workflow.enums import WorkflowNodeExecutionStatus
|
||||
@ -15,7 +15,7 @@ from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock
|
||||
|
||||
CODE_MAX_STRING_LENGTH = int(getenv("CODE_MAX_STRING_LENGTH", "10000"))
|
||||
CODE_MAX_STRING_LENGTH = dify_config.CODE_MAX_STRING_LENGTH
|
||||
|
||||
|
||||
def init_code_node(code_config: dict):
|
||||
|
||||
@ -18,6 +18,7 @@ from flask.testing import FlaskClient
|
||||
from sqlalchemy import Engine, text
|
||||
from sqlalchemy.orm import Session
|
||||
from testcontainers.core.container import DockerContainer
|
||||
from testcontainers.core.network import Network
|
||||
from testcontainers.core.waiting_utils import wait_for_logs
|
||||
from testcontainers.postgres import PostgresContainer
|
||||
from testcontainers.redis import RedisContainer
|
||||
@ -41,6 +42,7 @@ class DifyTestContainers:
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize container management with default configurations."""
|
||||
self.network: Network | None = None
|
||||
self.postgres: PostgresContainer | None = None
|
||||
self.redis: RedisContainer | None = None
|
||||
self.dify_sandbox: DockerContainer | None = None
|
||||
@ -62,12 +64,18 @@ class DifyTestContainers:
|
||||
|
||||
logger.info("Starting test containers for Dify integration tests...")
|
||||
|
||||
# Create Docker network for container communication
|
||||
logger.info("Creating Docker network for container communication...")
|
||||
self.network = Network()
|
||||
self.network.create()
|
||||
logger.info("Docker network created successfully with name: %s", self.network.name)
|
||||
|
||||
# Start PostgreSQL container for main application database
|
||||
# PostgreSQL is used for storing user data, workflows, and application state
|
||||
logger.info("Initializing PostgreSQL container...")
|
||||
self.postgres = PostgresContainer(
|
||||
image="postgres:14-alpine",
|
||||
)
|
||||
).with_network(self.network)
|
||||
self.postgres.start()
|
||||
db_host = self.postgres.get_container_host_ip()
|
||||
db_port = self.postgres.get_exposed_port(5432)
|
||||
@ -137,7 +145,7 @@ class DifyTestContainers:
|
||||
# Start Redis container for caching and session management
|
||||
# Redis is used for storing session data, cache entries, and temporary data
|
||||
logger.info("Initializing Redis container...")
|
||||
self.redis = RedisContainer(image="redis:6-alpine", port=6379)
|
||||
self.redis = RedisContainer(image="redis:6-alpine", port=6379).with_network(self.network)
|
||||
self.redis.start()
|
||||
redis_host = self.redis.get_container_host_ip()
|
||||
redis_port = self.redis.get_exposed_port(6379)
|
||||
@ -153,7 +161,7 @@ class DifyTestContainers:
|
||||
# Start Dify Sandbox container for code execution environment
|
||||
# Dify Sandbox provides a secure environment for executing user code
|
||||
logger.info("Initializing Dify Sandbox container...")
|
||||
self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest")
|
||||
self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest").with_network(self.network)
|
||||
self.dify_sandbox.with_exposed_ports(8194)
|
||||
self.dify_sandbox.env = {
|
||||
"API_KEY": "test_api_key",
|
||||
@ -173,22 +181,28 @@ class DifyTestContainers:
|
||||
# Start Dify Plugin Daemon container for plugin management
|
||||
# Dify Plugin Daemon provides plugin lifecycle management and execution
|
||||
logger.info("Initializing Dify Plugin Daemon container...")
|
||||
self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local")
|
||||
self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local").with_network(
|
||||
self.network
|
||||
)
|
||||
self.dify_plugin_daemon.with_exposed_ports(5002)
|
||||
# Get container internal network addresses
|
||||
postgres_container_name = self.postgres.get_wrapped_container().name
|
||||
redis_container_name = self.redis.get_wrapped_container().name
|
||||
|
||||
self.dify_plugin_daemon.env = {
|
||||
"DB_HOST": db_host,
|
||||
"DB_PORT": str(db_port),
|
||||
"DB_HOST": postgres_container_name, # Use container name for internal network communication
|
||||
"DB_PORT": "5432", # Use internal port
|
||||
"DB_USERNAME": self.postgres.username,
|
||||
"DB_PASSWORD": self.postgres.password,
|
||||
"DB_DATABASE": "dify_plugin",
|
||||
"REDIS_HOST": redis_host,
|
||||
"REDIS_PORT": str(redis_port),
|
||||
"REDIS_HOST": redis_container_name, # Use container name for internal network communication
|
||||
"REDIS_PORT": "6379", # Use internal port
|
||||
"REDIS_PASSWORD": "",
|
||||
"SERVER_PORT": "5002",
|
||||
"SERVER_KEY": "test_plugin_daemon_key",
|
||||
"MAX_PLUGIN_PACKAGE_SIZE": "52428800",
|
||||
"PPROF_ENABLED": "false",
|
||||
"DIFY_INNER_API_URL": f"http://{db_host}:5001",
|
||||
"DIFY_INNER_API_URL": f"http://{postgres_container_name}:5001",
|
||||
"DIFY_INNER_API_KEY": "test_inner_api_key",
|
||||
"PLUGIN_REMOTE_INSTALLING_HOST": "0.0.0.0",
|
||||
"PLUGIN_REMOTE_INSTALLING_PORT": "5003",
|
||||
@ -253,6 +267,15 @@ class DifyTestContainers:
|
||||
# Log error but don't fail the test cleanup
|
||||
logger.warning("Failed to stop container %s: %s", container, e)
|
||||
|
||||
# Stop and remove the network
|
||||
if self.network:
|
||||
try:
|
||||
logger.info("Removing Docker network...")
|
||||
self.network.remove()
|
||||
logger.info("Successfully removed Docker network")
|
||||
except Exception as e:
|
||||
logger.warning("Failed to remove Docker network: %s", e)
|
||||
|
||||
self._containers_started = False
|
||||
logger.info("All test containers stopped and cleaned up successfully")
|
||||
|
||||
|
||||
@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
from openai._exceptions import RateLimitError
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from models.model import EndUser
|
||||
@ -484,36 +483,6 @@ class TestAppGenerateService:
|
||||
# Verify error message
|
||||
assert "Rate limit exceeded" in str(exc_info.value)
|
||||
|
||||
def test_generate_with_rate_limit_error_from_openai(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test generation when OpenAI rate limit error occurs.
|
||||
"""
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(
|
||||
db_session_with_containers, mock_external_service_dependencies, mode="completion"
|
||||
)
|
||||
|
||||
# Setup completion generator to raise RateLimitError
|
||||
mock_response = MagicMock()
|
||||
mock_response.request = MagicMock()
|
||||
mock_external_service_dependencies["completion_generator"].return_value.generate.side_effect = RateLimitError(
|
||||
"Rate limit exceeded", response=mock_response, body=None
|
||||
)
|
||||
|
||||
# Setup test arguments
|
||||
args = {"inputs": {"query": fake.text(max_nb_chars=50)}, "response_mode": "streaming"}
|
||||
|
||||
# Execute the method under test and expect rate limit error
|
||||
with pytest.raises(InvokeRateLimitError) as exc_info:
|
||||
AppGenerateService.generate(
|
||||
app_model=app, user=account, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=True
|
||||
)
|
||||
|
||||
# Verify error message
|
||||
assert "Rate limit exceeded" in str(exc_info.value)
|
||||
|
||||
def test_generate_with_invalid_app_mode(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test generation with invalid app mode.
|
||||
|
||||
@ -784,133 +784,6 @@ class TestCleanDatasetTask:
|
||||
print(f"Total cleanup time: {cleanup_duration:.3f} seconds")
|
||||
print(f"Average time per document: {cleanup_duration / len(documents):.3f} seconds")
|
||||
|
||||
def test_clean_dataset_task_concurrent_cleanup_scenarios(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test dataset cleanup with concurrent cleanup scenarios and race conditions.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Handle multiple cleanup operations on the same dataset
|
||||
2. Prevent data corruption during concurrent access
|
||||
3. Maintain data consistency across multiple cleanup attempts
|
||||
4. Handle race conditions gracefully
|
||||
5. Ensure idempotent cleanup operations
|
||||
"""
|
||||
# Create test data
|
||||
account, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
dataset = self._create_test_dataset(db_session_with_containers, account, tenant)
|
||||
document = self._create_test_document(db_session_with_containers, account, tenant, dataset)
|
||||
segment = self._create_test_segment(db_session_with_containers, account, tenant, dataset, document)
|
||||
upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant)
|
||||
|
||||
# Update document with file reference
|
||||
import json
|
||||
|
||||
document.data_source_info = json.dumps({"upload_file_id": upload_file.id})
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.commit()
|
||||
|
||||
# Save IDs for verification
|
||||
dataset_id = dataset.id
|
||||
tenant_id = tenant.id
|
||||
upload_file_id = upload_file.id
|
||||
|
||||
# Mock storage to simulate slow operations
|
||||
mock_storage = mock_external_service_dependencies["storage"]
|
||||
original_delete = mock_storage.delete
|
||||
|
||||
def slow_delete(key):
|
||||
import time
|
||||
|
||||
time.sleep(0.1) # Simulate slow storage operation
|
||||
return original_delete(key)
|
||||
|
||||
mock_storage.delete.side_effect = slow_delete
|
||||
|
||||
# Execute multiple cleanup operations concurrently
|
||||
import threading
|
||||
|
||||
cleanup_results = []
|
||||
cleanup_errors = []
|
||||
|
||||
def run_cleanup():
|
||||
try:
|
||||
clean_dataset_task(
|
||||
dataset_id=dataset_id,
|
||||
tenant_id=tenant_id,
|
||||
indexing_technique="high_quality",
|
||||
index_struct='{"type": "paragraph"}',
|
||||
collection_binding_id=str(uuid.uuid4()),
|
||||
doc_form="paragraph_index",
|
||||
)
|
||||
cleanup_results.append("success")
|
||||
except Exception as e:
|
||||
cleanup_errors.append(str(e))
|
||||
|
||||
# Start multiple cleanup threads
|
||||
threads = []
|
||||
for i in range(3):
|
||||
thread = threading.Thread(target=run_cleanup)
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
|
||||
# Wait for all threads to complete
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
# Verify results
|
||||
# Check that all documents were deleted (only once)
|
||||
remaining_documents = db.session.query(Document).filter_by(dataset_id=dataset_id).all()
|
||||
assert len(remaining_documents) == 0
|
||||
|
||||
# Check that all segments were deleted (only once)
|
||||
remaining_segments = db.session.query(DocumentSegment).filter_by(dataset_id=dataset_id).all()
|
||||
assert len(remaining_segments) == 0
|
||||
|
||||
# Check that upload file was deleted (only once)
|
||||
# Note: In concurrent scenarios, the first thread deletes documents and segments,
|
||||
# subsequent threads may not find the related data to clean up upload files
|
||||
# This demonstrates the idempotent nature of the cleanup process
|
||||
remaining_files = db.session.query(UploadFile).filter_by(id=upload_file_id).all()
|
||||
# The upload file should be deleted by the first successful cleanup operation
|
||||
# However, in concurrent scenarios, this may not always happen due to race conditions
|
||||
# This test demonstrates the idempotent nature of the cleanup process
|
||||
if len(remaining_files) > 0:
|
||||
print(f"Warning: Upload file {upload_file_id} was not deleted in concurrent scenario")
|
||||
print("This is expected behavior demonstrating the idempotent nature of cleanup")
|
||||
# We don't assert here as the behavior depends on timing and race conditions
|
||||
|
||||
# Verify that storage.delete was called (may be called multiple times in concurrent scenarios)
|
||||
# In concurrent scenarios, storage operations may be called multiple times due to race conditions
|
||||
assert mock_storage.delete.call_count > 0
|
||||
|
||||
# Verify that index processor was called (may be called multiple times in concurrent scenarios)
|
||||
mock_index_processor = mock_external_service_dependencies["index_processor"]
|
||||
assert mock_index_processor.clean.call_count > 0
|
||||
|
||||
# Check cleanup results
|
||||
assert len(cleanup_results) == 3, "All cleanup operations should complete"
|
||||
assert len(cleanup_errors) == 0, "No cleanup errors should occur"
|
||||
|
||||
# Verify idempotency by running cleanup again on the same dataset
|
||||
# This should not perform any additional operations since data is already cleaned
|
||||
clean_dataset_task(
|
||||
dataset_id=dataset_id,
|
||||
tenant_id=tenant_id,
|
||||
indexing_technique="high_quality",
|
||||
index_struct='{"type": "paragraph"}',
|
||||
collection_binding_id=str(uuid.uuid4()),
|
||||
doc_form="paragraph_index",
|
||||
)
|
||||
|
||||
# Verify that no additional storage operations were performed
|
||||
# Note: In concurrent scenarios, the exact count may vary due to race conditions
|
||||
print(f"Final storage delete calls: {mock_storage.delete.call_count}")
|
||||
print(f"Final index processor calls: {mock_index_processor.clean.call_count}")
|
||||
print("Note: Multiple calls in concurrent scenarios are expected due to race conditions")
|
||||
|
||||
def test_clean_dataset_task_storage_exception_handling(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
|
||||
@ -148,61 +148,6 @@ class TestEnableSegmentsToIndexTask:
|
||||
db.session.commit()
|
||||
return segments
|
||||
|
||||
def test_enable_segments_to_index_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful segments indexing with paragraph index type.
|
||||
|
||||
This test verifies:
|
||||
- Proper dataset and document retrieval from database
|
||||
- Correct segment processing and document creation
|
||||
- Index processor integration
|
||||
- Database state updates
|
||||
- Redis cache key deletion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
dataset, document = self._create_test_dataset_and_document(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
segments = self._create_test_segments(db_session_with_containers, document, dataset)
|
||||
|
||||
# Set up Redis cache keys to simulate indexing in progress
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
for segment in segments:
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
redis_client.set(indexing_cache_key, "processing", ex=300) # 5 minutes expiry
|
||||
|
||||
# Verify cache keys exist
|
||||
for segment in segments:
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
assert redis_client.exists(indexing_cache_key) == 1
|
||||
|
||||
# Act: Execute the task
|
||||
enable_segments_to_index_task(segment_ids, dataset.id, document.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Verify index processor was called correctly
|
||||
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
|
||||
mock_external_service_dependencies["index_processor"].load.assert_called_once()
|
||||
|
||||
# Verify the load method was called with correct parameters
|
||||
call_args = mock_external_service_dependencies["index_processor"].load.call_args
|
||||
assert call_args is not None
|
||||
documents = call_args[0][1] # Second argument should be documents list
|
||||
assert len(documents) == 3
|
||||
|
||||
# Verify document structure
|
||||
for i, doc in enumerate(documents):
|
||||
assert doc.page_content == segments[i].content
|
||||
assert doc.metadata["doc_id"] == segments[i].index_node_id
|
||||
assert doc.metadata["doc_hash"] == segments[i].index_node_hash
|
||||
assert doc.metadata["document_id"] == document.id
|
||||
assert doc.metadata["dataset_id"] == dataset.id
|
||||
|
||||
# Verify Redis cache keys were deleted
|
||||
for segment in segments:
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
assert redis_client.exists(indexing_cache_key) == 0
|
||||
|
||||
def test_enable_segments_to_index_with_different_index_type(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
|
||||
@ -0,0 +1,242 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_account_deletion_task import send_account_deletion_verification_code, send_deletion_success_task
|
||||
|
||||
|
||||
class TestMailAccountDeletionTask:
|
||||
"""Integration tests for mail account deletion tasks using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_account_deletion_task.mail") as mock_mail,
|
||||
patch("tasks.mail_account_deletion_task.get_email_i18n_service") as mock_get_email_service,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_service.return_value = mock_email_service
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"get_email_service": mock_get_email_service,
|
||||
"email_service": mock_email_service,
|
||||
}
|
||||
|
||||
def _create_test_account(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create a test account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
Account: Created account instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db.session.add(account)
|
||||
db.session.commit()
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
status="normal",
|
||||
)
|
||||
db.session.add(tenant)
|
||||
db.session.commit()
|
||||
|
||||
# Create tenant-account join
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
current=True,
|
||||
)
|
||||
db.session.add(join)
|
||||
db.session.commit()
|
||||
|
||||
return account
|
||||
|
||||
def test_send_deletion_success_task_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful account deletion success email sending.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Correct email service method calls
|
||||
- Template context is properly formatted
|
||||
- Email type is correctly specified
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
test_language = "en-US"
|
||||
|
||||
# Act: Execute the task
|
||||
send_deletion_success_task(test_email, test_language)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Verify mail service was checked
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
|
||||
# Verify email service was retrieved
|
||||
mock_external_service_dependencies["get_email_service"].assert_called_once()
|
||||
|
||||
# Verify email was sent with correct parameters
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.ACCOUNT_DELETION_SUCCESS,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"email": test_email,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_deletion_success_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test account deletion success email when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls are made
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Setup mail service to return not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
|
||||
# Act: Execute the task
|
||||
send_deletion_success_task(test_email)
|
||||
|
||||
# Assert: Verify no email service calls were made
|
||||
mock_external_service_dependencies["get_email_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_deletion_success_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test account deletion success email when email service raises exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
- Error logging is recorded
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed")
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_deletion_success_task(test_email)
|
||||
|
||||
# Assert: Verify email service was called but exception was handled
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_called_once()
|
||||
|
||||
def test_send_account_deletion_verification_code_success(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful account deletion verification code email sending.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Correct email service method calls
|
||||
- Template context includes verification code
|
||||
- Email type is correctly specified
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Act: Execute the task
|
||||
send_account_deletion_verification_code(test_email, test_code, test_language)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Verify mail service was checked
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
|
||||
# Verify email service was retrieved
|
||||
mock_external_service_dependencies["get_email_service"].assert_called_once()
|
||||
|
||||
# Verify email was sent with correct parameters
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.ACCOUNT_DELETION_VERIFICATION,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"code": test_code,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_account_deletion_verification_code_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test account deletion verification code email when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls are made
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Setup mail service to return not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
|
||||
# Act: Execute the task
|
||||
send_account_deletion_verification_code(test_email, test_code)
|
||||
|
||||
# Assert: Verify no email service calls were made
|
||||
mock_external_service_dependencies["get_email_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_account_deletion_verification_code_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test account deletion verification code email when email service raises exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
- Error logging is recorded
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed")
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_account_deletion_verification_code(test_email, test_code)
|
||||
|
||||
# Assert: Verify email service was called but exception was handled
|
||||
mock_external_service_dependencies["email_service"].send_email.assert_called_once()
|
||||
@ -0,0 +1,282 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_change_mail_task import send_change_mail_completed_notification_task, send_change_mail_task
|
||||
|
||||
|
||||
class TestMailChangeMailTask:
|
||||
"""Integration tests for mail_change_mail_task using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_change_mail_task.mail") as mock_mail,
|
||||
patch("tasks.mail_change_mail_task.get_email_i18n_service") as mock_get_email_i18n_service,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email i18n service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_i18n_service.return_value = mock_email_service
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_i18n_service": mock_email_service,
|
||||
"get_email_i18n_service": mock_get_email_i18n_service,
|
||||
}
|
||||
|
||||
def _create_test_account(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create a test account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
Account: Created account instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
status="normal",
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant-account join
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account
|
||||
|
||||
def test_send_change_mail_task_success_old_email_phase(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email task execution for old_email phase.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with old_email phase
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_task_success_new_email_phase(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email task execution for new_email phase.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with new_email phase
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "zh-Hans"
|
||||
test_email = "new@example.com"
|
||||
test_code = "789012"
|
||||
test_phase = "new_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email task when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls when mail is not available
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify no email service calls
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_not_called()
|
||||
|
||||
def test_send_change_mail_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email task when email service raises an exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify email service was called despite exception
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_completed_notification_task_success(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email completed notification task execution.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with CHANGE_EMAIL_COMPLETED type
|
||||
- Template context is properly constructed
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.CHANGE_EMAIL_COMPLETED,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"email": test_email,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_change_mail_completed_notification_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email completed notification task when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls when mail is not available
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify no email service calls
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_change_mail_completed_notification_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email completed notification task when email service raises an exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify email service was called despite exception
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.CHANGE_EMAIL_COMPLETED,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"email": test_email,
|
||||
},
|
||||
)
|
||||
@ -0,0 +1,598 @@
|
||||
"""
|
||||
TestContainers-based integration tests for send_email_code_login_mail_task.
|
||||
|
||||
This module provides comprehensive integration tests for the email code login mail task
|
||||
using TestContainers infrastructure. The tests ensure that the task properly sends
|
||||
email verification codes for login with internationalization support and handles
|
||||
various error scenarios in a real database environment.
|
||||
|
||||
All tests use the testcontainers infrastructure to ensure proper database isolation
|
||||
and realistic testing scenarios with actual PostgreSQL and Redis instances.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_email_code_login import send_email_code_login_mail_task
|
||||
|
||||
|
||||
class TestSendEmailCodeLoginMailTask:
|
||||
"""
|
||||
Comprehensive integration tests for send_email_code_login_mail_task using testcontainers.
|
||||
|
||||
This test class covers all major functionality of the email code login mail task:
|
||||
- Successful email sending with different languages
|
||||
- Email service integration and template rendering
|
||||
- Error handling for various failure scenarios
|
||||
- Performance metrics and logging verification
|
||||
- Edge cases and boundary conditions
|
||||
|
||||
All tests use the testcontainers infrastructure to ensure proper database isolation
|
||||
and realistic testing environment with actual database interactions.
|
||||
"""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def cleanup_database(self, db_session_with_containers):
|
||||
"""Clean up database before each test to ensure isolation."""
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
# Clear all test data
|
||||
db_session_with_containers.query(TenantAccountJoin).delete()
|
||||
db_session_with_containers.query(Tenant).delete()
|
||||
db_session_with_containers.query(Account).delete()
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Clear Redis cache
|
||||
redis_client.flushdb()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_email_code_login.mail") as mock_mail,
|
||||
patch("tasks.mail_email_code_login.get_email_i18n_service") as mock_email_service,
|
||||
):
|
||||
# Setup default mock returns
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Mock email service
|
||||
mock_email_service_instance = MagicMock()
|
||||
mock_email_service_instance.send_email.return_value = None
|
||||
mock_email_service.return_value = mock_email_service_instance
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_service": mock_email_service,
|
||||
"email_service_instance": mock_email_service_instance,
|
||||
}
|
||||
|
||||
def _create_test_account(self, db_session_with_containers, fake=None):
|
||||
"""
|
||||
Helper method to create a test account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
fake: Faker instance for generating test data
|
||||
|
||||
Returns:
|
||||
Account: Created account instance
|
||||
"""
|
||||
if fake is None:
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account
|
||||
|
||||
def _create_test_tenant_and_account(self, db_session_with_containers, fake=None):
|
||||
"""
|
||||
Helper method to create a test tenant and account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
fake: Faker instance for generating test data
|
||||
|
||||
Returns:
|
||||
tuple: (Account, Tenant) created instances
|
||||
"""
|
||||
if fake is None:
|
||||
fake = Faker()
|
||||
|
||||
# Create account using the existing helper method
|
||||
account = self._create_test_account(db_session_with_containers, fake)
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
plan="basic",
|
||||
status="active",
|
||||
)
|
||||
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant-account relationship
|
||||
tenant_account_join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
)
|
||||
|
||||
db_session_with_containers.add(tenant_account_join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account, tenant
|
||||
|
||||
def test_send_email_code_login_mail_task_success_english(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful email code login mail sending in English.
|
||||
|
||||
This test verifies that the task can successfully:
|
||||
1. Send email code login mail with English language
|
||||
2. Use proper email service integration
|
||||
3. Pass correct template context to email service
|
||||
4. Log performance metrics correctly
|
||||
5. Complete task execution without errors
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Act: Execute the task
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_mail = mock_external_service_dependencies["mail"]
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify mail service was checked for initialization
|
||||
mock_mail.is_inited.assert_called_once()
|
||||
|
||||
# Verify email service was called with correct parameters
|
||||
mock_email_service_instance.send_email.assert_called_once_with(
|
||||
email_type=EmailType.EMAIL_CODE_LOGIN,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"code": test_code,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_email_code_login_mail_task_success_chinese(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful email code login mail sending in Chinese.
|
||||
|
||||
This test verifies that the task can successfully:
|
||||
1. Send email code login mail with Chinese language
|
||||
2. Handle different language codes properly
|
||||
3. Use correct template context for Chinese emails
|
||||
4. Complete task execution without errors
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "789012"
|
||||
test_language = "zh-Hans"
|
||||
|
||||
# Act: Execute the task
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify email service was called with Chinese language
|
||||
mock_email_service_instance.send_email.assert_called_once_with(
|
||||
email_type=EmailType.EMAIL_CODE_LOGIN,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"code": test_code,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_email_code_login_mail_task_success_multiple_languages(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful email code login mail sending with multiple languages.
|
||||
|
||||
This test verifies that the task can successfully:
|
||||
1. Handle various language codes correctly
|
||||
2. Send emails with different language configurations
|
||||
3. Maintain proper template context for each language
|
||||
4. Complete multiple task executions without conflicts
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_languages = ["en-US", "zh-Hans", "zh-CN", "ja-JP", "ko-KR"]
|
||||
test_emails = [fake.email() for _ in test_languages]
|
||||
test_codes = [fake.numerify("######") for _ in test_languages]
|
||||
|
||||
# Act: Execute the task for each language
|
||||
for i, language in enumerate(test_languages):
|
||||
send_email_code_login_mail_task(
|
||||
language=language,
|
||||
to=test_emails[i],
|
||||
code=test_codes[i],
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify email service was called for each language
|
||||
assert mock_email_service_instance.send_email.call_count == len(test_languages)
|
||||
|
||||
# Verify each call had correct parameters
|
||||
for i, language in enumerate(test_languages):
|
||||
call_args = mock_email_service_instance.send_email.call_args_list[i]
|
||||
assert call_args[1]["email_type"] == EmailType.EMAIL_CODE_LOGIN
|
||||
assert call_args[1]["language_code"] == language
|
||||
assert call_args[1]["to"] == test_emails[i]
|
||||
assert call_args[1]["template_context"]["code"] == test_codes[i]
|
||||
|
||||
def test_send_email_code_login_mail_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task when mail service is not initialized.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Check mail service initialization status
|
||||
2. Return early when mail is not initialized
|
||||
3. Not attempt to send email when service is unavailable
|
||||
4. Handle gracefully without errors
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Mock mail service as not initialized
|
||||
mock_mail = mock_external_service_dependencies["mail"]
|
||||
mock_mail.is_inited.return_value = False
|
||||
|
||||
# Act: Execute the task
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify mail service was checked for initialization
|
||||
mock_mail.is_inited.assert_called_once()
|
||||
|
||||
# Verify email service was not called
|
||||
mock_email_service_instance.send_email.assert_not_called()
|
||||
|
||||
def test_send_email_code_login_mail_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task when email service raises an exception.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Handle email service exceptions gracefully
|
||||
2. Log appropriate error messages
|
||||
3. Continue execution without crashing
|
||||
4. Maintain proper error handling
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Mock email service to raise an exception
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
mock_email_service_instance.send_email.side_effect = Exception("Email service unavailable")
|
||||
|
||||
# Act: Execute the task - it should handle the exception gracefully
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_mail = mock_external_service_dependencies["mail"]
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify mail service was checked for initialization
|
||||
mock_mail.is_inited.assert_called_once()
|
||||
|
||||
# Verify email service was called (and failed)
|
||||
mock_email_service_instance.send_email.assert_called_once_with(
|
||||
email_type=EmailType.EMAIL_CODE_LOGIN,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"code": test_code,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_email_code_login_mail_task_invalid_parameters(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task with invalid parameters.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Handle empty or None email addresses
|
||||
2. Process empty or None verification codes
|
||||
3. Handle invalid language codes
|
||||
4. Maintain proper error handling for invalid inputs
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_language = "en-US"
|
||||
|
||||
# Test cases for invalid parameters
|
||||
invalid_test_cases = [
|
||||
{"email": "", "code": "123456", "description": "empty email"},
|
||||
{"email": None, "code": "123456", "description": "None email"},
|
||||
{"email": fake.email(), "code": "", "description": "empty code"},
|
||||
{"email": fake.email(), "code": None, "description": "None code"},
|
||||
{"email": "invalid-email", "code": "123456", "description": "invalid email format"},
|
||||
]
|
||||
|
||||
for test_case in invalid_test_cases:
|
||||
# Reset mocks for each test case
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
mock_email_service_instance.reset_mock()
|
||||
|
||||
# Act: Execute the task with invalid parameters
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_case["email"],
|
||||
code=test_case["code"],
|
||||
)
|
||||
|
||||
# Assert: Verify that email service was still called
|
||||
# The task should pass parameters to email service as-is
|
||||
# and let the email service handle validation
|
||||
mock_email_service_instance.send_email.assert_called_once()
|
||||
|
||||
def test_send_email_code_login_mail_task_edge_cases(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task with edge cases and boundary conditions.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Handle very long email addresses
|
||||
2. Process very long verification codes
|
||||
3. Handle special characters in parameters
|
||||
4. Process extreme language codes
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_language = "en-US"
|
||||
|
||||
# Edge case test data
|
||||
edge_cases = [
|
||||
{
|
||||
"email": "a" * 100 + "@example.com", # Very long email
|
||||
"code": "1" * 20, # Very long code
|
||||
"description": "very long email and code",
|
||||
},
|
||||
{
|
||||
"email": "test+tag@example.com", # Email with special characters
|
||||
"code": "123-456", # Code with special characters
|
||||
"description": "special characters",
|
||||
},
|
||||
{
|
||||
"email": "test@sub.domain.example.com", # Complex domain
|
||||
"code": "000000", # All zeros
|
||||
"description": "complex domain and all zeros code",
|
||||
},
|
||||
{
|
||||
"email": "test@example.co.uk", # International domain
|
||||
"code": "999999", # All nines
|
||||
"description": "international domain and all nines code",
|
||||
},
|
||||
]
|
||||
|
||||
for test_case in edge_cases:
|
||||
# Reset mocks for each test case
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
mock_email_service_instance.reset_mock()
|
||||
|
||||
# Act: Execute the task with edge case data
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_case["email"],
|
||||
code=test_case["code"],
|
||||
)
|
||||
|
||||
# Assert: Verify that email service was called with edge case data
|
||||
mock_email_service_instance.send_email.assert_called_once_with(
|
||||
email_type=EmailType.EMAIL_CODE_LOGIN,
|
||||
language_code=test_language,
|
||||
to=test_case["email"],
|
||||
template_context={
|
||||
"to": test_case["email"],
|
||||
"code": test_case["code"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_email_code_login_mail_task_database_integration(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task with database integration.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Work with real database connections
|
||||
2. Handle database session management
|
||||
3. Maintain proper database state
|
||||
4. Complete without database-related errors
|
||||
"""
|
||||
# Arrange: Setup test data with database
|
||||
fake = Faker()
|
||||
account, tenant = self._create_test_tenant_and_account(db_session_with_containers, fake)
|
||||
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Act: Execute the task
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify email service was called with database account email
|
||||
mock_email_service_instance.send_email.assert_called_once_with(
|
||||
email_type=EmailType.EMAIL_CODE_LOGIN,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"code": test_code,
|
||||
},
|
||||
)
|
||||
|
||||
# Verify database state is maintained
|
||||
db_session_with_containers.refresh(account)
|
||||
assert account.email == test_email
|
||||
assert account.status == "active"
|
||||
|
||||
def test_send_email_code_login_mail_task_redis_integration(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email code login mail task with Redis integration.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Work with Redis cache connections
|
||||
2. Handle Redis operations without errors
|
||||
3. Maintain proper cache state
|
||||
4. Complete without Redis-related errors
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Setup Redis cache data
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"email_code_login_test_{test_email}"
|
||||
redis_client.set(cache_key, "test_value", ex=300)
|
||||
|
||||
# Act: Execute the task
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify expected outcomes
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
|
||||
# Verify email service was called
|
||||
mock_email_service_instance.send_email.assert_called_once()
|
||||
|
||||
# Verify Redis cache is still accessible
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
assert redis_client.get(cache_key) == b"test_value"
|
||||
|
||||
# Clean up Redis cache
|
||||
redis_client.delete(cache_key)
|
||||
|
||||
def test_send_email_code_login_mail_task_error_handling_comprehensive(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test comprehensive error handling for email code login mail task.
|
||||
|
||||
This test verifies that the task can properly:
|
||||
1. Handle various types of exceptions
|
||||
2. Log appropriate error messages
|
||||
3. Continue execution despite errors
|
||||
4. Maintain proper error reporting
|
||||
"""
|
||||
# Arrange: Setup test data
|
||||
fake = Faker()
|
||||
test_email = fake.email()
|
||||
test_code = "123456"
|
||||
test_language = "en-US"
|
||||
|
||||
# Test different exception types
|
||||
exception_types = [
|
||||
("ValueError", ValueError("Invalid email format")),
|
||||
("RuntimeError", RuntimeError("Service unavailable")),
|
||||
("ConnectionError", ConnectionError("Network error")),
|
||||
("TimeoutError", TimeoutError("Request timeout")),
|
||||
("Exception", Exception("Generic error")),
|
||||
]
|
||||
|
||||
for error_name, exception in exception_types:
|
||||
# Reset mocks for each test case
|
||||
mock_email_service_instance = mock_external_service_dependencies["email_service_instance"]
|
||||
mock_email_service_instance.reset_mock()
|
||||
mock_email_service_instance.send_email.side_effect = exception
|
||||
|
||||
# Mock logging to capture error messages
|
||||
with patch("tasks.mail_email_code_login.logger") as mock_logger:
|
||||
# Act: Execute the task - it should handle the exception gracefully
|
||||
send_email_code_login_mail_task(
|
||||
language=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
)
|
||||
|
||||
# Assert: Verify error handling
|
||||
# Verify email service was called (and failed)
|
||||
mock_email_service_instance.send_email.assert_called_once()
|
||||
|
||||
# Verify error was logged
|
||||
error_calls = [
|
||||
call
|
||||
for call in mock_logger.exception.call_args_list
|
||||
if f"Send email code login mail to {test_email} failed" in str(call)
|
||||
]
|
||||
# Check if any exception call was made (the exact message format may vary)
|
||||
assert mock_logger.exception.call_count >= 1, f"Error should be logged for {error_name}"
|
||||
|
||||
# Reset side effect for next iteration
|
||||
mock_email_service_instance.send_email.side_effect = None
|
||||
@ -0,0 +1,261 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from tasks.mail_inner_task import send_inner_email_task
|
||||
|
||||
|
||||
class TestMailInnerTask:
|
||||
"""Integration tests for send_inner_email_task using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_inner_task.mail") as mock_mail,
|
||||
patch("tasks.mail_inner_task.get_email_i18n_service") as mock_get_email_i18n_service,
|
||||
patch("tasks.mail_inner_task._render_template_with_strategy") as mock_render_template,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email i18n service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_i18n_service.return_value = mock_email_service
|
||||
|
||||
# Setup mock template rendering
|
||||
mock_render_template.return_value = "<html>Test email content</html>"
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_service": mock_email_service,
|
||||
"render_template": mock_render_template,
|
||||
}
|
||||
|
||||
def _create_test_email_data(self, fake: Faker) -> dict:
|
||||
"""
|
||||
Helper method to create test email data for testing.
|
||||
|
||||
Args:
|
||||
fake: Faker instance for generating test data
|
||||
|
||||
Returns:
|
||||
dict: Test email data including recipients, subject, body, and substitutions
|
||||
"""
|
||||
return {
|
||||
"to": [fake.email() for _ in range(3)],
|
||||
"subject": fake.sentence(nb_words=4),
|
||||
"body": "Hello {{name}}, this is a test email from {{company}}.",
|
||||
"substitutions": {
|
||||
"name": fake.name(),
|
||||
"company": fake.company(),
|
||||
"date": fake.date(),
|
||||
},
|
||||
}
|
||||
|
||||
def test_send_inner_email_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful email sending with valid data.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Template rendering with substitutions
|
||||
- Email service integration
|
||||
- Multiple recipient handling
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Verify mail service was checked for initialization
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
|
||||
# Verify template rendering was called with correct parameters
|
||||
mock_external_service_dependencies["render_template"].assert_called_once_with(
|
||||
email_data["body"], email_data["substitutions"]
|
||||
)
|
||||
|
||||
# Verify email service was called once with the full recipient list
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_single_recipient(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending with single recipient.
|
||||
|
||||
This test verifies:
|
||||
- Single recipient handling
|
||||
- Template rendering
|
||||
- Email service integration
|
||||
"""
|
||||
# Arrange: Create test data with single recipient
|
||||
fake = Faker()
|
||||
email_data = {
|
||||
"to": [fake.email()],
|
||||
"subject": fake.sentence(nb_words=3),
|
||||
"body": "Welcome {{user_name}}!",
|
||||
"substitutions": {
|
||||
"user_name": fake.name(),
|
||||
},
|
||||
}
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_empty_substitutions(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending with empty substitutions.
|
||||
|
||||
This test verifies:
|
||||
- Template rendering with empty substitutions
|
||||
- Email service integration
|
||||
- Handling of minimal template context
|
||||
"""
|
||||
# Arrange: Create test data with empty substitutions
|
||||
fake = Faker()
|
||||
email_data = {
|
||||
"to": [fake.email()],
|
||||
"subject": fake.sentence(nb_words=3),
|
||||
"body": "This is a simple email without variables.",
|
||||
"substitutions": {},
|
||||
}
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["render_template"].assert_called_once_with(email_data["body"], {})
|
||||
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email sending when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No template rendering occurs
|
||||
- No email service calls
|
||||
- No exceptions raised
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify no processing occurred
|
||||
mock_external_service_dependencies["render_template"].assert_not_called()
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called()
|
||||
|
||||
def test_send_inner_email_template_rendering_error(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email sending when template rendering fails.
|
||||
|
||||
This test verifies:
|
||||
- Exception handling during template rendering
|
||||
- No email service calls when template fails
|
||||
"""
|
||||
# Arrange: Setup template rendering to raise an exception
|
||||
mock_external_service_dependencies["render_template"].side_effect = Exception("Template rendering failed")
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify template rendering was attempted
|
||||
mock_external_service_dependencies["render_template"].assert_called_once()
|
||||
|
||||
# Verify no email service calls due to exception
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called()
|
||||
|
||||
def test_send_inner_email_service_error(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending when email service fails.
|
||||
|
||||
This test verifies:
|
||||
- Exception handling during email sending
|
||||
- Graceful error handling
|
||||
"""
|
||||
# Arrange: Setup email service to raise an exception
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify template rendering occurred
|
||||
mock_external_service_dependencies["render_template"].assert_called_once()
|
||||
|
||||
# Verify email service was called (and failed)
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
@ -0,0 +1,543 @@
|
||||
"""
|
||||
Integration tests for mail_invite_member_task using testcontainers.
|
||||
|
||||
This module provides integration tests for the invite member email task
|
||||
using TestContainers infrastructure. The tests ensure that the task properly sends
|
||||
invitation emails with internationalization support, handles error scenarios,
|
||||
and integrates correctly with the database and Redis for token management.
|
||||
|
||||
All tests use the testcontainers infrastructure to ensure proper database isolation
|
||||
and realistic testing scenarios with actual PostgreSQL and Redis instances.
|
||||
"""
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_invite_member_task import send_invite_member_mail_task
|
||||
|
||||
|
||||
class TestMailInviteMemberTask:
|
||||
"""
|
||||
Integration tests for send_invite_member_mail_task using testcontainers.
|
||||
|
||||
This test class covers the core functionality of the invite member email task:
|
||||
- Email sending with proper internationalization
|
||||
- Template context generation and URL construction
|
||||
- Error handling for failure scenarios
|
||||
- Integration with Redis for token validation
|
||||
- Mail service initialization checks
|
||||
- Real database integration with actual invitation flow
|
||||
|
||||
All tests use the testcontainers infrastructure to ensure proper database isolation
|
||||
and realistic testing environment with actual database and Redis interactions.
|
||||
"""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def cleanup_database(self, db_session_with_containers):
|
||||
"""Clean up database before each test to ensure isolation."""
|
||||
# Clear all test data
|
||||
db_session_with_containers.query(TenantAccountJoin).delete()
|
||||
db_session_with_containers.query(Tenant).delete()
|
||||
db_session_with_containers.query(Account).delete()
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Clear Redis cache
|
||||
redis_client.flushdb()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_invite_member_task.mail") as mock_mail,
|
||||
patch("tasks.mail_invite_member_task.get_email_i18n_service") as mock_email_service,
|
||||
patch("tasks.mail_invite_member_task.dify_config") as mock_config,
|
||||
):
|
||||
# Setup mail service mock
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup email service mock
|
||||
mock_email_service_instance = MagicMock()
|
||||
mock_email_service_instance.send_email.return_value = None
|
||||
mock_email_service.return_value = mock_email_service_instance
|
||||
|
||||
# Setup config mock
|
||||
mock_config.CONSOLE_WEB_URL = "https://console.dify.ai"
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_service": mock_email_service_instance,
|
||||
"config": mock_config,
|
||||
}
|
||||
|
||||
def _create_test_account_and_tenant(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create a test account and tenant for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
tuple: (Account, Tenant) created instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
password=fake.password(),
|
||||
interface_language="en-US",
|
||||
status=AccountStatus.ACTIVE.value,
|
||||
created_at=datetime.now(UTC),
|
||||
updated_at=datetime.now(UTC),
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
db_session_with_containers.refresh(account)
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
created_at=datetime.now(UTC),
|
||||
updated_at=datetime.now(UTC),
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
db_session_with_containers.refresh(tenant)
|
||||
|
||||
# Create tenant member relationship
|
||||
tenant_join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
created_at=datetime.now(UTC),
|
||||
)
|
||||
db_session_with_containers.add(tenant_join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account, tenant
|
||||
|
||||
def _create_invitation_token(self, tenant, account):
|
||||
"""
|
||||
Helper method to create a valid invitation token in Redis.
|
||||
|
||||
Args:
|
||||
tenant: Tenant instance
|
||||
account: Account instance
|
||||
|
||||
Returns:
|
||||
str: Generated invitation token
|
||||
"""
|
||||
token = str(uuid.uuid4())
|
||||
invitation_data = {
|
||||
"account_id": account.id,
|
||||
"email": account.email,
|
||||
"workspace_id": tenant.id,
|
||||
}
|
||||
cache_key = f"member_invite:token:{token}"
|
||||
redis_client.setex(cache_key, 24 * 60 * 60, json.dumps(invitation_data)) # 24 hours
|
||||
return token
|
||||
|
||||
def _create_pending_account_for_invitation(self, db_session_with_containers, email, tenant):
|
||||
"""
|
||||
Helper method to create a pending account for invitation testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session
|
||||
email: Email address for the account
|
||||
tenant: Tenant instance
|
||||
|
||||
Returns:
|
||||
Account: Created pending account
|
||||
"""
|
||||
account = Account(
|
||||
email=email,
|
||||
name=email.split("@")[0],
|
||||
password="",
|
||||
interface_language="en-US",
|
||||
status=AccountStatus.PENDING.value,
|
||||
created_at=datetime.now(UTC),
|
||||
updated_at=datetime.now(UTC),
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
db_session_with_containers.refresh(account)
|
||||
|
||||
# Create tenant member relationship
|
||||
tenant_join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.NORMAL.value,
|
||||
created_at=datetime.now(UTC),
|
||||
)
|
||||
db_session_with_containers.add(tenant_join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account
|
||||
|
||||
def test_send_invite_member_mail_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful invitation email sending with all parameters.
|
||||
|
||||
This test verifies:
|
||||
- Email service is called with correct parameters
|
||||
- Template context includes all required fields
|
||||
- URL is constructed correctly with token
|
||||
- Performance logging is recorded
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
invitee_email = "test@example.com"
|
||||
language = "en-US"
|
||||
token = self._create_invitation_token(tenant, inviter)
|
||||
inviter_name = inviter.name
|
||||
workspace_name = tenant.name
|
||||
|
||||
# Act: Execute the task
|
||||
send_invite_member_mail_task(
|
||||
language=language,
|
||||
to=invitee_email,
|
||||
token=token,
|
||||
inviter_name=inviter_name,
|
||||
workspace_name=workspace_name,
|
||||
)
|
||||
|
||||
# Assert: Verify email service was called correctly
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_email.assert_called_once()
|
||||
|
||||
# Verify call arguments
|
||||
call_args = mock_email_service.send_email.call_args
|
||||
assert call_args[1]["email_type"] == EmailType.INVITE_MEMBER
|
||||
assert call_args[1]["language_code"] == language
|
||||
assert call_args[1]["to"] == invitee_email
|
||||
|
||||
# Verify template context
|
||||
template_context = call_args[1]["template_context"]
|
||||
assert template_context["to"] == invitee_email
|
||||
assert template_context["inviter_name"] == inviter_name
|
||||
assert template_context["workspace_name"] == workspace_name
|
||||
assert template_context["url"] == f"https://console.dify.ai/activate?token={token}"
|
||||
|
||||
def test_send_invite_member_mail_different_languages(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test invitation email sending with different language codes.
|
||||
|
||||
This test verifies:
|
||||
- Email service handles different language codes correctly
|
||||
- Template context is passed correctly for each language
|
||||
- No language-specific errors occur
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
token = self._create_invitation_token(tenant, inviter)
|
||||
|
||||
test_languages = ["en-US", "zh-CN", "ja-JP", "fr-FR", "de-DE", "es-ES"]
|
||||
|
||||
for language in test_languages:
|
||||
# Act: Execute the task with different language
|
||||
send_invite_member_mail_task(
|
||||
language=language,
|
||||
to="test@example.com",
|
||||
token=token,
|
||||
inviter_name=inviter.name,
|
||||
workspace_name=tenant.name,
|
||||
)
|
||||
|
||||
# Assert: Verify language code was passed correctly
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
call_args = mock_email_service.send_email.call_args
|
||||
assert call_args[1]["language_code"] == language
|
||||
|
||||
def test_send_invite_member_mail_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test behavior when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Task returns early when mail is not initialized
|
||||
- Email service is not called
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_mail = mock_external_service_dependencies["mail"]
|
||||
mock_mail.is_inited.return_value = False
|
||||
|
||||
# Act: Execute the task
|
||||
result = send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to="test@example.com",
|
||||
token="test-token",
|
||||
inviter_name="Test User",
|
||||
workspace_name="Test Workspace",
|
||||
)
|
||||
|
||||
# Assert: Verify early return
|
||||
assert result is None
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_email.assert_not_called()
|
||||
|
||||
def test_send_invite_member_mail_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test error handling when email service raises an exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is caught and logged
|
||||
- Task completes without raising exception
|
||||
- Error logging is performed
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_email.side_effect = Exception("Email service failed")
|
||||
|
||||
# Act & Assert: Execute task and verify exception is handled
|
||||
with patch("tasks.mail_invite_member_task.logger") as mock_logger:
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to="test@example.com",
|
||||
token="test-token",
|
||||
inviter_name="Test User",
|
||||
workspace_name="Test Workspace",
|
||||
)
|
||||
|
||||
# Verify error was logged
|
||||
mock_logger.exception.assert_called_once()
|
||||
error_call = mock_logger.exception.call_args[0][0]
|
||||
assert "Send invite member mail to %s failed" in error_call
|
||||
|
||||
def test_send_invite_member_mail_template_context_validation(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test template context contains all required fields for email rendering.
|
||||
|
||||
This test verifies:
|
||||
- All required template context fields are present
|
||||
- Field values match expected data
|
||||
- URL construction is correct
|
||||
- No missing or None values in context
|
||||
"""
|
||||
# Arrange: Create test data with specific values
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
token = "test-token-123"
|
||||
invitee_email = "invitee@example.com"
|
||||
inviter_name = "John Doe"
|
||||
workspace_name = "Acme Corp"
|
||||
|
||||
# Act: Execute the task
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to=invitee_email,
|
||||
token=token,
|
||||
inviter_name=inviter_name,
|
||||
workspace_name=workspace_name,
|
||||
)
|
||||
|
||||
# Assert: Verify template context
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
call_args = mock_email_service.send_email.call_args
|
||||
template_context = call_args[1]["template_context"]
|
||||
|
||||
# Verify all required fields are present
|
||||
required_fields = ["to", "inviter_name", "workspace_name", "url"]
|
||||
for field in required_fields:
|
||||
assert field in template_context
|
||||
assert template_context[field] is not None
|
||||
assert template_context[field] != ""
|
||||
|
||||
# Verify specific values
|
||||
assert template_context["to"] == invitee_email
|
||||
assert template_context["inviter_name"] == inviter_name
|
||||
assert template_context["workspace_name"] == workspace_name
|
||||
assert template_context["url"] == f"https://console.dify.ai/activate?token={token}"
|
||||
|
||||
def test_send_invite_member_mail_integration_with_redis_token(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test integration with Redis token validation.
|
||||
|
||||
This test verifies:
|
||||
- Task works with real Redis token data
|
||||
- Token validation can be performed after email sending
|
||||
- Redis data integrity is maintained
|
||||
"""
|
||||
# Arrange: Create test data and store token in Redis
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
token = self._create_invitation_token(tenant, inviter)
|
||||
|
||||
# Verify token exists in Redis before sending email
|
||||
cache_key = f"member_invite:token:{token}"
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
|
||||
# Act: Execute the task
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to=inviter.email,
|
||||
token=token,
|
||||
inviter_name=inviter.name,
|
||||
workspace_name=tenant.name,
|
||||
)
|
||||
|
||||
# Assert: Verify token still exists after email sending
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
|
||||
# Verify token data integrity
|
||||
token_data = redis_client.get(cache_key)
|
||||
assert token_data is not None
|
||||
invitation_data = json.loads(token_data)
|
||||
assert invitation_data["account_id"] == inviter.id
|
||||
assert invitation_data["email"] == inviter.email
|
||||
assert invitation_data["workspace_id"] == tenant.id
|
||||
|
||||
def test_send_invite_member_mail_with_special_characters(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email sending with special characters in names and workspace names.
|
||||
|
||||
This test verifies:
|
||||
- Special characters are handled correctly in template context
|
||||
- Email service receives properly formatted data
|
||||
- No encoding issues occur
|
||||
"""
|
||||
# Arrange: Create test data with special characters
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
token = self._create_invitation_token(tenant, inviter)
|
||||
|
||||
special_cases = [
|
||||
("John O'Connor", "Acme & Co."),
|
||||
("José María", "Café & Restaurant"),
|
||||
("李小明", "北京科技有限公司"),
|
||||
("François & Marie", "L'École Internationale"),
|
||||
("Александр", "ООО Технологии"),
|
||||
("محمد أحمد", "شركة التقنية المتقدمة"),
|
||||
]
|
||||
|
||||
for inviter_name, workspace_name in special_cases:
|
||||
# Act: Execute the task
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to="test@example.com",
|
||||
token=token,
|
||||
inviter_name=inviter_name,
|
||||
workspace_name=workspace_name,
|
||||
)
|
||||
|
||||
# Assert: Verify special characters are preserved
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
call_args = mock_email_service.send_email.call_args
|
||||
template_context = call_args[1]["template_context"]
|
||||
|
||||
assert template_context["inviter_name"] == inviter_name
|
||||
assert template_context["workspace_name"] == workspace_name
|
||||
|
||||
def test_send_invite_member_mail_real_database_integration(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test real database integration with actual invitation flow.
|
||||
|
||||
This test verifies:
|
||||
- Task works with real database entities
|
||||
- Account and tenant relationships are properly maintained
|
||||
- Database state is consistent after email sending
|
||||
- Real invitation data flow is tested
|
||||
"""
|
||||
# Arrange: Create real database entities
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
invitee_email = "newmember@example.com"
|
||||
|
||||
# Create a pending account for invitation (simulating real invitation flow)
|
||||
pending_account = self._create_pending_account_for_invitation(db_session_with_containers, invitee_email, tenant)
|
||||
|
||||
# Create invitation token with real account data
|
||||
token = self._create_invitation_token(tenant, pending_account)
|
||||
|
||||
# Act: Execute the task with real data
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to=invitee_email,
|
||||
token=token,
|
||||
inviter_name=inviter.name,
|
||||
workspace_name=tenant.name,
|
||||
)
|
||||
|
||||
# Assert: Verify email service was called with real data
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_email.assert_called_once()
|
||||
|
||||
# Verify database state is maintained
|
||||
db_session_with_containers.refresh(pending_account)
|
||||
db_session_with_containers.refresh(tenant)
|
||||
|
||||
assert pending_account.status == AccountStatus.PENDING.value
|
||||
assert pending_account.email == invitee_email
|
||||
assert tenant.name is not None
|
||||
|
||||
# Verify tenant relationship exists
|
||||
tenant_join = (
|
||||
db_session_with_containers.query(TenantAccountJoin)
|
||||
.filter_by(tenant_id=tenant.id, account_id=pending_account.id)
|
||||
.first()
|
||||
)
|
||||
assert tenant_join is not None
|
||||
assert tenant_join.role == TenantAccountRole.NORMAL.value
|
||||
|
||||
def test_send_invite_member_mail_token_lifecycle_management(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test token lifecycle management and validation.
|
||||
|
||||
This test verifies:
|
||||
- Token is properly stored in Redis with correct TTL
|
||||
- Token data structure is correct
|
||||
- Token can be retrieved and validated after email sending
|
||||
- Token expiration is handled correctly
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers)
|
||||
token = self._create_invitation_token(tenant, inviter)
|
||||
|
||||
# Act: Execute the task
|
||||
send_invite_member_mail_task(
|
||||
language="en-US",
|
||||
to=inviter.email,
|
||||
token=token,
|
||||
inviter_name=inviter.name,
|
||||
workspace_name=tenant.name,
|
||||
)
|
||||
|
||||
# Assert: Verify token lifecycle
|
||||
cache_key = f"member_invite:token:{token}"
|
||||
|
||||
# Token should still exist
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
|
||||
# Token should have correct TTL (approximately 24 hours)
|
||||
ttl = redis_client.ttl(cache_key)
|
||||
assert 23 * 60 * 60 <= ttl <= 24 * 60 * 60 # Allow some tolerance
|
||||
|
||||
# Token data should be valid
|
||||
token_data = redis_client.get(cache_key)
|
||||
assert token_data is not None
|
||||
|
||||
invitation_data = json.loads(token_data)
|
||||
assert invitation_data["account_id"] == inviter.id
|
||||
assert invitation_data["email"] == inviter.email
|
||||
assert invitation_data["workspace_id"] == tenant.id
|
||||
@ -33,6 +33,7 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch):
|
||||
assert config.EDITION == "SELF_HOSTED"
|
||||
assert config.API_COMPRESSION_ENABLED is False
|
||||
assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0
|
||||
assert config.TEMPLATE_TRANSFORM_MAX_LENGTH == 400_000
|
||||
|
||||
# annotated field with default value
|
||||
assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600
|
||||
|
||||
@ -172,73 +172,31 @@ class TestSupabaseStorage:
|
||||
assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]]
|
||||
mock_client.storage.from_().download.assert_called_with("test.txt")
|
||||
|
||||
def test_exists_with_list_containing_items(self, storage_with_mock_client):
|
||||
"""Test exists returns True when list() returns items (using len() > 0)."""
|
||||
def test_exists_returns_true_when_file_found(self, storage_with_mock_client):
|
||||
"""Test exists returns True when list() returns items."""
|
||||
storage, mock_client = storage_with_mock_client
|
||||
|
||||
# Mock list return with special object that has count() method
|
||||
mock_list_result = Mock()
|
||||
mock_list_result.count.return_value = 1
|
||||
mock_client.storage.from_().list.return_value = mock_list_result
|
||||
mock_client.storage.from_().list.return_value = [{"name": "test.txt"}]
|
||||
|
||||
result = storage.exists("test.txt")
|
||||
|
||||
assert result is True
|
||||
# from_ gets called during init too, so just check it was called with the right bucket
|
||||
assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]]
|
||||
mock_client.storage.from_().list.assert_called_with("test.txt")
|
||||
mock_client.storage.from_().list.assert_called_with(path="test.txt")
|
||||
|
||||
def test_exists_with_count_method_greater_than_zero(self, storage_with_mock_client):
|
||||
"""Test exists returns True when list result has count() > 0."""
|
||||
def test_exists_returns_false_when_file_not_found(self, storage_with_mock_client):
|
||||
"""Test exists returns False when list() returns an empty list."""
|
||||
storage, mock_client = storage_with_mock_client
|
||||
|
||||
# Mock list return with count() method
|
||||
mock_list_result = Mock()
|
||||
mock_list_result.count.return_value = 1
|
||||
mock_client.storage.from_().list.return_value = mock_list_result
|
||||
|
||||
result = storage.exists("test.txt")
|
||||
|
||||
assert result is True
|
||||
# Verify the correct calls were made
|
||||
assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]]
|
||||
mock_client.storage.from_().list.assert_called_with("test.txt")
|
||||
mock_list_result.count.assert_called()
|
||||
|
||||
def test_exists_with_count_method_zero(self, storage_with_mock_client):
|
||||
"""Test exists returns False when list result has count() == 0."""
|
||||
storage, mock_client = storage_with_mock_client
|
||||
|
||||
# Mock list return with count() method returning 0
|
||||
mock_list_result = Mock()
|
||||
mock_list_result.count.return_value = 0
|
||||
mock_client.storage.from_().list.return_value = mock_list_result
|
||||
mock_client.storage.from_().list.return_value = []
|
||||
|
||||
result = storage.exists("test.txt")
|
||||
|
||||
assert result is False
|
||||
# Verify the correct calls were made
|
||||
assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]]
|
||||
mock_client.storage.from_().list.assert_called_with("test.txt")
|
||||
mock_list_result.count.assert_called()
|
||||
mock_client.storage.from_().list.assert_called_with(path="test.txt")
|
||||
|
||||
def test_exists_with_empty_list(self, storage_with_mock_client):
|
||||
"""Test exists returns False when list() returns empty list."""
|
||||
storage, mock_client = storage_with_mock_client
|
||||
|
||||
# Mock list return with special object that has count() method returning 0
|
||||
mock_list_result = Mock()
|
||||
mock_list_result.count.return_value = 0
|
||||
mock_client.storage.from_().list.return_value = mock_list_result
|
||||
|
||||
result = storage.exists("test.txt")
|
||||
|
||||
assert result is False
|
||||
# Verify the correct calls were made
|
||||
assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]]
|
||||
mock_client.storage.from_().list.assert_called_with("test.txt")
|
||||
|
||||
def test_delete_calls_remove_with_filename(self, storage_with_mock_client):
|
||||
def test_delete_calls_remove_with_filename_in_list(self, storage_with_mock_client):
|
||||
"""Test delete calls remove([...]) (some client versions require a list)."""
|
||||
storage, mock_client = storage_with_mock_client
|
||||
|
||||
@ -247,7 +205,7 @@ class TestSupabaseStorage:
|
||||
storage.delete(filename)
|
||||
|
||||
mock_client.storage.from_.assert_called_once_with("test-bucket")
|
||||
mock_client.storage.from_().remove.assert_called_once_with(filename)
|
||||
mock_client.storage.from_().remove.assert_called_once_with([filename])
|
||||
|
||||
def test_bucket_exists_returns_true_when_bucket_found(self):
|
||||
"""Test bucket_exists returns True when bucket is found in list."""
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from tos import TosClientV2 # type: ignore
|
||||
|
||||
@ -13,7 +15,13 @@ class TestVolcengineTos(BaseStorageTest):
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_method(self, setup_volcengine_tos_mock):
|
||||
"""Executed before each test method."""
|
||||
self.storage = VolcengineTosStorage()
|
||||
with patch("extensions.storage.volcengine_tos_storage.dify_config") as mock_config:
|
||||
mock_config.VOLCENGINE_TOS_ACCESS_KEY = "test_access_key"
|
||||
mock_config.VOLCENGINE_TOS_SECRET_KEY = "test_secret_key"
|
||||
mock_config.VOLCENGINE_TOS_ENDPOINT = "test_endpoint"
|
||||
mock_config.VOLCENGINE_TOS_REGION = "test_region"
|
||||
self.storage = VolcengineTosStorage()
|
||||
|
||||
self.storage.bucket_name = get_example_bucket()
|
||||
self.storage.client = TosClientV2(
|
||||
ak="dify",
|
||||
|
||||
44
api/uv.lock
generated
44
api/uv.lock
generated
@ -445,16 +445,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.13.0"
|
||||
version = "12.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "azure-core" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "msrest" },
|
||||
{ name = "isodate" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1297,7 +1298,6 @@ version = "1.9.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "arize-phoenix-otel" },
|
||||
{ name = "authlib" },
|
||||
{ name = "azure-identity" },
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "boto3" },
|
||||
@ -1329,10 +1329,8 @@ dependencies = [
|
||||
{ name = "json-repair" },
|
||||
{ name = "langfuse" },
|
||||
{ name = "langsmith" },
|
||||
{ name = "mailchimp-transactional" },
|
||||
{ name = "markdown" },
|
||||
{ name = "numpy" },
|
||||
{ name = "openai" },
|
||||
{ name = "openpyxl" },
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-distro" },
|
||||
@ -1343,6 +1341,7 @@ dependencies = [
|
||||
{ name = "opentelemetry-instrumentation" },
|
||||
{ name = "opentelemetry-instrumentation-celery" },
|
||||
{ name = "opentelemetry-instrumentation-flask" },
|
||||
{ name = "opentelemetry-instrumentation-httpx" },
|
||||
{ name = "opentelemetry-instrumentation-redis" },
|
||||
{ name = "opentelemetry-instrumentation-requests" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy" },
|
||||
@ -1354,7 +1353,6 @@ dependencies = [
|
||||
{ name = "opik" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pandas", extra = ["excel", "output-formatting", "performance"] },
|
||||
{ name = "pandoc" },
|
||||
{ name = "psycogreen" },
|
||||
{ name = "psycopg2-binary" },
|
||||
{ name = "pycryptodome" },
|
||||
@ -1493,7 +1491,6 @@ vdb = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "arize-phoenix-otel", specifier = "~=0.9.2" },
|
||||
{ name = "authlib", specifier = "==1.6.4" },
|
||||
{ name = "azure-identity", specifier = "==1.16.1" },
|
||||
{ name = "beautifulsoup4", specifier = "==4.12.2" },
|
||||
{ name = "boto3", specifier = "==1.35.99" },
|
||||
@ -1526,10 +1523,8 @@ requires-dist = [
|
||||
{ name = "json-repair", specifier = ">=0.41.1" },
|
||||
{ name = "langfuse", specifier = "~=2.51.3" },
|
||||
{ name = "langsmith", specifier = "~=0.1.77" },
|
||||
{ name = "mailchimp-transactional", specifier = "~=1.0.50" },
|
||||
{ name = "markdown", specifier = "~=3.5.1" },
|
||||
{ name = "numpy", specifier = "~=1.26.4" },
|
||||
{ name = "openai", specifier = "~=1.61.0" },
|
||||
{ name = "openpyxl", specifier = "~=3.1.5" },
|
||||
{ name = "opentelemetry-api", specifier = "==1.27.0" },
|
||||
{ name = "opentelemetry-distro", specifier = "==0.48b0" },
|
||||
@ -1540,6 +1535,7 @@ requires-dist = [
|
||||
{ name = "opentelemetry-instrumentation", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-requests", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.48b0" },
|
||||
@ -1551,7 +1547,6 @@ requires-dist = [
|
||||
{ name = "opik", specifier = "~=1.7.25" },
|
||||
{ name = "packaging", specifier = "~=23.2" },
|
||||
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
|
||||
{ name = "pandoc", specifier = "~=2.4" },
|
||||
{ name = "psycogreen", specifier = "~=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = "~=2.9.6" },
|
||||
{ name = "pycryptodome", specifier = "==3.19.1" },
|
||||
@ -1647,10 +1642,10 @@ dev = [
|
||||
{ name = "types-ujson", specifier = ">=5.10.0" },
|
||||
]
|
||||
storage = [
|
||||
{ name = "azure-storage-blob", specifier = "==12.13.0" },
|
||||
{ name = "azure-storage-blob", specifier = "==12.26.0" },
|
||||
{ name = "bce-python-sdk", specifier = "~=0.9.23" },
|
||||
{ name = "cos-python-sdk-v5", specifier = "==1.9.38" },
|
||||
{ name = "esdk-obs-python", specifier = "==3.24.6.1" },
|
||||
{ name = "esdk-obs-python", specifier = "==3.25.8" },
|
||||
{ name = "google-cloud-storage", specifier = "==2.16.0" },
|
||||
{ name = "opendal", specifier = "~=0.46.0" },
|
||||
{ name = "oss2", specifier = "==2.18.5" },
|
||||
@ -1789,12 +1784,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "esdk-obs-python"
|
||||
version = "3.24.6.1"
|
||||
version = "3.25.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "crcmod" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" }
|
||||
|
||||
[[package]]
|
||||
name = "et-xmlfile"
|
||||
@ -3945,6 +3942,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-httpx"
|
||||
version = "0.48b0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-instrumentation" },
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "opentelemetry-util-http" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931, upload-time = "2024-08-28T21:28:03.794Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900, upload-time = "2024-08-28T21:27:01.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation-redis"
|
||||
version = "0.48b0"
|
||||
|
||||
Reference in New Issue
Block a user