Merge branch 'fix/chore-fix' into dev/plugin-deploy

This commit is contained in:
Yeuoly
2024-11-25 17:19:51 +08:00
250 changed files with 7636 additions and 1975 deletions

View File

@ -4,7 +4,7 @@ import logging
import random
import secrets
import uuid
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta
from hashlib import sha256
from typing import Any, Optional
@ -115,15 +115,15 @@ class AccountService:
available_ta.current = True
db.session.commit()
if datetime.now(timezone.utc).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(timezone.utc).replace(tzinfo=None)
if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return account
@staticmethod
def get_account_jwt_token(account: Account) -> str:
exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
exp = int(exp_dt.timestamp())
payload = {
"user_id": account.id,
@ -160,7 +160,7 @@ class AccountService:
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
@ -253,7 +253,7 @@ class AccountService:
# If it exists, update the record
account_integrate.open_id = open_id
account_integrate.encrypted_token = "" # todo
account_integrate.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
else:
# If it does not exist, create a new record
account_integrate = AccountIntegrate(
@ -288,7 +288,7 @@ class AccountService:
@staticmethod
def update_login_info(account: Account, *, ip_address: str) -> None:
"""Update last login time and ip"""
account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
account.last_login_ip = ip_address
db.session.add(account)
db.session.commit()
@ -765,7 +765,7 @@ class RegisterService:
)
account.last_login_ip = ip_address
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
@ -805,7 +805,7 @@ class RegisterService:
is_setup=is_setup,
)
account.status = AccountStatus.ACTIVE.value if not status else status.value
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
if open_id is not None or provider is not None:
AccountService.link_account_integrate(provider, open_id, account)

View File

@ -429,7 +429,7 @@ class AppAnnotationService:
raise NotFound("App annotation not found")
annotation_setting.score_threshold = args["score_threshold"]
annotation_setting.updated_user_id = current_user.id
annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(annotation_setting)
db.session.commit()

View File

@ -0,0 +1,663 @@
import logging
import uuid
from enum import StrEnum
from typing import Optional
from uuid import uuid4
import yaml
from packaging import version
from pydantic import BaseModel, Field
from sqlalchemy import select
from sqlalchemy.orm import Session
from core.helper import ssrf_proxy
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.plugin import PluginDependency
from core.workflow.nodes.enums import NodeType
from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData
from core.workflow.nodes.llm.entities import LLMNodeData
from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData
from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData
from core.workflow.nodes.tool.entities import ToolNodeData
from events.app_event import app_model_config_was_updated, app_was_created
from extensions.ext_redis import redis_client
from factories import variable_factory
from models import Account, App, AppMode
from models.model import AppModelConfig
from models.workflow import Workflow
from services.plugin.dependencies_analysis import DependenciesAnalysisService
from services.workflow_service import WorkflowService
logger = logging.getLogger(__name__)
IMPORT_INFO_REDIS_KEY_PREFIX = "app_import_info:"
IMPORT_INFO_REDIS_EXPIRY = 2 * 60 * 60 # 2 hours
CURRENT_DSL_VERSION = "0.1.3"
DSL_MAX_SIZE = 10 * 1024 * 1024 # 10MB
class ImportMode(StrEnum):
YAML_CONTENT = "yaml-content"
YAML_URL = "yaml-url"
class ImportStatus(StrEnum):
COMPLETED = "completed"
COMPLETED_WITH_WARNINGS = "completed-with-warnings"
PENDING = "pending"
FAILED = "failed"
class Import(BaseModel):
id: str
status: ImportStatus
app_id: Optional[str] = None
current_dsl_version: str = CURRENT_DSL_VERSION
imported_dsl_version: str = ""
leaked_dependencies: list[PluginDependency] = Field(default_factory=list)
error: str = ""
def _check_version_compatibility(imported_version: str) -> ImportStatus:
"""Determine import status based on version comparison"""
try:
current_ver = version.parse(CURRENT_DSL_VERSION)
imported_ver = version.parse(imported_version)
except version.InvalidVersion:
return ImportStatus.FAILED
# Compare major version and minor version
if current_ver.major != imported_ver.major or current_ver.minor != imported_ver.minor:
return ImportStatus.PENDING
if current_ver.micro != imported_ver.micro:
return ImportStatus.COMPLETED_WITH_WARNINGS
return ImportStatus.COMPLETED
class PendingData(BaseModel):
import_mode: str
yaml_content: str
name: str | None
description: str | None
icon_type: str | None
icon: str | None
icon_background: str | None
app_id: str | None
class AppDslService:
def __init__(self, session: Session):
self._session = session
def import_app(
self,
*,
account: Account,
import_mode: str,
yaml_content: Optional[str] = None,
yaml_url: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
icon_type: Optional[str] = None,
icon: Optional[str] = None,
icon_background: Optional[str] = None,
app_id: Optional[str] = None,
) -> Import:
"""Import an app from YAML content or URL."""
import_id = str(uuid.uuid4())
# Validate import mode
try:
mode = ImportMode(import_mode)
except ValueError:
raise ValueError(f"Invalid import_mode: {import_mode}")
# Get YAML content
content = ""
if mode == ImportMode.YAML_URL:
if not yaml_url:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="yaml_url is required when import_mode is yaml-url",
)
try:
response = ssrf_proxy.get(yaml_url.strip(), follow_redirects=True, timeout=(10, 10))
response.raise_for_status()
content = response.content
if len(content) > DSL_MAX_SIZE:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="File size exceeds the limit of 10MB",
)
if not content:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Empty content from url",
)
try:
content = content.decode("utf-8")
except UnicodeDecodeError as e:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=f"Error decoding content: {e}",
)
except Exception as e:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=f"Error fetching YAML from URL: {str(e)}",
)
elif mode == ImportMode.YAML_CONTENT:
if not yaml_content:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="yaml_content is required when import_mode is yaml-content",
)
content = yaml_content
# Process YAML content
try:
# Parse YAML to validate format
data = yaml.safe_load(content)
if not isinstance(data, dict):
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Invalid YAML format: content must be a mapping",
)
# Validate and fix DSL version
if not data.get("version"):
data["version"] = "0.1.0"
if not data.get("kind") or data.get("kind") != "app":
data["kind"] = "app"
imported_version = data.get("version", "0.1.0")
status = _check_version_compatibility(imported_version)
# Extract app data
app_data = data.get("app")
if not app_data:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Missing app data in YAML content",
)
# If app_id is provided, check if it exists
app = None
if app_id:
stmt = select(App).where(App.id == app_id, App.tenant_id == account.current_tenant_id)
app = self._session.scalar(stmt)
if not app:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="App not found",
)
if app.mode not in [AppMode.WORKFLOW.value, AppMode.ADVANCED_CHAT.value]:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Only workflow or advanced chat apps can be overwritten",
)
# If major version mismatch, store import info in Redis
if status == ImportStatus.PENDING:
pending_data = PendingData(
import_mode=import_mode,
yaml_content=content,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
app_id=app_id,
)
redis_client.setex(
f"{IMPORT_INFO_REDIS_KEY_PREFIX}{import_id}",
IMPORT_INFO_REDIS_EXPIRY,
pending_data.model_dump_json(),
)
return Import(
id=import_id,
status=status,
app_id=app_id,
imported_dsl_version=imported_version,
)
try:
dependencies = self.get_leaked_dependencies(account.current_tenant_id, data.get("dependencies", []))
except Exception as e:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=str(e),
)
if len(dependencies) > 0:
return Import(
id=import_id,
status=ImportStatus.PENDING,
app_id=app_id,
imported_dsl_version=imported_version,
leaked_dependencies=dependencies,
)
# Create or update app
app = self._create_or_update_app(
app=app,
data=data,
account=account,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
)
return Import(
id=import_id,
status=status,
app_id=app.id,
imported_dsl_version=imported_version,
)
except yaml.YAMLError as e:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=f"Invalid YAML format: {str(e)}",
)
except Exception as e:
logger.exception("Failed to import app")
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=str(e),
)
def confirm_import(self, *, import_id: str, account: Account) -> Import:
"""
Confirm an import that requires confirmation
"""
redis_key = f"{IMPORT_INFO_REDIS_KEY_PREFIX}{import_id}"
pending_data = redis_client.get(redis_key)
if not pending_data:
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Import information expired or does not exist",
)
try:
if not isinstance(pending_data, str | bytes):
return Import(
id=import_id,
status=ImportStatus.FAILED,
error="Invalid import information",
)
pending_data = PendingData.model_validate_json(pending_data)
data = yaml.safe_load(pending_data.yaml_content)
app = None
if pending_data.app_id:
stmt = select(App).where(App.id == pending_data.app_id, App.tenant_id == account.current_tenant_id)
app = self._session.scalar(stmt)
# Create or update app
app = self._create_or_update_app(
app=app,
data=data,
account=account,
name=pending_data.name,
description=pending_data.description,
icon_type=pending_data.icon_type,
icon=pending_data.icon,
icon_background=pending_data.icon_background,
)
# Delete import info from Redis
redis_client.delete(redis_key)
return Import(
id=import_id,
status=ImportStatus.COMPLETED,
app_id=app.id,
current_dsl_version=CURRENT_DSL_VERSION,
imported_dsl_version=data.get("version", "0.1.0"),
)
except Exception as e:
logger.exception("Error confirming import")
return Import(
id=import_id,
status=ImportStatus.FAILED,
error=str(e),
)
def _create_or_update_app(
self,
*,
app: Optional[App],
data: dict,
account: Account,
name: Optional[str] = None,
description: Optional[str] = None,
icon_type: Optional[str] = None,
icon: Optional[str] = None,
icon_background: Optional[str] = None,
) -> App:
"""Create a new app or update an existing one."""
app_data = data.get("app", {})
app_mode = AppMode(app_data["mode"])
# Set icon type
icon_type_value = icon_type or app_data.get("icon_type")
if icon_type_value in ["emoji", "link"]:
icon_type = icon_type_value
else:
icon_type = "emoji"
icon = icon or str(app_data.get("icon", ""))
if app:
# Update existing app
app.name = name or app_data.get("name", app.name)
app.description = description or app_data.get("description", app.description)
app.icon_type = icon_type
app.icon = icon
app.icon_background = icon_background or app_data.get("icon_background", app.icon_background)
app.updated_by = account.id
else:
# Create new app
app = App()
app.id = str(uuid4())
app.tenant_id = account.current_tenant_id
app.mode = app_mode.value
app.name = name or app_data.get("name", "")
app.description = description or app_data.get("description", "")
app.icon_type = icon_type
app.icon = icon
app.icon_background = icon_background or app_data.get("icon_background", "#FFFFFF")
app.enable_site = True
app.enable_api = True
app.use_icon_as_answer_icon = app_data.get("use_icon_as_answer_icon", False)
app.created_by = account.id
app.updated_by = account.id
self._session.add(app)
self._session.commit()
app_was_created.send(app, account=account)
# Initialize app based on mode
if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
workflow_data = data.get("workflow")
if not workflow_data or not isinstance(workflow_data, dict):
raise ValueError("Missing workflow data for workflow/advanced chat app")
environment_variables_list = workflow_data.get("environment_variables", [])
environment_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
]
conversation_variables_list = workflow_data.get("conversation_variables", [])
conversation_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
]
workflow_service = WorkflowService()
current_draft_workflow = workflow_service.get_draft_workflow(app_model=app)
if current_draft_workflow:
unique_hash = current_draft_workflow.unique_hash
else:
unique_hash = None
workflow_service.sync_draft_workflow(
app_model=app,
graph=workflow_data.get("graph", {}),
features=workflow_data.get("features", {}),
unique_hash=unique_hash,
account=account,
environment_variables=environment_variables,
conversation_variables=conversation_variables,
)
elif app_mode in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.COMPLETION}:
# Initialize model config
model_config = data.get("model_config")
if not model_config or not isinstance(model_config, dict):
raise ValueError("Missing model_config for chat/agent-chat/completion app")
# Initialize or update model config
if not app.app_model_config:
app_model_config = AppModelConfig().from_model_config_dict(model_config)
app_model_config.id = str(uuid4())
app_model_config.app_id = app.id
app_model_config.created_by = account.id
app_model_config.updated_by = account.id
app.app_model_config_id = app_model_config.id
self._session.add(app_model_config)
app_model_config_was_updated.send(app, app_model_config=app_model_config)
else:
raise ValueError("Invalid app mode")
return app
@classmethod
def export_dsl(cls, app_model: App, include_secret: bool = False) -> str:
"""
Export app
:param app_model: App instance
:return:
"""
app_mode = AppMode.value_of(app_model.mode)
export_data = {
"version": CURRENT_DSL_VERSION,
"kind": "app",
"app": {
"name": app_model.name,
"mode": app_model.mode,
"icon": "🤖" if app_model.icon_type == "image" else app_model.icon,
"icon_background": "#FFEAD5" if app_model.icon_type == "image" else app_model.icon_background,
"description": app_model.description,
"use_icon_as_answer_icon": app_model.use_icon_as_answer_icon,
},
}
if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
cls._append_workflow_export_data(
export_data=export_data, app_model=app_model, include_secret=include_secret
)
else:
cls._append_model_config_export_data(export_data, app_model)
return yaml.dump(export_data, allow_unicode=True)
@classmethod
def _append_workflow_export_data(cls, *, export_data: dict, app_model: App, include_secret: bool) -> None:
"""
Append workflow export data
:param export_data: export data
:param app_model: App instance
"""
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model)
if not workflow:
raise ValueError("Missing draft workflow configuration, please check.")
export_data["workflow"] = workflow.to_dict(include_secret=include_secret)
dependencies = cls._extract_dependencies_from_workflow(workflow)
export_data["dependencies"] = [
jsonable_encoder(d.model_dump())
for d in DependenciesAnalysisService.generate_dependencies(
tenant_id=app_model.tenant_id, dependencies=dependencies
)
]
@classmethod
def _append_model_config_export_data(cls, export_data: dict, app_model: App) -> None:
"""
Append model config export data
:param export_data: export data
:param app_model: App instance
"""
app_model_config = app_model.app_model_config
if not app_model_config:
raise ValueError("Missing app configuration, please check.")
export_data["model_config"] = app_model_config.to_dict()
dependencies = cls._extract_dependencies_from_model_config(app_model_config)
export_data["dependencies"] = [
jsonable_encoder(d.model_dump())
for d in DependenciesAnalysisService.generate_dependencies(
tenant_id=app_model.tenant_id, dependencies=dependencies
)
]
@classmethod
def _extract_dependencies_from_workflow(cls, workflow: Workflow) -> list[str]:
"""
Extract dependencies from workflow
:param workflow: Workflow instance
:return: dependencies list format like ["langgenius/google"]
"""
graph = workflow.graph_dict
dependencies = []
for node in graph.get("nodes", []):
try:
typ = node.get("data", {}).get("type")
match typ:
case NodeType.TOOL.value:
tool_entity = ToolNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id),
)
case NodeType.LLM.value:
llm_entity = LLMNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider),
)
case NodeType.QUESTION_CLASSIFIER.value:
question_classifier_entity = QuestionClassifierNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
question_classifier_entity.model.provider
),
)
case NodeType.PARAMETER_EXTRACTOR.value:
parameter_extractor_entity = ParameterExtractorNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
parameter_extractor_entity.model.provider
),
)
case NodeType.KNOWLEDGE_RETRIEVAL.value:
knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"])
if knowledge_retrieval_entity.retrieval_mode == "multiple":
if knowledge_retrieval_entity.multiple_retrieval_config:
if (
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
== "reranking_model"
):
if knowledge_retrieval_entity.multiple_retrieval_config.reranking_model:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
knowledge_retrieval_entity.multiple_retrieval_config.reranking_model.provider
),
)
elif (
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
== "weighted_score"
):
if knowledge_retrieval_entity.multiple_retrieval_config.weights:
vector_setting = (
knowledge_retrieval_entity.multiple_retrieval_config.weights.vector_setting
)
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
vector_setting.embedding_provider_name
),
)
elif knowledge_retrieval_entity.retrieval_mode == "single":
model_config = knowledge_retrieval_entity.single_retrieval_config
if model_config:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
model_config.model.provider
),
)
case _:
# TODO: Handle default case or unknown node types
pass
except Exception as e:
logger.exception("Error extracting node dependency", exc_info=e)
return dependencies
@classmethod
def _extract_dependencies_from_model_config(cls, model_config: AppModelConfig) -> list[str]:
"""
Extract dependencies from model config
:param model_config: AppModelConfig instance
:return: dependencies list format like ["langgenius/google:1.0.0@abcdef1234567890"]
"""
dependencies = []
try:
# completion model
model_dict = model_config.model_dict
if model_dict:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(model_dict.get("provider", ""))
)
# reranking model
dataset_configs = model_config.dataset_configs_dict
if dataset_configs:
for dataset_config in dataset_configs:
if dataset_config.get("reranking_model"):
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
dataset_config.get("reranking_model", {})
.get("reranking_provider_name", {})
.get("provider")
)
)
# tools
agent_configs = model_config.agent_mode_dict
if agent_configs:
for agent_config in agent_configs:
if agent_config.get("tools"):
for tool in agent_config.get("tools", []):
dependencies.append(
DependenciesAnalysisService.analyze_tool_dependency(tool.get("provider_id"))
)
except Exception as e:
logger.exception("Error extracting model config dependency", exc_info=e)
return dependencies
@classmethod
def get_leaked_dependencies(cls, tenant_id: str, dsl_dependencies: list[dict]) -> list[PluginDependency]:
"""
Returns the leaked dependencies in current workspace
"""
dependencies = [PluginDependency(**dep) for dep in dsl_dependencies]
if not dependencies:
return []
return DependenciesAnalysisService.get_leaked_dependencies(tenant_id=tenant_id, dependencies=dependencies)

View File

@ -1,3 +0,0 @@
from .service import AppDslService
__all__ = ["AppDslService"]

View File

@ -1,34 +0,0 @@
class DSLVersionNotSupportedError(ValueError):
"""Raised when the imported DSL version is not supported by the current Dify version."""
class InvalidYAMLFormatError(ValueError):
"""Raised when the provided YAML format is invalid."""
class MissingAppDataError(ValueError):
"""Raised when the app data is missing in the provided DSL."""
class InvalidAppModeError(ValueError):
"""Raised when the app mode is invalid."""
class MissingWorkflowDataError(ValueError):
"""Raised when the workflow data is missing in the provided DSL."""
class MissingModelConfigError(ValueError):
"""Raised when the model config data is missing in the provided DSL."""
class FileSizeLimitExceededError(ValueError):
"""Raised when the file size exceeds the allowed limit."""
class EmptyContentError(ValueError):
"""Raised when the content fetched from the URL is empty."""
class ContentDecodingError(ValueError):
"""Raised when there is an error decoding the content."""

View File

@ -1,666 +0,0 @@
import logging
from collections.abc import Mapping
from typing import Any
import yaml
from packaging import version
from core.helper import ssrf_proxy
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.plugin import PluginDependency
from core.workflow.nodes.enums import NodeType
from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData
from core.workflow.nodes.llm.entities import LLMNodeData
from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData
from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData
from core.workflow.nodes.tool.entities import ToolNodeData
from events.app_event import app_model_config_was_updated, app_was_created
from extensions.ext_database import db
from factories import variable_factory
from models.account import Account
from models.model import App, AppMode, AppModelConfig
from models.workflow import Workflow
from services.plugin.dependencies_analysis import DependenciesAnalysisService
from services.workflow_service import WorkflowService
from .exc import (
ContentDecodingError,
EmptyContentError,
FileSizeLimitExceededError,
InvalidAppModeError,
InvalidYAMLFormatError,
MissingAppDataError,
MissingModelConfigError,
MissingWorkflowDataError,
)
logger = logging.getLogger(__name__)
current_dsl_version = "0.1.3"
dsl_max_size = 10 * 1024 * 1024 # 10MB
class AppDslService:
@classmethod
def import_and_create_new_app_from_url(cls, tenant_id: str, url: str, args: dict, account: Account) -> App:
"""
Import app dsl from url and create new app
:param tenant_id: tenant id
:param url: import url
:param args: request args
:param account: Account instance
"""
response = ssrf_proxy.get(url.strip(), follow_redirects=True, timeout=(10, 10))
response.raise_for_status()
content = response.content
if len(content) > dsl_max_size:
raise FileSizeLimitExceededError("File size exceeds the limit of 10MB")
if not content:
raise EmptyContentError("Empty content from url")
try:
data = content.decode("utf-8")
except UnicodeDecodeError as e:
raise ContentDecodingError(f"Error decoding content: {e}")
return cls.import_and_create_new_app(tenant_id, data, args, account)
@classmethod
def check_dependencies_from_url(cls, tenant_id: str, url: str, account: Account) -> list[PluginDependency]:
"""
Check dependencies from url
"""
response = ssrf_proxy.get(url.strip(), follow_redirects=True, timeout=(10, 10))
response.raise_for_status()
content = response.content
if len(content) > dsl_max_size:
raise FileSizeLimitExceededError("File size exceeds the limit of 10MB")
try:
data = content.decode("utf-8")
except UnicodeDecodeError as e:
raise ContentDecodingError(f"Error decoding content: {e}")
return cls.check_dependencies(tenant_id, data, account)
@classmethod
def check_dependencies(cls, tenant_id: str, data: str, account: Account) -> list[PluginDependency]:
"""
Returns the leaked dependencies in current workspace
"""
try:
import_data = yaml.safe_load(data) or {}
except yaml.YAMLError:
raise InvalidYAMLFormatError("Invalid YAML format in data argument.")
dependencies = [PluginDependency(**dep) for dep in import_data.get("dependencies", [])]
if not dependencies:
return []
return DependenciesAnalysisService.check_dependencies(tenant_id=tenant_id, dependencies=dependencies)
@classmethod
def import_and_create_new_app(cls, tenant_id: str, data: str, args: dict, account: Account) -> App:
"""
Import app dsl and create new app
:param tenant_id: tenant id
:param data: import data
:param args: request args
:param account: Account instance
"""
try:
import_data = yaml.safe_load(data)
except yaml.YAMLError:
raise InvalidYAMLFormatError("Invalid YAML format in data argument.")
# check or repair dsl version
import_data = _check_or_fix_dsl(import_data)
app_data = import_data.get("app")
if not app_data:
raise MissingAppDataError("Missing app in data argument")
# get app basic info
name = args.get("name") or app_data.get("name")
description = args.get("description") or app_data.get("description", "")
icon_type = args.get("icon_type") or app_data.get("icon_type")
icon = args.get("icon") or app_data.get("icon")
icon_background = args.get("icon_background") or app_data.get("icon_background")
use_icon_as_answer_icon = app_data.get("use_icon_as_answer_icon", False)
# import dsl and create app
app_mode = AppMode.value_of(app_data.get("mode"))
if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
workflow_data = import_data.get("workflow")
if not workflow_data or not isinstance(workflow_data, dict):
raise MissingWorkflowDataError(
"Missing workflow in data argument when app mode is advanced-chat or workflow"
)
app = cls._import_and_create_new_workflow_based_app(
tenant_id=tenant_id,
app_mode=app_mode,
workflow_data=workflow_data,
account=account,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
use_icon_as_answer_icon=use_icon_as_answer_icon,
)
elif app_mode in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.COMPLETION}:
model_config = import_data.get("model_config")
if not model_config or not isinstance(model_config, dict):
raise MissingModelConfigError(
"Missing model_config in data argument when app mode is chat, agent-chat or completion"
)
app = cls._import_and_create_new_model_config_based_app(
tenant_id=tenant_id,
app_mode=app_mode,
model_config_data=model_config,
account=account,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
use_icon_as_answer_icon=use_icon_as_answer_icon,
)
else:
raise InvalidAppModeError("Invalid app mode")
return app
@classmethod
def import_and_overwrite_workflow(cls, app_model: App, data: str, account: Account) -> Workflow:
"""
Import app dsl and overwrite workflow
:param app_model: App instance
:param data: import data
:param account: Account instance
"""
try:
import_data = yaml.safe_load(data)
except yaml.YAMLError:
raise InvalidYAMLFormatError("Invalid YAML format in data argument.")
# check or repair dsl version
import_data = _check_or_fix_dsl(import_data)
app_data = import_data.get("app")
if not app_data:
raise MissingAppDataError("Missing app in data argument")
# import dsl and overwrite app
app_mode = AppMode.value_of(app_data.get("mode"))
if app_mode not in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
raise InvalidAppModeError("Only support import workflow in advanced-chat or workflow app.")
if app_data.get("mode") != app_model.mode:
raise ValueError(f"App mode {app_data.get('mode')} is not matched with current app mode {app_mode.value}")
workflow_data = import_data.get("workflow")
if not workflow_data or not isinstance(workflow_data, dict):
raise MissingWorkflowDataError(
"Missing workflow in data argument when app mode is advanced-chat or workflow"
)
return cls._import_and_overwrite_workflow_based_app(
app_model=app_model,
workflow_data=workflow_data,
account=account,
)
@classmethod
def export_dsl(cls, app_model: App, include_secret: bool = False) -> str:
"""
Export app
:param app_model: App instance
:return:
"""
app_mode = AppMode.value_of(app_model.mode)
export_data = {
"version": current_dsl_version,
"kind": "app",
"app": {
"name": app_model.name,
"mode": app_model.mode,
"icon": "🤖" if app_model.icon_type == "image" else app_model.icon,
"icon_background": "#FFEAD5" if app_model.icon_type == "image" else app_model.icon_background,
"description": app_model.description,
"use_icon_as_answer_icon": app_model.use_icon_as_answer_icon,
},
}
if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
cls._append_workflow_export_data(
export_data=export_data, app_model=app_model, include_secret=include_secret
)
else:
cls._append_model_config_export_data(export_data, app_model)
return yaml.dump(export_data, allow_unicode=True)
@classmethod
def _import_and_create_new_workflow_based_app(
cls,
tenant_id: str,
app_mode: AppMode,
workflow_data: Mapping[str, Any],
account: Account,
name: str,
description: str,
icon_type: str,
icon: str,
icon_background: str,
use_icon_as_answer_icon: bool,
) -> App:
"""
Import app dsl and create new workflow based app
:param tenant_id: tenant id
:param app_mode: app mode
:param workflow_data: workflow data
:param account: Account instance
:param name: app name
:param description: app description
:param icon_type: app icon type, "emoji" or "image"
:param icon: app icon
:param icon_background: app icon background
:param use_icon_as_answer_icon: use app icon as answer icon
"""
if not workflow_data:
raise MissingWorkflowDataError(
"Missing workflow in data argument when app mode is advanced-chat or workflow"
)
app = cls._create_app(
tenant_id=tenant_id,
app_mode=app_mode,
account=account,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
use_icon_as_answer_icon=use_icon_as_answer_icon,
)
# init draft workflow
environment_variables_list = workflow_data.get("environment_variables") or []
environment_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
]
conversation_variables_list = workflow_data.get("conversation_variables") or []
conversation_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
]
workflow_service = WorkflowService()
draft_workflow = workflow_service.sync_draft_workflow(
app_model=app,
graph=workflow_data.get("graph", {}),
features=workflow_data.get("features", {}),
unique_hash=None,
account=account,
environment_variables=environment_variables,
conversation_variables=conversation_variables,
)
workflow_service.publish_workflow(app_model=app, account=account, draft_workflow=draft_workflow)
return app
@classmethod
def _import_and_overwrite_workflow_based_app(
cls, app_model: App, workflow_data: Mapping[str, Any], account: Account
) -> Workflow:
"""
Import app dsl and overwrite workflow based app
:param app_model: App instance
:param workflow_data: workflow data
:param account: Account instance
"""
if not workflow_data:
raise MissingWorkflowDataError(
"Missing workflow in data argument when app mode is advanced-chat or workflow"
)
# fetch draft workflow by app_model
workflow_service = WorkflowService()
current_draft_workflow = workflow_service.get_draft_workflow(app_model=app_model)
if current_draft_workflow:
unique_hash = current_draft_workflow.unique_hash
else:
unique_hash = None
# sync draft workflow
environment_variables_list = workflow_data.get("environment_variables") or []
environment_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
]
conversation_variables_list = workflow_data.get("conversation_variables") or []
conversation_variables = [
variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
]
draft_workflow = workflow_service.sync_draft_workflow(
app_model=app_model,
graph=workflow_data.get("graph", {}),
features=workflow_data.get("features", {}),
unique_hash=unique_hash,
account=account,
environment_variables=environment_variables,
conversation_variables=conversation_variables,
)
return draft_workflow
@classmethod
def _import_and_create_new_model_config_based_app(
cls,
tenant_id: str,
app_mode: AppMode,
model_config_data: Mapping[str, Any],
account: Account,
name: str,
description: str,
icon_type: str,
icon: str,
icon_background: str,
use_icon_as_answer_icon: bool,
) -> App:
"""
Import app dsl and create new model config based app
:param tenant_id: tenant id
:param app_mode: app mode
:param model_config_data: model config data
:param account: Account instance
:param name: app name
:param description: app description
:param icon: app icon
:param icon_background: app icon background
"""
if not model_config_data:
raise MissingModelConfigError(
"Missing model_config in data argument when app mode is chat, agent-chat or completion"
)
app = cls._create_app(
tenant_id=tenant_id,
app_mode=app_mode,
account=account,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
use_icon_as_answer_icon=use_icon_as_answer_icon,
)
app_model_config = AppModelConfig()
app_model_config = app_model_config.from_model_config_dict(model_config_data)
app_model_config.app_id = app.id
app_model_config.created_by = account.id
app_model_config.updated_by = account.id
db.session.add(app_model_config)
db.session.commit()
app.app_model_config_id = app_model_config.id
app_model_config_was_updated.send(app, app_model_config=app_model_config)
return app
@classmethod
def _create_app(
cls,
tenant_id: str,
app_mode: AppMode,
account: Account,
name: str,
description: str,
icon_type: str,
icon: str,
icon_background: str,
use_icon_as_answer_icon: bool,
) -> App:
"""
Create new app
:param tenant_id: tenant id
:param app_mode: app mode
:param account: Account instance
:param name: app name
:param description: app description
:param icon_type: app icon type, "emoji" or "image"
:param icon: app icon
:param icon_background: app icon background
:param use_icon_as_answer_icon: use app icon as answer icon
"""
app = App(
tenant_id=tenant_id,
mode=app_mode.value,
name=name,
description=description,
icon_type=icon_type,
icon=icon,
icon_background=icon_background,
enable_site=True,
enable_api=True,
use_icon_as_answer_icon=use_icon_as_answer_icon,
created_by=account.id,
updated_by=account.id,
)
db.session.add(app)
db.session.commit()
app_was_created.send(app, account=account)
return app
@classmethod
def _append_workflow_export_data(cls, *, export_data: dict, app_model: App, include_secret: bool) -> None:
"""
Append workflow export data
:param export_data: export data
:param app_model: App instance
"""
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model)
if not workflow:
raise ValueError("Missing draft workflow configuration, please check.")
export_data["workflow"] = workflow.to_dict(include_secret=include_secret)
dependencies = cls._extract_dependencies_from_workflow(workflow)
export_data["dependencies"] = [
jsonable_encoder(d.model_dump())
for d in DependenciesAnalysisService.generate_dependencies(
tenant_id=app_model.tenant_id, dependencies=dependencies
)
]
@classmethod
def _append_model_config_export_data(cls, export_data: dict, app_model: App) -> None:
"""
Append model config export data
:param export_data: export data
:param app_model: App instance
"""
app_model_config = app_model.app_model_config
if not app_model_config:
raise ValueError("Missing app configuration, please check.")
export_data["model_config"] = app_model_config.to_dict()
dependencies = cls._extract_dependencies_from_model_config(app_model_config)
export_data["dependencies"] = [
jsonable_encoder(d.model_dump())
for d in DependenciesAnalysisService.generate_dependencies(
tenant_id=app_model.tenant_id, dependencies=dependencies
)
]
@classmethod
def _extract_dependencies_from_workflow(cls, workflow: Workflow) -> list[str]:
"""
Extract dependencies from workflow
:param workflow: Workflow instance
:return: dependencies list format like ["langgenius/google"]
"""
graph = workflow.graph_dict
dependencies = []
for node in graph.get("nodes", []):
try:
typ = node.get("data", {}).get("type")
match typ:
case NodeType.TOOL.value:
tool_entity = ToolNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_tool_dependency(tool_entity.provider_id),
)
case NodeType.LLM.value:
llm_entity = LLMNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(llm_entity.model.provider),
)
case NodeType.QUESTION_CLASSIFIER.value:
question_classifier_entity = QuestionClassifierNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
question_classifier_entity.model.provider
),
)
case NodeType.PARAMETER_EXTRACTOR.value:
parameter_extractor_entity = ParameterExtractorNodeData(**node["data"])
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
parameter_extractor_entity.model.provider
),
)
case NodeType.KNOWLEDGE_RETRIEVAL.value:
knowledge_retrieval_entity = KnowledgeRetrievalNodeData(**node["data"])
if knowledge_retrieval_entity.retrieval_mode == "multiple":
if knowledge_retrieval_entity.multiple_retrieval_config:
if (
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
== "reranking_model"
):
if knowledge_retrieval_entity.multiple_retrieval_config.reranking_model:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
knowledge_retrieval_entity.multiple_retrieval_config.reranking_model.provider
),
)
elif (
knowledge_retrieval_entity.multiple_retrieval_config.reranking_mode
== "weighted_score"
):
if knowledge_retrieval_entity.multiple_retrieval_config.weights:
vector_setting = (
knowledge_retrieval_entity.multiple_retrieval_config.weights.vector_setting
)
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
vector_setting.embedding_provider_name
),
)
elif knowledge_retrieval_entity.retrieval_mode == "single":
model_config = knowledge_retrieval_entity.single_retrieval_config
if model_config:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
model_config.model.provider
),
)
case _:
# Handle default case or unknown node types
pass
except Exception as e:
logger.exception("Error extracting node dependency", exc_info=e)
return dependencies
@classmethod
def _extract_dependencies_from_model_config(cls, model_config: AppModelConfig) -> list[str]:
"""
Extract dependencies from model config
:param model_config: AppModelConfig instance
:return: dependencies list format like ["langgenius/google:1.0.0@abcdef1234567890"]
"""
dependencies = []
try:
# completion model
model_dict = model_config.model_dict
if model_dict:
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(model_dict.get("provider"))
)
# reranking model
dataset_configs = model_config.dataset_configs_dict
if dataset_configs:
for dataset_config in dataset_configs:
if dataset_config.get("reranking_model"):
dependencies.append(
DependenciesAnalysisService.analyze_model_provider_dependency(
dataset_config.get("reranking_model", {})
.get("reranking_provider_name", {})
.get("provider")
)
)
# tools
agent_configs = model_config.agent_mode_dict
if agent_configs:
for agent_config in agent_configs:
if agent_config.get("tools"):
for tool in agent_config.get("tools", []):
dependencies.append(
DependenciesAnalysisService.analyze_tool_dependency(tool.get("provider_id"))
)
except Exception as e:
logger.exception("Error extracting model config dependency", exc_info=e)
return dependencies
def _check_or_fix_dsl(import_data: dict[str, Any]) -> Mapping[str, Any]:
"""
Check or fix dsl
:param import_data: import data
:raises DSLVersionNotSupportedError: if the imported DSL version is newer than the current version
"""
if not import_data.get("version"):
import_data["version"] = "0.1.0"
if not import_data.get("kind") or import_data.get("kind") != "app":
import_data["kind"] = "app"
imported_version = import_data.get("version")
if imported_version != current_dsl_version:
if imported_version and version.parse(imported_version) > version.parse(current_dsl_version):
errmsg = (
f"The imported DSL version {imported_version} is newer than "
f"the current supported version {current_dsl_version}. "
f"Please upgrade your Dify instance to import this configuration."
)
logger.warning(errmsg)
# raise DSLVersionNotSupportedError(errmsg)
else:
logger.warning(
f"DSL version {imported_version} is older than "
f"the current version {current_dsl_version}. "
f"This may cause compatibility issues."
)
return import_data

View File

@ -1,6 +1,6 @@
import json
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import cast
from flask_login import current_user
@ -155,10 +155,7 @@ class AppService:
"""
# get original app model config
if app.mode == AppMode.AGENT_CHAT.value or app.is_agent:
model_config: AppModelConfig | None = app.app_model_config
if not model_config:
return app
model_config = app.app_model_config
agent_mode = model_config.agent_mode_dict
# decrypt agent tool parameters if it's secret-input
for tool in agent_mode.get("tools") or []:
@ -226,7 +223,7 @@ class AppService:
app.icon_background = args.get("icon_background")
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
if app.max_active_requests is not None:
@ -243,7 +240,7 @@ class AppService:
"""
app.name = name
app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return app
@ -259,7 +256,7 @@ class AppService:
app.icon = icon
app.icon_background = icon_background
app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return app
@ -276,7 +273,7 @@ class AppService:
app.enable_site = enable_site
app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return app
@ -293,7 +290,7 @@ class AppService:
app.enable_api = enable_api
app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return app

View File

@ -1,6 +1,6 @@
from enum import Enum
from enum import StrEnum
class AuthType(str, Enum):
class AuthType(StrEnum):
FIRECRAWL = "firecrawl"
JINA = "jinareader"

View File

@ -1,4 +1,4 @@
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional, Union
from sqlalchemy import asc, desc, or_
@ -104,7 +104,7 @@ class ConversationService:
return cls.auto_generate_name(app_model, conversation)
else:
conversation.name = name
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return conversation

View File

@ -600,7 +600,7 @@ class DocumentService:
# update document to be paused
document.is_paused = True
document.paused_by = current_user.id
document.paused_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
@ -1072,7 +1072,7 @@ class DocumentService:
document.parsing_completed_at = None
document.cleaning_completed_at = None
document.splitting_completed_at = None
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.created_from = created_from
document.doc_form = document_data["doc_form"]
db.session.add(document)
@ -1409,8 +1409,8 @@ class SegmentService:
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
@ -1429,7 +1429,7 @@ class SegmentService:
except Exception as e:
logging.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
@ -1481,8 +1481,8 @@ class SegmentService:
word_count=len(content),
tokens=tokens,
status="completed",
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
created_by=current_user.id,
)
if document.doc_form == "qa_model":
@ -1508,7 +1508,7 @@ class SegmentService:
logging.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
@ -1526,7 +1526,7 @@ class SegmentService:
if segment.enabled != action:
if not action:
segment.enabled = action
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_by = current_user.id
db.session.add(segment)
db.session.commit()
@ -1585,10 +1585,10 @@ class SegmentService:
segment.word_count = len(content)
segment.tokens = tokens
segment.status = "completed"
segment.indexing_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.completed_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.indexing_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.completed_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.updated_by = current_user.id
segment.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.enabled = True
segment.disabled_at = None
segment.disabled_by = None
@ -1608,7 +1608,7 @@ class SegmentService:
except Exception as e:
logging.exception("update segment index failed")
segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.status = "error"
segment.error = str(e)
db.session.commit()

View File

@ -1,6 +1,6 @@
import json
from copy import deepcopy
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Optional, Union
import httpx
@ -99,7 +99,7 @@ class ExternalDatasetService:
external_knowledge_api.description = args.get("description", "")
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
external_knowledge_api.updated_by = user_id
external_knowledge_api.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
return external_knowledge_api

View File

@ -1,4 +1,4 @@
from enum import Enum
from enum import StrEnum
from pydantic import BaseModel, ConfigDict
@ -22,7 +22,7 @@ class LimitationModel(BaseModel):
limit: int = 0
class LicenseStatus(str, Enum):
class LicenseStatus(StrEnum):
NONE = "none"
INACTIVE = "inactive"
ACTIVE = "active"

View File

@ -77,7 +77,7 @@ class FileService:
mime_type=mimetype,
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
created_by=user.id,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
used=False,
hash=hashlib.sha3_256(content).hexdigest(),
source_url=source_url,
@ -123,10 +123,10 @@ class FileService:
mime_type="text/plain",
created_by=current_user.id,
created_by_role=CreatedByRole.ACCOUNT,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
used=True,
used_by=current_user.id,
used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
)
db.session.add(upload_file)

View File

@ -371,7 +371,7 @@ class ModelLoadBalancingService:
load_balancing_config.name = name
load_balancing_config.enabled = enabled
load_balancing_config.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
load_balancing_config.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit()
self._clear_credentials_cache(tenant_id, config_id)

View File

@ -30,7 +30,7 @@ class DependenciesAnalysisService:
raise e
@classmethod
def check_dependencies(cls, tenant_id: str, dependencies: list[PluginDependency]) -> list[PluginDependency]:
def get_leaked_dependencies(cls, tenant_id: str, dependencies: list[PluginDependency]) -> list[PluginDependency]:
"""
Check dependencies, returns the leaked dependencies in current workspace
"""

View File

@ -105,6 +105,16 @@ class PluginService:
manager = PluginInstallationManager()
return manager.delete_plugin_installation_task(tenant_id, task_id)
@staticmethod
def delete_all_install_task_items(
tenant_id: str,
) -> bool:
"""
Delete all plugin installation task items
"""
manager = PluginInstallationManager()
return manager.delete_all_plugin_installation_task_items(tenant_id)
@staticmethod
def delete_install_task_item(tenant_id: str, task_id: str, identifier: str) -> bool:
"""

View File

@ -1,7 +1,7 @@
from enum import Enum
from enum import StrEnum
class RecommendAppType(str, Enum):
class RecommendAppType(StrEnum):
REMOTE = "remote"
BUILDIN = "builtin"
DATABASE = "db"

View File

@ -1,7 +1,7 @@
import json
import time
from collections.abc import Callable, Generator, Sequence
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Optional
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
@ -118,7 +118,7 @@ class WorkflowService:
workflow.graph = json.dumps(graph)
workflow.features = json.dumps(features)
workflow.updated_by = account.id
workflow.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
workflow.environment_variables = environment_variables
workflow.conversation_variables = conversation_variables
@ -151,7 +151,7 @@ class WorkflowService:
tenant_id=app_model.tenant_id,
app_id=app_model.id,
type=draft_workflow.type,
version=str(datetime.now(timezone.utc).replace(tzinfo=None)),
version=str(datetime.now(UTC).replace(tzinfo=None)),
graph=draft_workflow.graph,
features=draft_workflow.features,
created_by=account.id,
@ -312,18 +312,22 @@ class WorkflowService:
workflow_node_execution.title = node_instance.node_data.title
workflow_node_execution.elapsed_time = time.perf_counter() - start_at
workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
if run_succeeded and node_run_result:
# create workflow node execution
workflow_node_execution.inputs = json.dumps(node_run_result.inputs) if node_run_result.inputs else None
workflow_node_execution.process_data = (
json.dumps(node_run_result.process_data) if node_run_result.process_data else None
)
workflow_node_execution.outputs = (
json.dumps(jsonable_encoder(node_run_result.outputs)) if node_run_result.outputs else None
inputs = WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
process_data = (
WorkflowEntry.handle_special_values(node_run_result.process_data)
if node_run_result.process_data
else None
)
outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) if node_run_result.outputs else None
workflow_node_execution.inputs = json.dumps(inputs)
workflow_node_execution.process_data = json.dumps(process_data)
workflow_node_execution.outputs = json.dumps(outputs)
workflow_node_execution.execution_metadata = (
json.dumps(jsonable_encoder(node_run_result.metadata)) if node_run_result.metadata else None
)
@ -355,10 +359,10 @@ class WorkflowService:
new_app = workflow_converter.convert_to_workflow(
app_model=app_model,
account=account,
name=args.get("name", ""),
icon_type=args.get("icon_type", ""),
icon=args.get("icon", ""),
icon_background=args.get("icon_background", ""),
name=args.get("name", "Default Name"),
icon_type=args.get("icon_type", "emoji"),
icon=args.get("icon", "🤖"),
icon_background=args.get("icon_background", "#FFEAD5"),
)
return new_app