mirror of
https://github.com/langgenius/dify.git
synced 2026-01-23 13:35:35 +08:00
Compare commits
52 Commits
test/perfo
...
feat/tool-
| Author | SHA1 | Date | |
|---|---|---|---|
| c238b88641 | |||
| 5db388716b | |||
| f9f3c207f4 | |||
| 2bafcd596f | |||
| d162905bb5 | |||
| fd2651f5aa | |||
| 9cdbf30238 | |||
| 6a085fab26 | |||
| 22297d0326 | |||
| 37be099442 | |||
| f68201af0b | |||
| 06802afc94 | |||
| 458e44133e | |||
| f9c4897ff3 | |||
| 6cb4a6f692 | |||
| 7de3436e6b | |||
| 8fc5ccab35 | |||
| 5090f63df5 | |||
| 31e1261ae2 | |||
| ace6e11a6f | |||
| 7ba09dfa06 | |||
| ab6ae1f209 | |||
| 0532135a9c | |||
| adc39f7b0d | |||
| fb9e4a4227 | |||
| 545c21b196 | |||
| f3bbab0eed | |||
| f35b8d6245 | |||
| ef330fec2c | |||
| 0dc5bfb2c7 | |||
| eaefa1b7e6 | |||
| 9f053f3bbc | |||
| 26b46b88c9 | |||
| b316867bab | |||
| 988a76066d | |||
| 6ef1e017df | |||
| 7951a1c4df | |||
| daec82bd44 | |||
| 8a954c0b19 | |||
| f4f6e41074 | |||
| ba843c2691 | |||
| 6c9e99b0c6 | |||
| ce4cc54cc9 | |||
| 1a2dfd950e | |||
| a58e99c671 | |||
| 8bd05aee4b | |||
| fcfaa7ce13 | |||
| 7979e05ade | |||
| 5e7c5863ef | |||
| 7f292dc261 | |||
| b3a8dbe2f5 | |||
| 12c20ec7f6 |
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -8,13 +8,13 @@ body:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||
required: true
|
||||
- label: This is only for bug report, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||
required: true
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
@ -42,22 +42,20 @@ body:
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks.
|
||||
placeholder: Having detailed steps helps us reproduce the bug. If you have logs, please use fenced code blocks (triple backticks ```) to format them.
|
||||
placeholder: Having detailed steps helps us reproduce the bug.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ✔️ Expected Behavior
|
||||
description: Describe what you expected to happen.
|
||||
placeholder: What were you expecting? Please do not copy and paste the steps to reproduce here.
|
||||
placeholder: What were you expecting?
|
||||
validations:
|
||||
required: true
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ❌ Actual Behavior
|
||||
description: Describe what actually happened.
|
||||
placeholder: What happened instead? Please do not copy and paste the steps to reproduce here.
|
||||
placeholder: What happened instead?
|
||||
validations:
|
||||
required: false
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,11 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: "\U0001F4A1 Model Providers & Plugins"
|
||||
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
|
||||
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
|
||||
- name: "\U0001F4AC Documentation Issues"
|
||||
url: "https://github.com/langgenius/dify-docs/issues/new"
|
||||
about: Report issues with the documentation, such as typos, outdated information, or missing content. Please provide the specific section and details of the issue.
|
||||
- name: "\U0001F4E7 Discussions"
|
||||
url: https://github.com/langgenius/dify/discussions/categories/general
|
||||
about: General discussions and seek help from the community
|
||||
about: General discussions and request help from the community
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
Normal file
24
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: "📚 Documentation Issue"
|
||||
description: Report issues in our documentation
|
||||
labels:
|
||||
- documentation
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Provide a description of requested docs changes
|
||||
placeholder: Briefly describe which document needs to be corrected and why.
|
||||
validations:
|
||||
required: true
|
||||
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -8,11 +8,11 @@ body:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||
required: true
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
|
||||
55
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
Normal file
55
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
name: "🌐 Localization/Translation issue"
|
||||
description: Report incorrect translations. [please use English :)]
|
||||
labels:
|
||||
- translation
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
|
||||
required: true
|
||||
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue,否则会被关闭。谢谢!:)"
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Dify version
|
||||
description: Hover over system tray icon or look at Settings
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Utility with translation issue
|
||||
placeholder: Some area
|
||||
description: Please input here the utility with the translation issue
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: 🌐 Language affected
|
||||
placeholder: "German"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ❌ Actual phrase(s)
|
||||
placeholder: What is there? Please include a screenshot as that is extremely helpful.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ✔️ Expected phrase(s)
|
||||
placeholder: What was expected?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ℹ Why is the current translation wrong
|
||||
placeholder: Why do you feel this is incorrect?
|
||||
validations:
|
||||
required: true
|
||||
@ -5,17 +5,17 @@
|
||||
SECRET_KEY=
|
||||
|
||||
# Console API base URL
|
||||
CONSOLE_API_URL=http://127.0.0.1:5001
|
||||
CONSOLE_WEB_URL=http://127.0.0.1:3000
|
||||
CONSOLE_API_URL=http://localhost:5001
|
||||
CONSOLE_WEB_URL=http://localhost:3000
|
||||
|
||||
# Service API base URL
|
||||
SERVICE_API_URL=http://127.0.0.1:5001
|
||||
SERVICE_API_URL=http://localhost:5001
|
||||
|
||||
# Web APP base URL
|
||||
APP_WEB_URL=http://127.0.0.1:3000
|
||||
APP_WEB_URL=http://localhost:3000
|
||||
|
||||
# Files URL
|
||||
FILES_URL=http://127.0.0.1:5001
|
||||
FILES_URL=http://localhost:5001
|
||||
|
||||
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
|
||||
# Set this to the internal Docker service URL for proper plugin file access.
|
||||
@ -138,8 +138,8 @@ SUPABASE_API_KEY=your-access-key
|
||||
SUPABASE_URL=your-server-url
|
||||
|
||||
# CORS configuration
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
|
||||
|
||||
# Vector database configuration
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone
|
||||
|
||||
@ -11,10 +11,12 @@ from werkzeug.exceptions import NotFound
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import languages
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.index_processor.constant.built_in_field import BuiltInField
|
||||
from core.rag.models.document import Document
|
||||
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
|
||||
from events.app_event import app_was_created
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
@ -27,6 +29,7 @@ from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, D
|
||||
from models.dataset import Document as DatasetDocument
|
||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||
from models.provider import Provider, ProviderModel
|
||||
from models.tools import ToolOAuthSystemClient
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
@ -1155,3 +1158,49 @@ def remove_orphaned_files_on_storage(force: bool):
|
||||
click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
|
||||
else:
|
||||
click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))
|
||||
|
||||
|
||||
@click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.")
|
||||
@click.option("--provider", prompt=True, help="Provider name")
|
||||
@click.option("--client-params", prompt=True, help="Client Params")
|
||||
def setup_system_tool_oauth_client(provider, client_params):
|
||||
"""
|
||||
Setup system tool oauth client
|
||||
"""
|
||||
provider_id = ToolProviderID(provider)
|
||||
provider_name = provider_id.provider_name
|
||||
plugin_id = provider_id.plugin_id
|
||||
|
||||
try:
|
||||
# json validate
|
||||
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
|
||||
json.loads(client_params)
|
||||
click.echo(click.style("Client params validated successfully.", fg="green"))
|
||||
|
||||
click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
|
||||
click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
|
||||
oauth_client_params = encrypt_system_oauth_params(client_params)
|
||||
click.echo(click.style("Client params encrypted successfully.", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
|
||||
return
|
||||
|
||||
deleted_count = (
|
||||
db.session.query(ToolOAuthSystemClient)
|
||||
.filter_by(
|
||||
provider=provider_name,
|
||||
plugin_id=plugin_id,
|
||||
)
|
||||
.delete()
|
||||
)
|
||||
if deleted_count > 0:
|
||||
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
|
||||
|
||||
oauth_client = ToolOAuthSystemClient(
|
||||
provider=provider_name,
|
||||
plugin_id=plugin_id,
|
||||
encrypted_oauth_params=oauth_client_params,
|
||||
)
|
||||
db.session.add(oauth_client)
|
||||
db.session.commit()
|
||||
click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
|
||||
|
||||
@ -162,11 +162,6 @@ class DatabaseConfig(BaseSettings):
|
||||
default=3600,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_USE_LIFO: bool = Field(
|
||||
description="If True, SQLAlchemy will use last-in-first-out way to retrieve connections from pool.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_PRE_PING: bool = Field(
|
||||
description="If True, enables connection pool pre-ping feature to check connections.",
|
||||
default=False,
|
||||
@ -204,7 +199,6 @@ class DatabaseConfig(BaseSettings):
|
||||
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
|
||||
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
||||
"connect_args": connect_args,
|
||||
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
from configs import dify_config
|
||||
|
||||
HIDDEN_VALUE = "[__HIDDEN__]"
|
||||
UNKNOWN_VALUE = "[__UNKNOWN__]"
|
||||
UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
|
||||
@ -151,7 +151,6 @@ class AppApi(Resource):
|
||||
parser.add_argument("icon", type=str, location="json")
|
||||
parser.add_argument("icon_background", type=str, location="json")
|
||||
parser.add_argument("use_icon_as_answer_icon", type=bool, location="json")
|
||||
parser.add_argument("max_active_requests", type=int, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
app_service = AppService()
|
||||
|
||||
@ -35,20 +35,16 @@ class AppMCPServerController(Resource):
|
||||
@get_app_model
|
||||
@marshal_with(app_server_fields)
|
||||
def post(self, app_model):
|
||||
# The role of the current user in the ta table must be editor, admin, or owner
|
||||
if not current_user.is_editor:
|
||||
raise NotFound()
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("description", type=str, required=False, location="json")
|
||||
parser.add_argument("description", type=str, required=True, location="json")
|
||||
parser.add_argument("parameters", type=dict, required=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
description = args.get("description")
|
||||
if not description:
|
||||
description = app_model.description or ""
|
||||
|
||||
server = AppMCPServer(
|
||||
name=app_model.name,
|
||||
description=description,
|
||||
description=args["description"],
|
||||
parameters=json.dumps(args["parameters"], ensure_ascii=False),
|
||||
status=AppMCPServerStatus.ACTIVE,
|
||||
app_id=app_model.id,
|
||||
@ -69,22 +65,14 @@ class AppMCPServerController(Resource):
|
||||
raise NotFound()
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("id", type=str, required=True, location="json")
|
||||
parser.add_argument("description", type=str, required=False, location="json")
|
||||
parser.add_argument("description", type=str, required=True, location="json")
|
||||
parser.add_argument("parameters", type=dict, required=True, location="json")
|
||||
parser.add_argument("status", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first()
|
||||
if not server:
|
||||
raise NotFound()
|
||||
|
||||
description = args.get("description")
|
||||
if description is None:
|
||||
pass
|
||||
elif not description:
|
||||
server.description = app_model.description or ""
|
||||
else:
|
||||
server.description = description
|
||||
|
||||
server.description = args["description"]
|
||||
server.parameters = json.dumps(args["parameters"], ensure_ascii=False)
|
||||
if args["status"]:
|
||||
if args["status"] not in [status.value for status in AppMCPServerStatus]:
|
||||
|
||||
@ -68,18 +68,13 @@ def _create_pagination_parser():
|
||||
return parser
|
||||
|
||||
|
||||
def _serialize_variable_type(workflow_draft_var: WorkflowDraftVariable) -> str:
|
||||
value_type = workflow_draft_var.value_type
|
||||
return value_type.exposed_type().value
|
||||
|
||||
|
||||
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
|
||||
"id": fields.String,
|
||||
"type": fields.String(attribute=lambda model: model.get_variable_type()),
|
||||
"name": fields.String,
|
||||
"description": fields.String,
|
||||
"selector": fields.List(fields.String, attribute=lambda model: model.get_selector()),
|
||||
"value_type": fields.String(attribute=_serialize_variable_type),
|
||||
"value_type": fields.String,
|
||||
"edited": fields.Boolean(attribute=lambda model: model.edited),
|
||||
"visible": fields.Boolean,
|
||||
}
|
||||
@ -95,7 +90,7 @@ _WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = {
|
||||
"name": fields.String,
|
||||
"description": fields.String,
|
||||
"selector": fields.List(fields.String, attribute=lambda model: model.get_selector()),
|
||||
"value_type": fields.String(attribute=_serialize_variable_type),
|
||||
"value_type": fields.String,
|
||||
"edited": fields.Boolean(attribute=lambda model: model.edited),
|
||||
"visible": fields.Boolean,
|
||||
}
|
||||
@ -401,7 +396,7 @@ class EnvironmentVariableCollectionApi(Resource):
|
||||
"name": v.name,
|
||||
"description": v.description,
|
||||
"selector": v.selector,
|
||||
"value_type": v.value_type.exposed_type().value,
|
||||
"value_type": v.value_type.value,
|
||||
"value": v.value,
|
||||
# Do not track edited for env vars.
|
||||
"edited": False,
|
||||
|
||||
@ -1,23 +1,32 @@
|
||||
import io
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import redirect, send_file
|
||||
from flask import make_response, redirect, request, send_file
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, reqparse
|
||||
from sqlalchemy.orm import Session
|
||||
from flask_restful import (
|
||||
Resource,
|
||||
reqparse,
|
||||
)
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
enterprise_license_required,
|
||||
setup_required,
|
||||
)
|
||||
from core.mcp.auth.auth_flow import auth, handle_callback
|
||||
from core.mcp.auth.auth_provider import OAuthClientProvider
|
||||
from core.mcp.error import MCPAuthError, MCPError
|
||||
from core.mcp.mcp_client import MCPClient
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import alphanumeric, uuid_value
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from core.tools.entities.tool_entities import CredentialType
|
||||
from libs.helper import StrLen, alphanumeric, uuid_value
|
||||
from libs.login import login_required
|
||||
from services.plugin.oauth_service import OAuthProxyService
|
||||
from services.tools.api_tools_manage_service import ApiToolManageService
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
from services.tools.mcp_tools_mange_service import MCPToolManageService
|
||||
@ -89,7 +98,7 @@ class ToolBuiltinProviderInfoApi(Resource):
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(user_id, tenant_id, provider))
|
||||
return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider))
|
||||
|
||||
|
||||
class ToolBuiltinProviderDeleteApi(Resource):
|
||||
@ -98,17 +107,47 @@ class ToolBuiltinProviderDeleteApi(Resource):
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
req = reqparse.RequestParser()
|
||||
req.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
|
||||
args = req.parse_args()
|
||||
|
||||
return BuiltinToolManageService.delete_builtin_tool_provider(
|
||||
tenant_id,
|
||||
provider,
|
||||
args["credential_id"],
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderAddApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
user = current_user
|
||||
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.delete_builtin_tool_provider(
|
||||
user_id,
|
||||
tenant_id,
|
||||
provider,
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json")
|
||||
parser.add_argument("type", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args["type"] not in CredentialType.values():
|
||||
raise ValueError(f"Invalid credential type: {args['type']}")
|
||||
|
||||
return BuiltinToolManageService.add_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credentials=args["credentials"],
|
||||
name=args["name"],
|
||||
api_type=CredentialType.of(args["type"]),
|
||||
)
|
||||
|
||||
|
||||
@ -126,19 +165,20 @@ class ToolBuiltinProviderUpdateApi(Resource):
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
|
||||
parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with Session(db.engine) as session:
|
||||
result = BuiltinToolManageService.update_builtin_tool_provider(
|
||||
session=session,
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
credentials=args["credentials"],
|
||||
)
|
||||
session.commit()
|
||||
result = BuiltinToolManageService.update_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credential_id=args["credential_id"],
|
||||
credentials=args.get("credentials", None),
|
||||
name=args.get("name", ""),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@ -149,9 +189,11 @@ class ToolBuiltinProviderGetCredentialsApi(Resource):
|
||||
def get(self, provider):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.get_builtin_tool_provider_credentials(
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_credentials(
|
||||
tenant_id=tenant_id,
|
||||
provider_name=provider,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@ -344,12 +386,15 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
def get(self, provider, credential_type):
|
||||
user = current_user
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return BuiltinToolManageService.list_builtin_provider_credentials_schema(provider, tenant_id)
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.list_builtin_provider_credentials_schema(
|
||||
provider, CredentialType.of(credential_type), tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolApiProviderSchemaApi(Resource):
|
||||
@ -586,15 +631,12 @@ class ToolApiListApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
user = current_user
|
||||
|
||||
user_id = user.id
|
||||
tenant_id = user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(
|
||||
[
|
||||
provider.to_dict()
|
||||
for provider in ApiToolManageService.list_api_tools(
|
||||
user_id,
|
||||
tenant_id,
|
||||
)
|
||||
]
|
||||
@ -631,6 +673,178 @@ class ToolLabelsApi(Resource):
|
||||
return jsonable_encoder(ToolLabelsService.list_tool_labels())
|
||||
|
||||
|
||||
class ToolPluginOAuthApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
tool_provider = ToolProviderID(provider)
|
||||
plugin_id = tool_provider.plugin_id
|
||||
provider_name = tool_provider.provider_name
|
||||
|
||||
# todo check permission
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = user.current_tenant_id
|
||||
oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id=tenant_id, provider=provider)
|
||||
if oauth_client_params is None:
|
||||
raise Forbidden("no oauth available client config found for this tool provider")
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
context_id = OAuthProxyService.create_proxy_context(
|
||||
user_id=current_user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name
|
||||
)
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback"
|
||||
authorization_url_response = oauth_handler.get_authorization_url(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user.id,
|
||||
plugin_id=plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=oauth_client_params,
|
||||
)
|
||||
response = make_response(jsonable_encoder(authorization_url_response))
|
||||
response.set_cookie(
|
||||
"context_id",
|
||||
context_id,
|
||||
httponly=True,
|
||||
samesite="Lax",
|
||||
max_age=OAuthProxyService.__MAX_AGE__,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class ToolOAuthCallback(Resource):
|
||||
@setup_required
|
||||
def get(self, provider):
|
||||
context_id = request.cookies.get("context_id")
|
||||
if not context_id:
|
||||
raise Forbidden("context_id not found")
|
||||
|
||||
context = OAuthProxyService.use_proxy_context(context_id)
|
||||
if context is None:
|
||||
raise Forbidden("Invalid context_id")
|
||||
|
||||
tool_provider = ToolProviderID(provider)
|
||||
plugin_id = tool_provider.plugin_id
|
||||
provider_name = tool_provider.provider_name
|
||||
user_id, tenant_id = context.get("user_id"), context.get("tenant_id")
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id, provider)
|
||||
if oauth_client_params is None:
|
||||
raise Forbidden("no oauth available client config found for this tool provider")
|
||||
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback"
|
||||
credentials = oauth_handler.get_credentials(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
plugin_id=plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=oauth_client_params,
|
||||
request=request,
|
||||
).credentials
|
||||
|
||||
if not credentials:
|
||||
raise Exception("the plugin credentials failed")
|
||||
|
||||
# add credentials to database
|
||||
BuiltinToolManageService.add_builtin_tool_provider(
|
||||
user_id=user_id,
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credentials=dict(credentials),
|
||||
api_type=CredentialType.OAUTH2,
|
||||
)
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
|
||||
|
||||
|
||||
class ToolBuiltinProviderSetDefaultApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("id", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
return BuiltinToolManageService.set_default_provider(
|
||||
tenant_id=current_user.current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"]
|
||||
)
|
||||
|
||||
|
||||
class ToolOAuthCustomClient(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
|
||||
parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
user = current_user
|
||||
|
||||
if not user.is_admin_or_owner:
|
||||
raise Forbidden()
|
||||
|
||||
return BuiltinToolManageService.save_custom_oauth_client_params(
|
||||
tenant_id=user.current_tenant_id,
|
||||
provider=provider,
|
||||
client_params=args.get("client_params", {}),
|
||||
enable_oauth_custom_client=args.get("enable_oauth_custom_client", True),
|
||||
)
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_custom_oauth_client_params(
|
||||
tenant_id=current_user.current_tenant_id, provider=provider
|
||||
)
|
||||
)
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.delete_custom_oauth_client_params(
|
||||
tenant_id=current_user.current_tenant_id, provider=provider
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderGetOauthClientSchemaApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_oauth_client_schema(
|
||||
tenant_id=current_user.current_tenant_id, provider_name=provider
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ToolBuiltinProviderGetCredentialInfoApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, provider):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
return jsonable_encoder(
|
||||
BuiltinToolManageService.get_builtin_tool_provider_credential_info(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
)
|
||||
)
|
||||
|
||||
class ToolProviderMCPApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@ -794,17 +1008,33 @@ class ToolMCPCallbackApi(Resource):
|
||||
# tool provider
|
||||
api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers")
|
||||
|
||||
# tool oauth
|
||||
api.add_resource(ToolPluginOAuthApi, "/oauth/plugin/<path:provider>/tool/authorization-url")
|
||||
api.add_resource(ToolOAuthCallback, "/oauth/plugin/<path:provider>/tool/callback")
|
||||
api.add_resource(ToolOAuthCustomClient, "/workspaces/current/tool-provider/builtin/<path:provider>/oauth/custom-client")
|
||||
|
||||
# builtin tool provider
|
||||
api.add_resource(ToolBuiltinProviderListToolsApi, "/workspaces/current/tool-provider/builtin/<path:provider>/tools")
|
||||
api.add_resource(ToolBuiltinProviderInfoApi, "/workspaces/current/tool-provider/builtin/<path:provider>/info")
|
||||
api.add_resource(ToolBuiltinProviderAddApi, "/workspaces/current/tool-provider/builtin/<path:provider>/add")
|
||||
api.add_resource(ToolBuiltinProviderDeleteApi, "/workspaces/current/tool-provider/builtin/<path:provider>/delete")
|
||||
api.add_resource(ToolBuiltinProviderUpdateApi, "/workspaces/current/tool-provider/builtin/<path:provider>/update")
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderSetDefaultApi, "/workspaces/current/tool-provider/builtin/<path:provider>/default-credential"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetCredentialInfoApi, "/workspaces/current/tool-provider/builtin/<path:provider>/credential/info"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetCredentialsApi, "/workspaces/current/tool-provider/builtin/<path:provider>/credentials"
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderCredentialsSchemaApi,
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/credentials_schema",
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/credential/schema/<path:credential_type>",
|
||||
)
|
||||
api.add_resource(
|
||||
ToolBuiltinProviderGetOauthClientSchemaApi,
|
||||
"/workspaces/current/tool-provider/builtin/<path:provider>/oauth/client-schema",
|
||||
)
|
||||
api.add_resource(ToolBuiltinProviderIconApi, "/workspaces/current/tool-provider/builtin/<path:provider>/icon")
|
||||
|
||||
|
||||
@ -175,6 +175,7 @@ class PluginInvokeToolApi(Resource):
|
||||
provider=payload.provider,
|
||||
tool_name=payload.tool,
|
||||
tool_parameters=payload.tool_parameters,
|
||||
credential_id=payload.credential_id
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@ class AgentToolEntity(BaseModel):
|
||||
tool_name: str
|
||||
tool_parameters: dict[str, Any] = Field(default_factory=dict)
|
||||
plugin_unique_identifier: str | None = None
|
||||
credential_id: str | None = None
|
||||
|
||||
|
||||
class AgentPromptEntity(BaseModel):
|
||||
|
||||
@ -4,6 +4,7 @@ from typing import Any, Optional
|
||||
|
||||
from core.agent.entities import AgentInvokeMessage
|
||||
from core.agent.plugin_entities import AgentStrategyParameter
|
||||
from core.plugin.entities.request import InvokeCredentials
|
||||
|
||||
|
||||
class BaseAgentStrategy(ABC):
|
||||
@ -18,11 +19,12 @@ class BaseAgentStrategy(ABC):
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
"""
|
||||
Invoke the agent strategy.
|
||||
"""
|
||||
yield from self._invoke(params, user_id, conversation_id, app_id, message_id)
|
||||
yield from self._invoke(params, user_id, conversation_id, app_id, message_id, credentials)
|
||||
|
||||
def get_parameters(self) -> Sequence[AgentStrategyParameter]:
|
||||
"""
|
||||
@ -38,5 +40,6 @@ class BaseAgentStrategy(ABC):
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
pass
|
||||
|
||||
@ -4,6 +4,7 @@ from typing import Any, Optional
|
||||
from core.agent.entities import AgentInvokeMessage
|
||||
from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter
|
||||
from core.agent.strategy.base import BaseAgentStrategy
|
||||
from core.plugin.entities.request import InvokeCredentials, PluginInvokeContext
|
||||
from core.plugin.impl.agent import PluginAgentClient
|
||||
from core.plugin.utils.converter import convert_parameters_to_plugin_format
|
||||
|
||||
@ -40,6 +41,7 @@ class PluginAgentStrategy(BaseAgentStrategy):
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
"""
|
||||
Invoke the agent strategy.
|
||||
@ -58,4 +60,7 @@ class PluginAgentStrategy(BaseAgentStrategy):
|
||||
conversation_id=conversation_id,
|
||||
app_id=app_id,
|
||||
message_id=message_id,
|
||||
context=PluginInvokeContext(
|
||||
credentials=credentials or InvokeCredentials()
|
||||
),
|
||||
)
|
||||
|
||||
@ -39,6 +39,7 @@ class AgentConfigManager:
|
||||
"provider_id": tool["provider_id"],
|
||||
"tool_name": tool["tool_name"],
|
||||
"tool_parameters": tool.get("tool_parameters", {}),
|
||||
"credential_id": tool.get("credential_id", None),
|
||||
}
|
||||
|
||||
agent_tools.append(AgentToolEntity(**agent_tool_properties))
|
||||
|
||||
@ -16,10 +16,9 @@ from core.app.entities.queue_entities import (
|
||||
QueueTextChunkEvent,
|
||||
)
|
||||
from core.moderation.base import ModerationError
|
||||
from core.variables.variables import VariableUnion
|
||||
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.variable_loader import VariableLoader
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from extensions.ext_database import db
|
||||
@ -65,7 +64,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
||||
if not workflow:
|
||||
raise ValueError("Workflow not initialized")
|
||||
|
||||
user_id: str | None = None
|
||||
user_id = None
|
||||
if self.application_generate_entity.invoke_from in {InvokeFrom.WEB_APP, InvokeFrom.SERVICE_API}:
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == self.application_generate_entity.user_id).first()
|
||||
if end_user:
|
||||
@ -137,25 +136,23 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
||||
session.commit()
|
||||
|
||||
# Create a variable pool.
|
||||
system_inputs = SystemVariable(
|
||||
query=query,
|
||||
files=files,
|
||||
conversation_id=self.conversation.id,
|
||||
user_id=user_id,
|
||||
dialogue_count=self._dialogue_count,
|
||||
app_id=app_config.app_id,
|
||||
workflow_id=app_config.workflow_id,
|
||||
workflow_execution_id=self.application_generate_entity.workflow_run_id,
|
||||
)
|
||||
system_inputs = {
|
||||
SystemVariableKey.QUERY: query,
|
||||
SystemVariableKey.FILES: files,
|
||||
SystemVariableKey.CONVERSATION_ID: self.conversation.id,
|
||||
SystemVariableKey.USER_ID: user_id,
|
||||
SystemVariableKey.DIALOGUE_COUNT: self._dialogue_count,
|
||||
SystemVariableKey.APP_ID: app_config.app_id,
|
||||
SystemVariableKey.WORKFLOW_ID: app_config.workflow_id,
|
||||
SystemVariableKey.WORKFLOW_EXECUTION_ID: self.application_generate_entity.workflow_run_id,
|
||||
}
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=system_inputs,
|
||||
user_inputs=inputs,
|
||||
environment_variables=workflow.environment_variables,
|
||||
# Based on the definition of `VariableUnion`,
|
||||
# `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
|
||||
conversation_variables=cast(list[VariableUnion], conversation_variables),
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
# init graph
|
||||
|
||||
@ -61,12 +61,12 @@ from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
from events.message_event import message_was_created
|
||||
from extensions.ext_database import db
|
||||
@ -116,16 +116,16 @@ class AdvancedChatAppGenerateTaskPipeline:
|
||||
|
||||
self._workflow_cycle_manager = WorkflowCycleManager(
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_system_variables=SystemVariable(
|
||||
query=message.query,
|
||||
files=application_generate_entity.files,
|
||||
conversation_id=conversation.id,
|
||||
user_id=user_session_id,
|
||||
dialogue_count=dialogue_count,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
workflow_id=workflow.id,
|
||||
workflow_execution_id=application_generate_entity.workflow_run_id,
|
||||
),
|
||||
workflow_system_variables={
|
||||
SystemVariableKey.QUERY: message.query,
|
||||
SystemVariableKey.FILES: application_generate_entity.files,
|
||||
SystemVariableKey.CONVERSATION_ID: conversation.id,
|
||||
SystemVariableKey.USER_ID: user_session_id,
|
||||
SystemVariableKey.DIALOGUE_COUNT: dialogue_count,
|
||||
SystemVariableKey.APP_ID: application_generate_entity.app_config.app_id,
|
||||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||
SystemVariableKey.WORKFLOW_EXECUTION_ID: application_generate_entity.workflow_run_id,
|
||||
},
|
||||
workflow_info=CycleManagerWorkflowInfo(
|
||||
workflow_id=workflow.id,
|
||||
workflow_type=WorkflowType(workflow.type),
|
||||
|
||||
@ -11,7 +11,7 @@ from core.app.entities.app_invoke_entities import (
|
||||
)
|
||||
from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.variable_loader import VariableLoader
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from extensions.ext_database import db
|
||||
@ -95,14 +95,13 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
|
||||
files = self.application_generate_entity.files
|
||||
|
||||
# Create a variable pool.
|
||||
|
||||
system_inputs = SystemVariable(
|
||||
files=files,
|
||||
user_id=user_id,
|
||||
app_id=app_config.app_id,
|
||||
workflow_id=app_config.workflow_id,
|
||||
workflow_execution_id=self.application_generate_entity.workflow_execution_id,
|
||||
)
|
||||
system_inputs = {
|
||||
SystemVariableKey.FILES: files,
|
||||
SystemVariableKey.USER_ID: user_id,
|
||||
SystemVariableKey.APP_ID: app_config.app_id,
|
||||
SystemVariableKey.WORKFLOW_ID: app_config.workflow_id,
|
||||
SystemVariableKey.WORKFLOW_EXECUTION_ID: self.application_generate_entity.workflow_execution_id,
|
||||
}
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=system_inputs,
|
||||
|
||||
@ -54,10 +54,10 @@ from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTas
|
||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account
|
||||
@ -107,13 +107,13 @@ class WorkflowAppGenerateTaskPipeline:
|
||||
|
||||
self._workflow_cycle_manager = WorkflowCycleManager(
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_system_variables=SystemVariable(
|
||||
files=application_generate_entity.files,
|
||||
user_id=user_session_id,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
workflow_id=workflow.id,
|
||||
workflow_execution_id=application_generate_entity.workflow_execution_id,
|
||||
),
|
||||
workflow_system_variables={
|
||||
SystemVariableKey.FILES: application_generate_entity.files,
|
||||
SystemVariableKey.USER_ID: user_session_id,
|
||||
SystemVariableKey.APP_ID: application_generate_entity.app_config.app_id,
|
||||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||
SystemVariableKey.WORKFLOW_EXECUTION_ID: application_generate_entity.workflow_execution_id,
|
||||
},
|
||||
workflow_info=CycleManagerWorkflowInfo(
|
||||
workflow_id=workflow.id,
|
||||
workflow_type=WorkflowType(workflow.type),
|
||||
|
||||
@ -62,7 +62,6 @@ from core.workflow.graph_engine.entities.event import (
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from extensions.ext_database import db
|
||||
@ -167,7 +166,7 @@ class WorkflowBasedAppRunner(AppRunner):
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
environment_variables=workflow.environment_variables,
|
||||
)
|
||||
@ -264,7 +263,7 @@ class WorkflowBasedAppRunner(AppRunner):
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
environment_variables=workflow.environment_variables,
|
||||
)
|
||||
|
||||
84
api/core/helper/provider_cache.py
Normal file
84
api/core/helper/provider_cache.py
Normal file
@ -0,0 +1,84 @@
|
||||
import json
|
||||
from abc import ABC, abstractmethod
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, Optional
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
|
||||
class ProviderCredentialsCache(ABC):
|
||||
"""Base class for provider credentials cache"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.cache_key = self._generate_cache_key(**kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def _generate_cache_key(self, **kwargs) -> str:
|
||||
"""Generate cache key based on subclass implementation"""
|
||||
pass
|
||||
|
||||
def get(self) -> Optional[dict]:
|
||||
"""Get cached provider credentials"""
|
||||
cached_credentials = redis_client.get(self.cache_key)
|
||||
if cached_credentials:
|
||||
try:
|
||||
cached_credentials = cached_credentials.decode("utf-8")
|
||||
return dict(json.loads(cached_credentials))
|
||||
except JSONDecodeError:
|
||||
return None
|
||||
return None
|
||||
|
||||
def set(self, config: dict[str, Any]) -> None:
|
||||
"""Cache provider credentials"""
|
||||
redis_client.setex(self.cache_key, 86400, json.dumps(config))
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete cached provider credentials"""
|
||||
redis_client.delete(self.cache_key)
|
||||
|
||||
|
||||
class SingletonProviderCredentialsCache(ProviderCredentialsCache):
|
||||
"""Cache for tool single provider credentials"""
|
||||
|
||||
def __init__(self, tenant_id: str, provider_type: str, provider_identity: str):
|
||||
super().__init__(
|
||||
tenant_id=tenant_id,
|
||||
provider_type=provider_type,
|
||||
provider_identity=provider_identity,
|
||||
)
|
||||
|
||||
def _generate_cache_key(self, **kwargs) -> str:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
provider_type = kwargs["provider_type"]
|
||||
identity_name = kwargs["provider_identity"]
|
||||
identity_id = f"{provider_type}.{identity_name}"
|
||||
return f"{provider_type}_credentials:tenant_id:{tenant_id}:id:{identity_id}"
|
||||
|
||||
|
||||
class ToolProviderCredentialsCache(ProviderCredentialsCache):
|
||||
"""Cache for tool provider credentials"""
|
||||
|
||||
def __init__(self, tenant_id: str, provider: str, credential_id: str):
|
||||
super().__init__(tenant_id=tenant_id, provider=provider, credential_id=credential_id)
|
||||
|
||||
def _generate_cache_key(self, **kwargs) -> str:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
provider = kwargs["provider"]
|
||||
credential_id = kwargs["credential_id"]
|
||||
return f"tool_credentials:tenant_id:{tenant_id}:provider:{provider}:credential_id:{credential_id}"
|
||||
|
||||
|
||||
class NoOpProviderCredentialCache:
|
||||
"""No-op provider credential cache"""
|
||||
|
||||
def get(self) -> Optional[dict]:
|
||||
"""Get cached provider credentials"""
|
||||
return None
|
||||
|
||||
def set(self, config: dict[str, Any]) -> None:
|
||||
"""Cache provider credentials"""
|
||||
pass
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete cached provider credentials"""
|
||||
pass
|
||||
@ -1,51 +0,0 @@
|
||||
import json
|
||||
from enum import Enum
|
||||
from json import JSONDecodeError
|
||||
from typing import Optional
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
|
||||
class ToolProviderCredentialsCacheType(Enum):
|
||||
PROVIDER = "tool_provider"
|
||||
ENDPOINT = "endpoint"
|
||||
|
||||
|
||||
class ToolProviderCredentialsCache:
|
||||
def __init__(self, tenant_id: str, identity_id: str, cache_type: ToolProviderCredentialsCacheType):
|
||||
self.cache_key = f"{cache_type.value}_credentials:tenant_id:{tenant_id}:id:{identity_id}"
|
||||
|
||||
def get(self) -> Optional[dict]:
|
||||
"""
|
||||
Get cached model provider credentials.
|
||||
|
||||
:return:
|
||||
"""
|
||||
cached_provider_credentials = redis_client.get(self.cache_key)
|
||||
if cached_provider_credentials:
|
||||
try:
|
||||
cached_provider_credentials = cached_provider_credentials.decode("utf-8")
|
||||
cached_provider_credentials = json.loads(cached_provider_credentials)
|
||||
except JSONDecodeError:
|
||||
return None
|
||||
|
||||
return dict(cached_provider_credentials)
|
||||
else:
|
||||
return None
|
||||
|
||||
def set(self, credentials: dict) -> None:
|
||||
"""
|
||||
Cache model provider credentials.
|
||||
|
||||
:param credentials: provider credentials
|
||||
:return:
|
||||
"""
|
||||
redis_client.setex(self.cache_key, 86400, json.dumps(credentials))
|
||||
|
||||
def delete(self) -> None:
|
||||
"""
|
||||
Delete cached model provider credentials.
|
||||
|
||||
:return:
|
||||
"""
|
||||
redis_client.delete(self.cache_key)
|
||||
@ -240,7 +240,7 @@ def refresh_authorization(
|
||||
response = requests.post(token_url, data=params)
|
||||
if not response.ok:
|
||||
raise ValueError(f"Token refresh failed: HTTP {response.status_code}")
|
||||
return OAuthTokens.model_validate(response.json())
|
||||
return OAuthTokens.parse_obj(response.json())
|
||||
|
||||
|
||||
def register_client(
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import queue
|
||||
from collections.abc import Callable
|
||||
from concurrent.futures import Future, ThreadPoolExecutor, TimeoutError
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from contextlib import ExitStack
|
||||
from datetime import timedelta
|
||||
from types import TracebackType
|
||||
@ -171,41 +171,23 @@ class BaseSession(
|
||||
self._session_read_timeout_seconds = read_timeout_seconds
|
||||
self._in_flight = {}
|
||||
self._exit_stack = ExitStack()
|
||||
# Initialize executor and future to None for proper cleanup checks
|
||||
self._executor: ThreadPoolExecutor | None = None
|
||||
self._receiver_future: Future | None = None
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
# The thread pool is dedicated to running `_receive_loop`. Setting `max_workers` to 1
|
||||
# ensures no unnecessary threads are created.
|
||||
self._executor = ThreadPoolExecutor(max_workers=1)
|
||||
self._executor = ThreadPoolExecutor()
|
||||
self._receiver_future = self._executor.submit(self._receive_loop)
|
||||
return self
|
||||
|
||||
def check_receiver_status(self) -> None:
|
||||
"""`check_receiver_status` ensures that any exceptions raised during the
|
||||
execution of `_receive_loop` are retrieved and propagated."""
|
||||
if self._receiver_future and self._receiver_future.done():
|
||||
if self._receiver_future.done():
|
||||
self._receiver_future.result()
|
||||
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None:
|
||||
self._exit_stack.close()
|
||||
self._read_stream.put(None)
|
||||
self._write_stream.put(None)
|
||||
|
||||
# Wait for the receiver loop to finish
|
||||
if self._receiver_future:
|
||||
try:
|
||||
self._receiver_future.result(timeout=5.0) # Wait up to 5 seconds
|
||||
except TimeoutError:
|
||||
# If the receiver loop is still running after timeout, we'll force shutdown
|
||||
pass
|
||||
|
||||
# Shutdown the executor
|
||||
if self._executor:
|
||||
self._executor.shutdown(wait=True)
|
||||
|
||||
def send_request(
|
||||
self,
|
||||
request: SendRequestT,
|
||||
|
||||
@ -284,8 +284,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
else:
|
||||
node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution)
|
||||
return node_span
|
||||
except Exception as e:
|
||||
logging.debug(f"Error occurred in build_workflow_node_span: {e}", exc_info=True)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status:
|
||||
@ -307,7 +306,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "",
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False),
|
||||
@ -382,7 +381,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
start_time=convert_datetime_to_nanoseconds(node_execution.created_at),
|
||||
end_time=convert_datetime_to_nanoseconds(node_execution.finished_at),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "",
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
GEN_AI_MODEL_NAME: process_data.get("model_name", ""),
|
||||
@ -416,7 +415,7 @@ class AliyunDataTrace(BaseTraceInstance):
|
||||
start_time=convert_datetime_to_nanoseconds(trace_info.start_time),
|
||||
end_time=convert_datetime_to_nanoseconds(trace_info.end_time),
|
||||
attributes={
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "",
|
||||
GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""),
|
||||
GEN_AI_USER_ID: str(user_id),
|
||||
GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value,
|
||||
GEN_AI_FRAMEWORK: "dify",
|
||||
|
||||
@ -1,16 +1,20 @@
|
||||
from core.helper.provider_cache import SingletonProviderCredentialsCache
|
||||
from core.plugin.entities.request import RequestInvokeEncrypt
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
from models.account import Tenant
|
||||
|
||||
|
||||
class PluginEncrypter:
|
||||
@classmethod
|
||||
def invoke_encrypt(cls, tenant: Tenant, payload: RequestInvokeEncrypt) -> dict:
|
||||
encrypter = ProviderConfigEncrypter(
|
||||
encrypter, cache = create_provider_encrypter(
|
||||
tenant_id=tenant.id,
|
||||
config=payload.config,
|
||||
provider_type=payload.namespace,
|
||||
provider_identity=payload.identity,
|
||||
cache=SingletonProviderCredentialsCache(
|
||||
tenant_id=tenant.id,
|
||||
provider_type=payload.namespace,
|
||||
provider_identity=payload.identity,
|
||||
),
|
||||
)
|
||||
|
||||
if payload.opt == "encrypt":
|
||||
@ -22,7 +26,7 @@ class PluginEncrypter:
|
||||
"data": encrypter.decrypt(payload.data),
|
||||
}
|
||||
elif payload.opt == "clear":
|
||||
encrypter.delete_tool_credentials_cache()
|
||||
cache.delete()
|
||||
return {
|
||||
"data": {},
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler
|
||||
from core.plugin.backwards_invocation.base import BaseBackwardsInvocation
|
||||
@ -23,6 +23,7 @@ class PluginToolBackwardsInvocation(BaseBackwardsInvocation):
|
||||
provider: str,
|
||||
tool_name: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
credential_id: Optional[str] = None,
|
||||
) -> Generator[ToolInvokeMessage, None, None]:
|
||||
"""
|
||||
invoke tool
|
||||
@ -30,7 +31,7 @@ class PluginToolBackwardsInvocation(BaseBackwardsInvocation):
|
||||
# get tool runtime
|
||||
try:
|
||||
tool_runtime = ToolManager.get_tool_runtime_from_plugin(
|
||||
tool_type, tenant_id, provider, tool_name, tool_parameters
|
||||
tool_type, tenant_id, provider, tool_name, tool_parameters, credential_id
|
||||
)
|
||||
response = ToolEngine.generic_invoke(
|
||||
tool_runtime, tool_parameters, user_id, DifyWorkflowCallbackHandler(), workflow_call_depth=1
|
||||
|
||||
@ -27,6 +27,20 @@ from core.workflow.nodes.question_classifier.entities import (
|
||||
)
|
||||
|
||||
|
||||
class InvokeCredentials(BaseModel):
|
||||
tool_credentials: dict[str, str] = Field(
|
||||
default_factory=dict,
|
||||
description="Map of tool provider to credential id, used to store the credential id for the tool provider.",
|
||||
)
|
||||
|
||||
|
||||
class PluginInvokeContext(BaseModel):
|
||||
credentials: Optional[InvokeCredentials] = Field(
|
||||
default_factory=InvokeCredentials,
|
||||
description="Credentials context for the plugin invocation or backward invocation.",
|
||||
)
|
||||
|
||||
|
||||
class RequestInvokeTool(BaseModel):
|
||||
"""
|
||||
Request to invoke a tool
|
||||
@ -36,6 +50,7 @@ class RequestInvokeTool(BaseModel):
|
||||
provider: str
|
||||
tool: str
|
||||
tool_parameters: dict
|
||||
credential_id: Optional[str] = None
|
||||
|
||||
|
||||
class BaseRequestInvokeModel(BaseModel):
|
||||
|
||||
@ -6,6 +6,7 @@ from core.plugin.entities.plugin import GenericProviderID
|
||||
from core.plugin.entities.plugin_daemon import (
|
||||
PluginAgentProviderEntity,
|
||||
)
|
||||
from core.plugin.entities.request import PluginInvokeContext
|
||||
from core.plugin.impl.base import BasePluginClient
|
||||
|
||||
|
||||
@ -83,6 +84,7 @@ class PluginAgentClient(BasePluginClient):
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
context: Optional[PluginInvokeContext] = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
"""
|
||||
Invoke the agent with the given tenant, user, plugin, provider, name and parameters.
|
||||
@ -99,6 +101,7 @@ class PluginAgentClient(BasePluginClient):
|
||||
"conversation_id": conversation_id,
|
||||
"app_id": app_id,
|
||||
"message_id": message_id,
|
||||
"context": context.model_dump() if context else {},
|
||||
"data": {
|
||||
"agent_strategy_provider": agent_provider_id.provider_name,
|
||||
"agent_strategy": agent_strategy,
|
||||
|
||||
@ -15,27 +15,32 @@ class OAuthHandler(BasePluginClient):
|
||||
user_id: str,
|
||||
plugin_id: str,
|
||||
provider: str,
|
||||
redirect_uri: str,
|
||||
system_credentials: Mapping[str, Any],
|
||||
) -> PluginOAuthAuthorizationUrlResponse:
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/dispatch/oauth/get_authorization_url",
|
||||
PluginOAuthAuthorizationUrlResponse,
|
||||
data={
|
||||
"user_id": user_id,
|
||||
"data": {
|
||||
"provider": provider,
|
||||
"system_credentials": system_credentials,
|
||||
try:
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/dispatch/oauth/get_authorization_url",
|
||||
PluginOAuthAuthorizationUrlResponse,
|
||||
data={
|
||||
"user_id": user_id,
|
||||
"data": {
|
||||
"provider": provider,
|
||||
"redirect_uri": redirect_uri,
|
||||
"system_credentials": system_credentials,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
for resp in response:
|
||||
return resp
|
||||
raise ValueError("No response received from plugin daemon for authorization URL request.")
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
for resp in response:
|
||||
return resp
|
||||
raise ValueError("No response received from plugin daemon for authorization URL request.")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error getting authorization URL: {e}")
|
||||
|
||||
def get_credentials(
|
||||
self,
|
||||
@ -43,6 +48,7 @@ class OAuthHandler(BasePluginClient):
|
||||
user_id: str,
|
||||
plugin_id: str,
|
||||
provider: str,
|
||||
redirect_uri: str,
|
||||
system_credentials: Mapping[str, Any],
|
||||
request: Request,
|
||||
) -> PluginOAuthCredentialsResponse:
|
||||
@ -50,30 +56,34 @@ class OAuthHandler(BasePluginClient):
|
||||
Get credentials from the given request.
|
||||
"""
|
||||
|
||||
# encode request to raw http request
|
||||
raw_request_bytes = self._convert_request_to_raw_data(request)
|
||||
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/dispatch/oauth/get_credentials",
|
||||
PluginOAuthCredentialsResponse,
|
||||
data={
|
||||
"user_id": user_id,
|
||||
"data": {
|
||||
"provider": provider,
|
||||
"system_credentials": system_credentials,
|
||||
# for json serialization
|
||||
"raw_http_request": binascii.hexlify(raw_request_bytes).decode(),
|
||||
try:
|
||||
# encode request to raw http request
|
||||
raw_request_bytes = self._convert_request_to_raw_data(request)
|
||||
response = self._request_with_plugin_daemon_response_stream(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/dispatch/oauth/get_credentials",
|
||||
PluginOAuthCredentialsResponse,
|
||||
data={
|
||||
"user_id": user_id,
|
||||
"data": {
|
||||
"provider": provider,
|
||||
"redirect_uri": redirect_uri,
|
||||
"system_credentials": system_credentials,
|
||||
# for json serialization
|
||||
"raw_http_request": binascii.hexlify(raw_request_bytes).decode(),
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
for resp in response:
|
||||
return resp
|
||||
raise ValueError("No response received from plugin daemon for authorization URL request.")
|
||||
headers={
|
||||
"X-Plugin-ID": plugin_id,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
for resp in response:
|
||||
return resp
|
||||
raise ValueError("No response received from plugin daemon for authorization URL request.")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error getting credentials: {e}")
|
||||
|
||||
def _convert_request_to_raw_data(self, request: Request) -> bytes:
|
||||
"""
|
||||
|
||||
@ -6,7 +6,7 @@ from pydantic import BaseModel
|
||||
from core.plugin.entities.plugin import GenericProviderID, ToolProviderID
|
||||
from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity
|
||||
from core.plugin.impl.base import BasePluginClient
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
|
||||
from core.tools.entities.tool_entities import CredentialType, ToolInvokeMessage, ToolParameter
|
||||
|
||||
|
||||
class PluginToolManager(BasePluginClient):
|
||||
@ -78,6 +78,7 @@ class PluginToolManager(BasePluginClient):
|
||||
tool_provider: str,
|
||||
tool_name: str,
|
||||
credentials: dict[str, Any],
|
||||
credential_type: CredentialType,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
@ -102,6 +103,7 @@ class PluginToolManager(BasePluginClient):
|
||||
"provider": tool_provider_id.provider_name,
|
||||
"tool": tool_name,
|
||||
"credentials": credentials,
|
||||
"credential_type": credential_type,
|
||||
"tool_parameters": tool_parameters,
|
||||
},
|
||||
},
|
||||
|
||||
@ -158,7 +158,7 @@ class AdvancedPromptTransform(PromptTransform):
|
||||
|
||||
if prompt_item.edition_type == "basic" or not prompt_item.edition_type:
|
||||
if self.with_variable_tmpl:
|
||||
vp = VariablePool.empty()
|
||||
vp = VariablePool()
|
||||
for k, v in inputs.items():
|
||||
if k.startswith("#"):
|
||||
vp.add(k[1:-1].split("."), v)
|
||||
|
||||
@ -4,7 +4,7 @@ from openai import BaseModel
|
||||
from pydantic import Field
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.tools.entities.tool_entities import ToolInvokeFrom
|
||||
from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom
|
||||
|
||||
|
||||
class ToolRuntime(BaseModel):
|
||||
@ -17,6 +17,7 @@ class ToolRuntime(BaseModel):
|
||||
invoke_from: Optional[InvokeFrom] = None
|
||||
tool_invoke_from: Optional[ToolInvokeFrom] = None
|
||||
credentials: dict[str, Any] = Field(default_factory=dict)
|
||||
credential_type: Optional[CredentialType] = CredentialType.API_KEY
|
||||
runtime_parameters: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
|
||||
@ -7,7 +7,13 @@ from core.helper.module_import_helper import load_single_subclass_from_source
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolEntity, ToolProviderEntity, ToolProviderType
|
||||
from core.tools.entities.tool_entities import (
|
||||
CredentialType,
|
||||
OAuthSchema,
|
||||
ToolEntity,
|
||||
ToolProviderEntity,
|
||||
ToolProviderType,
|
||||
)
|
||||
from core.tools.entities.values import ToolLabelEnum, default_tool_label_dict
|
||||
from core.tools.errors import (
|
||||
ToolProviderNotFoundError,
|
||||
@ -39,10 +45,18 @@ class BuiltinToolProviderController(ToolProviderController):
|
||||
credential_dict = provider_yaml.get("credentials_for_provider", {}).get(credential, {})
|
||||
credentials_schema.append(credential_dict)
|
||||
|
||||
oauth_schema = None
|
||||
if provider_yaml.get("oauth_schema", None) is not None:
|
||||
oauth_schema = OAuthSchema(
|
||||
client_schema=provider_yaml.get("oauth_schema", {}).get("client_schema", []),
|
||||
credentials_schema=provider_yaml.get("oauth_schema", {}).get("credentials_schema", []),
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
entity=ToolProviderEntity(
|
||||
identity=provider_yaml["identity"],
|
||||
credentials_schema=credentials_schema,
|
||||
oauth_schema=oauth_schema,
|
||||
),
|
||||
)
|
||||
|
||||
@ -97,10 +111,39 @@ class BuiltinToolProviderController(ToolProviderController):
|
||||
|
||||
:return: the credentials schema
|
||||
"""
|
||||
if not self.entity.credentials_schema:
|
||||
return []
|
||||
return self.get_credentials_schema_by_type(CredentialType.API_KEY.value)
|
||||
|
||||
return self.entity.credentials_schema.copy()
|
||||
def get_credentials_schema_by_type(self, credential_type: str) -> list[ProviderConfig]:
|
||||
"""
|
||||
returns the credentials schema of the provider
|
||||
|
||||
:param credential_type: the type of the credential
|
||||
:return: the credentials schema of the provider
|
||||
"""
|
||||
if credential_type == CredentialType.OAUTH2.value:
|
||||
return self.entity.oauth_schema.credentials_schema.copy() if self.entity.oauth_schema else []
|
||||
if credential_type == CredentialType.API_KEY.value:
|
||||
return self.entity.credentials_schema.copy() if self.entity.credentials_schema else []
|
||||
raise ValueError(f"Invalid credential type: {credential_type}")
|
||||
|
||||
def get_oauth_client_schema(self) -> list[ProviderConfig]:
|
||||
"""
|
||||
returns the oauth client schema of the provider
|
||||
|
||||
:return: the oauth client schema
|
||||
"""
|
||||
return self.entity.oauth_schema.client_schema.copy() if self.entity.oauth_schema else []
|
||||
|
||||
def get_supported_credential_types(self) -> list[str]:
|
||||
"""
|
||||
returns the credential support type of the provider
|
||||
"""
|
||||
types = []
|
||||
if self.entity.credentials_schema is not None and len(self.entity.credentials_schema) > 0:
|
||||
types.append(CredentialType.API_KEY.value)
|
||||
if self.entity.oauth_schema is not None and len(self.entity.oauth_schema.credentials_schema) > 0:
|
||||
types.append(CredentialType.OAUTH2.value)
|
||||
return types
|
||||
|
||||
def get_tools(self) -> list[BuiltinTool]:
|
||||
"""
|
||||
@ -123,7 +166,11 @@ class BuiltinToolProviderController(ToolProviderController):
|
||||
|
||||
:return: whether the provider needs credentials
|
||||
"""
|
||||
return self.entity.credentials_schema is not None and len(self.entity.credentials_schema) != 0
|
||||
return (
|
||||
self.entity.credentials_schema is not None
|
||||
and len(self.entity.credentials_schema) != 0
|
||||
or (self.entity.oauth_schema is not None and len(self.entity.oauth_schema.credentials_schema) != 0)
|
||||
)
|
||||
|
||||
@property
|
||||
def provider_type(self) -> ToolProviderType:
|
||||
|
||||
@ -6,7 +6,7 @@ from pydantic import BaseModel, Field, field_validator
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.tools.__base.tool import ToolParameter
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.tools.entities.tool_entities import CredentialType, ToolProviderType
|
||||
|
||||
|
||||
class ToolApiEntity(BaseModel):
|
||||
@ -87,3 +87,22 @@ class ToolProviderApiEntity(BaseModel):
|
||||
def optional_field(self, key: str, value: Any) -> dict:
|
||||
"""Return dict with key-value if value is truthy, empty dict otherwise."""
|
||||
return {key: value} if value else {}
|
||||
|
||||
|
||||
class ToolProviderCredentialApiEntity(BaseModel):
|
||||
id: str = Field(description="The unique id of the credential")
|
||||
name: str = Field(description="The name of the credential")
|
||||
provider: str = Field(description="The provider of the credential")
|
||||
credential_type: CredentialType = Field(description="The type of the credential")
|
||||
is_default: bool = Field(
|
||||
default=False, description="Whether the credential is the default credential for the provider in the workspace"
|
||||
)
|
||||
credentials: dict = Field(description="The credentials of the provider")
|
||||
|
||||
|
||||
class ToolProviderCredentialInfoApiEntity(BaseModel):
|
||||
supported_credential_types: list[str] = Field(description="The supported credential types of the provider")
|
||||
is_oauth_custom_client_enabled: bool = Field(
|
||||
default=False, description="Whether the OAuth custom client is enabled for the provider"
|
||||
)
|
||||
credentials: list[ToolProviderCredentialApiEntity] = Field(description="The credentials of the provider")
|
||||
|
||||
@ -355,10 +355,18 @@ class ToolEntity(BaseModel):
|
||||
return v or []
|
||||
|
||||
|
||||
class OAuthSchema(BaseModel):
|
||||
client_schema: list[ProviderConfig] = Field(default_factory=list, description="The schema of the OAuth client")
|
||||
credentials_schema: list[ProviderConfig] = Field(
|
||||
default_factory=list, description="The schema of the OAuth credentials"
|
||||
)
|
||||
|
||||
|
||||
class ToolProviderEntity(BaseModel):
|
||||
identity: ToolProviderIdentity
|
||||
plugin_id: Optional[str] = None
|
||||
credentials_schema: list[ProviderConfig] = Field(default_factory=list)
|
||||
oauth_schema: Optional[OAuthSchema] = None
|
||||
|
||||
|
||||
class ToolProviderEntityWithPlugin(ToolProviderEntity):
|
||||
@ -438,6 +446,7 @@ class ToolSelector(BaseModel):
|
||||
options: Optional[list[PluginParameterOption]] = None
|
||||
|
||||
provider_id: str = Field(..., description="The id of the provider")
|
||||
credential_id: Optional[str] = Field(default=None, description="The id of the credential")
|
||||
tool_name: str = Field(..., description="The name of the tool")
|
||||
tool_description: str = Field(..., description="The description of the tool")
|
||||
tool_configuration: Mapping[str, Any] = Field(..., description="Configuration, type form")
|
||||
@ -445,3 +454,36 @@ class ToolSelector(BaseModel):
|
||||
|
||||
def to_plugin_parameter(self) -> dict[str, Any]:
|
||||
return self.model_dump()
|
||||
|
||||
|
||||
class CredentialType(enum.StrEnum):
|
||||
API_KEY = "api-key"
|
||||
OAUTH2 = "oauth2"
|
||||
|
||||
def get_name(self):
|
||||
if self == CredentialType.API_KEY:
|
||||
return "API KEY"
|
||||
elif self == CredentialType.OAUTH2:
|
||||
return "AUTH"
|
||||
else:
|
||||
return self.value.replace("-", " ").upper()
|
||||
|
||||
def is_editable(self):
|
||||
return self == CredentialType.API_KEY
|
||||
|
||||
def is_validate_allowed(self):
|
||||
return self == CredentialType.API_KEY
|
||||
|
||||
@classmethod
|
||||
def values(cls):
|
||||
return [item.value for item in cls]
|
||||
|
||||
@classmethod
|
||||
def of(cls, credential_type: str) -> "CredentialType":
|
||||
type_name = credential_type.lower()
|
||||
if type_name == "api-key":
|
||||
return cls.API_KEY
|
||||
elif type_name == "oauth2":
|
||||
return cls.OAUTH2
|
||||
else:
|
||||
raise ValueError(f"Invalid credential type: {credential_type}")
|
||||
|
||||
@ -44,6 +44,7 @@ class PluginTool(Tool):
|
||||
tool_provider=self.entity.identity.provider,
|
||||
tool_name=self.entity.identity.name,
|
||||
credentials=self.runtime.credentials,
|
||||
credential_type=self.runtime.credential_type,
|
||||
tool_parameters=tool_parameters,
|
||||
conversation_id=conversation_id,
|
||||
app_id=app_id,
|
||||
|
||||
@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
|
||||
from yarl import URL
|
||||
|
||||
import contexts
|
||||
from core.helper.provider_cache import ToolProviderCredentialsCache
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.tool import PluginToolManager
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
@ -17,6 +18,7 @@ from core.tools.mcp_tool.provider import MCPToolProviderController
|
||||
from core.tools.mcp_tool.tool import MCPTool
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.tools.plugin_tool.tool import PluginTool
|
||||
from core.tools.utils.uuid_utils import is_valid_uuid
|
||||
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from services.tools.mcp_tools_mange_service import MCPToolManageService
|
||||
@ -24,7 +26,6 @@ from services.tools.mcp_tools_mange_service import MCPToolManageService
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.nodes.tool.entities import ToolEntity
|
||||
|
||||
|
||||
from configs import dify_config
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
@ -41,16 +42,17 @@ from core.tools.entities.api_entities import ToolProviderApiEntity, ToolProvider
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import (
|
||||
ApiProviderAuthType,
|
||||
CredentialType,
|
||||
ToolInvokeFrom,
|
||||
ToolParameter,
|
||||
ToolProviderType,
|
||||
)
|
||||
from core.tools.errors import ToolNotFoundError, ToolProviderNotFoundError
|
||||
from core.tools.errors import ToolProviderNotFoundError
|
||||
from core.tools.tool_label_manager import ToolLabelManager
|
||||
from core.tools.utils.configuration import (
|
||||
ProviderConfigEncrypter,
|
||||
ToolParameterConfigurationManager,
|
||||
)
|
||||
from core.tools.utils.encryption import create_provider_encrypter, create_tool_provider_encrypter
|
||||
from core.tools.workflow_as_tool.tool import WorkflowTool
|
||||
from extensions.ext_database import db
|
||||
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider
|
||||
@ -68,8 +70,11 @@ class ToolManager:
|
||||
@classmethod
|
||||
def get_hardcoded_provider(cls, provider: str) -> BuiltinToolProviderController:
|
||||
"""
|
||||
|
||||
get the hardcoded provider
|
||||
|
||||
"""
|
||||
|
||||
if len(cls._hardcoded_providers) == 0:
|
||||
# init the builtin providers
|
||||
cls.load_hardcoded_providers_cache()
|
||||
@ -113,7 +118,12 @@ class ToolManager:
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(Lock())
|
||||
|
||||
plugin_tool_providers = contexts.plugin_tool_providers.get()
|
||||
if provider in plugin_tool_providers:
|
||||
return plugin_tool_providers[provider]
|
||||
|
||||
with contexts.plugin_tool_providers_lock.get():
|
||||
# double check
|
||||
plugin_tool_providers = contexts.plugin_tool_providers.get()
|
||||
if provider in plugin_tool_providers:
|
||||
return plugin_tool_providers[provider]
|
||||
@ -131,25 +141,7 @@ class ToolManager:
|
||||
)
|
||||
|
||||
plugin_tool_providers[provider] = controller
|
||||
|
||||
return controller
|
||||
|
||||
@classmethod
|
||||
def get_builtin_tool(cls, provider: str, tool_name: str, tenant_id: str) -> BuiltinTool | PluginTool | None:
|
||||
"""
|
||||
get the builtin tool
|
||||
|
||||
:param provider: the name of the provider
|
||||
:param tool_name: the name of the tool
|
||||
:param tenant_id: the id of the tenant
|
||||
:return: the provider, the tool
|
||||
"""
|
||||
provider_controller = cls.get_builtin_provider(provider, tenant_id)
|
||||
tool = provider_controller.get_tool(tool_name)
|
||||
if tool is None:
|
||||
raise ToolNotFoundError(f"tool {tool_name} not found")
|
||||
|
||||
return tool
|
||||
return controller
|
||||
|
||||
@classmethod
|
||||
def get_tool_runtime(
|
||||
@ -160,6 +152,7 @@ class ToolManager:
|
||||
tenant_id: str,
|
||||
invoke_from: InvokeFrom = InvokeFrom.DEBUGGER,
|
||||
tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT,
|
||||
credential_id: Optional[str] = None,
|
||||
) -> Union[BuiltinTool, PluginTool, ApiTool, WorkflowTool, MCPTool]:
|
||||
"""
|
||||
get the tool runtime
|
||||
@ -170,6 +163,7 @@ class ToolManager:
|
||||
:param tenant_id: the tenant id
|
||||
:param invoke_from: invoke from
|
||||
:param tool_invoke_from: the tool invoke from
|
||||
:param credential_id: the credential id
|
||||
|
||||
:return: the tool
|
||||
"""
|
||||
@ -193,49 +187,70 @@ class ToolManager:
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
builtin_provider = None
|
||||
if isinstance(provider_controller, PluginToolProviderController):
|
||||
provider_id_entity = ToolProviderID(provider_id)
|
||||
# get credentials
|
||||
builtin_provider: BuiltinToolProvider | None = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == str(provider_id_entity))
|
||||
| (BuiltinToolProvider.provider == provider_id_entity.provider_name),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
# get specific credentials
|
||||
if is_valid_uuid(credential_id):
|
||||
try:
|
||||
builtin_provider = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.id == credential_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
except Exception as e:
|
||||
builtin_provider = None
|
||||
logger.info(f"Error getting builtin provider {credential_id}:{e}", exc_info=True)
|
||||
# if the provider has been deleted, raise an error
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"provider has been deleted: {credential_id}")
|
||||
|
||||
# fallback to the default provider
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
|
||||
# use the default provider
|
||||
builtin_provider = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == str(provider_id_entity))
|
||||
| (BuiltinToolProvider.provider == provider_id_entity.provider_name),
|
||||
)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.first()
|
||||
)
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"no default provider for {provider_id}")
|
||||
else:
|
||||
builtin_provider = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_id))
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
|
||||
|
||||
# decrypt the credentials
|
||||
credentials = builtin_provider.credentials
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(builtin_provider.credential_type)
|
||||
],
|
||||
cache=ToolProviderCredentialsCache(
|
||||
tenant_id=tenant_id, provider=provider_id, credential_id=builtin_provider.id
|
||||
),
|
||||
)
|
||||
|
||||
decrypted_credentials = tool_configuration.decrypt(credentials)
|
||||
|
||||
return cast(
|
||||
BuiltinTool,
|
||||
builtin_tool.fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
credentials=decrypted_credentials,
|
||||
credentials=encrypter.decrypt(builtin_provider.credentials),
|
||||
credential_type=CredentialType.of(builtin_provider.credential_type),
|
||||
runtime_parameters={},
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
@ -245,22 +260,16 @@ class ToolManager:
|
||||
|
||||
elif provider_type == ToolProviderType.API:
|
||||
api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id)
|
||||
|
||||
# decrypt the credentials
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in api_provider.get_credentials_schema()],
|
||||
provider_type=api_provider.provider_type.value,
|
||||
provider_identity=api_provider.entity.identity.name,
|
||||
controller=api_provider,
|
||||
)
|
||||
decrypted_credentials = tool_configuration.decrypt(credentials)
|
||||
|
||||
return cast(
|
||||
ApiTool,
|
||||
api_provider.get_tool(tool_name).fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
credentials=decrypted_credentials,
|
||||
credentials=encrypter.decrypt(credentials),
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
@ -320,6 +329,7 @@ class ToolManager:
|
||||
tenant_id=tenant_id,
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=ToolInvokeFrom.AGENT,
|
||||
credential_id=agent_tool.credential_id,
|
||||
)
|
||||
runtime_parameters = {}
|
||||
parameters = tool_entity.get_merged_runtime_parameters()
|
||||
@ -362,6 +372,7 @@ class ToolManager:
|
||||
tenant_id=tenant_id,
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=ToolInvokeFrom.WORKFLOW,
|
||||
credential_id=workflow_tool.credential_id,
|
||||
)
|
||||
|
||||
parameters = tool_runtime.get_merged_runtime_parameters()
|
||||
@ -391,6 +402,7 @@ class ToolManager:
|
||||
provider: str,
|
||||
tool_name: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
credential_id: Optional[str] = None,
|
||||
) -> Tool:
|
||||
"""
|
||||
get tool runtime from plugin
|
||||
@ -402,6 +414,7 @@ class ToolManager:
|
||||
tenant_id=tenant_id,
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
tool_invoke_from=ToolInvokeFrom.PLUGIN,
|
||||
credential_id=credential_id,
|
||||
)
|
||||
runtime_parameters = {}
|
||||
parameters = tool_entity.get_merged_runtime_parameters()
|
||||
@ -551,6 +564,22 @@ class ToolManager:
|
||||
|
||||
return cls._builtin_tools_labels[tool_name]
|
||||
|
||||
@classmethod
|
||||
def list_default_builtin_providers(cls, tenant_id: str) -> list[BuiltinToolProvider]:
|
||||
"""
|
||||
list all the builtin providers
|
||||
"""
|
||||
# according to multi credentials, select the one with is_default=True first, then created_at oldest
|
||||
# for compatibility with old version
|
||||
sql = """
|
||||
SELECT DISTINCT ON (tenant_id, provider) id
|
||||
FROM tool_builtin_providers
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY tenant_id, provider, is_default DESC, created_at DESC
|
||||
"""
|
||||
ids = [row.id for row in db.session.execute(db.text(sql), {"tenant_id": tenant_id}).all()]
|
||||
return db.session.query(BuiltinToolProvider).filter(BuiltinToolProvider.id.in_(ids)).all()
|
||||
|
||||
@classmethod
|
||||
def list_providers_from_api(
|
||||
cls, user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral
|
||||
@ -565,21 +594,13 @@ class ToolManager:
|
||||
|
||||
with db.session.no_autoflush:
|
||||
if "builtin" in filters:
|
||||
# get builtin providers
|
||||
builtin_providers = cls.list_builtin_providers(tenant_id)
|
||||
|
||||
# get db builtin providers
|
||||
db_builtin_providers: list[BuiltinToolProvider] = (
|
||||
db.session.query(BuiltinToolProvider).filter(BuiltinToolProvider.tenant_id == tenant_id).all()
|
||||
)
|
||||
|
||||
# rewrite db_builtin_providers
|
||||
for db_provider in db_builtin_providers:
|
||||
tool_provider_id = str(ToolProviderID(db_provider.provider))
|
||||
db_provider.provider = tool_provider_id
|
||||
|
||||
def find_db_builtin_provider(provider):
|
||||
return next((x for x in db_builtin_providers if x.provider == provider), None)
|
||||
# key: provider name, value: provider
|
||||
db_builtin_providers = {
|
||||
str(ToolProviderID(provider.provider)): provider
|
||||
for provider in cls.list_default_builtin_providers(tenant_id)
|
||||
}
|
||||
|
||||
# append builtin providers
|
||||
for provider in builtin_providers:
|
||||
@ -591,10 +612,9 @@ class ToolManager:
|
||||
name_func=lambda x: x.identity.name,
|
||||
):
|
||||
continue
|
||||
|
||||
user_provider = ToolTransformService.builtin_provider_to_user_provider(
|
||||
provider_controller=provider,
|
||||
db_provider=find_db_builtin_provider(provider.entity.identity.name),
|
||||
db_provider=db_builtin_providers.get(provider.entity.identity.name),
|
||||
decrypt_credentials=False,
|
||||
)
|
||||
|
||||
@ -604,7 +624,6 @@ class ToolManager:
|
||||
result_providers[f"builtin_provider.{user_provider.name}"] = user_provider
|
||||
|
||||
# get db api providers
|
||||
|
||||
if "api" in filters:
|
||||
db_api_providers: list[ApiToolProvider] = (
|
||||
db.session.query(ApiToolProvider).filter(ApiToolProvider.tenant_id == tenant_id).all()
|
||||
@ -764,15 +783,12 @@ class ToolManager:
|
||||
auth_type,
|
||||
)
|
||||
# init tool configuration
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in controller.get_credentials_schema()],
|
||||
provider_type=controller.provider_type.value,
|
||||
provider_identity=controller.entity.identity.name,
|
||||
controller=controller,
|
||||
)
|
||||
|
||||
decrypted_credentials = tool_configuration.decrypt(credentials)
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials)
|
||||
masked_credentials = encrypter.mask_tool_credentials(encrypter.decrypt(credentials))
|
||||
|
||||
try:
|
||||
icon = json.loads(provider_obj.icon)
|
||||
|
||||
@ -1,12 +1,8 @@
|
||||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.helper import encrypter
|
||||
from core.helper.tool_parameter_cache import ToolParameterCache, ToolParameterCacheType
|
||||
from core.helper.tool_provider_cache import ToolProviderCredentialsCache, ToolProviderCredentialsCacheType
|
||||
from core.tools.__base.tool import Tool
|
||||
from core.tools.entities.tool_entities import (
|
||||
ToolParameter,
|
||||
@ -14,110 +10,6 @@ from core.tools.entities.tool_entities import (
|
||||
)
|
||||
|
||||
|
||||
class ProviderConfigEncrypter(BaseModel):
|
||||
tenant_id: str
|
||||
config: list[BasicProviderConfig]
|
||||
provider_type: str
|
||||
provider_identity: str
|
||||
|
||||
def _deep_copy(self, data: dict[str, str]) -> dict[str, str]:
|
||||
"""
|
||||
deep copy data
|
||||
"""
|
||||
return deepcopy(data)
|
||||
|
||||
def encrypt(self, data: dict[str, str]) -> dict[str, str]:
|
||||
"""
|
||||
encrypt tool credentials with tenant id
|
||||
|
||||
return a deep copy of credentials with encrypted values
|
||||
"""
|
||||
data = self._deep_copy(data)
|
||||
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name] or "")
|
||||
data[field_name] = encrypted
|
||||
|
||||
return data
|
||||
|
||||
def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
mask tool credentials
|
||||
|
||||
return a deep copy of credentials with masked values
|
||||
"""
|
||||
data = self._deep_copy(data)
|
||||
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
if len(data[field_name]) > 6:
|
||||
data[field_name] = (
|
||||
data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:]
|
||||
)
|
||||
else:
|
||||
data[field_name] = "*" * len(data[field_name])
|
||||
|
||||
return data
|
||||
|
||||
def decrypt(self, data: dict[str, str], use_cache: bool = True) -> dict[str, str]:
|
||||
"""
|
||||
decrypt tool credentials with tenant id
|
||||
|
||||
return a deep copy of credentials with decrypted values
|
||||
"""
|
||||
if use_cache:
|
||||
cache = ToolProviderCredentialsCache(
|
||||
tenant_id=self.tenant_id,
|
||||
identity_id=f"{self.provider_type}.{self.provider_identity}",
|
||||
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
|
||||
)
|
||||
cached_credentials = cache.get()
|
||||
if cached_credentials:
|
||||
return cached_credentials
|
||||
data = self._deep_copy(data)
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
try:
|
||||
# if the value is None or empty string, skip decrypt
|
||||
if not data[field_name]:
|
||||
continue
|
||||
|
||||
data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if use_cache:
|
||||
cache.set(data)
|
||||
return data
|
||||
|
||||
def delete_tool_credentials_cache(self):
|
||||
cache = ToolProviderCredentialsCache(
|
||||
tenant_id=self.tenant_id,
|
||||
identity_id=f"{self.provider_type}.{self.provider_identity}",
|
||||
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
|
||||
)
|
||||
cache.delete()
|
||||
|
||||
|
||||
class ToolParameterConfigurationManager:
|
||||
"""
|
||||
Tool parameter configuration manager
|
||||
|
||||
141
api/core/tools/utils/encryption.py
Normal file
141
api/core/tools/utils/encryption.py
Normal file
@ -0,0 +1,141 @@
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional, Protocol
|
||||
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.helper import encrypter
|
||||
from core.helper.provider_cache import SingletonProviderCredentialsCache
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
|
||||
|
||||
class ProviderConfigCache(Protocol):
|
||||
"""
|
||||
Interface for provider configuration cache operations
|
||||
"""
|
||||
|
||||
def get(self) -> Optional[dict]:
|
||||
"""Get cached provider configuration"""
|
||||
...
|
||||
|
||||
def set(self, config: dict[str, Any]) -> None:
|
||||
"""Cache provider configuration"""
|
||||
...
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete cached provider configuration"""
|
||||
...
|
||||
|
||||
|
||||
class ProviderConfigEncrypter:
|
||||
tenant_id: str
|
||||
config: list[BasicProviderConfig]
|
||||
provider_config_cache: ProviderConfigCache
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tenant_id: str,
|
||||
config: list[BasicProviderConfig],
|
||||
provider_config_cache: ProviderConfigCache,
|
||||
):
|
||||
self.tenant_id = tenant_id
|
||||
self.config = config
|
||||
self.provider_config_cache = provider_config_cache
|
||||
|
||||
def _deep_copy(self, data: dict[str, str]) -> dict[str, str]:
|
||||
"""
|
||||
deep copy data
|
||||
"""
|
||||
return deepcopy(data)
|
||||
|
||||
def encrypt(self, data: dict[str, str]) -> dict[str, str]:
|
||||
"""
|
||||
encrypt tool credentials with tenant id
|
||||
|
||||
return a deep copy of credentials with encrypted values
|
||||
"""
|
||||
data = self._deep_copy(data)
|
||||
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
encrypted = encrypter.encrypt_token(self.tenant_id, data[field_name] or "")
|
||||
data[field_name] = encrypted
|
||||
|
||||
return data
|
||||
|
||||
def mask_tool_credentials(self, data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
mask tool credentials
|
||||
|
||||
return a deep copy of credentials with masked values
|
||||
"""
|
||||
data = self._deep_copy(data)
|
||||
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
if len(data[field_name]) > 6:
|
||||
data[field_name] = (
|
||||
data[field_name][:2] + "*" * (len(data[field_name]) - 4) + data[field_name][-2:]
|
||||
)
|
||||
else:
|
||||
data[field_name] = "*" * len(data[field_name])
|
||||
|
||||
return data
|
||||
|
||||
def decrypt(self, data: dict[str, str]) -> dict[str, Any]:
|
||||
"""
|
||||
decrypt tool credentials with tenant id
|
||||
|
||||
return a deep copy of credentials with decrypted values
|
||||
"""
|
||||
cached_credentials = self.provider_config_cache.get()
|
||||
if cached_credentials:
|
||||
return cached_credentials
|
||||
|
||||
data = self._deep_copy(data)
|
||||
# get fields need to be decrypted
|
||||
fields = dict[str, BasicProviderConfig]()
|
||||
for credential in self.config:
|
||||
fields[credential.name] = credential
|
||||
|
||||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
try:
|
||||
# if the value is None or empty string, skip decrypt
|
||||
if not data[field_name]:
|
||||
continue
|
||||
|
||||
data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.provider_config_cache.set(data)
|
||||
return data
|
||||
|
||||
|
||||
def create_provider_encrypter(tenant_id: str, config: list[BasicProviderConfig], cache: ProviderConfigCache):
|
||||
return ProviderConfigEncrypter(tenant_id=tenant_id, config=config, provider_config_cache=cache), cache
|
||||
|
||||
def create_tool_provider_encrypter(tenant_id: str, controller: ToolProviderController):
|
||||
cache = SingletonProviderCredentialsCache(
|
||||
tenant_id=tenant_id,
|
||||
provider_type=controller.provider_type.value,
|
||||
provider_identity=controller.entity.identity.name,
|
||||
)
|
||||
encrypt = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in controller.get_credentials_schema()],
|
||||
provider_config_cache=cache,
|
||||
)
|
||||
return encrypt, cache
|
||||
192
api/core/tools/utils/system_oauth_encryption.py
Normal file
192
api/core/tools/utils/system_oauth_encryption.py
Normal file
@ -0,0 +1,192 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad, unpad
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OAuthEncryptionError(Exception):
|
||||
"""OAuth encryption/decryption specific error"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SystemOAuthEncrypter:
|
||||
"""
|
||||
A simple OAuth parameters encrypter using AES-CBC encryption.
|
||||
|
||||
This class provides methods to encrypt and decrypt OAuth parameters
|
||||
using AES-CBC mode with a key derived from the application's SECRET_KEY.
|
||||
"""
|
||||
|
||||
def __init__(self, secret_key: Optional[str] = None):
|
||||
"""
|
||||
Initialize the OAuth encrypter.
|
||||
|
||||
Args:
|
||||
secret_key: Optional secret key. If not provided, uses dify_config.SECRET_KEY
|
||||
|
||||
Raises:
|
||||
ValueError: If SECRET_KEY is not configured or empty
|
||||
"""
|
||||
secret_key = secret_key or dify_config.SECRET_KEY or ""
|
||||
|
||||
# Generate a fixed 256-bit key using SHA-256
|
||||
self.key = hashlib.sha256(secret_key.encode()).digest()
|
||||
|
||||
def encrypt_oauth_params(self, oauth_params: str) -> str:
|
||||
"""
|
||||
Encrypt OAuth parameters.
|
||||
|
||||
Args:
|
||||
oauth_params: OAuth parameters dictionary, e.g., {"client_id": "xxx", "client_secret": "xxx"}
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
|
||||
Raises:
|
||||
OAuthEncryptionError: If encryption fails
|
||||
ValueError: If oauth_params is invalid
|
||||
"""
|
||||
|
||||
if not oauth_params:
|
||||
raise ValueError("oauth_params cannot be empty")
|
||||
|
||||
try:
|
||||
# Generate random IV (16 bytes)
|
||||
iv = get_random_bytes(16)
|
||||
|
||||
# Create AES cipher (CBC mode)
|
||||
cipher = AES.new(self.key, AES.MODE_CBC, iv)
|
||||
|
||||
# Encrypt data
|
||||
padded_data = pad(oauth_params.encode("utf-8"), AES.block_size)
|
||||
encrypted_data = cipher.encrypt(padded_data)
|
||||
|
||||
# Combine IV and encrypted data
|
||||
combined = iv + encrypted_data
|
||||
|
||||
# Return base64 encoded string
|
||||
return base64.b64encode(combined).decode()
|
||||
|
||||
except Exception as e:
|
||||
raise OAuthEncryptionError(f"Encryption failed: {str(e)}") from e
|
||||
|
||||
def decrypt_oauth_params(self, encrypted_data: str) -> dict[str, Any]:
|
||||
"""
|
||||
Decrypt OAuth parameters.
|
||||
|
||||
Args:
|
||||
encrypted_data: Base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted OAuth parameters dictionary
|
||||
|
||||
Raises:
|
||||
OAuthEncryptionError: If decryption fails
|
||||
ValueError: If encrypted_data is invalid
|
||||
"""
|
||||
if not isinstance(encrypted_data, str):
|
||||
raise ValueError("encrypted_data must be a string")
|
||||
|
||||
if not encrypted_data:
|
||||
raise ValueError("encrypted_data cannot be empty")
|
||||
|
||||
try:
|
||||
# Base64 decode
|
||||
combined = base64.b64decode(encrypted_data)
|
||||
|
||||
# Check minimum length (IV + at least one AES block)
|
||||
if len(combined) < 32: # 16 bytes IV + 16 bytes minimum encrypted data
|
||||
raise ValueError("Invalid encrypted data format")
|
||||
|
||||
# Separate IV and encrypted data
|
||||
iv = combined[:16]
|
||||
encrypted_data_bytes = combined[16:]
|
||||
|
||||
# Create AES cipher
|
||||
cipher = AES.new(self.key, AES.MODE_CBC, iv)
|
||||
|
||||
# Decrypt data
|
||||
decrypted_data = cipher.decrypt(encrypted_data_bytes)
|
||||
unpadded_data = unpad(decrypted_data, AES.block_size)
|
||||
|
||||
# Parse JSON
|
||||
params_json = unpadded_data.decode("utf-8")
|
||||
oauth_params = json.loads(params_json)
|
||||
|
||||
if not isinstance(oauth_params, dict):
|
||||
raise ValueError("Decrypted data is not a valid dictionary")
|
||||
|
||||
return oauth_params
|
||||
|
||||
except (ValueError, TypeError) as e:
|
||||
raise OAuthEncryptionError(f"Decryption failed: {str(e)}") from e
|
||||
except Exception as e:
|
||||
raise OAuthEncryptionError(f"Decryption failed: {str(e)}") from e
|
||||
|
||||
|
||||
# Factory function for creating encrypter instances
|
||||
def create_system_oauth_encrypter(secret_key: Optional[str] = None) -> SystemOAuthEncrypter:
|
||||
"""
|
||||
Create an OAuth encrypter instance.
|
||||
|
||||
Args:
|
||||
secret_key: Optional secret key. If not provided, uses dify_config.SECRET_KEY
|
||||
|
||||
Returns:
|
||||
SystemOAuthEncrypter instance
|
||||
"""
|
||||
return SystemOAuthEncrypter(secret_key=secret_key)
|
||||
|
||||
|
||||
# Global encrypter instance (for backward compatibility)
|
||||
_oauth_encrypter: Optional[SystemOAuthEncrypter] = None
|
||||
|
||||
|
||||
def get_system_oauth_encrypter() -> SystemOAuthEncrypter:
|
||||
"""
|
||||
Get the global OAuth encrypter instance.
|
||||
|
||||
Returns:
|
||||
SystemOAuthEncrypter instance
|
||||
"""
|
||||
global _oauth_encrypter
|
||||
if _oauth_encrypter is None:
|
||||
_oauth_encrypter = SystemOAuthEncrypter()
|
||||
return _oauth_encrypter
|
||||
|
||||
|
||||
# Convenience functions for backward compatibility
|
||||
def encrypt_system_oauth_params(oauth_params: str) -> str:
|
||||
"""
|
||||
Encrypt OAuth parameters using the global encrypter.
|
||||
|
||||
Args:
|
||||
oauth_params: OAuth parameters dictionary
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
"""
|
||||
return get_system_oauth_encrypter().encrypt_oauth_params(oauth_params)
|
||||
|
||||
|
||||
def decrypt_system_oauth_params(encrypted_data: str) -> dict[str, Any]:
|
||||
"""
|
||||
Decrypt OAuth parameters using the global encrypter.
|
||||
|
||||
Args:
|
||||
encrypted_data: Base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted OAuth parameters dictionary
|
||||
"""
|
||||
return get_system_oauth_encrypter().decrypt_oauth_params(encrypted_data)
|
||||
@ -1,8 +1,11 @@
|
||||
import uuid
|
||||
|
||||
|
||||
def is_valid_uuid(uuid_str: str) -> bool:
|
||||
def is_valid_uuid(uuid_str: str | None) -> bool:
|
||||
if uuid_str is None or len(uuid_str) == 0:
|
||||
return False
|
||||
try:
|
||||
|
||||
uuid.UUID(uuid_str)
|
||||
return True
|
||||
except Exception:
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import json
|
||||
import sys
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Annotated, Any, TypeAlias
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Discriminator, Tag, field_validator
|
||||
from pydantic import BaseModel, ConfigDict, field_validator
|
||||
|
||||
from core.file import File
|
||||
|
||||
@ -11,11 +11,6 @@ from .types import SegmentType
|
||||
|
||||
|
||||
class Segment(BaseModel):
|
||||
"""Segment is runtime type used during the execution of workflow.
|
||||
|
||||
Note: this class is abstract, you should use subclasses of this class instead.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
value_type: SegmentType
|
||||
@ -78,7 +73,7 @@ class StringSegment(Segment):
|
||||
|
||||
|
||||
class FloatSegment(Segment):
|
||||
value_type: SegmentType = SegmentType.FLOAT
|
||||
value_type: SegmentType = SegmentType.NUMBER
|
||||
value: float
|
||||
# NOTE(QuantumGhost): seems that the equality for FloatSegment with `NaN` value has some problems.
|
||||
# The following tests cannot pass.
|
||||
@ -97,7 +92,7 @@ class FloatSegment(Segment):
|
||||
|
||||
|
||||
class IntegerSegment(Segment):
|
||||
value_type: SegmentType = SegmentType.INTEGER
|
||||
value_type: SegmentType = SegmentType.NUMBER
|
||||
value: int
|
||||
|
||||
|
||||
@ -186,46 +181,3 @@ class ArrayFileSegment(ArraySegment):
|
||||
@property
|
||||
def text(self) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def get_segment_discriminator(v: Any) -> SegmentType | None:
|
||||
if isinstance(v, Segment):
|
||||
return v.value_type
|
||||
elif isinstance(v, dict):
|
||||
value_type = v.get("value_type")
|
||||
if value_type is None:
|
||||
return None
|
||||
try:
|
||||
seg_type = SegmentType(value_type)
|
||||
except ValueError:
|
||||
return None
|
||||
return seg_type
|
||||
else:
|
||||
# return None if the discriminator value isn't found
|
||||
return None
|
||||
|
||||
|
||||
# The `SegmentUnion`` type is used to enable serialization and deserialization with Pydantic.
|
||||
# Use `Segment` for type hinting when serialization is not required.
|
||||
#
|
||||
# Note:
|
||||
# - All variants in `SegmentUnion` must inherit from the `Segment` class.
|
||||
# - The union must include all non-abstract subclasses of `Segment`, except:
|
||||
# - `SegmentGroup`, which is not added to the variable pool.
|
||||
# - `Variable` and its subclasses, which are handled by `VariableUnion`.
|
||||
SegmentUnion: TypeAlias = Annotated[
|
||||
(
|
||||
Annotated[NoneSegment, Tag(SegmentType.NONE)]
|
||||
| Annotated[StringSegment, Tag(SegmentType.STRING)]
|
||||
| Annotated[FloatSegment, Tag(SegmentType.FLOAT)]
|
||||
| Annotated[IntegerSegment, Tag(SegmentType.INTEGER)]
|
||||
| Annotated[ObjectSegment, Tag(SegmentType.OBJECT)]
|
||||
| Annotated[FileSegment, Tag(SegmentType.FILE)]
|
||||
| Annotated[ArrayAnySegment, Tag(SegmentType.ARRAY_ANY)]
|
||||
| Annotated[ArrayStringSegment, Tag(SegmentType.ARRAY_STRING)]
|
||||
| Annotated[ArrayNumberSegment, Tag(SegmentType.ARRAY_NUMBER)]
|
||||
| Annotated[ArrayObjectSegment, Tag(SegmentType.ARRAY_OBJECT)]
|
||||
| Annotated[ArrayFileSegment, Tag(SegmentType.ARRAY_FILE)]
|
||||
),
|
||||
Discriminator(get_segment_discriminator),
|
||||
]
|
||||
|
||||
@ -1,27 +1,8 @@
|
||||
from collections.abc import Mapping
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional
|
||||
|
||||
from core.file.models import File
|
||||
|
||||
|
||||
class ArrayValidation(StrEnum):
|
||||
"""Strategy for validating array elements"""
|
||||
|
||||
# Skip element validation (only check array container)
|
||||
NONE = "none"
|
||||
|
||||
# Validate the first element (if array is non-empty)
|
||||
FIRST = "first"
|
||||
|
||||
# Validate all elements in the array.
|
||||
ALL = "all"
|
||||
|
||||
|
||||
class SegmentType(StrEnum):
|
||||
NUMBER = "number"
|
||||
INTEGER = "integer"
|
||||
FLOAT = "float"
|
||||
STRING = "string"
|
||||
OBJECT = "object"
|
||||
SECRET = "secret"
|
||||
@ -38,141 +19,16 @@ class SegmentType(StrEnum):
|
||||
|
||||
GROUP = "group"
|
||||
|
||||
def is_array_type(self) -> bool:
|
||||
def is_array_type(self):
|
||||
return self in _ARRAY_TYPES
|
||||
|
||||
@classmethod
|
||||
def infer_segment_type(cls, value: Any) -> Optional["SegmentType"]:
|
||||
"""
|
||||
Attempt to infer the `SegmentType` based on the Python type of the `value` parameter.
|
||||
|
||||
Returns `None` if no appropriate `SegmentType` can be determined for the given `value`.
|
||||
For example, this may occur if the input is a generic Python object of type `object`.
|
||||
"""
|
||||
|
||||
if isinstance(value, list):
|
||||
elem_types: set[SegmentType] = set()
|
||||
for i in value:
|
||||
segment_type = cls.infer_segment_type(i)
|
||||
if segment_type is None:
|
||||
return None
|
||||
|
||||
elem_types.add(segment_type)
|
||||
|
||||
if len(elem_types) != 1:
|
||||
if elem_types.issubset(_NUMERICAL_TYPES):
|
||||
return SegmentType.ARRAY_NUMBER
|
||||
return SegmentType.ARRAY_ANY
|
||||
elif all(i.is_array_type() for i in elem_types):
|
||||
return SegmentType.ARRAY_ANY
|
||||
match elem_types.pop():
|
||||
case SegmentType.STRING:
|
||||
return SegmentType.ARRAY_STRING
|
||||
case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT:
|
||||
return SegmentType.ARRAY_NUMBER
|
||||
case SegmentType.OBJECT:
|
||||
return SegmentType.ARRAY_OBJECT
|
||||
case SegmentType.FILE:
|
||||
return SegmentType.ARRAY_FILE
|
||||
case SegmentType.NONE:
|
||||
return SegmentType.ARRAY_ANY
|
||||
case _:
|
||||
# This should be unreachable.
|
||||
raise ValueError(f"not supported value {value}")
|
||||
if value is None:
|
||||
return SegmentType.NONE
|
||||
elif isinstance(value, int) and not isinstance(value, bool):
|
||||
return SegmentType.INTEGER
|
||||
elif isinstance(value, float):
|
||||
return SegmentType.FLOAT
|
||||
elif isinstance(value, str):
|
||||
return SegmentType.STRING
|
||||
elif isinstance(value, dict):
|
||||
return SegmentType.OBJECT
|
||||
elif isinstance(value, File):
|
||||
return SegmentType.FILE
|
||||
elif isinstance(value, str):
|
||||
return SegmentType.STRING
|
||||
else:
|
||||
return None
|
||||
|
||||
def _validate_array(self, value: Any, array_validation: ArrayValidation) -> bool:
|
||||
if not isinstance(value, list):
|
||||
return False
|
||||
# Skip element validation if array is empty
|
||||
if len(value) == 0:
|
||||
return True
|
||||
if self == SegmentType.ARRAY_ANY:
|
||||
return True
|
||||
element_type = _ARRAY_ELEMENT_TYPES_MAPPING[self]
|
||||
|
||||
if array_validation == ArrayValidation.NONE:
|
||||
return True
|
||||
elif array_validation == ArrayValidation.FIRST:
|
||||
return element_type.is_valid(value[0])
|
||||
else:
|
||||
return all([element_type.is_valid(i, array_validation=ArrayValidation.NONE)] for i in value)
|
||||
|
||||
def is_valid(self, value: Any, array_validation: ArrayValidation = ArrayValidation.FIRST) -> bool:
|
||||
"""
|
||||
Check if a value matches the segment type.
|
||||
Users of `SegmentType` should call this method, instead of using
|
||||
`isinstance` manually.
|
||||
|
||||
Args:
|
||||
value: The value to validate
|
||||
array_validation: Validation strategy for array types (ignored for non-array types)
|
||||
|
||||
Returns:
|
||||
True if the value matches the type under the given validation strategy
|
||||
"""
|
||||
if self.is_array_type():
|
||||
return self._validate_array(value, array_validation)
|
||||
elif self == SegmentType.NUMBER:
|
||||
return isinstance(value, (int, float))
|
||||
elif self == SegmentType.STRING:
|
||||
return isinstance(value, str)
|
||||
elif self == SegmentType.OBJECT:
|
||||
return isinstance(value, dict)
|
||||
elif self == SegmentType.SECRET:
|
||||
return isinstance(value, str)
|
||||
elif self == SegmentType.FILE:
|
||||
return isinstance(value, File)
|
||||
elif self == SegmentType.NONE:
|
||||
return value is None
|
||||
else:
|
||||
raise AssertionError("this statement should be unreachable.")
|
||||
|
||||
def exposed_type(self) -> "SegmentType":
|
||||
"""Returns the type exposed to the frontend.
|
||||
|
||||
The frontend treats `INTEGER` and `FLOAT` as `NUMBER`, so these are returned as `NUMBER` here.
|
||||
"""
|
||||
if self in (SegmentType.INTEGER, SegmentType.FLOAT):
|
||||
return SegmentType.NUMBER
|
||||
return self
|
||||
|
||||
|
||||
_ARRAY_ELEMENT_TYPES_MAPPING: Mapping[SegmentType, SegmentType] = {
|
||||
# ARRAY_ANY does not have correpond element type.
|
||||
SegmentType.ARRAY_STRING: SegmentType.STRING,
|
||||
SegmentType.ARRAY_NUMBER: SegmentType.NUMBER,
|
||||
SegmentType.ARRAY_OBJECT: SegmentType.OBJECT,
|
||||
SegmentType.ARRAY_FILE: SegmentType.FILE,
|
||||
}
|
||||
|
||||
_ARRAY_TYPES = frozenset(
|
||||
list(_ARRAY_ELEMENT_TYPES_MAPPING.keys())
|
||||
+ [
|
||||
SegmentType.ARRAY_ANY,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
_NUMERICAL_TYPES = frozenset(
|
||||
[
|
||||
SegmentType.NUMBER,
|
||||
SegmentType.INTEGER,
|
||||
SegmentType.FLOAT,
|
||||
SegmentType.ARRAY_ANY,
|
||||
SegmentType.ARRAY_STRING,
|
||||
SegmentType.ARRAY_NUMBER,
|
||||
SegmentType.ARRAY_OBJECT,
|
||||
SegmentType.ARRAY_FILE,
|
||||
]
|
||||
)
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import Annotated, TypeAlias, cast
|
||||
from typing import cast
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import Discriminator, Field, Tag
|
||||
from pydantic import Field
|
||||
|
||||
from core.helper import encrypter
|
||||
|
||||
@ -20,7 +20,6 @@ from .segments import (
|
||||
ObjectSegment,
|
||||
Segment,
|
||||
StringSegment,
|
||||
get_segment_discriminator,
|
||||
)
|
||||
from .types import SegmentType
|
||||
|
||||
@ -28,10 +27,6 @@ from .types import SegmentType
|
||||
class Variable(Segment):
|
||||
"""
|
||||
A variable is a segment that has a name.
|
||||
|
||||
It is mainly used to store segments and their selector in VariablePool.
|
||||
|
||||
Note: this class is abstract, you should use subclasses of this class instead.
|
||||
"""
|
||||
|
||||
id: str = Field(
|
||||
@ -98,28 +93,3 @@ class FileVariable(FileSegment, Variable):
|
||||
|
||||
class ArrayFileVariable(ArrayFileSegment, ArrayVariable):
|
||||
pass
|
||||
|
||||
|
||||
# The `VariableUnion`` type is used to enable serialization and deserialization with Pydantic.
|
||||
# Use `Variable` for type hinting when serialization is not required.
|
||||
#
|
||||
# Note:
|
||||
# - All variants in `VariableUnion` must inherit from the `Variable` class.
|
||||
# - The union must include all non-abstract subclasses of `Segment`, except:
|
||||
VariableUnion: TypeAlias = Annotated[
|
||||
(
|
||||
Annotated[NoneVariable, Tag(SegmentType.NONE)]
|
||||
| Annotated[StringVariable, Tag(SegmentType.STRING)]
|
||||
| Annotated[FloatVariable, Tag(SegmentType.FLOAT)]
|
||||
| Annotated[IntegerVariable, Tag(SegmentType.INTEGER)]
|
||||
| Annotated[ObjectVariable, Tag(SegmentType.OBJECT)]
|
||||
| Annotated[FileVariable, Tag(SegmentType.FILE)]
|
||||
| Annotated[ArrayAnyVariable, Tag(SegmentType.ARRAY_ANY)]
|
||||
| Annotated[ArrayStringVariable, Tag(SegmentType.ARRAY_STRING)]
|
||||
| Annotated[ArrayNumberVariable, Tag(SegmentType.ARRAY_NUMBER)]
|
||||
| Annotated[ArrayObjectVariable, Tag(SegmentType.ARRAY_OBJECT)]
|
||||
| Annotated[ArrayFileVariable, Tag(SegmentType.ARRAY_FILE)]
|
||||
| Annotated[SecretVariable, Tag(SegmentType.SECRET)]
|
||||
),
|
||||
Discriminator(get_segment_discriminator),
|
||||
]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Annotated, Any, Union, cast
|
||||
from typing import Any, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@ -9,9 +9,8 @@ from core.file import File, FileAttribute, file_manager
|
||||
from core.variables import Segment, SegmentGroup, Variable
|
||||
from core.variables.consts import MIN_SELECTORS_LENGTH
|
||||
from core.variables.segments import FileSegment, NoneSegment
|
||||
from core.variables.variables import VariableUnion
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from factories import variable_factory
|
||||
|
||||
VariableValue = Union[str, int, float, dict, list, File]
|
||||
@ -24,31 +23,31 @@ class VariablePool(BaseModel):
|
||||
# The first element of the selector is the node id, it's the first-level key in the dictionary.
|
||||
# Other elements of the selector are the keys in the second-level dictionary. To get the key, we hash the
|
||||
# elements of the selector except the first one.
|
||||
variable_dictionary: defaultdict[str, Annotated[dict[int, VariableUnion], Field(default_factory=dict)]] = Field(
|
||||
variable_dictionary: dict[str, dict[int, Segment]] = Field(
|
||||
description="Variables mapping",
|
||||
default=defaultdict(dict),
|
||||
)
|
||||
|
||||
# The `user_inputs` is used only when constructing the inputs for the `StartNode`. It's not used elsewhere.
|
||||
# TODO: This user inputs is not used for pool.
|
||||
user_inputs: Mapping[str, Any] = Field(
|
||||
description="User inputs",
|
||||
default_factory=dict,
|
||||
)
|
||||
system_variables: SystemVariable = Field(
|
||||
system_variables: Mapping[SystemVariableKey, Any] = Field(
|
||||
description="System variables",
|
||||
default_factory=dict,
|
||||
)
|
||||
environment_variables: Sequence[VariableUnion] = Field(
|
||||
environment_variables: Sequence[Variable] = Field(
|
||||
description="Environment variables.",
|
||||
default_factory=list,
|
||||
)
|
||||
conversation_variables: Sequence[VariableUnion] = Field(
|
||||
conversation_variables: Sequence[Variable] = Field(
|
||||
description="Conversation variables.",
|
||||
default_factory=list,
|
||||
)
|
||||
|
||||
def model_post_init(self, context: Any, /) -> None:
|
||||
# Create a mapping from field names to SystemVariableKey enum values
|
||||
self._add_system_variables(self.system_variables)
|
||||
for key, value in self.system_variables.items():
|
||||
self.add((SYSTEM_VARIABLE_NODE_ID, key.value), value)
|
||||
# Add environment variables to the variable pool
|
||||
for var in self.environment_variables:
|
||||
self.add((ENVIRONMENT_VARIABLE_NODE_ID, var.name), var)
|
||||
@ -84,22 +83,8 @@ class VariablePool(BaseModel):
|
||||
segment = variable_factory.build_segment(value)
|
||||
variable = variable_factory.segment_to_variable(segment=segment, selector=selector)
|
||||
|
||||
key, hash_key = self._selector_to_keys(selector)
|
||||
# Based on the definition of `VariableUnion`,
|
||||
# `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
|
||||
self.variable_dictionary[key][hash_key] = cast(VariableUnion, variable)
|
||||
|
||||
@classmethod
|
||||
def _selector_to_keys(cls, selector: Sequence[str]) -> tuple[str, int]:
|
||||
return selector[0], hash(tuple(selector[1:]))
|
||||
|
||||
def _has(self, selector: Sequence[str]) -> bool:
|
||||
key, hash_key = self._selector_to_keys(selector)
|
||||
if key not in self.variable_dictionary:
|
||||
return False
|
||||
if hash_key not in self.variable_dictionary[key]:
|
||||
return False
|
||||
return True
|
||||
hash_key = hash(tuple(selector[1:]))
|
||||
self.variable_dictionary[selector[0]][hash_key] = variable
|
||||
|
||||
def get(self, selector: Sequence[str], /) -> Segment | None:
|
||||
"""
|
||||
@ -117,8 +102,8 @@ class VariablePool(BaseModel):
|
||||
if len(selector) < MIN_SELECTORS_LENGTH:
|
||||
return None
|
||||
|
||||
key, hash_key = self._selector_to_keys(selector)
|
||||
value: Segment | None = self.variable_dictionary[key].get(hash_key)
|
||||
hash_key = hash(tuple(selector[1:]))
|
||||
value = self.variable_dictionary[selector[0]].get(hash_key)
|
||||
|
||||
if value is None:
|
||||
selector, attr = selector[:-1], selector[-1]
|
||||
@ -151,9 +136,8 @@ class VariablePool(BaseModel):
|
||||
if len(selector) == 1:
|
||||
self.variable_dictionary[selector[0]] = {}
|
||||
return
|
||||
key, hash_key = self._selector_to_keys(selector)
|
||||
hash_key = hash(tuple(selector[1:]))
|
||||
self.variable_dictionary[key].pop(hash_key, None)
|
||||
self.variable_dictionary[selector[0]].pop(hash_key, None)
|
||||
|
||||
def convert_template(self, template: str, /):
|
||||
parts = VARIABLE_PATTERN.split(template)
|
||||
@ -170,20 +154,3 @@ class VariablePool(BaseModel):
|
||||
if isinstance(segment, FileSegment):
|
||||
return segment
|
||||
return None
|
||||
|
||||
def _add_system_variables(self, system_variable: SystemVariable):
|
||||
sys_var_mapping = system_variable.to_dict()
|
||||
for key, value in sys_var_mapping.items():
|
||||
if value is None:
|
||||
continue
|
||||
selector = (SYSTEM_VARIABLE_NODE_ID, key)
|
||||
# If the system variable already exists, do not add it again.
|
||||
# This ensures that we can keep the id of the system variables intact.
|
||||
if self._has(selector):
|
||||
continue
|
||||
self.add(selector, value) # type: ignore
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> "VariablePool":
|
||||
"""Create an empty variable pool."""
|
||||
return cls(system_variables=SystemVariable.empty())
|
||||
|
||||
@ -17,12 +17,8 @@ class GraphRuntimeState(BaseModel):
|
||||
"""total tokens"""
|
||||
llm_usage: LLMUsage = LLMUsage.empty_usage()
|
||||
"""llm usage info"""
|
||||
|
||||
# The `outputs` field stores the final output values generated by executing workflows or chatflows.
|
||||
#
|
||||
# Note: Since the type of this field is `dict[str, Any]`, its values may not remain consistent
|
||||
# after a serialization and deserialization round trip.
|
||||
outputs: dict[str, Any] = {}
|
||||
"""outputs"""
|
||||
|
||||
node_run_steps: int = 0
|
||||
"""node run steps"""
|
||||
|
||||
@ -4,6 +4,7 @@ from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from packaging.version import Version
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@ -13,10 +14,16 @@ from core.agent.strategy.plugin import PluginAgentStrategy
|
||||
from core.memory.token_buffer_memory import TokenBufferMemory
|
||||
from core.model_manager import ModelInstance, ModelManager
|
||||
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
|
||||
from core.plugin.entities.request import InvokeCredentials
|
||||
from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from core.provider_manager import ProviderManager
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolProviderType
|
||||
from core.tools.entities.tool_entities import (
|
||||
ToolIdentity,
|
||||
ToolInvokeMessage,
|
||||
ToolParameter,
|
||||
ToolProviderType,
|
||||
)
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.variables.segments import StringSegment
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
@ -84,6 +91,7 @@ class AgentNode(ToolNode):
|
||||
for_log=True,
|
||||
strategy=strategy,
|
||||
)
|
||||
credentials = self._generate_credentials(parameters=parameters)
|
||||
|
||||
# get conversation id
|
||||
conversation_id = self.graph_runtime_state.variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID])
|
||||
@ -94,6 +102,7 @@ class AgentNode(ToolNode):
|
||||
user_id=self.user_id,
|
||||
app_id=self.app_id,
|
||||
conversation_id=conversation_id.text if conversation_id else None,
|
||||
credentials=credentials,
|
||||
)
|
||||
except Exception as e:
|
||||
yield RunCompletedEvent(
|
||||
@ -246,6 +255,7 @@ class AgentNode(ToolNode):
|
||||
tool_name=tool.get("tool_name", ""),
|
||||
tool_parameters=parameters,
|
||||
plugin_unique_identifier=tool.get("plugin_unique_identifier", None),
|
||||
credential_id=tool.get("credential_id", None),
|
||||
)
|
||||
|
||||
extra = tool.get("extra", {})
|
||||
@ -276,6 +286,7 @@ class AgentNode(ToolNode):
|
||||
{
|
||||
**tool_runtime.entity.model_dump(mode="json"),
|
||||
"runtime_parameters": runtime_parameters,
|
||||
"credential_id": tool.get("credential_id", None),
|
||||
"provider_type": provider_type.value,
|
||||
}
|
||||
)
|
||||
@ -305,6 +316,27 @@ class AgentNode(ToolNode):
|
||||
|
||||
return result
|
||||
|
||||
def _generate_credentials(
|
||||
self,
|
||||
parameters: dict[str, Any],
|
||||
) -> InvokeCredentials:
|
||||
"""
|
||||
Generate credentials based on the given agent parameters.
|
||||
"""
|
||||
|
||||
credentials = InvokeCredentials()
|
||||
|
||||
# generate credentials for tools selector
|
||||
credentials.tool_credentials = {}
|
||||
for tool in parameters.get("tools", []):
|
||||
if tool.get("credential_id"):
|
||||
try:
|
||||
identity = ToolIdentity.model_validate(tool.get("identity", {}))
|
||||
credentials.tool_credentials[identity.provider] = tool.get("credential_id", None)
|
||||
except ValidationError:
|
||||
continue
|
||||
return credentials
|
||||
|
||||
@classmethod
|
||||
def _extract_variable_selector_to_variable_mapping(
|
||||
cls,
|
||||
|
||||
@ -1,29 +1,11 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import Annotated, Any, Literal, Optional
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from pydantic import AfterValidator, BaseModel, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.variables.types import SegmentType
|
||||
from core.workflow.nodes.base import BaseLoopNodeData, BaseLoopState, BaseNodeData
|
||||
from core.workflow.utils.condition.entities import Condition
|
||||
|
||||
_VALID_VAR_TYPE = frozenset(
|
||||
[
|
||||
SegmentType.STRING,
|
||||
SegmentType.NUMBER,
|
||||
SegmentType.OBJECT,
|
||||
SegmentType.ARRAY_STRING,
|
||||
SegmentType.ARRAY_NUMBER,
|
||||
SegmentType.ARRAY_OBJECT,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _is_valid_var_type(seg_type: SegmentType) -> SegmentType:
|
||||
if seg_type not in _VALID_VAR_TYPE:
|
||||
raise ValueError(...)
|
||||
return seg_type
|
||||
|
||||
|
||||
class LoopVariableData(BaseModel):
|
||||
"""
|
||||
@ -31,7 +13,7 @@ class LoopVariableData(BaseModel):
|
||||
"""
|
||||
|
||||
label: str
|
||||
var_type: Annotated[SegmentType, AfterValidator(_is_valid_var_type)]
|
||||
var_type: Literal["string", "number", "object", "array[string]", "array[number]", "array[object]"]
|
||||
value_type: Literal["variable", "constant"]
|
||||
value: Optional[Any | list[str]] = None
|
||||
|
||||
|
||||
@ -7,9 +7,14 @@ from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
|
||||
from configs import dify_config
|
||||
from core.variables import (
|
||||
ArrayNumberSegment,
|
||||
ArrayObjectSegment,
|
||||
ArrayStringSegment,
|
||||
IntegerSegment,
|
||||
ObjectSegment,
|
||||
Segment,
|
||||
SegmentType,
|
||||
StringSegment,
|
||||
)
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
|
||||
@ -34,7 +39,6 @@ from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.nodes.event import NodeEvent, RunCompletedEvent
|
||||
from core.workflow.nodes.loop.entities import LoopNodeData
|
||||
from core.workflow.utils.condition.processor import ConditionProcessor
|
||||
from factories.variable_factory import TypeMismatchError, build_segment_with_type
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
@ -501,21 +505,23 @@ class LoopNode(BaseNode[LoopNodeData]):
|
||||
return variable_mapping
|
||||
|
||||
@staticmethod
|
||||
def _get_segment_for_constant(var_type: SegmentType, value: Any) -> Segment:
|
||||
def _get_segment_for_constant(var_type: str, value: Any) -> Segment:
|
||||
"""Get the appropriate segment type for a constant value."""
|
||||
segment_mapping: dict[str, tuple[type[Segment], SegmentType]] = {
|
||||
"string": (StringSegment, SegmentType.STRING),
|
||||
"number": (IntegerSegment, SegmentType.NUMBER),
|
||||
"object": (ObjectSegment, SegmentType.OBJECT),
|
||||
"array[string]": (ArrayStringSegment, SegmentType.ARRAY_STRING),
|
||||
"array[number]": (ArrayNumberSegment, SegmentType.ARRAY_NUMBER),
|
||||
"array[object]": (ArrayObjectSegment, SegmentType.ARRAY_OBJECT),
|
||||
}
|
||||
if var_type in ["array[string]", "array[number]", "array[object]"]:
|
||||
if value and isinstance(value, str):
|
||||
if value:
|
||||
value = json.loads(value)
|
||||
else:
|
||||
value = []
|
||||
try:
|
||||
return build_segment_with_type(var_type, value)
|
||||
except TypeMismatchError as type_exc:
|
||||
# Attempt to parse the value as a JSON-encoded string, if applicable.
|
||||
if not isinstance(value, str):
|
||||
raise
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except ValueError:
|
||||
raise type_exc
|
||||
return build_segment_with_type(var_type, value)
|
||||
segment_info = segment_mapping.get(var_type)
|
||||
if not segment_info:
|
||||
raise ValueError(f"Invalid variable type: {var_type}")
|
||||
segment_class, value_type = segment_info
|
||||
return segment_class(value=value, value_type=value_type)
|
||||
|
||||
@ -16,7 +16,7 @@ class StartNode(BaseNode[StartNodeData]):
|
||||
|
||||
def _run(self) -> NodeRunResult:
|
||||
node_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs)
|
||||
system_inputs = self.graph_runtime_state.variable_pool.system_variables.to_dict()
|
||||
system_inputs = self.graph_runtime_state.variable_pool.system_variables
|
||||
|
||||
# TODO: System variables should be directly accessible, no need for special handling
|
||||
# Set system variables as node outputs.
|
||||
|
||||
@ -14,6 +14,7 @@ class ToolEntity(BaseModel):
|
||||
tool_name: str
|
||||
tool_label: str # redundancy
|
||||
tool_configurations: dict[str, Any]
|
||||
credential_id: str | None = None
|
||||
plugin_unique_identifier: str | None = None # redundancy
|
||||
|
||||
@field_validator("tool_configurations", mode="before")
|
||||
|
||||
@ -130,7 +130,6 @@ class VariableAssignerNode(BaseNode[VariableAssignerData]):
|
||||
|
||||
|
||||
def get_zero_value(t: SegmentType):
|
||||
# TODO(QuantumGhost): this should be a method of `SegmentType`.
|
||||
match t:
|
||||
case SegmentType.ARRAY_OBJECT | SegmentType.ARRAY_STRING | SegmentType.ARRAY_NUMBER:
|
||||
return variable_factory.build_segment([])
|
||||
@ -138,10 +137,6 @@ def get_zero_value(t: SegmentType):
|
||||
return variable_factory.build_segment({})
|
||||
case SegmentType.STRING:
|
||||
return variable_factory.build_segment("")
|
||||
case SegmentType.INTEGER:
|
||||
return variable_factory.build_segment(0)
|
||||
case SegmentType.FLOAT:
|
||||
return variable_factory.build_segment(0.0)
|
||||
case SegmentType.NUMBER:
|
||||
return variable_factory.build_segment(0)
|
||||
case _:
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
from core.variables import SegmentType
|
||||
|
||||
# Note: This mapping is duplicated with `get_zero_value`. Consider refactoring to avoid redundancy.
|
||||
EMPTY_VALUE_MAPPING = {
|
||||
SegmentType.STRING: "",
|
||||
SegmentType.NUMBER: 0,
|
||||
|
||||
@ -10,16 +10,10 @@ def is_operation_supported(*, variable_type: SegmentType, operation: Operation):
|
||||
case Operation.OVER_WRITE | Operation.CLEAR:
|
||||
return True
|
||||
case Operation.SET:
|
||||
return variable_type in {
|
||||
SegmentType.OBJECT,
|
||||
SegmentType.STRING,
|
||||
SegmentType.NUMBER,
|
||||
SegmentType.INTEGER,
|
||||
SegmentType.FLOAT,
|
||||
}
|
||||
return variable_type in {SegmentType.OBJECT, SegmentType.STRING, SegmentType.NUMBER}
|
||||
case Operation.ADD | Operation.SUBTRACT | Operation.MULTIPLY | Operation.DIVIDE:
|
||||
# Only number variable can be added, subtracted, multiplied or divided
|
||||
return variable_type in {SegmentType.NUMBER, SegmentType.INTEGER, SegmentType.FLOAT}
|
||||
return variable_type == SegmentType.NUMBER
|
||||
case Operation.APPEND | Operation.EXTEND:
|
||||
# Only array variable can be appended or extended
|
||||
return variable_type in {
|
||||
@ -52,7 +46,7 @@ def is_constant_input_supported(*, variable_type: SegmentType, operation: Operat
|
||||
match variable_type:
|
||||
case SegmentType.STRING | SegmentType.OBJECT:
|
||||
return operation in {Operation.OVER_WRITE, Operation.SET}
|
||||
case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT:
|
||||
case SegmentType.NUMBER:
|
||||
return operation in {
|
||||
Operation.OVER_WRITE,
|
||||
Operation.SET,
|
||||
@ -72,7 +66,7 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
|
||||
case SegmentType.STRING:
|
||||
return isinstance(value, str)
|
||||
|
||||
case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT:
|
||||
case SegmentType.NUMBER:
|
||||
if not isinstance(value, int | float):
|
||||
return False
|
||||
if operation == Operation.DIVIDE and value == 0:
|
||||
|
||||
@ -1,89 +0,0 @@
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
|
||||
from pydantic import AliasChoices, BaseModel, ConfigDict, Field, model_validator
|
||||
|
||||
from core.file.models import File
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
|
||||
|
||||
class SystemVariable(BaseModel):
|
||||
"""A model for managing system variables.
|
||||
|
||||
Fields with a value of `None` are treated as absent and will not be included
|
||||
in the variable pool.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="forbid",
|
||||
serialize_by_alias=True,
|
||||
validate_by_alias=True,
|
||||
)
|
||||
|
||||
user_id: str | None = None
|
||||
|
||||
# Ideally, `app_id` and `workflow_id` should be required and not `None`.
|
||||
# However, there are scenarios in the codebase where these fields are not set.
|
||||
# To maintain compatibility, they are marked as optional here.
|
||||
app_id: str | None = None
|
||||
workflow_id: str | None = None
|
||||
|
||||
files: Sequence[File] = Field(default_factory=list)
|
||||
|
||||
# NOTE: The `workflow_execution_id` field was previously named `workflow_run_id`.
|
||||
# To maintain compatibility with existing workflows, it must be serialized
|
||||
# as `workflow_run_id` in dictionaries or JSON objects, and also referenced
|
||||
# as `workflow_run_id` in the variable pool.
|
||||
workflow_execution_id: str | None = Field(
|
||||
validation_alias=AliasChoices("workflow_execution_id", "workflow_run_id"),
|
||||
serialization_alias="workflow_run_id",
|
||||
default=None,
|
||||
)
|
||||
# Chatflow related fields.
|
||||
query: str | None = None
|
||||
conversation_id: str | None = None
|
||||
dialogue_count: int | None = None
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_json_fields(cls, data):
|
||||
if isinstance(data, dict):
|
||||
# For JSON validation, only allow workflow_run_id
|
||||
if "workflow_execution_id" in data and "workflow_run_id" not in data:
|
||||
# This is likely from direct instantiation, allow it
|
||||
return data
|
||||
elif "workflow_execution_id" in data and "workflow_run_id" in data:
|
||||
# Both present, remove workflow_execution_id
|
||||
data = data.copy()
|
||||
data.pop("workflow_execution_id")
|
||||
return data
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> "SystemVariable":
|
||||
return cls()
|
||||
|
||||
def to_dict(self) -> dict[SystemVariableKey, Any]:
|
||||
# NOTE: This method is provided for compatibility with legacy code.
|
||||
# New code should use the `SystemVariable` object directly instead of converting
|
||||
# it to a dictionary, as this conversion results in the loss of type information
|
||||
# for each key, making static analysis more difficult.
|
||||
|
||||
d: dict[SystemVariableKey, Any] = {
|
||||
SystemVariableKey.FILES: self.files,
|
||||
}
|
||||
if self.user_id is not None:
|
||||
d[SystemVariableKey.USER_ID] = self.user_id
|
||||
if self.app_id is not None:
|
||||
d[SystemVariableKey.APP_ID] = self.app_id
|
||||
if self.workflow_id is not None:
|
||||
d[SystemVariableKey.WORKFLOW_ID] = self.workflow_id
|
||||
if self.workflow_execution_id is not None:
|
||||
d[SystemVariableKey.WORKFLOW_EXECUTION_ID] = self.workflow_execution_id
|
||||
if self.query is not None:
|
||||
d[SystemVariableKey.QUERY] = self.query
|
||||
if self.conversation_id is not None:
|
||||
d[SystemVariableKey.CONVERSATION_ID] = self.conversation_id
|
||||
if self.dialogue_count is not None:
|
||||
d[SystemVariableKey.DIALOGUE_COUNT] = self.dialogue_count
|
||||
return d
|
||||
@ -26,7 +26,6 @@ from core.workflow.entities.workflow_node_execution import (
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
@ -44,7 +43,7 @@ class WorkflowCycleManager:
|
||||
self,
|
||||
*,
|
||||
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
||||
workflow_system_variables: SystemVariable,
|
||||
workflow_system_variables: dict[SystemVariableKey, Any],
|
||||
workflow_info: CycleManagerWorkflowInfo,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
@ -57,22 +56,17 @@ class WorkflowCycleManager:
|
||||
|
||||
def handle_workflow_run_start(self) -> WorkflowExecution:
|
||||
inputs = {**self._application_generate_entity.inputs}
|
||||
|
||||
# Iterate over SystemVariable fields using Pydantic's model_fields
|
||||
if self._workflow_system_variables:
|
||||
for field_name, value in self._workflow_system_variables.to_dict().items():
|
||||
if field_name == SystemVariableKey.CONVERSATION_ID:
|
||||
continue
|
||||
inputs[f"sys.{field_name}"] = value
|
||||
for key, value in (self._workflow_system_variables or {}).items():
|
||||
if key.value == "conversation":
|
||||
continue
|
||||
inputs[f"sys.{key.value}"] = value
|
||||
|
||||
# handle special values
|
||||
inputs = dict(WorkflowEntry.handle_special_values(inputs) or {})
|
||||
|
||||
# init workflow run
|
||||
# TODO: This workflow_run_id should always not be None, maybe we can use a more elegant way to handle this
|
||||
execution_id = str(
|
||||
self._workflow_system_variables.workflow_execution_id if self._workflow_system_variables else None
|
||||
) or str(uuid4())
|
||||
execution_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_EXECUTION_ID) or uuid4())
|
||||
execution = WorkflowExecution.new(
|
||||
id_=execution_id,
|
||||
workflow_id=self._workflow_info.workflow_id,
|
||||
|
||||
@ -21,7 +21,6 @@ from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.base import BaseNode
|
||||
from core.workflow.nodes.event import NodeEvent
|
||||
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool
|
||||
from factories import file_factory
|
||||
from models.enums import UserFrom
|
||||
@ -255,7 +254,7 @@ class WorkflowEntry:
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
)
|
||||
|
||||
@ -20,6 +20,7 @@ def handle(sender, **kwargs):
|
||||
provider_id=tool_entity.provider_id,
|
||||
tool_name=tool_entity.tool_name,
|
||||
tenant_id=app.tenant_id,
|
||||
credential_id=tool_entity.credential_id,
|
||||
)
|
||||
manager = ToolParameterConfigurationManager(
|
||||
tenant_id=app.tenant_id,
|
||||
|
||||
@ -18,6 +18,7 @@ def init_app(app: DifyApp):
|
||||
reset_email,
|
||||
reset_encrypt_key_pair,
|
||||
reset_password,
|
||||
setup_system_tool_oauth_client,
|
||||
upgrade_db,
|
||||
vdb_migrate,
|
||||
)
|
||||
@ -40,6 +41,7 @@ def init_app(app: DifyApp):
|
||||
clear_free_plan_tenant_expired_logs,
|
||||
clear_orphaned_file_records,
|
||||
remove_orphaned_files_on_storage,
|
||||
setup_system_tool_oauth_client,
|
||||
]
|
||||
for cmd in cmds_to_register:
|
||||
app.cli.add_command(cmd)
|
||||
|
||||
@ -91,13 +91,9 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen
|
||||
result = StringVariable.model_validate(mapping)
|
||||
case SegmentType.SECRET:
|
||||
result = SecretVariable.model_validate(mapping)
|
||||
case SegmentType.NUMBER | SegmentType.INTEGER if isinstance(value, int):
|
||||
mapping = dict(mapping)
|
||||
mapping["value_type"] = SegmentType.INTEGER
|
||||
case SegmentType.NUMBER if isinstance(value, int):
|
||||
result = IntegerVariable.model_validate(mapping)
|
||||
case SegmentType.NUMBER | SegmentType.FLOAT if isinstance(value, float):
|
||||
mapping = dict(mapping)
|
||||
mapping["value_type"] = SegmentType.FLOAT
|
||||
case SegmentType.NUMBER if isinstance(value, float):
|
||||
result = FloatVariable.model_validate(mapping)
|
||||
case SegmentType.NUMBER if not isinstance(value, float | int):
|
||||
raise VariableError(f"invalid number value {value}")
|
||||
@ -123,8 +119,6 @@ def infer_segment_type_from_value(value: Any, /) -> SegmentType:
|
||||
|
||||
|
||||
def build_segment(value: Any, /) -> Segment:
|
||||
# NOTE: If you have runtime type information available, consider using the `build_segment_with_type`
|
||||
# below
|
||||
if value is None:
|
||||
return NoneSegment()
|
||||
if isinstance(value, str):
|
||||
@ -140,17 +134,12 @@ def build_segment(value: Any, /) -> Segment:
|
||||
if isinstance(value, list):
|
||||
items = [build_segment(item) for item in value]
|
||||
types = {item.value_type for item in items}
|
||||
if all(isinstance(item, ArraySegment) for item in items):
|
||||
if len(types) != 1 or all(isinstance(item, ArraySegment) for item in items):
|
||||
return ArrayAnySegment(value=value)
|
||||
elif len(types) != 1:
|
||||
if types.issubset({SegmentType.NUMBER, SegmentType.INTEGER, SegmentType.FLOAT}):
|
||||
return ArrayNumberSegment(value=value)
|
||||
return ArrayAnySegment(value=value)
|
||||
|
||||
match types.pop():
|
||||
case SegmentType.STRING:
|
||||
return ArrayStringSegment(value=value)
|
||||
case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT:
|
||||
case SegmentType.NUMBER:
|
||||
return ArrayNumberSegment(value=value)
|
||||
case SegmentType.OBJECT:
|
||||
return ArrayObjectSegment(value=value)
|
||||
@ -164,22 +153,6 @@ def build_segment(value: Any, /) -> Segment:
|
||||
raise ValueError(f"not supported value {value}")
|
||||
|
||||
|
||||
_segment_factory: Mapping[SegmentType, type[Segment]] = {
|
||||
SegmentType.NONE: NoneSegment,
|
||||
SegmentType.STRING: StringSegment,
|
||||
SegmentType.INTEGER: IntegerSegment,
|
||||
SegmentType.FLOAT: FloatSegment,
|
||||
SegmentType.FILE: FileSegment,
|
||||
SegmentType.OBJECT: ObjectSegment,
|
||||
# Array types
|
||||
SegmentType.ARRAY_ANY: ArrayAnySegment,
|
||||
SegmentType.ARRAY_STRING: ArrayStringSegment,
|
||||
SegmentType.ARRAY_NUMBER: ArrayNumberSegment,
|
||||
SegmentType.ARRAY_OBJECT: ArrayObjectSegment,
|
||||
SegmentType.ARRAY_FILE: ArrayFileSegment,
|
||||
}
|
||||
|
||||
|
||||
def build_segment_with_type(segment_type: SegmentType, value: Any) -> Segment:
|
||||
"""
|
||||
Build a segment with explicit type checking.
|
||||
@ -217,7 +190,7 @@ def build_segment_with_type(segment_type: SegmentType, value: Any) -> Segment:
|
||||
if segment_type == SegmentType.NONE:
|
||||
return NoneSegment()
|
||||
else:
|
||||
raise TypeMismatchError(f"Type mismatch: expected {segment_type}, but got None")
|
||||
raise TypeMismatchError(f"Expected {segment_type}, but got None")
|
||||
|
||||
# Handle empty list special case for array types
|
||||
if isinstance(value, list) and len(value) == 0:
|
||||
@ -232,25 +205,21 @@ def build_segment_with_type(segment_type: SegmentType, value: Any) -> Segment:
|
||||
elif segment_type == SegmentType.ARRAY_FILE:
|
||||
return ArrayFileSegment(value=value)
|
||||
else:
|
||||
raise TypeMismatchError(f"Type mismatch: expected {segment_type}, but got empty list")
|
||||
raise TypeMismatchError(f"Expected {segment_type}, but got empty list")
|
||||
|
||||
# Build segment using existing logic to infer actual type
|
||||
inferred_segment = build_segment(value)
|
||||
inferred_type = inferred_segment.value_type
|
||||
|
||||
inferred_type = SegmentType.infer_segment_type(value)
|
||||
# Type compatibility checking
|
||||
if inferred_type is None:
|
||||
raise TypeMismatchError(
|
||||
f"Type mismatch: expected {segment_type}, but got python object, type={type(value)}, value={value}"
|
||||
)
|
||||
if inferred_type == segment_type:
|
||||
segment_class = _segment_factory[segment_type]
|
||||
return segment_class(value_type=segment_type, value=value)
|
||||
elif segment_type == SegmentType.NUMBER and inferred_type in (
|
||||
SegmentType.INTEGER,
|
||||
SegmentType.FLOAT,
|
||||
):
|
||||
segment_class = _segment_factory[inferred_type]
|
||||
return segment_class(value_type=inferred_type, value=value)
|
||||
else:
|
||||
raise TypeMismatchError(f"Type mismatch: expected {segment_type}, but got {inferred_type}, value={value}")
|
||||
return inferred_segment
|
||||
|
||||
# Type mismatch - raise error with descriptive message
|
||||
raise TypeMismatchError(
|
||||
f"Type mismatch: expected {segment_type}, but value '{value}' "
|
||||
f"(type: {type(value).__name__}) corresponds to {inferred_type}"
|
||||
)
|
||||
|
||||
|
||||
def segment_to_variable(
|
||||
@ -278,6 +247,6 @@ def segment_to_variable(
|
||||
name=name,
|
||||
description=description,
|
||||
value=segment.value,
|
||||
selector=list(selector),
|
||||
selector=selector,
|
||||
),
|
||||
)
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
from typing import TypedDict
|
||||
|
||||
from core.variables.segments import Segment
|
||||
from core.variables.types import SegmentType
|
||||
|
||||
|
||||
class _VarTypedDict(TypedDict, total=False):
|
||||
value_type: SegmentType
|
||||
|
||||
|
||||
def serialize_value_type(v: _VarTypedDict | Segment) -> str:
|
||||
if isinstance(v, Segment):
|
||||
return v.value_type.exposed_type().value
|
||||
else:
|
||||
return v["value_type"].exposed_type().value
|
||||
@ -188,7 +188,6 @@ app_detail_fields_with_site = {
|
||||
"site": fields.Nested(site_fields),
|
||||
"api_base_url": fields.String,
|
||||
"use_icon_as_answer_icon": fields.Boolean,
|
||||
"max_active_requests": fields.Integer,
|
||||
"created_by": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"updated_by": fields.String,
|
||||
|
||||
@ -2,12 +2,10 @@ from flask_restful import fields
|
||||
|
||||
from libs.helper import TimestampField
|
||||
|
||||
from ._value_type_serializer import serialize_value_type
|
||||
|
||||
conversation_variable_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"value_type": fields.String(attribute=serialize_value_type),
|
||||
"value_type": fields.String(attribute="value_type.value"),
|
||||
"value": fields.String,
|
||||
"description": fields.String,
|
||||
"created_at": TimestampField,
|
||||
|
||||
@ -5,8 +5,6 @@ from core.variables import SecretVariable, SegmentType, Variable
|
||||
from fields.member_fields import simple_account_fields
|
||||
from libs.helper import TimestampField
|
||||
|
||||
from ._value_type_serializer import serialize_value_type
|
||||
|
||||
ENVIRONMENT_VARIABLE_SUPPORTED_TYPES = (SegmentType.STRING, SegmentType.NUMBER, SegmentType.SECRET)
|
||||
|
||||
|
||||
@ -26,16 +24,11 @@ class EnvironmentVariableField(fields.Raw):
|
||||
"id": value.id,
|
||||
"name": value.name,
|
||||
"value": value.value,
|
||||
"value_type": value.value_type.exposed_type().value,
|
||||
"value_type": value.value_type.value,
|
||||
"description": value.description,
|
||||
}
|
||||
if isinstance(value, dict):
|
||||
value_type_str = value.get("value_type")
|
||||
if not isinstance(value_type_str, str):
|
||||
raise TypeError(
|
||||
f"unexpected type for value_type field, value={value_type_str}, type={type(value_type_str)}"
|
||||
)
|
||||
value_type = SegmentType(value_type_str).exposed_type()
|
||||
value_type = value.get("value_type")
|
||||
if value_type not in ENVIRONMENT_VARIABLE_SUPPORTED_TYPES:
|
||||
raise ValueError(f"Unsupported environment variable value type: {value_type}")
|
||||
return value
|
||||
@ -44,7 +37,7 @@ class EnvironmentVariableField(fields.Raw):
|
||||
conversation_variable_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"value_type": fields.String(attribute=serialize_value_type),
|
||||
"value_type": fields.String(attribute="value_type.value"),
|
||||
"value": fields.Raw,
|
||||
"description": fields.String,
|
||||
}
|
||||
|
||||
@ -1,164 +0,0 @@
|
||||
import secrets
|
||||
import struct
|
||||
import time
|
||||
import uuid
|
||||
|
||||
# Reference for UUIDv7 specification:
|
||||
# RFC 9562, Section 5.7 - https://www.rfc-editor.org/rfc/rfc9562.html#section-5.7
|
||||
|
||||
# Define the format for packing the timestamp as an unsigned 64-bit integer (big-endian).
|
||||
#
|
||||
# For details on the `struct.pack` format, refer to:
|
||||
# https://docs.python.org/3/library/struct.html#byte-order-size-and-alignment
|
||||
_PACK_TIMESTAMP = ">Q"
|
||||
|
||||
# Define the format for packing the 12-bit random data A (as specified in RFC 9562 Section 5.7)
|
||||
# into an unsigned 16-bit integer (big-endian).
|
||||
_PACK_RAND_A = ">H"
|
||||
|
||||
|
||||
def _create_uuidv7_bytes(timestamp_ms: int, random_bytes: bytes) -> bytes:
|
||||
"""Create UUIDv7 byte structure with given timestamp and random bytes.
|
||||
|
||||
This is a private helper function that handles the common logic for creating
|
||||
UUIDv7 byte structure according to RFC 9562 specification.
|
||||
|
||||
UUIDv7 Structure:
|
||||
- 48 bits: timestamp (milliseconds since Unix epoch)
|
||||
- 12 bits: random data A (with version bits)
|
||||
- 62 bits: random data B (with variant bits)
|
||||
|
||||
The function performs the following operations:
|
||||
1. Creates a 128-bit (16-byte) UUID structure
|
||||
2. Packs the timestamp into the first 48 bits (6 bytes)
|
||||
3. Sets the version bits to 7 (0111) in the correct position
|
||||
4. Sets the variant bits to 10 (binary) in the correct position
|
||||
5. Fills the remaining bits with the provided random bytes
|
||||
|
||||
Args:
|
||||
timestamp_ms: The timestamp in milliseconds since Unix epoch (48 bits).
|
||||
random_bytes: Random bytes to use for the random portions (must be 10 bytes).
|
||||
First 2 bytes are used for random data A (12 bits after version).
|
||||
Last 8 bytes are used for random data B (62 bits after variant).
|
||||
|
||||
Returns:
|
||||
A 16-byte bytes object representing the complete UUIDv7 structure.
|
||||
|
||||
Note:
|
||||
This function assumes the random_bytes parameter is exactly 10 bytes.
|
||||
The caller is responsible for providing appropriate random data.
|
||||
"""
|
||||
# Create the 128-bit UUID structure
|
||||
uuid_bytes = bytearray(16)
|
||||
|
||||
# Pack timestamp (48 bits) into first 6 bytes
|
||||
uuid_bytes[0:6] = struct.pack(_PACK_TIMESTAMP, timestamp_ms)[2:8] # Take last 6 bytes of 8-byte big-endian
|
||||
|
||||
# Next 16 bits: random data A (12 bits) + version (4 bits)
|
||||
# Take first 2 random bytes and set version to 7
|
||||
rand_a = struct.unpack(_PACK_RAND_A, random_bytes[0:2])[0]
|
||||
# Clear the highest 4 bits to make room for the version field
|
||||
# by performing a bitwise AND with 0x0FFF (binary: 0b0000_1111_1111_1111).
|
||||
rand_a = rand_a & 0x0FFF
|
||||
# Set the version field to 7 (binary: 0111) by performing a bitwise OR with 0x7000 (binary: 0b0111_0000_0000_0000).
|
||||
rand_a = rand_a | 0x7000
|
||||
uuid_bytes[6:8] = struct.pack(_PACK_RAND_A, rand_a)
|
||||
|
||||
# Last 64 bits: random data B (62 bits) + variant (2 bits)
|
||||
# Use remaining 8 random bytes and set variant to 10 (binary)
|
||||
uuid_bytes[8:16] = random_bytes[2:10]
|
||||
|
||||
# Set variant bits (first 2 bits of byte 8 should be '10')
|
||||
uuid_bytes[8] = (uuid_bytes[8] & 0x3F) | 0x80 # Set variant to 10xxxxxx
|
||||
|
||||
return bytes(uuid_bytes)
|
||||
|
||||
|
||||
def uuidv7(timestamp_ms: int | None = None) -> uuid.UUID:
|
||||
"""Generate a UUID version 7 according to RFC 9562 specification.
|
||||
|
||||
UUIDv7 features a time-ordered value field derived from the widely
|
||||
implemented and well known Unix Epoch timestamp source, the number of
|
||||
milliseconds since midnight 1 Jan 1970 UTC, leap seconds excluded.
|
||||
|
||||
Structure:
|
||||
- 48 bits: timestamp (milliseconds since Unix epoch)
|
||||
- 12 bits: random data A (with version bits)
|
||||
- 62 bits: random data B (with variant bits)
|
||||
|
||||
Args:
|
||||
timestamp_ms: The timestamp used when generating UUID, use the current time if unspecified.
|
||||
Should be an integer representing milliseconds since Unix epoch.
|
||||
|
||||
Returns:
|
||||
A UUID object representing a UUIDv7.
|
||||
|
||||
Example:
|
||||
>>> import time
|
||||
>>> # Generate UUIDv7 with current time
|
||||
>>> uuid_current = uuidv7()
|
||||
>>> # Generate UUIDv7 with specific timestamp
|
||||
>>> uuid_specific = uuidv7(int(time.time() * 1000))
|
||||
"""
|
||||
if timestamp_ms is None:
|
||||
timestamp_ms = int(time.time() * 1000)
|
||||
|
||||
# Generate 10 random bytes for the random portions
|
||||
random_bytes = secrets.token_bytes(10)
|
||||
|
||||
# Create UUIDv7 bytes using the helper function
|
||||
uuid_bytes = _create_uuidv7_bytes(timestamp_ms, random_bytes)
|
||||
|
||||
return uuid.UUID(bytes=uuid_bytes)
|
||||
|
||||
|
||||
def uuidv7_timestamp(id_: uuid.UUID) -> int:
|
||||
"""Extract the timestamp from a UUIDv7.
|
||||
|
||||
UUIDv7 contains a 48-bit timestamp field representing milliseconds since
|
||||
the Unix epoch (1970-01-01 00:00:00 UTC). This function extracts and
|
||||
returns that timestamp as an integer representing milliseconds since the epoch.
|
||||
|
||||
Args:
|
||||
id_: A UUID object that should be a UUIDv7 (version 7).
|
||||
|
||||
Returns:
|
||||
The timestamp as an integer representing milliseconds since Unix epoch.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provided UUID is not version 7.
|
||||
|
||||
Example:
|
||||
>>> uuid_v7 = uuidv7()
|
||||
>>> timestamp = uuidv7_timestamp(uuid_v7)
|
||||
>>> print(f"UUID was created at: {timestamp} ms")
|
||||
"""
|
||||
# Verify this is a UUIDv7
|
||||
if id_.version != 7:
|
||||
raise ValueError(f"Expected UUIDv7 (version 7), got version {id_.version}")
|
||||
|
||||
# Extract the UUID bytes
|
||||
uuid_bytes = id_.bytes
|
||||
|
||||
# Extract the first 48 bits (6 bytes) as the timestamp in milliseconds
|
||||
# Pad with 2 zero bytes at the beginning to make it 8 bytes for unpacking as Q (unsigned long long)
|
||||
timestamp_bytes = b"\x00\x00" + uuid_bytes[0:6]
|
||||
ts_in_ms = struct.unpack(_PACK_TIMESTAMP, timestamp_bytes)[0]
|
||||
|
||||
# Return timestamp directly in milliseconds as integer
|
||||
assert isinstance(ts_in_ms, int)
|
||||
return ts_in_ms
|
||||
|
||||
|
||||
def uuidv7_boundary(timestamp_ms: int) -> uuid.UUID:
|
||||
"""Generate a non-random uuidv7 with the given timestamp (first 48 bits) and
|
||||
all random bits to 0. As the smallest possible uuidv7 for that timestamp,
|
||||
it may be used as a boundary for partitions.
|
||||
"""
|
||||
# Use zero bytes for all random portions
|
||||
zero_random_bytes = b"\x00" * 10
|
||||
|
||||
# Create UUIDv7 bytes using the helper function
|
||||
uuid_bytes = _create_uuidv7_bytes(timestamp_ms, zero_random_bytes)
|
||||
|
||||
return uuid.UUID(bytes=uuid_bytes)
|
||||
41
api/migrations/versions/2025_05_15_1635-16081485540c_.py
Normal file
41
api/migrations/versions/2025_05_15_1635-16081485540c_.py
Normal file
@ -0,0 +1,41 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 16081485540c
|
||||
Revises: d28f2004b072
|
||||
Create Date: 2025-05-15 16:35:39.113777
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '16081485540c'
|
||||
down_revision = '2adcbe1f5dfb'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tenant_plugin_auto_upgrade_strategies',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
|
||||
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
|
||||
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
|
||||
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tenant_plugin_auto_upgrade_strategies')
|
||||
# ### end Alembic commands ###
|
||||
@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4474872b0ee6'
|
||||
down_revision = '2adcbe1f5dfb'
|
||||
down_revision = '16081485540c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
@ -1,86 +0,0 @@
|
||||
"""add uuidv7 function in SQL
|
||||
|
||||
Revision ID: 1c9ba48be8e4
|
||||
Revises: 58eb7bdb93fe
|
||||
Create Date: 2025-07-02 23:32:38.484499
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
The functions in this files comes from https://github.com/dverite/postgres-uuidv7-sql/, with minor modifications.
|
||||
|
||||
LICENSE:
|
||||
|
||||
# Copyright and License
|
||||
|
||||
Copyright (c) 2024, Daniel Vérité
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its documentation for any purpose, without fee, and without a written agreement is hereby granted, provided that the above copyright notice and this paragraph and the following two paragraphs appear in all copies.
|
||||
|
||||
In no event shall Daniel Vérité be liable to any party for direct, indirect, special, incidental, or consequential damages, including lost profits, arising out of the use of this software and its documentation, even if Daniel Vérité has been advised of the possibility of such damage.
|
||||
|
||||
Daniel Vérité specifically disclaims any warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose. The software provided hereunder is on an "AS IS" basis, and Daniel Vérité has no obligations to provide maintenance, support, updates, enhancements, or modifications.
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1c9ba48be8e4'
|
||||
down_revision = '58eb7bdb93fe'
|
||||
branch_labels: None = None
|
||||
depends_on: None = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# This implementation differs slightly from the original uuidv7 function in
|
||||
# https://github.com/dverite/postgres-uuidv7-sql/.
|
||||
# The ability to specify source timestamp has been removed because its type signature is incompatible with
|
||||
# PostgreSQL 18's `uuidv7` function. This capability is rarely needed in practice, as IDs can be
|
||||
# generated and controlled within the application layer.
|
||||
op.execute(sa.text(r"""
|
||||
/* Main function to generate a uuidv7 value with millisecond precision */
|
||||
CREATE FUNCTION uuidv7() RETURNS uuid
|
||||
AS
|
||||
$$
|
||||
-- Replace the first 48 bits of a uuidv4 with the current
|
||||
-- number of milliseconds since 1970-01-01 UTC
|
||||
-- and set the "ver" field to 7 by setting additional bits
|
||||
SELECT encode(
|
||||
set_bit(
|
||||
set_bit(
|
||||
overlay(uuid_send(gen_random_uuid()) placing
|
||||
substring(int8send((extract(epoch from clock_timestamp()) * 1000)::bigint) from
|
||||
3)
|
||||
from 1 for 6),
|
||||
52, 1),
|
||||
53, 1), 'hex')::uuid;
|
||||
$$ LANGUAGE SQL VOLATILE PARALLEL SAFE;
|
||||
|
||||
COMMENT ON FUNCTION uuidv7 IS
|
||||
'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness';
|
||||
"""))
|
||||
|
||||
op.execute(sa.text(r"""
|
||||
CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid
|
||||
AS
|
||||
$$
|
||||
/* uuid fields: version=0b0111, variant=0b10 */
|
||||
SELECT encode(
|
||||
overlay('\x00000000000070008000000000000000'::bytea
|
||||
placing substring(int8send(floor(extract(epoch from $1) * 1000)::bigint) from 3)
|
||||
from 1 for 6),
|
||||
'hex')::uuid;
|
||||
$$ LANGUAGE SQL STABLE STRICT PARALLEL SAFE;
|
||||
|
||||
COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS
|
||||
'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.';
|
||||
"""
|
||||
))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7"))
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
|
||||
@ -0,0 +1,62 @@
|
||||
"""tool oauth
|
||||
|
||||
Revision ID: 71f5020c6470
|
||||
Revises: 4474872b0ee6
|
||||
Create Date: 2025-06-24 17:05:43.118647
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '71f5020c6470'
|
||||
down_revision = '58eb7bdb93fe'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tool_oauth_system_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
|
||||
)
|
||||
op.create_table('tool_oauth_tenant_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
|
||||
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
|
||||
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
|
||||
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
|
||||
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider'])
|
||||
batch_op.drop_column('credential_type')
|
||||
batch_op.drop_column('is_default')
|
||||
batch_op.drop_column('name')
|
||||
|
||||
op.drop_table('tool_oauth_tenant_clients')
|
||||
op.drop_table('tool_oauth_system_clients')
|
||||
# ### end Alembic commands ###
|
||||
@ -21,6 +21,43 @@ from .model import Account, App, Tenant
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
# system level tool oauth client params (client_id, client_secret, etc.)
|
||||
class ToolOAuthSystemClient(Base):
|
||||
__tablename__ = "tool_oauth_system_clients"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="tool_oauth_system_client_pkey"),
|
||||
db.UniqueConstraint("plugin_id", "provider", name="tool_oauth_system_client_plugin_id_provider_idx"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
plugin_id: Mapped[str] = mapped_column(db.String(512), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||
# oauth params of the tool provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
|
||||
|
||||
# tenant level tool oauth client params (client_id, client_secret, etc.)
|
||||
class ToolOAuthTenantClient(Base):
|
||||
__tablename__ = "tool_oauth_tenant_clients"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="tool_oauth_tenant_client_pkey"),
|
||||
db.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
# tenant id
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
plugin_id: Mapped[str] = mapped_column(db.String(512), nullable=False)
|
||||
provider: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||
enabled: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("true"))
|
||||
# oauth params of the tool provider
|
||||
encrypted_oauth_params: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
|
||||
@property
|
||||
def oauth_params(self) -> dict:
|
||||
return cast(dict, json.loads(self.encrypted_oauth_params or "{}"))
|
||||
|
||||
|
||||
class BuiltinToolProvider(Base):
|
||||
"""
|
||||
This table stores the tool provider information for built-in tools for each tenant.
|
||||
@ -29,12 +66,14 @@ class BuiltinToolProvider(Base):
|
||||
__tablename__ = "tool_builtin_providers"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="tool_builtin_provider_pkey"),
|
||||
# one tenant can only have one tool provider with the same name
|
||||
db.UniqueConstraint("tenant_id", "provider", name="unique_builtin_tool_provider"),
|
||||
db.UniqueConstraint("tenant_id", "provider", "name", name="unique_builtin_tool_provider"),
|
||||
)
|
||||
|
||||
# id of the tool provider
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
name: Mapped[str] = mapped_column(
|
||||
db.String(256), nullable=False, server_default=db.text("'API KEY 1'::character varying")
|
||||
)
|
||||
# id of the tenant
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=True)
|
||||
# who created this tool provider
|
||||
@ -49,6 +88,11 @@ class BuiltinToolProvider(Base):
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
|
||||
)
|
||||
is_default: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
|
||||
# credential type, e.g., "api-key", "oauth2"
|
||||
credential_type: Mapped[str] = mapped_column(
|
||||
db.String(32), nullable=False, server_default=db.text("'api-key'::character varying")
|
||||
)
|
||||
|
||||
@property
|
||||
def credentials(self) -> dict:
|
||||
@ -68,7 +112,7 @@ class ApiToolProvider(Base):
|
||||
|
||||
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
# name of the api provider
|
||||
name = db.Column(db.String(255), nullable=False)
|
||||
name = db.Column(db.String(255), nullable=False, server_default=db.text("'API KEY 1'::character varying"))
|
||||
# icon
|
||||
icon = db.Column(db.String(255), nullable=False)
|
||||
# original schema
|
||||
@ -281,18 +325,17 @@ class MCPToolProvider(Base):
|
||||
|
||||
@property
|
||||
def decrypted_credentials(self) -> dict:
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.tools.mcp_tool.provider import MCPToolProviderController
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
|
||||
provider_controller = MCPToolProviderController._from_db(self)
|
||||
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
return create_provider_encrypter(
|
||||
tenant_id=self.tenant_id,
|
||||
config=list(provider_controller.get_credentials_schema()),
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.provider_id,
|
||||
)
|
||||
return tool_configuration.decrypt(self.credentials, use_cache=False)
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)[0].decrypt(self.credentials)
|
||||
|
||||
|
||||
class ToolModelInvoke(Base):
|
||||
|
||||
@ -12,7 +12,6 @@ from sqlalchemy import orm
|
||||
from core.file.constants import maybe_file_object
|
||||
from core.file.models import File
|
||||
from core.variables import utils as variable_utils
|
||||
from core.variables.variables import FloatVariable, IntegerVariable, StringVariable
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from factories.variable_factory import TypeMismatchError, build_segment_with_type
|
||||
@ -348,7 +347,7 @@ class Workflow(Base):
|
||||
)
|
||||
|
||||
@property
|
||||
def environment_variables(self) -> Sequence[StringVariable | IntegerVariable | FloatVariable | SecretVariable]:
|
||||
def environment_variables(self) -> Sequence[Variable]:
|
||||
# TODO: find some way to init `self._environment_variables` when instance created.
|
||||
if self._environment_variables is None:
|
||||
self._environment_variables = "{}"
|
||||
@ -368,15 +367,11 @@ class Workflow(Base):
|
||||
def decrypt_func(var):
|
||||
if isinstance(var, SecretVariable):
|
||||
return var.model_copy(update={"value": encrypter.decrypt_token(tenant_id=tenant_id, token=var.value)})
|
||||
elif isinstance(var, (StringVariable, IntegerVariable, FloatVariable)):
|
||||
return var
|
||||
else:
|
||||
raise AssertionError("this statement should be unreachable.")
|
||||
return var
|
||||
|
||||
decrypted_results: list[SecretVariable | StringVariable | IntegerVariable | FloatVariable] = list(
|
||||
map(decrypt_func, results)
|
||||
)
|
||||
return decrypted_results
|
||||
results = list(map(decrypt_func, results))
|
||||
return results
|
||||
|
||||
@environment_variables.setter
|
||||
def environment_variables(self, value: Sequence[Variable]):
|
||||
|
||||
@ -575,13 +575,26 @@ class AppDslService:
|
||||
raise ValueError("Missing draft workflow configuration, please check.")
|
||||
|
||||
workflow_dict = workflow.to_dict(include_secret=include_secret)
|
||||
# TODO: refactor: we need a better way to filter workspace related data from nodes
|
||||
for node in workflow_dict.get("graph", {}).get("nodes", []):
|
||||
if node.get("data", {}).get("type", "") == NodeType.KNOWLEDGE_RETRIEVAL.value:
|
||||
dataset_ids = node["data"].get("dataset_ids", [])
|
||||
node["data"]["dataset_ids"] = [
|
||||
node_data = node.get("data", {})
|
||||
if not node_data:
|
||||
continue
|
||||
data_type = node_data.get("type", "")
|
||||
if data_type == NodeType.KNOWLEDGE_RETRIEVAL.value:
|
||||
dataset_ids = node_data.get("dataset_ids", [])
|
||||
node_data["dataset_ids"] = [
|
||||
cls.encrypt_dataset_id(dataset_id=dataset_id, tenant_id=app_model.tenant_id)
|
||||
for dataset_id in dataset_ids
|
||||
]
|
||||
# filter credential id from tool node
|
||||
if not include_secret and data_type == NodeType.TOOL.value:
|
||||
node_data.pop("credential_id", None)
|
||||
# filter credential id from agent node
|
||||
if not include_secret and data_type == NodeType.AGENT.value:
|
||||
for tool in node_data.get("agent_parameters", {}).get("tools", {}).get("value", []):
|
||||
tool.pop("credential_id", None)
|
||||
|
||||
export_data["workflow"] = workflow_dict
|
||||
dependencies = cls._extract_dependencies_from_workflow(workflow)
|
||||
export_data["dependencies"] = [
|
||||
@ -602,7 +615,15 @@ class AppDslService:
|
||||
if not app_model_config:
|
||||
raise ValueError("Missing app configuration, please check.")
|
||||
|
||||
export_data["model_config"] = app_model_config.to_dict()
|
||||
model_config = app_model_config.to_dict()
|
||||
|
||||
# TODO: refactor: we need a better way to filter workspace related data from model config
|
||||
# filter credential id from model config
|
||||
for tool in model_config.get("agent_mode", {}).get("tools", []):
|
||||
tool.pop("credential_id", None)
|
||||
|
||||
export_data["model_config"] = model_config
|
||||
|
||||
dependencies = cls._extract_dependencies_from_model_config(app_model_config.to_dict())
|
||||
export_data["dependencies"] = [
|
||||
jsonable_encoder(d.model_dump())
|
||||
|
||||
@ -233,7 +233,6 @@ class AppService:
|
||||
app.icon = args.get("icon")
|
||||
app.icon_background = args.get("icon_background")
|
||||
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
|
||||
app.max_active_requests = args.get("max_active_requests")
|
||||
app.updated_by = current_user.id
|
||||
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
db.session.commit()
|
||||
|
||||
@ -6,7 +6,7 @@ from sqlalchemy.orm import Session
|
||||
from core.plugin.entities.parameters import PluginParameterOption
|
||||
from core.plugin.impl.dynamic_select import DynamicSelectClient
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_tool_provider_encrypter
|
||||
from extensions.ext_database import db
|
||||
from models.tools import BuiltinToolProvider
|
||||
|
||||
@ -38,11 +38,9 @@ class PluginParameterService:
|
||||
case "tool":
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
# init tool configuration
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
controller=provider_controller,
|
||||
)
|
||||
|
||||
# check if credentials are required
|
||||
@ -63,7 +61,7 @@ class PluginParameterService:
|
||||
if db_record is None:
|
||||
raise ValueError(f"Builtin provider {provider} not found when fetching credentials")
|
||||
|
||||
credentials = tool_configuration.decrypt(db_record.credentials)
|
||||
credentials = encrypter.decrypt(db_record.credentials)
|
||||
case _:
|
||||
raise ValueError(f"Invalid provider type: {provider_type}")
|
||||
|
||||
|
||||
@ -196,6 +196,17 @@ class PluginService:
|
||||
manager = PluginInstaller()
|
||||
return manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier)
|
||||
|
||||
@staticmethod
|
||||
def is_plugin_verified(tenant_id: str, plugin_unique_identifier: str) -> bool:
|
||||
"""
|
||||
Check if the plugin is verified
|
||||
"""
|
||||
manager = PluginInstaller()
|
||||
try:
|
||||
return manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier).verified
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def fetch_install_tasks(tenant_id: str, page: int, page_size: int) -> Sequence[PluginInstallTask]:
|
||||
"""
|
||||
|
||||
@ -18,7 +18,7 @@ from core.tools.entities.tool_entities import (
|
||||
)
|
||||
from core.tools.tool_label_manager import ToolLabelManager
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_tool_provider_encrypter
|
||||
from core.tools.utils.parser import ApiBasedToolSchemaParser
|
||||
from extensions.ext_database import db
|
||||
from models.tools import ApiToolProvider
|
||||
@ -164,15 +164,11 @@ class ApiToolManageService:
|
||||
provider_controller.load_bundled_tools(tool_bundles)
|
||||
|
||||
# encrypt credentials
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=list(provider_controller.get_credentials_schema()),
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
controller=provider_controller,
|
||||
)
|
||||
|
||||
encrypted_credentials = tool_configuration.encrypt(credentials)
|
||||
db_provider.credentials_str = json.dumps(encrypted_credentials)
|
||||
db_provider.credentials_str = json.dumps(encrypter.encrypt(credentials))
|
||||
|
||||
db.session.add(db_provider)
|
||||
db.session.commit()
|
||||
@ -297,28 +293,26 @@ class ApiToolManageService:
|
||||
provider_controller.load_bundled_tools(tool_bundles)
|
||||
|
||||
# get original credentials if exists
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, cache = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=list(provider_controller.get_credentials_schema()),
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
controller=provider_controller,
|
||||
)
|
||||
|
||||
original_credentials = tool_configuration.decrypt(provider.credentials)
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(original_credentials)
|
||||
original_credentials = encrypter.decrypt(provider.credentials)
|
||||
masked_credentials = encrypter.mask_tool_credentials(original_credentials)
|
||||
# check if the credential has changed, save the original credential
|
||||
for name, value in credentials.items():
|
||||
if name in masked_credentials and value == masked_credentials[name]:
|
||||
credentials[name] = original_credentials[name]
|
||||
|
||||
credentials = tool_configuration.encrypt(credentials)
|
||||
credentials = encrypter.encrypt(credentials)
|
||||
provider.credentials_str = json.dumps(credentials)
|
||||
|
||||
db.session.add(provider)
|
||||
db.session.commit()
|
||||
|
||||
# delete cache
|
||||
tool_configuration.delete_tool_credentials_cache()
|
||||
cache.delete()
|
||||
|
||||
# update labels
|
||||
ToolLabelManager.update_tool_labels(provider_controller, labels)
|
||||
@ -416,15 +410,13 @@ class ApiToolManageService:
|
||||
|
||||
# decrypt credentials
|
||||
if db_provider.id:
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=list(provider_controller.get_credentials_schema()),
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
controller=provider_controller,
|
||||
)
|
||||
decrypted_credentials = tool_configuration.decrypt(credentials)
|
||||
decrypted_credentials = encrypter.decrypt(credentials)
|
||||
# check if the credential has changed, save the original credential
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(decrypted_credentials)
|
||||
masked_credentials = encrypter.mask_tool_credentials(decrypted_credentials)
|
||||
for name, value in credentials.items():
|
||||
if name in masked_credentials and value == masked_credentials[name]:
|
||||
credentials[name] = decrypted_credentials[name]
|
||||
@ -446,7 +438,7 @@ class ApiToolManageService:
|
||||
return {"result": result or "empty response"}
|
||||
|
||||
@staticmethod
|
||||
def list_api_tools(user_id: str, tenant_id: str) -> list[ToolProviderApiEntity]:
|
||||
def list_api_tools(tenant_id: str) -> list[ToolProviderApiEntity]:
|
||||
"""
|
||||
list api tools
|
||||
"""
|
||||
@ -474,7 +466,7 @@ class ApiToolManageService:
|
||||
for tool in tools or []:
|
||||
user_provider.tools.append(
|
||||
ToolTransformService.convert_tool_entity_to_api_entity(
|
||||
tenant_id=tenant_id, tool=tool, credentials=user_provider.original_credentials, labels=labels
|
||||
tenant_id=tenant_id, tool=tool, labels=labels
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@ -1,28 +1,83 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from configs import dify_config
|
||||
from constants import HIDDEN_VALUE, UNKNOWN_VALUE
|
||||
from core.helper.position_helper import is_filtered
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache, ToolProviderCredentialsCache
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from core.tools.builtin_tool.provider import BuiltinToolProviderController
|
||||
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
|
||||
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
|
||||
from core.tools.errors import ToolNotFoundError, ToolProviderCredentialValidationError, ToolProviderNotFoundError
|
||||
from core.tools.entities.api_entities import (
|
||||
ToolApiEntity,
|
||||
ToolProviderApiEntity,
|
||||
ToolProviderCredentialApiEntity,
|
||||
ToolProviderCredentialInfoApiEntity,
|
||||
)
|
||||
from core.tools.entities.tool_entities import CredentialType
|
||||
from core.tools.errors import ToolProviderNotFoundError
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.tools.tool_label_manager import ToolLabelManager
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_provider_encrypter
|
||||
from core.tools.utils.system_oauth_encryption import decrypt_system_oauth_params
|
||||
from extensions.ext_database import db
|
||||
from models.tools import BuiltinToolProvider
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.tools import BuiltinToolProvider, ToolOAuthSystemClient, ToolOAuthTenantClient
|
||||
from services.plugin.plugin_service import PluginService
|
||||
from services.tools.tools_transform_service import ToolTransformService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BuiltinToolManageService:
|
||||
__MAX_BUILTIN_TOOL_PROVIDER_COUNT__ = 100
|
||||
|
||||
@staticmethod
|
||||
def delete_custom_oauth_client_params(tenant_id: str, provider: str):
|
||||
"""
|
||||
delete custom oauth client params
|
||||
"""
|
||||
tool_provider = ToolProviderID(provider)
|
||||
with Session(db.engine) as session:
|
||||
session.query(ToolOAuthTenantClient).filter_by(
|
||||
tenant_id=tenant_id,
|
||||
provider=tool_provider.provider_name,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
).delete()
|
||||
session.commit()
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def get_builtin_tool_provider_oauth_client_schema(tenant_id: str, provider_name: str):
|
||||
"""
|
||||
get builtin tool provider oauth client schema
|
||||
"""
|
||||
provider = ToolManager.get_builtin_provider(provider_name, tenant_id)
|
||||
verified = not isinstance(provider, PluginToolProviderController) or PluginService.is_plugin_verified(
|
||||
tenant_id, provider.plugin_unique_identifier
|
||||
)
|
||||
|
||||
is_oauth_custom_client_enabled = BuiltinToolManageService.is_oauth_custom_client_enabled(
|
||||
tenant_id, provider_name
|
||||
)
|
||||
is_system_oauth_params_exists = verified and BuiltinToolManageService.is_oauth_system_client_exists(
|
||||
provider_name
|
||||
)
|
||||
result = {
|
||||
"schema": provider.get_oauth_client_schema(),
|
||||
"is_oauth_custom_client_enabled": is_oauth_custom_client_enabled,
|
||||
"is_system_oauth_params_exists": is_system_oauth_params_exists,
|
||||
"client_params": BuiltinToolManageService.get_custom_oauth_client_params(tenant_id, provider_name),
|
||||
"redirect_uri": f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_name}/tool/callback",
|
||||
}
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def list_builtin_tool_provider_tools(tenant_id: str, provider: str) -> list[ToolApiEntity]:
|
||||
"""
|
||||
@ -36,27 +91,11 @@ class BuiltinToolManageService:
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
tools = provider_controller.get_tools()
|
||||
|
||||
tool_provider_configurations = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
)
|
||||
# check if user has added the provider
|
||||
builtin_provider = BuiltinToolManageService._fetch_builtin_provider(provider, tenant_id)
|
||||
|
||||
credentials = {}
|
||||
if builtin_provider is not None:
|
||||
# get credentials
|
||||
credentials = builtin_provider.credentials
|
||||
credentials = tool_provider_configurations.decrypt(credentials)
|
||||
|
||||
result: list[ToolApiEntity] = []
|
||||
for tool in tools or []:
|
||||
result.append(
|
||||
ToolTransformService.convert_tool_entity_to_api_entity(
|
||||
tool=tool,
|
||||
credentials=credentials,
|
||||
tenant_id=tenant_id,
|
||||
labels=ToolLabelManager.get_tool_labels(provider_controller),
|
||||
)
|
||||
@ -65,25 +104,15 @@ class BuiltinToolManageService:
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_builtin_tool_provider_info(user_id: str, tenant_id: str, provider: str):
|
||||
def get_builtin_tool_provider_info(tenant_id: str, provider: str):
|
||||
"""
|
||||
get builtin tool provider info
|
||||
"""
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
tool_provider_configurations = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
)
|
||||
# check if user has added the provider
|
||||
builtin_provider = BuiltinToolManageService._fetch_builtin_provider(provider, tenant_id)
|
||||
|
||||
credentials = {}
|
||||
if builtin_provider is not None:
|
||||
# get credentials
|
||||
credentials = builtin_provider.credentials
|
||||
credentials = tool_provider_configurations.decrypt(credentials)
|
||||
builtin_provider = BuiltinToolManageService.get_builtin_provider(provider, tenant_id)
|
||||
if builtin_provider is None:
|
||||
raise ValueError(f"you have not added provider {provider}")
|
||||
|
||||
entity = ToolTransformService.builtin_provider_to_user_provider(
|
||||
provider_controller=provider_controller,
|
||||
@ -92,128 +121,407 @@ class BuiltinToolManageService:
|
||||
)
|
||||
|
||||
entity.original_credentials = {}
|
||||
|
||||
return entity
|
||||
|
||||
@staticmethod
|
||||
def list_builtin_provider_credentials_schema(provider_name: str, tenant_id: str):
|
||||
def list_builtin_provider_credentials_schema(provider_name: str, credential_type: CredentialType, tenant_id: str):
|
||||
"""
|
||||
list builtin provider credentials schema
|
||||
|
||||
:param credential_type: credential type
|
||||
:param provider_name: the name of the provider
|
||||
:param tenant_id: the id of the tenant
|
||||
:return: the list of tool providers
|
||||
"""
|
||||
provider = ToolManager.get_builtin_provider(provider_name, tenant_id)
|
||||
return jsonable_encoder(provider.get_credentials_schema())
|
||||
return provider.get_credentials_schema_by_type(credential_type)
|
||||
|
||||
@staticmethod
|
||||
def update_builtin_tool_provider(
|
||||
session: Session, user_id: str, tenant_id: str, provider_name: str, credentials: dict
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
provider: str,
|
||||
credential_id: str,
|
||||
credentials: dict | None = None,
|
||||
name: str | None = None,
|
||||
):
|
||||
"""
|
||||
update builtin tool provider
|
||||
"""
|
||||
# get if the provider exists
|
||||
provider = BuiltinToolManageService._fetch_builtin_provider(provider_name, tenant_id)
|
||||
|
||||
try:
|
||||
# get provider
|
||||
provider_controller = ToolManager.get_builtin_provider(provider_name, tenant_id)
|
||||
if not provider_controller.need_credentials:
|
||||
raise ValueError(f"provider {provider_name} does not need credentials")
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
with Session(db.engine) as session:
|
||||
# get if the provider exists
|
||||
db_provider = (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.id == credential_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if db_provider is None:
|
||||
raise ValueError(f"you have not added provider {provider}")
|
||||
|
||||
# get original credentials if exists
|
||||
if provider is not None:
|
||||
original_credentials = tool_configuration.decrypt(provider.credentials)
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(original_credentials)
|
||||
# check if the credential has changed, save the original credential
|
||||
for name, value in credentials.items():
|
||||
if name in masked_credentials and value == masked_credentials[name]:
|
||||
credentials[name] = original_credentials[name]
|
||||
# validate credentials
|
||||
provider_controller.validate_credentials(user_id, credentials)
|
||||
# encrypt credentials
|
||||
credentials = tool_configuration.encrypt(credentials)
|
||||
except (
|
||||
PluginDaemonClientSideError,
|
||||
ToolProviderNotFoundError,
|
||||
ToolNotFoundError,
|
||||
ToolProviderCredentialValidationError,
|
||||
) as e:
|
||||
raise ValueError(str(e))
|
||||
try:
|
||||
if CredentialType.of(db_provider.credential_type).is_editable() and credentials:
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
if not provider_controller.need_credentials:
|
||||
raise ValueError(f"provider {provider} does not need credentials")
|
||||
|
||||
if provider is None:
|
||||
# create provider
|
||||
provider = BuiltinToolProvider(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
provider=provider_name,
|
||||
encrypted_credentials=json.dumps(credentials),
|
||||
)
|
||||
encrypter, cache = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, db_provider, provider, provider_controller
|
||||
)
|
||||
|
||||
db.session.add(provider)
|
||||
else:
|
||||
provider.encrypted_credentials = json.dumps(credentials)
|
||||
original_credentials = encrypter.decrypt(db_provider.credentials)
|
||||
new_credentials: dict = {
|
||||
key: value if value != HIDDEN_VALUE else original_credentials.get(key, UNKNOWN_VALUE)
|
||||
for key, value in credentials.items()
|
||||
}
|
||||
|
||||
# delete cache
|
||||
tool_configuration.delete_tool_credentials_cache()
|
||||
if CredentialType.of(db_provider.credential_type).is_validate_allowed():
|
||||
provider_controller.validate_credentials(user_id, new_credentials)
|
||||
|
||||
db.session.commit()
|
||||
# encrypt credentials
|
||||
db_provider.encrypted_credentials = json.dumps(encrypter.encrypt(new_credentials))
|
||||
|
||||
cache.delete()
|
||||
|
||||
# update name if provided
|
||||
if name and name != db_provider.name:
|
||||
# check if the name is already used
|
||||
if (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter_by(tenant_id=tenant_id, provider=provider, name=name)
|
||||
.count()
|
||||
> 0
|
||||
):
|
||||
raise ValueError(f"the credential name '{name}' is already used")
|
||||
|
||||
db_provider.name = name
|
||||
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise ValueError(str(e))
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def get_builtin_tool_provider_credentials(tenant_id: str, provider_name: str):
|
||||
def add_builtin_tool_provider(
|
||||
user_id: str,
|
||||
api_type: CredentialType,
|
||||
tenant_id: str,
|
||||
provider: str,
|
||||
credentials: dict,
|
||||
name: str | None = None,
|
||||
):
|
||||
"""
|
||||
add builtin tool provider
|
||||
"""
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
lock = f"builtin_tool_provider_create_lock:{tenant_id}_{provider}"
|
||||
with redis_client.lock(lock, timeout=20):
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
if not provider_controller.need_credentials:
|
||||
raise ValueError(f"provider {provider} does not need credentials")
|
||||
|
||||
provider_count = (
|
||||
session.query(BuiltinToolProvider).filter_by(tenant_id=tenant_id, provider=provider).count()
|
||||
)
|
||||
|
||||
# check if the provider count is reached the limit
|
||||
if provider_count >= BuiltinToolManageService.__MAX_BUILTIN_TOOL_PROVIDER_COUNT__:
|
||||
raise ValueError(f"you have reached the maximum number of providers for {provider}")
|
||||
|
||||
# validate credentials if allowed
|
||||
if CredentialType.of(api_type).is_validate_allowed():
|
||||
provider_controller.validate_credentials(user_id, credentials)
|
||||
|
||||
# generate name if not provided
|
||||
if name is None or name == "":
|
||||
name = BuiltinToolManageService.generate_builtin_tool_provider_name(
|
||||
session=session, tenant_id=tenant_id, provider=provider, credential_type=api_type
|
||||
)
|
||||
else:
|
||||
# check if the name is already used
|
||||
if (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter_by(tenant_id=tenant_id, provider=provider, name=name)
|
||||
.count()
|
||||
> 0
|
||||
):
|
||||
raise ValueError(f"the credential name '{name}' is already used")
|
||||
|
||||
# create encrypter
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(api_type)
|
||||
],
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
|
||||
db_provider = BuiltinToolProvider(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
provider=provider,
|
||||
encrypted_credentials=json.dumps(encrypter.encrypt(credentials)),
|
||||
credential_type=api_type.value,
|
||||
name=name,
|
||||
)
|
||||
|
||||
session.add(db_provider)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise ValueError(str(e))
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def create_tool_encrypter(
|
||||
tenant_id: str,
|
||||
db_provider: BuiltinToolProvider,
|
||||
provider: str,
|
||||
provider_controller: BuiltinToolProviderController,
|
||||
):
|
||||
encrypter, cache = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(db_provider.credential_type)
|
||||
],
|
||||
cache=ToolProviderCredentialsCache(tenant_id=tenant_id, provider=provider, credential_id=db_provider.id),
|
||||
)
|
||||
return encrypter, cache
|
||||
|
||||
@staticmethod
|
||||
def generate_builtin_tool_provider_name(
|
||||
session: Session, tenant_id: str, provider: str, credential_type: CredentialType
|
||||
) -> str:
|
||||
try:
|
||||
db_providers = (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter_by(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
credential_type=credential_type.value,
|
||||
)
|
||||
.order_by(BuiltinToolProvider.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
# Get the default name pattern
|
||||
default_pattern = f"{credential_type.get_name()}"
|
||||
|
||||
# Find all names that match the default pattern: "{default_pattern} {number}"
|
||||
pattern = rf"^{re.escape(default_pattern)}\s+(\d+)$"
|
||||
numbers = []
|
||||
|
||||
for db_provider in db_providers:
|
||||
if db_provider.name:
|
||||
match = re.match(pattern, db_provider.name.strip())
|
||||
if match:
|
||||
numbers.append(int(match.group(1)))
|
||||
|
||||
# If no default pattern names found, start with 1
|
||||
if not numbers:
|
||||
return f"{default_pattern} 1"
|
||||
|
||||
# Find the next number
|
||||
max_number = max(numbers)
|
||||
return f"{default_pattern} {max_number + 1}"
|
||||
except Exception as e:
|
||||
logger.warning(f"Error generating next provider name for {provider}: {str(e)}")
|
||||
# fallback
|
||||
return f"{credential_type.get_name()} 1"
|
||||
|
||||
@staticmethod
|
||||
def get_builtin_tool_provider_credentials(
|
||||
tenant_id: str, provider_name: str
|
||||
) -> list[ToolProviderCredentialApiEntity]:
|
||||
"""
|
||||
get builtin tool provider credentials
|
||||
"""
|
||||
provider_obj = BuiltinToolManageService._fetch_builtin_provider(provider_name, tenant_id)
|
||||
with db.session.no_autoflush:
|
||||
providers = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter_by(tenant_id=tenant_id, provider=provider_name)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.all()
|
||||
)
|
||||
|
||||
if provider_obj is None:
|
||||
return {}
|
||||
if len(providers) == 0:
|
||||
return []
|
||||
|
||||
provider_controller = ToolManager.get_builtin_provider(provider_obj.provider, tenant_id)
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
)
|
||||
credentials = tool_configuration.decrypt(provider_obj.credentials)
|
||||
credentials = tool_configuration.mask_tool_credentials(credentials)
|
||||
return credentials
|
||||
default_provider = providers[0]
|
||||
default_provider.is_default = True
|
||||
provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id)
|
||||
|
||||
credentials: list[ToolProviderCredentialApiEntity] = []
|
||||
encrypters = {}
|
||||
for provider in providers:
|
||||
credential_type = provider.credential_type
|
||||
if credential_type not in encrypters:
|
||||
encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, provider, provider.provider, provider_controller
|
||||
)[0]
|
||||
encrypter = encrypters[credential_type]
|
||||
decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials))
|
||||
credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity(
|
||||
provider=provider,
|
||||
credentials=decrypt_credential,
|
||||
)
|
||||
credentials.append(credential_entity)
|
||||
return credentials
|
||||
|
||||
@staticmethod
|
||||
def delete_builtin_tool_provider(user_id: str, tenant_id: str, provider_name: str):
|
||||
def get_builtin_tool_provider_credential_info(tenant_id: str, provider: str) -> ToolProviderCredentialInfoApiEntity:
|
||||
"""
|
||||
get builtin tool provider credential info
|
||||
"""
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
supported_credential_types = provider_controller.get_supported_credential_types()
|
||||
credentials = BuiltinToolManageService.get_builtin_tool_provider_credentials(tenant_id, provider)
|
||||
credential_info = ToolProviderCredentialInfoApiEntity(
|
||||
supported_credential_types=supported_credential_types,
|
||||
is_oauth_custom_client_enabled=BuiltinToolManageService.is_oauth_custom_client_enabled(tenant_id, provider),
|
||||
credentials=credentials,
|
||||
)
|
||||
|
||||
return credential_info
|
||||
|
||||
@staticmethod
|
||||
def delete_builtin_tool_provider(tenant_id: str, provider: str, credential_id: str):
|
||||
"""
|
||||
delete tool provider
|
||||
"""
|
||||
provider_obj = BuiltinToolManageService._fetch_builtin_provider(provider_name, tenant_id)
|
||||
with Session(db.engine) as session:
|
||||
db_provider = (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.id == credential_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if provider_obj is None:
|
||||
raise ValueError(f"you have not added provider {provider_name}")
|
||||
if db_provider is None:
|
||||
raise ValueError(f"you have not added provider {provider}")
|
||||
|
||||
db.session.delete(provider_obj)
|
||||
db.session.commit()
|
||||
session.delete(db_provider)
|
||||
session.commit()
|
||||
|
||||
# delete cache
|
||||
provider_controller = ToolManager.get_builtin_provider(provider_name, tenant_id)
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
)
|
||||
tool_configuration.delete_tool_credentials_cache()
|
||||
# delete cache
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
_, cache = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, db_provider, provider, provider_controller
|
||||
)
|
||||
cache.delete()
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def set_default_provider(tenant_id: str, user_id: str, provider: str, id: str):
|
||||
"""
|
||||
set default provider
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
# get provider
|
||||
target_provider = session.query(BuiltinToolProvider).filter_by(id=id).first()
|
||||
if target_provider is None:
|
||||
raise ValueError("provider not found")
|
||||
|
||||
# clear default provider
|
||||
session.query(BuiltinToolProvider).filter_by(
|
||||
tenant_id=tenant_id, user_id=user_id, provider=provider, is_default=True
|
||||
).update({"is_default": False})
|
||||
|
||||
# set new default provider
|
||||
target_provider.is_default = True
|
||||
session.commit()
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def is_oauth_system_client_exists(provider_name: str) -> bool:
|
||||
"""
|
||||
check if oauth system client exists
|
||||
"""
|
||||
tool_provider = ToolProviderID(provider_name)
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
system_client: ToolOAuthSystemClient | None = (
|
||||
session.query(ToolOAuthSystemClient)
|
||||
.filter_by(plugin_id=tool_provider.plugin_id, provider=tool_provider.provider_name)
|
||||
.first()
|
||||
)
|
||||
return system_client is not None
|
||||
|
||||
@staticmethod
|
||||
def is_oauth_custom_client_enabled(tenant_id: str, provider: str) -> bool:
|
||||
"""
|
||||
check if oauth custom client is enabled
|
||||
"""
|
||||
tool_provider = ToolProviderID(provider)
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
user_client: ToolOAuthTenantClient | None = (
|
||||
session.query(ToolOAuthTenantClient)
|
||||
.filter_by(
|
||||
tenant_id=tenant_id,
|
||||
provider=tool_provider.provider_name,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
enabled=True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return user_client is not None and user_client.enabled
|
||||
|
||||
@staticmethod
|
||||
def get_oauth_client(tenant_id: str, provider: str) -> dict[str, Any] | None:
|
||||
"""
|
||||
get builtin tool provider
|
||||
"""
|
||||
tool_provider = ToolProviderID(provider)
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()],
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
user_client: ToolOAuthTenantClient | None = (
|
||||
session.query(ToolOAuthTenantClient)
|
||||
.filter_by(
|
||||
tenant_id=tenant_id,
|
||||
provider=tool_provider.provider_name,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
enabled=True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
oauth_params: dict[str, Any] | None = None
|
||||
if user_client:
|
||||
oauth_params = encrypter.decrypt(user_client.oauth_params)
|
||||
return oauth_params
|
||||
|
||||
# only verified provider can use custom oauth client
|
||||
is_verified = not isinstance(provider, PluginToolProviderController) or PluginService.is_plugin_verified(
|
||||
tenant_id, provider.plugin_unique_identifier
|
||||
)
|
||||
if not is_verified:
|
||||
return oauth_params
|
||||
|
||||
system_client: ToolOAuthSystemClient | None = (
|
||||
session.query(ToolOAuthSystemClient)
|
||||
.filter_by(plugin_id=tool_provider.plugin_id, provider=tool_provider.provider_name)
|
||||
.first()
|
||||
)
|
||||
if system_client:
|
||||
try:
|
||||
oauth_params = decrypt_system_oauth_params(system_client.encrypted_oauth_params)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error decrypting system oauth params: {e}")
|
||||
|
||||
return oauth_params
|
||||
|
||||
@staticmethod
|
||||
def get_builtin_tool_provider_icon(provider: str):
|
||||
"""
|
||||
@ -234,9 +542,7 @@ class BuiltinToolManageService:
|
||||
|
||||
with db.session.no_autoflush:
|
||||
# get all user added providers
|
||||
db_providers: list[BuiltinToolProvider] = (
|
||||
db.session.query(BuiltinToolProvider).filter(BuiltinToolProvider.tenant_id == tenant_id).all() or []
|
||||
)
|
||||
db_providers: list[BuiltinToolProvider] = ToolManager.list_default_builtin_providers(tenant_id)
|
||||
|
||||
# rewrite db_providers
|
||||
for db_provider in db_providers:
|
||||
@ -275,7 +581,6 @@ class BuiltinToolManageService:
|
||||
ToolTransformService.convert_tool_entity_to_api_entity(
|
||||
tenant_id=tenant_id,
|
||||
tool=tool,
|
||||
credentials=user_builtin_provider.original_credentials,
|
||||
labels=ToolLabelManager.get_tool_labels(provider_controller),
|
||||
)
|
||||
)
|
||||
@ -287,43 +592,153 @@ class BuiltinToolManageService:
|
||||
return BuiltinToolProviderSort.sort(result)
|
||||
|
||||
@staticmethod
|
||||
def _fetch_builtin_provider(provider_name: str, tenant_id: str) -> BuiltinToolProvider | None:
|
||||
try:
|
||||
full_provider_name = provider_name
|
||||
provider_id_entity = ToolProviderID(provider_name)
|
||||
provider_name = provider_id_entity.provider_name
|
||||
if provider_id_entity.organization != "langgenius":
|
||||
provider_obj = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.provider == full_provider_name,
|
||||
def get_builtin_provider(provider_name: str, tenant_id: str) -> Optional[BuiltinToolProvider]:
|
||||
"""
|
||||
This method is used to fetch the builtin provider from the database
|
||||
1.if the default provider exists, return the default provider
|
||||
2.if the default provider does not exist, return the oldest provider
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
try:
|
||||
full_provider_name = provider_name
|
||||
provider_id_entity = ToolProviderID(provider_name)
|
||||
provider_name = provider_id_entity.provider_name
|
||||
|
||||
if provider_id_entity.organization != "langgenius":
|
||||
provider = (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.provider == full_provider_name,
|
||||
)
|
||||
.order_by(
|
||||
BuiltinToolProvider.is_default.desc(), # default=True first
|
||||
BuiltinToolProvider.created_at.asc(), # oldest first
|
||||
)
|
||||
.first()
|
||||
)
|
||||
.first()
|
||||
)
|
||||
else:
|
||||
provider_obj = (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == provider_name)
|
||||
| (BuiltinToolProvider.provider == full_provider_name),
|
||||
else:
|
||||
provider = (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == provider_name)
|
||||
| (BuiltinToolProvider.provider == full_provider_name),
|
||||
)
|
||||
.order_by(
|
||||
BuiltinToolProvider.is_default.desc(), # default=True first
|
||||
BuiltinToolProvider.created_at.asc(), # oldest first
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if provider is None:
|
||||
return None
|
||||
|
||||
provider.provider = ToolProviderID(provider.provider).to_string()
|
||||
return provider
|
||||
except Exception:
|
||||
# it's an old provider without organization
|
||||
return (
|
||||
session.query(BuiltinToolProvider)
|
||||
.filter(BuiltinToolProvider.tenant_id == tenant_id, BuiltinToolProvider.provider == provider_name)
|
||||
.order_by(
|
||||
BuiltinToolProvider.is_default.desc(), # default=True first
|
||||
BuiltinToolProvider.created_at.asc(), # oldest first
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if provider_obj is None:
|
||||
return None
|
||||
@staticmethod
|
||||
def save_custom_oauth_client_params(
|
||||
tenant_id: str,
|
||||
provider: str,
|
||||
client_params: Optional[dict] = None,
|
||||
enable_oauth_custom_client: Optional[bool] = None,
|
||||
):
|
||||
"""
|
||||
setup oauth custom client
|
||||
"""
|
||||
if client_params is None and enable_oauth_custom_client is None:
|
||||
return {"result": "success"}
|
||||
|
||||
provider_obj.provider = ToolProviderID(provider_obj.provider).to_string()
|
||||
return provider_obj
|
||||
except Exception:
|
||||
# it's an old provider without organization
|
||||
return (
|
||||
db.session.query(BuiltinToolProvider)
|
||||
.filter(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == provider_name),
|
||||
tool_provider = ToolProviderID(provider)
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
if not provider_controller:
|
||||
raise ToolProviderNotFoundError(f"Provider {provider} not found")
|
||||
|
||||
if not isinstance(provider_controller, (BuiltinToolProviderController, PluginToolProviderController)):
|
||||
raise ValueError(f"Provider {provider} is not a builtin or plugin provider")
|
||||
|
||||
with Session(db.engine) as session:
|
||||
custom_client_params = (
|
||||
session.query(ToolOAuthTenantClient)
|
||||
.filter_by(
|
||||
tenant_id=tenant_id,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
provider=tool_provider.provider_name,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
# if the record does not exist, create a basic record
|
||||
if custom_client_params is None:
|
||||
custom_client_params = ToolOAuthTenantClient(
|
||||
tenant_id=tenant_id,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
provider=tool_provider.provider_name,
|
||||
)
|
||||
session.add(custom_client_params)
|
||||
|
||||
if client_params is not None:
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()],
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
original_params = encrypter.decrypt(custom_client_params.oauth_params)
|
||||
new_params: dict = {
|
||||
key: value if value != HIDDEN_VALUE else original_params.get(key, UNKNOWN_VALUE)
|
||||
for key, value in client_params.items()
|
||||
}
|
||||
custom_client_params.encrypted_oauth_params = json.dumps(encrypter.encrypt(new_params))
|
||||
|
||||
if enable_oauth_custom_client is not None:
|
||||
custom_client_params.enabled = enable_oauth_custom_client
|
||||
|
||||
session.commit()
|
||||
return {"result": "success"}
|
||||
|
||||
@staticmethod
|
||||
def get_custom_oauth_client_params(tenant_id: str, provider: str):
|
||||
"""
|
||||
get custom oauth client params
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
tool_provider = ToolProviderID(provider)
|
||||
custom_oauth_client_params: ToolOAuthTenantClient | None = (
|
||||
session.query(ToolOAuthTenantClient)
|
||||
.filter_by(
|
||||
tenant_id=tenant_id,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
provider=tool_provider.provider_name,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if custom_oauth_client_params is None:
|
||||
return {}
|
||||
|
||||
provider_controller = ToolManager.get_builtin_provider(provider, tenant_id)
|
||||
if not provider_controller:
|
||||
raise ToolProviderNotFoundError(f"Provider {provider} not found")
|
||||
|
||||
if not isinstance(provider_controller, BuiltinToolProviderController):
|
||||
raise ValueError(f"Provider {provider} is not a builtin or plugin provider")
|
||||
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()],
|
||||
cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
|
||||
return encrypter.mask_tool_credentials(encrypter.decrypt(custom_oauth_client_params.oauth_params))
|
||||
|
||||
@ -7,13 +7,14 @@ from sqlalchemy import or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from core.helper import encrypter
|
||||
from core.helper.provider_cache import NoOpProviderCredentialCache
|
||||
from core.mcp.error import MCPAuthError, MCPError
|
||||
from core.mcp.mcp_client import MCPClient
|
||||
from core.tools.entities.api_entities import ToolProviderApiEntity
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.tools.mcp_tool.provider import MCPToolProviderController
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import ProviderConfigEncrypter
|
||||
from extensions.ext_database import db
|
||||
from models.tools import MCPToolProvider
|
||||
from services.tools.tools_transform_service import ToolTransformService
|
||||
@ -69,6 +70,7 @@ class MCPToolManageService:
|
||||
MCPToolProvider.server_url_hash == server_url_hash,
|
||||
MCPToolProvider.server_identifier == server_identifier,
|
||||
),
|
||||
MCPToolProvider.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
@ -197,8 +199,7 @@ class MCPToolManageService:
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
tenant_id=mcp_provider.tenant_id,
|
||||
config=list(provider_controller.get_credentials_schema()),
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.provider_id,
|
||||
provider_config_cache=NoOpProviderCredentialCache(),
|
||||
)
|
||||
credentials = tool_configuration.encrypt(credentials)
|
||||
mcp_provider.updated_at = datetime.now()
|
||||
|
||||
@ -5,21 +5,23 @@ from typing import Any, Optional, Union, cast
|
||||
from yarl import URL
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.provider_cache import ToolProviderCredentialsCache
|
||||
from core.mcp.types import Tool as MCPTool
|
||||
from core.tools.__base.tool import Tool
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.builtin_tool.provider import BuiltinToolProviderController
|
||||
from core.tools.custom_tool.provider import ApiToolProviderController
|
||||
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
|
||||
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity, ToolProviderCredentialApiEntity
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_bundle import ApiToolBundle
|
||||
from core.tools.entities.tool_entities import (
|
||||
ApiProviderAuthType,
|
||||
CredentialType,
|
||||
ToolParameter,
|
||||
ToolProviderType,
|
||||
)
|
||||
from core.tools.plugin_tool.provider import PluginToolProviderController
|
||||
from core.tools.utils.configuration import ProviderConfigEncrypter
|
||||
from core.tools.utils.encryption import create_provider_encrypter, create_tool_provider_encrypter
|
||||
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
|
||||
from core.tools.workflow_as_tool.tool import WorkflowTool
|
||||
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider
|
||||
@ -119,7 +121,12 @@ class ToolTransformService:
|
||||
result.plugin_unique_identifier = provider_controller.plugin_unique_identifier
|
||||
|
||||
# get credentials schema
|
||||
schema = {x.to_basic_provider_config().name: x for x in provider_controller.get_credentials_schema()}
|
||||
schema = {
|
||||
x.to_basic_provider_config().name: x
|
||||
for x in provider_controller.get_credentials_schema_by_type(
|
||||
CredentialType.of(db_provider.credential_type) if db_provider else CredentialType.API_KEY
|
||||
)
|
||||
}
|
||||
|
||||
for name, value in schema.items():
|
||||
if result.masked_credentials:
|
||||
@ -136,15 +143,23 @@ class ToolTransformService:
|
||||
credentials = db_provider.credentials
|
||||
|
||||
# init tool configuration
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_provider_encrypter(
|
||||
tenant_id=db_provider.tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(
|
||||
CredentialType.of(db_provider.credential_type)
|
||||
)
|
||||
],
|
||||
cache=ToolProviderCredentialsCache(
|
||||
tenant_id=db_provider.tenant_id,
|
||||
provider=db_provider.provider,
|
||||
credential_id=db_provider.id,
|
||||
),
|
||||
)
|
||||
# decrypt the credentials and mask the credentials
|
||||
decrypted_credentials = tool_configuration.decrypt(data=credentials)
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(data=decrypted_credentials)
|
||||
decrypted_credentials = encrypter.decrypt(data=credentials)
|
||||
masked_credentials = encrypter.mask_tool_credentials(data=decrypted_credentials)
|
||||
|
||||
result.masked_credentials = masked_credentials
|
||||
result.original_credentials = decrypted_credentials
|
||||
@ -287,16 +302,14 @@ class ToolTransformService:
|
||||
|
||||
if decrypt_credentials:
|
||||
# init tool configuration
|
||||
tool_configuration = ProviderConfigEncrypter(
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=db_provider.tenant_id,
|
||||
config=[x.to_basic_provider_config() for x in provider_controller.get_credentials_schema()],
|
||||
provider_type=provider_controller.provider_type.value,
|
||||
provider_identity=provider_controller.entity.identity.name,
|
||||
controller=provider_controller,
|
||||
)
|
||||
|
||||
# decrypt the credentials and mask the credentials
|
||||
decrypted_credentials = tool_configuration.decrypt(data=credentials)
|
||||
masked_credentials = tool_configuration.mask_tool_credentials(data=decrypted_credentials)
|
||||
decrypted_credentials = encrypter.decrypt(data=credentials)
|
||||
masked_credentials = encrypter.mask_tool_credentials(data=decrypted_credentials)
|
||||
|
||||
result.masked_credentials = masked_credentials
|
||||
|
||||
@ -306,7 +319,6 @@ class ToolTransformService:
|
||||
def convert_tool_entity_to_api_entity(
|
||||
tool: Union[ApiToolBundle, WorkflowTool, Tool],
|
||||
tenant_id: str,
|
||||
credentials: dict | None = None,
|
||||
labels: list[str] | None = None,
|
||||
) -> ToolApiEntity:
|
||||
"""
|
||||
@ -316,7 +328,7 @@ class ToolTransformService:
|
||||
# fork tool runtime
|
||||
tool = tool.fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
credentials=credentials or {},
|
||||
credentials={},
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
)
|
||||
@ -357,6 +369,19 @@ class ToolTransformService:
|
||||
labels=labels or [],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def convert_builtin_provider_to_credential_entity(
|
||||
provider: BuiltinToolProvider, credentials: dict
|
||||
) -> ToolProviderCredentialApiEntity:
|
||||
return ToolProviderCredentialApiEntity(
|
||||
id=provider.id,
|
||||
name=provider.name,
|
||||
provider=provider.provider,
|
||||
credential_type=CredentialType.of(provider.credential_type),
|
||||
is_default=provider.is_default,
|
||||
credentials=credentials,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def convert_mcp_schema_to_parameter(schema: dict) -> list["ToolParameter"]:
|
||||
"""
|
||||
|
||||
@ -3,7 +3,7 @@ import time
|
||||
import uuid
|
||||
from collections.abc import Callable, Generator, Mapping, Sequence
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, cast
|
||||
from typing import Any, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlalchemy import select
|
||||
@ -15,10 +15,10 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||
from core.file import File
|
||||
from core.repositories import DifyCoreRepositoryFactory
|
||||
from core.variables import Variable
|
||||
from core.variables.variables import VariableUnion
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.errors import WorkflowNodeRunFailedError
|
||||
from core.workflow.graph_engine.entities.event import InNodeEvent
|
||||
from core.workflow.nodes import NodeType
|
||||
@ -28,7 +28,6 @@ from core.workflow.nodes.event import RunCompletedEvent
|
||||
from core.workflow.nodes.event.types import NodeEvent
|
||||
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
|
||||
from core.workflow.nodes.start.entities import StartNodeData
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
|
||||
from extensions.ext_database import db
|
||||
@ -370,7 +369,7 @@ class WorkflowService:
|
||||
|
||||
else:
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable.empty(),
|
||||
system_variables={},
|
||||
user_inputs=user_inputs,
|
||||
environment_variables=draft_workflow.environment_variables,
|
||||
conversation_variables=[],
|
||||
@ -686,30 +685,36 @@ def _setup_variable_pool(
|
||||
):
|
||||
# Only inject system variables for START node type.
|
||||
if node_type == NodeType.START:
|
||||
system_variable = SystemVariable(
|
||||
user_id=user_id,
|
||||
app_id=workflow.app_id,
|
||||
workflow_id=workflow.id,
|
||||
files=files or [],
|
||||
workflow_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
# Create a variable pool.
|
||||
system_inputs: dict[SystemVariableKey, Any] = {
|
||||
# From inputs:
|
||||
SystemVariableKey.FILES: files,
|
||||
SystemVariableKey.USER_ID: user_id,
|
||||
# From workflow model
|
||||
SystemVariableKey.APP_ID: workflow.app_id,
|
||||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||
# Randomly generated.
|
||||
SystemVariableKey.WORKFLOW_EXECUTION_ID: str(uuid.uuid4()),
|
||||
}
|
||||
|
||||
# Only add chatflow-specific variables for non-workflow types
|
||||
if workflow.type != WorkflowType.WORKFLOW.value:
|
||||
system_variable.query = query
|
||||
system_variable.conversation_id = conversation_id
|
||||
system_variable.dialogue_count = 0
|
||||
system_inputs.update(
|
||||
{
|
||||
SystemVariableKey.QUERY: query,
|
||||
SystemVariableKey.CONVERSATION_ID: conversation_id,
|
||||
SystemVariableKey.DIALOGUE_COUNT: 0,
|
||||
}
|
||||
)
|
||||
else:
|
||||
system_variable = SystemVariable.empty()
|
||||
system_inputs = {}
|
||||
|
||||
# init variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=system_variable,
|
||||
system_variables=system_inputs,
|
||||
user_inputs=user_inputs,
|
||||
environment_variables=workflow.environment_variables,
|
||||
# Based on the definition of `VariableUnion`,
|
||||
# `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
|
||||
conversation_variables=cast(list[VariableUnion], conversation_variables), #
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
return variable_pool
|
||||
|
||||
@ -9,12 +9,12 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.code.code_node import CodeNode
|
||||
from core.workflow.nodes.code.entities import CodeNodeData
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock
|
||||
@ -50,7 +50,7 @@ def init_code_node(code_config: dict):
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", files=[]),
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -6,11 +6,11 @@ import pytest
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.http_request.node import HttpRequestNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
from tests.integration_tests.workflow.nodes.__mock.http import setup_http_mock
|
||||
@ -44,7 +44,7 @@ def init_http_node(config: dict):
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", files=[]),
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -13,12 +13,12 @@ from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import AssistantPromptMessage
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.event import RunCompletedEvent
|
||||
from core.workflow.nodes.llm.node import LLMNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from extensions.ext_database import db
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
@ -62,14 +62,12 @@ def init_llm_node(config: dict) -> LLMNode:
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa",
|
||||
app_id=app_id,
|
||||
workflow_id=workflow_id,
|
||||
files=[],
|
||||
query="what's the weather today?",
|
||||
conversation_id="abababa",
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "what's the weather today?",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "aaa",
|
||||
},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -8,11 +8,11 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.model_runtime.entities import AssistantPromptMessage
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from extensions.ext_database import db
|
||||
from models.enums import UserFrom
|
||||
from tests.integration_tests.workflow.nodes.__mock.model import get_mocked_fetch_model_config
|
||||
@ -64,9 +64,12 @@ def init_parameter_extractor_node(config: dict):
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa", files=[], query="what's the weather in SF", conversation_id="abababa"
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "what's the weather in SF",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "aaa",
|
||||
},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -6,11 +6,11 @@ import pytest
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock
|
||||
@ -61,7 +61,7 @@ def test_execute_code(setup_code_executor_mock):
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", files=[]),
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -6,12 +6,12 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.tools.utils.configuration import ToolParameterConfigurationManager
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.event.event import RunCompletedEvent
|
||||
from core.workflow.nodes.tool.tool_node import ToolNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
|
||||
@ -44,7 +44,7 @@ def init_tool_node(config: dict):
|
||||
|
||||
# construct variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", files=[]),
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
|
||||
@ -88,7 +88,6 @@ def test_flask_configs(monkeypatch):
|
||||
"pool_pre_ping": False,
|
||||
"pool_recycle": 3600,
|
||||
"pool_size": 30,
|
||||
"pool_use_lifo": False,
|
||||
}
|
||||
|
||||
assert config["CONSOLE_WEB_URL"] == "https://example.com"
|
||||
|
||||
@ -1,380 +0,0 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager, UserMixin
|
||||
|
||||
from controllers.console.error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogout
|
||||
from controllers.console.workspace.error import AccountNotInitializedError
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
cloud_edition_billing_rate_limit_check,
|
||||
cloud_edition_billing_resource_check,
|
||||
enterprise_license_required,
|
||||
only_edition_cloud,
|
||||
only_edition_enterprise,
|
||||
only_edition_self_hosted,
|
||||
setup_required,
|
||||
)
|
||||
from models.account import AccountStatus
|
||||
from services.feature_service import LicenseStatus
|
||||
|
||||
|
||||
class MockUser(UserMixin):
|
||||
"""Simple User class for testing."""
|
||||
|
||||
def __init__(self, user_id: str):
|
||||
self.id = user_id
|
||||
self.current_tenant_id = "tenant123"
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self.id
|
||||
|
||||
|
||||
def create_app_with_login():
|
||||
"""Create a Flask app with LoginManager configured."""
|
||||
app = Flask(__name__)
|
||||
app.config["SECRET_KEY"] = "test-secret-key"
|
||||
|
||||
login_manager = LoginManager()
|
||||
login_manager.init_app(app)
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id: str):
|
||||
return MockUser(user_id)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
class TestAccountInitialization:
|
||||
"""Test account initialization decorator"""
|
||||
|
||||
def test_should_allow_initialized_account(self):
|
||||
"""Test that initialized accounts can access protected views"""
|
||||
# Arrange
|
||||
mock_user = MagicMock()
|
||||
mock_user.status = AccountStatus.ACTIVE
|
||||
|
||||
@account_initialization_required
|
||||
def protected_view():
|
||||
return "success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.current_user", mock_user):
|
||||
result = protected_view()
|
||||
|
||||
# Assert
|
||||
assert result == "success"
|
||||
|
||||
def test_should_reject_uninitialized_account(self):
|
||||
"""Test that uninitialized accounts raise AccountNotInitializedError"""
|
||||
# Arrange
|
||||
mock_user = MagicMock()
|
||||
mock_user.status = AccountStatus.UNINITIALIZED
|
||||
|
||||
@account_initialization_required
|
||||
def protected_view():
|
||||
return "success"
|
||||
|
||||
# Act & Assert
|
||||
with patch("controllers.console.wraps.current_user", mock_user):
|
||||
with pytest.raises(AccountNotInitializedError):
|
||||
protected_view()
|
||||
|
||||
|
||||
class TestEditionChecks:
|
||||
"""Test edition-specific decorators"""
|
||||
|
||||
def test_only_edition_cloud_allows_cloud_edition(self):
|
||||
"""Test cloud edition decorator allows CLOUD edition"""
|
||||
|
||||
# Arrange
|
||||
@only_edition_cloud
|
||||
def cloud_view():
|
||||
return "cloud_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "CLOUD"):
|
||||
result = cloud_view()
|
||||
|
||||
# Assert
|
||||
assert result == "cloud_success"
|
||||
|
||||
def test_only_edition_cloud_rejects_other_editions(self):
|
||||
"""Test cloud edition decorator rejects non-CLOUD editions"""
|
||||
# Arrange
|
||||
app = Flask(__name__)
|
||||
|
||||
@only_edition_cloud
|
||||
def cloud_view():
|
||||
return "cloud_success"
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context():
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"):
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
cloud_view()
|
||||
assert exc_info.value.code == 404
|
||||
|
||||
def test_only_edition_enterprise_allows_when_enabled(self):
|
||||
"""Test enterprise edition decorator allows when ENTERPRISE_ENABLED is True"""
|
||||
|
||||
# Arrange
|
||||
@only_edition_enterprise
|
||||
def enterprise_view():
|
||||
return "enterprise_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.dify_config.ENTERPRISE_ENABLED", True):
|
||||
result = enterprise_view()
|
||||
|
||||
# Assert
|
||||
assert result == "enterprise_success"
|
||||
|
||||
def test_only_edition_self_hosted_allows_self_hosted(self):
|
||||
"""Test self-hosted edition decorator allows SELF_HOSTED edition"""
|
||||
|
||||
# Arrange
|
||||
@only_edition_self_hosted
|
||||
def self_hosted_view():
|
||||
return "self_hosted_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"):
|
||||
result = self_hosted_view()
|
||||
|
||||
# Assert
|
||||
assert result == "self_hosted_success"
|
||||
|
||||
|
||||
class TestBillingResourceLimits:
|
||||
"""Test billing resource limit decorators"""
|
||||
|
||||
def test_should_allow_when_under_resource_limit(self):
|
||||
"""Test that requests are allowed when under resource limits"""
|
||||
# Arrange
|
||||
mock_features = MagicMock()
|
||||
mock_features.billing.enabled = True
|
||||
mock_features.members.limit = 10
|
||||
mock_features.members.size = 5
|
||||
|
||||
@cloud_edition_billing_resource_check("members")
|
||||
def add_member():
|
||||
return "member_added"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.current_user"):
|
||||
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
|
||||
result = add_member()
|
||||
|
||||
# Assert
|
||||
assert result == "member_added"
|
||||
|
||||
def test_should_reject_when_over_resource_limit(self):
|
||||
"""Test that requests are rejected when over resource limits"""
|
||||
# Arrange
|
||||
app = create_app_with_login()
|
||||
mock_features = MagicMock()
|
||||
mock_features.billing.enabled = True
|
||||
mock_features.members.limit = 10
|
||||
mock_features.members.size = 10
|
||||
|
||||
@cloud_edition_billing_resource_check("members")
|
||||
def add_member():
|
||||
return "member_added"
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context():
|
||||
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
|
||||
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
add_member()
|
||||
assert exc_info.value.code == 403
|
||||
assert "members has reached the limit" in str(exc_info.value.description)
|
||||
|
||||
def test_should_check_source_for_documents_limit(self):
|
||||
"""Test document limit checks request source"""
|
||||
# Arrange
|
||||
app = create_app_with_login()
|
||||
mock_features = MagicMock()
|
||||
mock_features.billing.enabled = True
|
||||
mock_features.documents_upload_quota.limit = 100
|
||||
mock_features.documents_upload_quota.size = 100
|
||||
|
||||
@cloud_edition_billing_resource_check("documents")
|
||||
def upload_document():
|
||||
return "document_uploaded"
|
||||
|
||||
# Test 1: Should reject when source is datasets
|
||||
with app.test_request_context("/?source=datasets"):
|
||||
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
|
||||
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
upload_document()
|
||||
assert exc_info.value.code == 403
|
||||
|
||||
# Test 2: Should allow when source is not datasets
|
||||
with app.test_request_context("/?source=other"):
|
||||
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
|
||||
with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features):
|
||||
result = upload_document()
|
||||
assert result == "document_uploaded"
|
||||
|
||||
|
||||
class TestRateLimiting:
|
||||
"""Test rate limiting decorator"""
|
||||
|
||||
@patch("controllers.console.wraps.redis_client")
|
||||
@patch("controllers.console.wraps.db")
|
||||
def test_should_allow_requests_within_rate_limit(self, mock_db, mock_redis):
|
||||
"""Test that requests within rate limit are allowed"""
|
||||
# Arrange
|
||||
mock_rate_limit = MagicMock()
|
||||
mock_rate_limit.enabled = True
|
||||
mock_rate_limit.limit = 10
|
||||
mock_redis.zcard.return_value = 5 # 5 requests in window
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge")
|
||||
def knowledge_request():
|
||||
return "knowledge_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.current_user"):
|
||||
with patch(
|
||||
"controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit
|
||||
):
|
||||
result = knowledge_request()
|
||||
|
||||
# Assert
|
||||
assert result == "knowledge_success"
|
||||
mock_redis.zadd.assert_called_once()
|
||||
mock_redis.zremrangebyscore.assert_called_once()
|
||||
|
||||
@patch("controllers.console.wraps.redis_client")
|
||||
@patch("controllers.console.wraps.db")
|
||||
def test_should_reject_requests_over_rate_limit(self, mock_db, mock_redis):
|
||||
"""Test that requests over rate limit are rejected and logged"""
|
||||
# Arrange
|
||||
app = create_app_with_login()
|
||||
mock_rate_limit = MagicMock()
|
||||
mock_rate_limit.enabled = True
|
||||
mock_rate_limit.limit = 10
|
||||
mock_rate_limit.subscription_plan = "pro"
|
||||
mock_redis.zcard.return_value = 11 # Over limit
|
||||
|
||||
mock_session = MagicMock()
|
||||
mock_db.session = mock_session
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge")
|
||||
def knowledge_request():
|
||||
return "knowledge_success"
|
||||
|
||||
# Act & Assert
|
||||
with app.test_request_context():
|
||||
with patch("controllers.console.wraps.current_user", MockUser("test_user")):
|
||||
with patch(
|
||||
"controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit
|
||||
):
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
knowledge_request()
|
||||
|
||||
# Verify error
|
||||
assert exc_info.value.code == 403
|
||||
assert "rate limit" in str(exc_info.value.description)
|
||||
|
||||
# Verify rate limit log was created
|
||||
mock_session.add.assert_called_once()
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
|
||||
class TestSystemSetup:
|
||||
"""Test system setup decorator"""
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
def test_should_allow_when_setup_complete(self, mock_db):
|
||||
"""Test that requests are allowed when setup is complete"""
|
||||
# Arrange
|
||||
mock_db.session.query.return_value.first.return_value = MagicMock() # Setup exists
|
||||
|
||||
@setup_required
|
||||
def admin_view():
|
||||
return "admin_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"):
|
||||
result = admin_view()
|
||||
|
||||
# Assert
|
||||
assert result == "admin_success"
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.wraps.os.environ.get")
|
||||
def test_should_raise_not_init_validate_error_with_init_password(self, mock_environ_get, mock_db):
|
||||
"""Test NotInitValidateError when INIT_PASSWORD is set but setup not complete"""
|
||||
# Arrange
|
||||
mock_db.session.query.return_value.first.return_value = None # No setup
|
||||
mock_environ_get.return_value = "some_password"
|
||||
|
||||
@setup_required
|
||||
def admin_view():
|
||||
return "admin_success"
|
||||
|
||||
# Act & Assert
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"):
|
||||
with pytest.raises(NotInitValidateError):
|
||||
admin_view()
|
||||
|
||||
@patch("controllers.console.wraps.db")
|
||||
@patch("controllers.console.wraps.os.environ.get")
|
||||
def test_should_raise_not_setup_error_without_init_password(self, mock_environ_get, mock_db):
|
||||
"""Test NotSetupError when no INIT_PASSWORD and setup not complete"""
|
||||
# Arrange
|
||||
mock_db.session.query.return_value.first.return_value = None # No setup
|
||||
mock_environ_get.return_value = None # No INIT_PASSWORD
|
||||
|
||||
@setup_required
|
||||
def admin_view():
|
||||
return "admin_success"
|
||||
|
||||
# Act & Assert
|
||||
with patch("controllers.console.wraps.dify_config.EDITION", "SELF_HOSTED"):
|
||||
with pytest.raises(NotSetupError):
|
||||
admin_view()
|
||||
|
||||
|
||||
class TestEnterpriseLicense:
|
||||
"""Test enterprise license decorator"""
|
||||
|
||||
def test_should_allow_with_valid_license(self):
|
||||
"""Test that valid licenses allow access"""
|
||||
# Arrange
|
||||
mock_settings = MagicMock()
|
||||
mock_settings.license.status = LicenseStatus.ACTIVE
|
||||
|
||||
@enterprise_license_required
|
||||
def enterprise_feature():
|
||||
return "enterprise_success"
|
||||
|
||||
# Act
|
||||
with patch("controllers.console.wraps.FeatureService.get_system_features", return_value=mock_settings):
|
||||
result = enterprise_feature()
|
||||
|
||||
# Assert
|
||||
assert result == "enterprise_success"
|
||||
|
||||
@pytest.mark.parametrize("invalid_status", [LicenseStatus.INACTIVE, LicenseStatus.EXPIRED, LicenseStatus.LOST])
|
||||
def test_should_reject_with_invalid_license(self, invalid_status):
|
||||
"""Test that invalid licenses raise UnauthorizedAndForceLogout"""
|
||||
# Arrange
|
||||
mock_settings = MagicMock()
|
||||
mock_settings.license.status = invalid_status
|
||||
|
||||
@enterprise_license_required
|
||||
def enterprise_feature():
|
||||
return "enterprise_success"
|
||||
|
||||
# Act & Assert
|
||||
with patch("controllers.console.wraps.FeatureService.get_system_features", return_value=mock_settings):
|
||||
with pytest.raises(UnauthorizedAndForceLogout) as exc_info:
|
||||
enterprise_feature()
|
||||
assert "license is invalid" in str(exc_info.value)
|
||||
@ -1,49 +1,14 @@
|
||||
import dataclasses
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.file import File, FileTransferMethod, FileType
|
||||
from core.helper import encrypter
|
||||
from core.variables.segments import (
|
||||
ArrayAnySegment,
|
||||
ArrayFileSegment,
|
||||
ArrayNumberSegment,
|
||||
ArrayObjectSegment,
|
||||
ArrayStringSegment,
|
||||
FileSegment,
|
||||
FloatSegment,
|
||||
IntegerSegment,
|
||||
NoneSegment,
|
||||
ObjectSegment,
|
||||
Segment,
|
||||
SegmentUnion,
|
||||
StringSegment,
|
||||
get_segment_discriminator,
|
||||
)
|
||||
from core.variables.types import SegmentType
|
||||
from core.variables.variables import (
|
||||
ArrayAnyVariable,
|
||||
ArrayFileVariable,
|
||||
ArrayNumberVariable,
|
||||
ArrayObjectVariable,
|
||||
ArrayStringVariable,
|
||||
FileVariable,
|
||||
FloatVariable,
|
||||
IntegerVariable,
|
||||
NoneVariable,
|
||||
ObjectVariable,
|
||||
SecretVariable,
|
||||
StringVariable,
|
||||
Variable,
|
||||
VariableUnion,
|
||||
)
|
||||
from core.variables import SecretVariable, StringVariable
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
|
||||
|
||||
def test_segment_group_to_text():
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="fake-user-id"),
|
||||
system_variables={
|
||||
SystemVariableKey("user_id"): "fake-user-id",
|
||||
},
|
||||
user_inputs={},
|
||||
environment_variables=[
|
||||
SecretVariable(name="secret_key", value="fake-secret-key"),
|
||||
@ -65,7 +30,7 @@ def test_segment_group_to_text():
|
||||
|
||||
def test_convert_constant_to_segment_group():
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="1", app_id="1", workflow_id="1"),
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
@ -78,7 +43,9 @@ def test_convert_constant_to_segment_group():
|
||||
|
||||
def test_convert_variable_to_segment_group():
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="fake-user-id"),
|
||||
system_variables={
|
||||
SystemVariableKey("user_id"): "fake-user-id",
|
||||
},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
@ -89,297 +56,3 @@ def test_convert_variable_to_segment_group():
|
||||
assert segments_group.log == "fake-user-id"
|
||||
assert isinstance(segments_group.value[0], StringVariable)
|
||||
assert segments_group.value[0].value == "fake-user-id"
|
||||
|
||||
|
||||
class _Segments(BaseModel):
|
||||
segments: list[SegmentUnion]
|
||||
|
||||
|
||||
class _Variables(BaseModel):
|
||||
variables: list[VariableUnion]
|
||||
|
||||
|
||||
def create_test_file(
|
||||
file_type: FileType = FileType.DOCUMENT,
|
||||
transfer_method: FileTransferMethod = FileTransferMethod.LOCAL_FILE,
|
||||
filename: str = "test.txt",
|
||||
extension: str = ".txt",
|
||||
mime_type: str = "text/plain",
|
||||
size: int = 1024,
|
||||
) -> File:
|
||||
"""Factory function to create File objects for testing"""
|
||||
return File(
|
||||
tenant_id="test-tenant",
|
||||
type=file_type,
|
||||
transfer_method=transfer_method,
|
||||
filename=filename,
|
||||
extension=extension,
|
||||
mime_type=mime_type,
|
||||
size=size,
|
||||
related_id="test-file-id" if transfer_method != FileTransferMethod.REMOTE_URL else None,
|
||||
remote_url="https://example.com/file.txt" if transfer_method == FileTransferMethod.REMOTE_URL else None,
|
||||
storage_key="test-storage-key",
|
||||
)
|
||||
|
||||
|
||||
class TestSegmentDumpAndLoad:
|
||||
"""Test suite for segment and variable serialization/deserialization"""
|
||||
|
||||
def test_segments(self):
|
||||
"""Test basic segment serialization compatibility"""
|
||||
model = _Segments(segments=[IntegerSegment(value=1), StringSegment(value="a")])
|
||||
json = model.model_dump_json()
|
||||
print("Json: ", json)
|
||||
loaded = _Segments.model_validate_json(json)
|
||||
assert loaded == model
|
||||
|
||||
def test_segment_number(self):
|
||||
"""Test number segment serialization compatibility"""
|
||||
model = _Segments(segments=[IntegerSegment(value=1), FloatSegment(value=1.0)])
|
||||
json = model.model_dump_json()
|
||||
print("Json: ", json)
|
||||
loaded = _Segments.model_validate_json(json)
|
||||
assert loaded == model
|
||||
|
||||
def test_variables(self):
|
||||
"""Test variable serialization compatibility"""
|
||||
model = _Variables(variables=[IntegerVariable(value=1, name="int"), StringVariable(value="a", name="str")])
|
||||
json = model.model_dump_json()
|
||||
print("Json: ", json)
|
||||
restored = _Variables.model_validate_json(json)
|
||||
assert restored == model
|
||||
|
||||
def test_all_segments_serialization(self):
|
||||
"""Test serialization/deserialization of all segment types"""
|
||||
# Create one instance of each segment type
|
||||
test_file = create_test_file()
|
||||
|
||||
all_segments: list[SegmentUnion] = [
|
||||
NoneSegment(),
|
||||
StringSegment(value="test string"),
|
||||
IntegerSegment(value=42),
|
||||
FloatSegment(value=3.14),
|
||||
ObjectSegment(value={"key": "value", "number": 123}),
|
||||
FileSegment(value=test_file),
|
||||
ArrayAnySegment(value=[1, "string", 3.14, {"key": "value"}]),
|
||||
ArrayStringSegment(value=["hello", "world"]),
|
||||
ArrayNumberSegment(value=[1, 2.5, 3]),
|
||||
ArrayObjectSegment(value=[{"id": 1}, {"id": 2}]),
|
||||
ArrayFileSegment(value=[]), # Empty array to avoid file complexity
|
||||
]
|
||||
|
||||
# Test serialization and deserialization
|
||||
model = _Segments(segments=all_segments)
|
||||
json_str = model.model_dump_json()
|
||||
loaded = _Segments.model_validate_json(json_str)
|
||||
|
||||
# Verify all segments are preserved
|
||||
assert len(loaded.segments) == len(all_segments)
|
||||
|
||||
for original, loaded_segment in zip(all_segments, loaded.segments):
|
||||
assert type(loaded_segment) == type(original)
|
||||
assert loaded_segment.value_type == original.value_type
|
||||
|
||||
# For file segments, compare key properties instead of exact equality
|
||||
if isinstance(original, FileSegment) and isinstance(loaded_segment, FileSegment):
|
||||
orig_file = original.value
|
||||
loaded_file = loaded_segment.value
|
||||
assert isinstance(orig_file, File)
|
||||
assert isinstance(loaded_file, File)
|
||||
assert loaded_file.tenant_id == orig_file.tenant_id
|
||||
assert loaded_file.type == orig_file.type
|
||||
assert loaded_file.filename == orig_file.filename
|
||||
else:
|
||||
assert loaded_segment.value == original.value
|
||||
|
||||
def test_all_variables_serialization(self):
|
||||
"""Test serialization/deserialization of all variable types"""
|
||||
# Create one instance of each variable type
|
||||
test_file = create_test_file()
|
||||
|
||||
all_variables: list[VariableUnion] = [
|
||||
NoneVariable(name="none_var"),
|
||||
StringVariable(value="test string", name="string_var"),
|
||||
IntegerVariable(value=42, name="int_var"),
|
||||
FloatVariable(value=3.14, name="float_var"),
|
||||
ObjectVariable(value={"key": "value", "number": 123}, name="object_var"),
|
||||
FileVariable(value=test_file, name="file_var"),
|
||||
ArrayAnyVariable(value=[1, "string", 3.14, {"key": "value"}], name="array_any_var"),
|
||||
ArrayStringVariable(value=["hello", "world"], name="array_string_var"),
|
||||
ArrayNumberVariable(value=[1, 2.5, 3], name="array_number_var"),
|
||||
ArrayObjectVariable(value=[{"id": 1}, {"id": 2}], name="array_object_var"),
|
||||
ArrayFileVariable(value=[], name="array_file_var"), # Empty array to avoid file complexity
|
||||
]
|
||||
|
||||
# Test serialization and deserialization
|
||||
model = _Variables(variables=all_variables)
|
||||
json_str = model.model_dump_json()
|
||||
loaded = _Variables.model_validate_json(json_str)
|
||||
|
||||
# Verify all variables are preserved
|
||||
assert len(loaded.variables) == len(all_variables)
|
||||
|
||||
for original, loaded_variable in zip(all_variables, loaded.variables):
|
||||
assert type(loaded_variable) == type(original)
|
||||
assert loaded_variable.value_type == original.value_type
|
||||
assert loaded_variable.name == original.name
|
||||
|
||||
# For file variables, compare key properties instead of exact equality
|
||||
if isinstance(original, FileVariable) and isinstance(loaded_variable, FileVariable):
|
||||
orig_file = original.value
|
||||
loaded_file = loaded_variable.value
|
||||
assert isinstance(orig_file, File)
|
||||
assert isinstance(loaded_file, File)
|
||||
assert loaded_file.tenant_id == orig_file.tenant_id
|
||||
assert loaded_file.type == orig_file.type
|
||||
assert loaded_file.filename == orig_file.filename
|
||||
else:
|
||||
assert loaded_variable.value == original.value
|
||||
|
||||
def test_segment_discriminator_function_for_segment_types(self):
|
||||
"""Test the segment discriminator function"""
|
||||
|
||||
@dataclasses.dataclass
|
||||
class TestCase:
|
||||
segment: Segment
|
||||
expected_segment_type: SegmentType
|
||||
|
||||
file1 = create_test_file()
|
||||
file2 = create_test_file(filename="test2.txt")
|
||||
|
||||
cases = [
|
||||
TestCase(
|
||||
NoneSegment(),
|
||||
SegmentType.NONE,
|
||||
),
|
||||
TestCase(
|
||||
StringSegment(value=""),
|
||||
SegmentType.STRING,
|
||||
),
|
||||
TestCase(
|
||||
FloatSegment(value=0.0),
|
||||
SegmentType.FLOAT,
|
||||
),
|
||||
TestCase(
|
||||
IntegerSegment(value=0),
|
||||
SegmentType.INTEGER,
|
||||
),
|
||||
TestCase(
|
||||
ObjectSegment(value={}),
|
||||
SegmentType.OBJECT,
|
||||
),
|
||||
TestCase(
|
||||
FileSegment(value=file1),
|
||||
SegmentType.FILE,
|
||||
),
|
||||
TestCase(
|
||||
ArrayAnySegment(value=[0, 0.0, ""]),
|
||||
SegmentType.ARRAY_ANY,
|
||||
),
|
||||
TestCase(
|
||||
ArrayStringSegment(value=[""]),
|
||||
SegmentType.ARRAY_STRING,
|
||||
),
|
||||
TestCase(
|
||||
ArrayNumberSegment(value=[0, 0.0]),
|
||||
SegmentType.ARRAY_NUMBER,
|
||||
),
|
||||
TestCase(
|
||||
ArrayObjectSegment(value=[{}]),
|
||||
SegmentType.ARRAY_OBJECT,
|
||||
),
|
||||
TestCase(
|
||||
ArrayFileSegment(value=[file1, file2]),
|
||||
SegmentType.ARRAY_FILE,
|
||||
),
|
||||
]
|
||||
|
||||
for test_case in cases:
|
||||
segment = test_case.segment
|
||||
assert get_segment_discriminator(segment) == test_case.expected_segment_type, (
|
||||
f"get_segment_discriminator failed for type {type(segment)}"
|
||||
)
|
||||
model_dict = segment.model_dump(mode="json")
|
||||
assert get_segment_discriminator(model_dict) == test_case.expected_segment_type, (
|
||||
f"get_segment_discriminator failed for serialized form of type {type(segment)}"
|
||||
)
|
||||
|
||||
def test_variable_discriminator_function_for_variable_types(self):
|
||||
"""Test the variable discriminator function"""
|
||||
|
||||
@dataclasses.dataclass
|
||||
class TestCase:
|
||||
variable: Variable
|
||||
expected_segment_type: SegmentType
|
||||
|
||||
file1 = create_test_file()
|
||||
file2 = create_test_file(filename="test2.txt")
|
||||
|
||||
cases = [
|
||||
TestCase(
|
||||
NoneVariable(name="none_var"),
|
||||
SegmentType.NONE,
|
||||
),
|
||||
TestCase(
|
||||
StringVariable(value="test", name="string_var"),
|
||||
SegmentType.STRING,
|
||||
),
|
||||
TestCase(
|
||||
FloatVariable(value=0.0, name="float_var"),
|
||||
SegmentType.FLOAT,
|
||||
),
|
||||
TestCase(
|
||||
IntegerVariable(value=0, name="int_var"),
|
||||
SegmentType.INTEGER,
|
||||
),
|
||||
TestCase(
|
||||
ObjectVariable(value={}, name="object_var"),
|
||||
SegmentType.OBJECT,
|
||||
),
|
||||
TestCase(
|
||||
FileVariable(value=file1, name="file_var"),
|
||||
SegmentType.FILE,
|
||||
),
|
||||
TestCase(
|
||||
SecretVariable(value="secret", name="secret_var"),
|
||||
SegmentType.SECRET,
|
||||
),
|
||||
TestCase(
|
||||
ArrayAnyVariable(value=[0, 0.0, ""], name="array_any_var"),
|
||||
SegmentType.ARRAY_ANY,
|
||||
),
|
||||
TestCase(
|
||||
ArrayStringVariable(value=[""], name="array_string_var"),
|
||||
SegmentType.ARRAY_STRING,
|
||||
),
|
||||
TestCase(
|
||||
ArrayNumberVariable(value=[0, 0.0], name="array_number_var"),
|
||||
SegmentType.ARRAY_NUMBER,
|
||||
),
|
||||
TestCase(
|
||||
ArrayObjectVariable(value=[{}], name="array_object_var"),
|
||||
SegmentType.ARRAY_OBJECT,
|
||||
),
|
||||
TestCase(
|
||||
ArrayFileVariable(value=[file1, file2], name="array_file_var"),
|
||||
SegmentType.ARRAY_FILE,
|
||||
),
|
||||
]
|
||||
|
||||
for test_case in cases:
|
||||
variable = test_case.variable
|
||||
assert get_segment_discriminator(variable) == test_case.expected_segment_type, (
|
||||
f"get_segment_discriminator failed for type {type(variable)}"
|
||||
)
|
||||
model_dict = variable.model_dump(mode="json")
|
||||
assert get_segment_discriminator(model_dict) == test_case.expected_segment_type, (
|
||||
f"get_segment_discriminator failed for serialized form of type {type(variable)}"
|
||||
)
|
||||
|
||||
def test_invlaid_value_for_discriminator(self):
|
||||
# Test invalid cases
|
||||
assert get_segment_discriminator({"value_type": "invalid"}) is None
|
||||
assert get_segment_discriminator({}) is None
|
||||
assert get_segment_discriminator("not_a_dict") is None
|
||||
assert get_segment_discriminator(42) is None
|
||||
assert get_segment_discriminator(object) is None
|
||||
|
||||
@ -1,60 +0,0 @@
|
||||
from core.variables.types import SegmentType
|
||||
|
||||
|
||||
class TestSegmentTypeIsArrayType:
|
||||
"""
|
||||
Test class for SegmentType.is_array_type method.
|
||||
|
||||
Provides comprehensive coverage of all SegmentType values to ensure
|
||||
correct identification of array and non-array types.
|
||||
"""
|
||||
|
||||
def test_is_array_type(self):
|
||||
"""
|
||||
Test that all SegmentType enum values are covered in our test cases.
|
||||
|
||||
Ensures comprehensive coverage by verifying that every SegmentType
|
||||
value is tested for the is_array_type method.
|
||||
"""
|
||||
# Arrange
|
||||
all_segment_types = set(SegmentType)
|
||||
expected_array_types = [
|
||||
SegmentType.ARRAY_ANY,
|
||||
SegmentType.ARRAY_STRING,
|
||||
SegmentType.ARRAY_NUMBER,
|
||||
SegmentType.ARRAY_OBJECT,
|
||||
SegmentType.ARRAY_FILE,
|
||||
]
|
||||
expected_non_array_types = [
|
||||
SegmentType.INTEGER,
|
||||
SegmentType.FLOAT,
|
||||
SegmentType.NUMBER,
|
||||
SegmentType.STRING,
|
||||
SegmentType.OBJECT,
|
||||
SegmentType.SECRET,
|
||||
SegmentType.FILE,
|
||||
SegmentType.NONE,
|
||||
SegmentType.GROUP,
|
||||
]
|
||||
|
||||
for seg_type in expected_array_types:
|
||||
assert seg_type.is_array_type()
|
||||
|
||||
for seg_type in expected_non_array_types:
|
||||
assert not seg_type.is_array_type()
|
||||
|
||||
# Act & Assert
|
||||
covered_types = set(expected_array_types) | set(expected_non_array_types)
|
||||
assert covered_types == set(SegmentType), "All SegmentType values should be covered in tests"
|
||||
|
||||
def test_all_enum_values_are_supported(self):
|
||||
"""
|
||||
Test that all enum values are supported and return boolean values.
|
||||
|
||||
Validates that every SegmentType enum value can be processed by
|
||||
is_array_type method and returns a boolean value.
|
||||
"""
|
||||
enum_values: list[SegmentType] = list(SegmentType)
|
||||
for seg_type in enum_values:
|
||||
is_array = seg_type.is_array_type()
|
||||
assert isinstance(is_array, bool), f"is_array_type does not return a boolean for segment type {seg_type}"
|
||||
@ -11,7 +11,6 @@ from core.variables import (
|
||||
SegmentType,
|
||||
StringVariable,
|
||||
)
|
||||
from core.variables.variables import Variable
|
||||
|
||||
|
||||
def test_frozen_variables():
|
||||
@ -76,7 +75,7 @@ def test_object_variable_to_object():
|
||||
|
||||
|
||||
def test_variable_to_object():
|
||||
var: Variable = StringVariable(name="text", value="text")
|
||||
var = StringVariable(name="text", value="text")
|
||||
assert var.to_object() == "text"
|
||||
var = IntegerVariable(name="integer", value=42)
|
||||
assert var.to_object() == 42
|
||||
|
||||
@ -1,146 +0,0 @@
|
||||
import time
|
||||
from decimal import Decimal
|
||||
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.graph_engine.entities.runtime_route_state import RuntimeRouteState
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
|
||||
|
||||
def create_test_graph_runtime_state() -> GraphRuntimeState:
|
||||
"""Factory function to create a GraphRuntimeState with non-empty values for testing."""
|
||||
# Create a variable pool with system variables
|
||||
system_vars = SystemVariable(
|
||||
user_id="test_user_123",
|
||||
app_id="test_app_456",
|
||||
workflow_id="test_workflow_789",
|
||||
workflow_execution_id="test_execution_001",
|
||||
query="test query",
|
||||
conversation_id="test_conv_123",
|
||||
dialogue_count=5,
|
||||
)
|
||||
variable_pool = VariablePool(system_variables=system_vars)
|
||||
|
||||
# Add some variables to the variable pool
|
||||
variable_pool.add(["test_node", "test_var"], "test_value")
|
||||
variable_pool.add(["another_node", "another_var"], 42)
|
||||
|
||||
# Create LLM usage with realistic values
|
||||
llm_usage = LLMUsage(
|
||||
prompt_tokens=150,
|
||||
prompt_unit_price=Decimal("0.001"),
|
||||
prompt_price_unit=Decimal(1000),
|
||||
prompt_price=Decimal("0.15"),
|
||||
completion_tokens=75,
|
||||
completion_unit_price=Decimal("0.002"),
|
||||
completion_price_unit=Decimal(1000),
|
||||
completion_price=Decimal("0.15"),
|
||||
total_tokens=225,
|
||||
total_price=Decimal("0.30"),
|
||||
currency="USD",
|
||||
latency=1.25,
|
||||
)
|
||||
|
||||
# Create runtime route state with some node states
|
||||
node_run_state = RuntimeRouteState()
|
||||
node_state = node_run_state.create_node_state("test_node_1")
|
||||
node_run_state.add_route(node_state.id, "target_node_id")
|
||||
|
||||
return GraphRuntimeState(
|
||||
variable_pool=variable_pool,
|
||||
start_at=time.perf_counter(),
|
||||
total_tokens=100,
|
||||
llm_usage=llm_usage,
|
||||
outputs={
|
||||
"string_output": "test result",
|
||||
"int_output": 42,
|
||||
"float_output": 3.14,
|
||||
"list_output": ["item1", "item2", "item3"],
|
||||
"dict_output": {"key1": "value1", "key2": 123},
|
||||
"nested_dict": {"level1": {"level2": ["nested", "list", 456]}},
|
||||
},
|
||||
node_run_steps=5,
|
||||
node_run_state=node_run_state,
|
||||
)
|
||||
|
||||
|
||||
def test_basic_round_trip_serialization():
|
||||
"""Test basic round-trip serialization ensures GraphRuntimeState values remain unchanged."""
|
||||
# Create a state with non-empty values
|
||||
original_state = create_test_graph_runtime_state()
|
||||
|
||||
# Serialize to JSON and deserialize back
|
||||
json_data = original_state.model_dump_json()
|
||||
deserialized_state = GraphRuntimeState.model_validate_json(json_data)
|
||||
|
||||
# Core test: ensure the round-trip preserves all values
|
||||
assert deserialized_state == original_state
|
||||
|
||||
# Serialize to JSON and deserialize back
|
||||
dict_data = original_state.model_dump(mode="python")
|
||||
deserialized_state = GraphRuntimeState.model_validate(dict_data)
|
||||
assert deserialized_state == original_state
|
||||
|
||||
# Serialize to JSON and deserialize back
|
||||
dict_data = original_state.model_dump(mode="json")
|
||||
deserialized_state = GraphRuntimeState.model_validate(dict_data)
|
||||
assert deserialized_state == original_state
|
||||
|
||||
|
||||
def test_outputs_field_round_trip():
|
||||
"""Test the problematic outputs field maintains values through round-trip serialization."""
|
||||
original_state = create_test_graph_runtime_state()
|
||||
|
||||
# Serialize and deserialize
|
||||
json_data = original_state.model_dump_json()
|
||||
deserialized_state = GraphRuntimeState.model_validate_json(json_data)
|
||||
|
||||
# Verify the outputs field specifically maintains its values
|
||||
assert deserialized_state.outputs == original_state.outputs
|
||||
assert deserialized_state == original_state
|
||||
|
||||
|
||||
def test_empty_outputs_round_trip():
|
||||
"""Test round-trip serialization with empty outputs field."""
|
||||
variable_pool = VariablePool.empty()
|
||||
original_state = GraphRuntimeState(
|
||||
variable_pool=variable_pool,
|
||||
start_at=time.perf_counter(),
|
||||
outputs={}, # Empty outputs
|
||||
)
|
||||
|
||||
json_data = original_state.model_dump_json()
|
||||
deserialized_state = GraphRuntimeState.model_validate_json(json_data)
|
||||
|
||||
assert deserialized_state == original_state
|
||||
|
||||
|
||||
def test_llm_usage_round_trip():
|
||||
# Create LLM usage with specific decimal values
|
||||
llm_usage = LLMUsage(
|
||||
prompt_tokens=100,
|
||||
prompt_unit_price=Decimal("0.0015"),
|
||||
prompt_price_unit=Decimal(1000),
|
||||
prompt_price=Decimal("0.15"),
|
||||
completion_tokens=50,
|
||||
completion_unit_price=Decimal("0.003"),
|
||||
completion_price_unit=Decimal(1000),
|
||||
completion_price=Decimal("0.15"),
|
||||
total_tokens=150,
|
||||
total_price=Decimal("0.30"),
|
||||
currency="USD",
|
||||
latency=2.5,
|
||||
)
|
||||
|
||||
json_data = llm_usage.model_dump_json()
|
||||
deserialized = LLMUsage.model_validate_json(json_data)
|
||||
assert deserialized == llm_usage
|
||||
|
||||
dict_data = llm_usage.model_dump(mode="python")
|
||||
deserialized = LLMUsage.model_validate(dict_data)
|
||||
assert deserialized == llm_usage
|
||||
|
||||
dict_data = llm_usage.model_dump(mode="json")
|
||||
deserialized = LLMUsage.model_validate(dict_data)
|
||||
assert deserialized == llm_usage
|
||||
@ -1,401 +0,0 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeState, RuntimeRouteState
|
||||
|
||||
_TEST_DATETIME = datetime(2024, 1, 15, 10, 30, 45)
|
||||
|
||||
|
||||
class TestRouteNodeStateSerialization:
|
||||
"""Test cases for RouteNodeState Pydantic serialization/deserialization."""
|
||||
|
||||
def _test_route_node_state(self):
|
||||
"""Test comprehensive RouteNodeState serialization with all core fields validation."""
|
||||
|
||||
node_run_result = NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
inputs={"input_key": "input_value"},
|
||||
outputs={"output_key": "output_value"},
|
||||
)
|
||||
|
||||
node_state = RouteNodeState(
|
||||
node_id="comprehensive_test_node",
|
||||
start_at=_TEST_DATETIME,
|
||||
finished_at=_TEST_DATETIME,
|
||||
status=RouteNodeState.Status.SUCCESS,
|
||||
node_run_result=node_run_result,
|
||||
index=5,
|
||||
paused_at=_TEST_DATETIME,
|
||||
paused_by="user_123",
|
||||
failed_reason="test_reason",
|
||||
)
|
||||
return node_state
|
||||
|
||||
def test_route_node_state_comprehensive_field_validation(self):
|
||||
"""Test comprehensive RouteNodeState serialization with all core fields validation."""
|
||||
node_state = self._test_route_node_state()
|
||||
serialized = node_state.model_dump()
|
||||
|
||||
# Comprehensive validation of all RouteNodeState fields
|
||||
assert serialized["node_id"] == "comprehensive_test_node"
|
||||
assert serialized["status"] == RouteNodeState.Status.SUCCESS
|
||||
assert serialized["start_at"] == _TEST_DATETIME
|
||||
assert serialized["finished_at"] == _TEST_DATETIME
|
||||
assert serialized["paused_at"] == _TEST_DATETIME
|
||||
assert serialized["paused_by"] == "user_123"
|
||||
assert serialized["failed_reason"] == "test_reason"
|
||||
assert serialized["index"] == 5
|
||||
assert "id" in serialized
|
||||
assert isinstance(serialized["id"], str)
|
||||
uuid.UUID(serialized["id"]) # Validate UUID format
|
||||
|
||||
# Validate nested NodeRunResult structure
|
||||
assert serialized["node_run_result"] is not None
|
||||
assert serialized["node_run_result"]["status"] == WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
assert serialized["node_run_result"]["inputs"] == {"input_key": "input_value"}
|
||||
assert serialized["node_run_result"]["outputs"] == {"output_key": "output_value"}
|
||||
|
||||
def test_route_node_state_minimal_required_fields(self):
|
||||
"""Test RouteNodeState with only required fields, focusing on defaults."""
|
||||
node_state = RouteNodeState(node_id="minimal_node", start_at=_TEST_DATETIME)
|
||||
|
||||
serialized = node_state.model_dump()
|
||||
|
||||
# Focus on required fields and default values (not re-testing all fields)
|
||||
assert serialized["node_id"] == "minimal_node"
|
||||
assert serialized["start_at"] == _TEST_DATETIME
|
||||
assert serialized["status"] == RouteNodeState.Status.RUNNING # Default status
|
||||
assert serialized["index"] == 1 # Default index
|
||||
assert serialized["node_run_result"] is None # Default None
|
||||
json = node_state.model_dump_json()
|
||||
deserialized = RouteNodeState.model_validate_json(json)
|
||||
assert deserialized == node_state
|
||||
|
||||
def test_route_node_state_deserialization_from_dict(self):
|
||||
"""Test RouteNodeState deserialization from dictionary data."""
|
||||
test_datetime = datetime(2024, 1, 15, 10, 30, 45)
|
||||
test_id = str(uuid.uuid4())
|
||||
|
||||
dict_data = {
|
||||
"id": test_id,
|
||||
"node_id": "deserialized_node",
|
||||
"start_at": test_datetime,
|
||||
"status": "success",
|
||||
"finished_at": test_datetime,
|
||||
"index": 3,
|
||||
}
|
||||
|
||||
node_state = RouteNodeState.model_validate(dict_data)
|
||||
|
||||
# Focus on deserialization accuracy
|
||||
assert node_state.id == test_id
|
||||
assert node_state.node_id == "deserialized_node"
|
||||
assert node_state.start_at == test_datetime
|
||||
assert node_state.status == RouteNodeState.Status.SUCCESS
|
||||
assert node_state.finished_at == test_datetime
|
||||
assert node_state.index == 3
|
||||
|
||||
def test_route_node_state_round_trip_consistency(self):
|
||||
node_states = (
|
||||
self._test_route_node_state(),
|
||||
RouteNodeState(node_id="minimal_node", start_at=_TEST_DATETIME),
|
||||
)
|
||||
for node_state in node_states:
|
||||
json = node_state.model_dump_json()
|
||||
deserialized = RouteNodeState.model_validate_json(json)
|
||||
assert deserialized == node_state
|
||||
|
||||
dict_ = node_state.model_dump(mode="python")
|
||||
deserialized = RouteNodeState.model_validate(dict_)
|
||||
assert deserialized == node_state
|
||||
|
||||
dict_ = node_state.model_dump(mode="json")
|
||||
deserialized = RouteNodeState.model_validate(dict_)
|
||||
assert deserialized == node_state
|
||||
|
||||
|
||||
class TestRouteNodeStateEnumSerialization:
|
||||
"""Dedicated tests for RouteNodeState Status enum serialization behavior."""
|
||||
|
||||
def test_status_enum_model_dump_behavior(self):
|
||||
"""Test Status enum serialization in model_dump() returns enum objects."""
|
||||
|
||||
for status_enum in RouteNodeState.Status:
|
||||
node_state = RouteNodeState(node_id="enum_test", start_at=_TEST_DATETIME, status=status_enum)
|
||||
serialized = node_state.model_dump(mode="python")
|
||||
assert serialized["status"] == status_enum
|
||||
serialized = node_state.model_dump(mode="json")
|
||||
assert serialized["status"] == status_enum.value
|
||||
|
||||
def test_status_enum_json_serialization_behavior(self):
|
||||
"""Test Status enum serialization in JSON returns string values."""
|
||||
test_datetime = datetime(2024, 1, 15, 10, 30, 45)
|
||||
|
||||
enum_to_string_mapping = {
|
||||
RouteNodeState.Status.RUNNING: "running",
|
||||
RouteNodeState.Status.SUCCESS: "success",
|
||||
RouteNodeState.Status.FAILED: "failed",
|
||||
RouteNodeState.Status.PAUSED: "paused",
|
||||
RouteNodeState.Status.EXCEPTION: "exception",
|
||||
}
|
||||
|
||||
for status_enum, expected_string in enum_to_string_mapping.items():
|
||||
node_state = RouteNodeState(node_id="json_enum_test", start_at=test_datetime, status=status_enum)
|
||||
|
||||
json_data = json.loads(node_state.model_dump_json())
|
||||
assert json_data["status"] == expected_string
|
||||
|
||||
def test_status_enum_deserialization_from_string(self):
|
||||
"""Test Status enum deserialization from string values."""
|
||||
test_datetime = datetime(2024, 1, 15, 10, 30, 45)
|
||||
|
||||
string_to_enum_mapping = {
|
||||
"running": RouteNodeState.Status.RUNNING,
|
||||
"success": RouteNodeState.Status.SUCCESS,
|
||||
"failed": RouteNodeState.Status.FAILED,
|
||||
"paused": RouteNodeState.Status.PAUSED,
|
||||
"exception": RouteNodeState.Status.EXCEPTION,
|
||||
}
|
||||
|
||||
for status_string, expected_enum in string_to_enum_mapping.items():
|
||||
dict_data = {
|
||||
"node_id": "enum_deserialize_test",
|
||||
"start_at": test_datetime,
|
||||
"status": status_string,
|
||||
}
|
||||
|
||||
node_state = RouteNodeState.model_validate(dict_data)
|
||||
assert node_state.status == expected_enum
|
||||
|
||||
|
||||
class TestRuntimeRouteStateSerialization:
|
||||
"""Test cases for RuntimeRouteState Pydantic serialization/deserialization."""
|
||||
|
||||
_NODE1_ID = "node_1"
|
||||
_ROUTE_STATE1_ID = str(uuid.uuid4())
|
||||
_NODE2_ID = "node_2"
|
||||
_ROUTE_STATE2_ID = str(uuid.uuid4())
|
||||
_NODE3_ID = "node_3"
|
||||
_ROUTE_STATE3_ID = str(uuid.uuid4())
|
||||
|
||||
def _get_runtime_route_state(self):
|
||||
# Create node states with different configurations
|
||||
node_state_1 = RouteNodeState(
|
||||
id=self._ROUTE_STATE1_ID,
|
||||
node_id=self._NODE1_ID,
|
||||
start_at=_TEST_DATETIME,
|
||||
index=1,
|
||||
)
|
||||
node_state_2 = RouteNodeState(
|
||||
id=self._ROUTE_STATE2_ID,
|
||||
node_id=self._NODE2_ID,
|
||||
start_at=_TEST_DATETIME,
|
||||
status=RouteNodeState.Status.SUCCESS,
|
||||
finished_at=_TEST_DATETIME,
|
||||
index=2,
|
||||
)
|
||||
node_state_3 = RouteNodeState(
|
||||
id=self._ROUTE_STATE3_ID,
|
||||
node_id=self._NODE3_ID,
|
||||
start_at=_TEST_DATETIME,
|
||||
status=RouteNodeState.Status.FAILED,
|
||||
failed_reason="Test failure",
|
||||
index=3,
|
||||
)
|
||||
|
||||
runtime_state = RuntimeRouteState(
|
||||
routes={node_state_1.id: [node_state_2.id, node_state_3.id], node_state_2.id: [node_state_3.id]},
|
||||
node_state_mapping={
|
||||
node_state_1.id: node_state_1,
|
||||
node_state_2.id: node_state_2,
|
||||
node_state_3.id: node_state_3,
|
||||
},
|
||||
)
|
||||
|
||||
return runtime_state
|
||||
|
||||
def test_runtime_route_state_comprehensive_structure_validation(self):
|
||||
"""Test comprehensive RuntimeRouteState serialization with full structure validation."""
|
||||
|
||||
runtime_state = self._get_runtime_route_state()
|
||||
serialized = runtime_state.model_dump()
|
||||
|
||||
# Comprehensive validation of RuntimeRouteState structure
|
||||
assert "routes" in serialized
|
||||
assert "node_state_mapping" in serialized
|
||||
assert isinstance(serialized["routes"], dict)
|
||||
assert isinstance(serialized["node_state_mapping"], dict)
|
||||
|
||||
# Validate routes dictionary structure and content
|
||||
assert len(serialized["routes"]) == 2
|
||||
assert self._ROUTE_STATE1_ID in serialized["routes"]
|
||||
assert self._ROUTE_STATE2_ID in serialized["routes"]
|
||||
assert serialized["routes"][self._ROUTE_STATE1_ID] == [self._ROUTE_STATE2_ID, self._ROUTE_STATE3_ID]
|
||||
assert serialized["routes"][self._ROUTE_STATE2_ID] == [self._ROUTE_STATE3_ID]
|
||||
|
||||
# Validate node_state_mapping dictionary structure and content
|
||||
assert len(serialized["node_state_mapping"]) == 3
|
||||
for state_id in [
|
||||
self._ROUTE_STATE1_ID,
|
||||
self._ROUTE_STATE2_ID,
|
||||
self._ROUTE_STATE3_ID,
|
||||
]:
|
||||
assert state_id in serialized["node_state_mapping"]
|
||||
node_data = serialized["node_state_mapping"][state_id]
|
||||
node_state = runtime_state.node_state_mapping[state_id]
|
||||
assert node_data["node_id"] == node_state.node_id
|
||||
assert node_data["status"] == node_state.status
|
||||
assert node_data["index"] == node_state.index
|
||||
|
||||
def test_runtime_route_state_empty_collections(self):
|
||||
"""Test RuntimeRouteState with empty collections, focusing on default behavior."""
|
||||
runtime_state = RuntimeRouteState()
|
||||
serialized = runtime_state.model_dump()
|
||||
|
||||
# Focus on default empty collection behavior
|
||||
assert serialized["routes"] == {}
|
||||
assert serialized["node_state_mapping"] == {}
|
||||
assert isinstance(serialized["routes"], dict)
|
||||
assert isinstance(serialized["node_state_mapping"], dict)
|
||||
|
||||
def test_runtime_route_state_json_serialization_structure(self):
|
||||
"""Test RuntimeRouteState JSON serialization structure."""
|
||||
node_state = RouteNodeState(node_id="json_node", start_at=_TEST_DATETIME)
|
||||
|
||||
runtime_state = RuntimeRouteState(
|
||||
routes={"source": ["target1", "target2"]}, node_state_mapping={node_state.id: node_state}
|
||||
)
|
||||
|
||||
json_str = runtime_state.model_dump_json()
|
||||
json_data = json.loads(json_str)
|
||||
|
||||
# Focus on JSON structure validation
|
||||
assert isinstance(json_str, str)
|
||||
assert isinstance(json_data, dict)
|
||||
assert "routes" in json_data
|
||||
assert "node_state_mapping" in json_data
|
||||
assert json_data["routes"]["source"] == ["target1", "target2"]
|
||||
assert node_state.id in json_data["node_state_mapping"]
|
||||
|
||||
def test_runtime_route_state_deserialization_from_dict(self):
|
||||
"""Test RuntimeRouteState deserialization from dictionary data."""
|
||||
node_id = str(uuid.uuid4())
|
||||
|
||||
dict_data = {
|
||||
"routes": {"source_node": ["target_node_1", "target_node_2"]},
|
||||
"node_state_mapping": {
|
||||
node_id: {
|
||||
"id": node_id,
|
||||
"node_id": "test_node",
|
||||
"start_at": _TEST_DATETIME,
|
||||
"status": "running",
|
||||
"index": 1,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
runtime_state = RuntimeRouteState.model_validate(dict_data)
|
||||
|
||||
# Focus on deserialization accuracy
|
||||
assert runtime_state.routes == {"source_node": ["target_node_1", "target_node_2"]}
|
||||
assert len(runtime_state.node_state_mapping) == 1
|
||||
assert node_id in runtime_state.node_state_mapping
|
||||
|
||||
deserialized_node = runtime_state.node_state_mapping[node_id]
|
||||
assert deserialized_node.node_id == "test_node"
|
||||
assert deserialized_node.status == RouteNodeState.Status.RUNNING
|
||||
assert deserialized_node.index == 1
|
||||
|
||||
def test_runtime_route_state_round_trip_consistency(self):
|
||||
"""Test RuntimeRouteState round-trip serialization consistency."""
|
||||
original = self._get_runtime_route_state()
|
||||
|
||||
# Dictionary round trip
|
||||
dict_data = original.model_dump(mode="python")
|
||||
reconstructed = RuntimeRouteState.model_validate(dict_data)
|
||||
assert reconstructed == original
|
||||
|
||||
dict_data = original.model_dump(mode="json")
|
||||
reconstructed = RuntimeRouteState.model_validate(dict_data)
|
||||
assert reconstructed == original
|
||||
|
||||
# JSON round trip
|
||||
json_str = original.model_dump_json()
|
||||
json_reconstructed = RuntimeRouteState.model_validate_json(json_str)
|
||||
assert json_reconstructed == original
|
||||
|
||||
|
||||
class TestSerializationEdgeCases:
|
||||
"""Test edge cases and error conditions for serialization/deserialization."""
|
||||
|
||||
def test_invalid_status_deserialization(self):
|
||||
"""Test deserialization with invalid status values."""
|
||||
test_datetime = _TEST_DATETIME
|
||||
invalid_data = {
|
||||
"node_id": "invalid_test",
|
||||
"start_at": test_datetime,
|
||||
"status": "invalid_status",
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
RouteNodeState.model_validate(invalid_data)
|
||||
assert "status" in str(exc_info.value)
|
||||
|
||||
def test_missing_required_fields_deserialization(self):
|
||||
"""Test deserialization with missing required fields."""
|
||||
incomplete_data = {"id": str(uuid.uuid4())}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
RouteNodeState.model_validate(incomplete_data)
|
||||
error_str = str(exc_info.value)
|
||||
assert "node_id" in error_str or "start_at" in error_str
|
||||
|
||||
def test_invalid_datetime_deserialization(self):
|
||||
"""Test deserialization with invalid datetime values."""
|
||||
invalid_data = {
|
||||
"node_id": "datetime_test",
|
||||
"start_at": "invalid_datetime",
|
||||
"status": "running",
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
RouteNodeState.model_validate(invalid_data)
|
||||
assert "start_at" in str(exc_info.value)
|
||||
|
||||
def test_invalid_routes_structure_deserialization(self):
|
||||
"""Test RuntimeRouteState deserialization with invalid routes structure."""
|
||||
invalid_data = {
|
||||
"routes": "invalid_routes_structure", # Should be dict
|
||||
"node_state_mapping": {},
|
||||
}
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
RuntimeRouteState.model_validate(invalid_data)
|
||||
assert "routes" in str(exc_info.value)
|
||||
|
||||
def test_timezone_handling_in_datetime_fields(self):
|
||||
"""Test timezone handling in datetime field serialization."""
|
||||
utc_datetime = datetime.now(UTC)
|
||||
naive_datetime = utc_datetime.replace(tzinfo=None)
|
||||
|
||||
node_state = RouteNodeState(node_id="timezone_test", start_at=naive_datetime)
|
||||
dict_ = node_state.model_dump()
|
||||
|
||||
assert dict_["start_at"] == naive_datetime
|
||||
|
||||
# Test round trip
|
||||
reconstructed = RouteNodeState.model_validate(dict_)
|
||||
assert reconstructed.start_at == naive_datetime
|
||||
assert reconstructed.start_at.tzinfo is None
|
||||
|
||||
json = node_state.model_dump_json()
|
||||
|
||||
reconstructed = RouteNodeState.model_validate_json(json)
|
||||
assert reconstructed.start_at == naive_datetime
|
||||
assert reconstructed.start_at.tzinfo is None
|
||||
@ -8,6 +8,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities.node_entities import NodeRunResult, WorkflowNodeExecutionMetadataKey
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
BaseNodeEvent,
|
||||
GraphRunFailedEvent,
|
||||
@ -26,7 +27,6 @@ from core.workflow.nodes.code.code_node import CodeNode
|
||||
from core.workflow.nodes.event import RunCompletedEvent, RunStreamChunkEvent
|
||||
from core.workflow.nodes.llm.node import LLMNode
|
||||
from core.workflow.nodes.question_classifier.question_classifier_node import QuestionClassifierNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
|
||||
@ -171,8 +171,7 @@ def test_run_parallel_in_workflow(mock_close, mock_remove):
|
||||
graph = Graph.init(graph_config=graph_config)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", app_id="1", workflow_id="1", files=[]),
|
||||
user_inputs={"query": "hi"},
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"}, user_inputs={"query": "hi"}
|
||||
)
|
||||
|
||||
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
|
||||
@ -294,12 +293,12 @@ def test_run_parallel_in_chatflow(mock_close, mock_remove):
|
||||
graph = Graph.init(graph_config=graph_config)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa",
|
||||
files=[],
|
||||
query="what's the weather in SF",
|
||||
conversation_id="abababa",
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "what's the weather in SF",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "aaa",
|
||||
},
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
@ -475,12 +474,12 @@ def test_run_branch(mock_close, mock_remove):
|
||||
graph = Graph.init(graph_config=graph_config)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa",
|
||||
files=[],
|
||||
query="hi",
|
||||
conversation_id="abababa",
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "hi",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "aaa",
|
||||
},
|
||||
user_inputs={"uid": "takato"},
|
||||
)
|
||||
|
||||
@ -805,22 +804,18 @@ def test_condition_parallel_correct_output(mock_close, mock_remove, app):
|
||||
|
||||
# construct variable pool
|
||||
pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="1",
|
||||
files=[],
|
||||
query="dify",
|
||||
conversation_id="abababa",
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "dify",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "1",
|
||||
},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
)
|
||||
pool.add(["pe", "list_output"], ["dify-1", "dify-2"])
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa",
|
||||
files=[],
|
||||
),
|
||||
user_inputs={"query": "hi"},
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"}, user_inputs={"query": "hi"}
|
||||
)
|
||||
|
||||
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
|
||||
|
||||
@ -5,11 +5,11 @@ from unittest.mock import MagicMock
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes.answer.answer_node import AnswerNode
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
from extensions.ext_database import db
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
@ -51,7 +51,7 @@ def test_execute_answer():
|
||||
|
||||
# construct variable pool
|
||||
pool = VariablePool(
|
||||
system_variables=SystemVariable(user_id="aaa", files=[]),
|
||||
system_variables={SystemVariableKey.FILES: [], SystemVariableKey.USER_ID: "aaa"},
|
||||
user_inputs={},
|
||||
environment_variables=[],
|
||||
)
|
||||
|
||||
@ -3,6 +3,7 @@ from collections.abc import Generator
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
GraphEngineEvent,
|
||||
NodeRunStartedEvent,
|
||||
@ -14,7 +15,6 @@ from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeSta
|
||||
from core.workflow.nodes.answer.answer_stream_processor import AnswerStreamProcessor
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.nodes.start.entities import StartNodeData
|
||||
from core.workflow.system_variable import SystemVariable
|
||||
|
||||
|
||||
def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
|
||||
@ -180,12 +180,12 @@ def test_process():
|
||||
graph = Graph.init(graph_config=graph_config)
|
||||
|
||||
variable_pool = VariablePool(
|
||||
system_variables=SystemVariable(
|
||||
user_id="aaa",
|
||||
files=[],
|
||||
query="what's the weather in SF",
|
||||
conversation_id="abababa",
|
||||
),
|
||||
system_variables={
|
||||
SystemVariableKey.QUERY: "what's the weather in SF",
|
||||
SystemVariableKey.FILES: [],
|
||||
SystemVariableKey.CONVERSATION_ID: "abababa",
|
||||
SystemVariableKey.USER_ID: "aaa",
|
||||
},
|
||||
user_inputs={},
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user