Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine

This commit is contained in:
-LAN-
2025-09-10 02:03:45 +08:00
99 changed files with 849 additions and 494 deletions

View File

@ -3278,7 +3278,7 @@ class TestRegisterService:
redis_client.setex(cache_key, 24 * 60 * 60, account_id)
# Execute invitation retrieval
result = RegisterService._get_invitation_by_token(
result = RegisterService.get_invitation_by_token(
token=token,
workspace_id=workspace_id,
email=email,
@ -3316,7 +3316,7 @@ class TestRegisterService:
redis_client.setex(token_key, 24 * 60 * 60, json.dumps(invitation_data))
# Execute invitation retrieval
result = RegisterService._get_invitation_by_token(token=token)
result = RegisterService.get_invitation_by_token(token=token)
# Verify result contains expected data
assert result is not None

View File

@ -14,6 +14,7 @@ from core.app.app_config.entities import (
VariableEntityType,
)
from core.model_runtime.entities.llm_entities import LLMMode
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from models.account import Account, Tenant
from models.api_based_extension import APIBasedExtension
from models.model import App, AppMode, AppModelConfig
@ -37,7 +38,7 @@ class TestWorkflowConverter:
# Setup default mock returns
mock_encrypter.decrypt_token.return_value = "decrypted_api_key"
mock_prompt_transform.return_value.get_prompt_template.return_value = {
"prompt_template": type("obj", (object,), {"template": "You are a helpful assistant {{text_input}}"})(),
"prompt_template": PromptTemplateParser(template="You are a helpful assistant {{text_input}}"),
"prompt_rules": {"human_prefix": "Human", "assistant_prefix": "Assistant"},
}
mock_agent_chat_config_manager.get_app_config.return_value = self._create_mock_app_config()