Compare commits

..

10 Commits

Author SHA1 Message Date
2c121b38af refactor: update app creation tracking to use appMode instead of source identifiers 2026-04-13 14:37:13 +08:00
2d2b107a75 feat: implement app creation tracking and attribution handling 2026-04-13 14:12:14 +08:00
6cf4d1002f chore: refine .github configs for dependabot, PR template, and stale workflow (#35035)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-13 04:11:31 +00:00
a111d56ea3 refactor: use sessionmaker in workflow_tools_manage_service.py (#34896)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-04-13 03:47:29 +00:00
8436470fcb refactor: replace bare dict with TypedDicts in annotation_service (#34998) 2026-04-13 03:46:33 +00:00
17da0e4146 test: migrate BillingService permission-check tests to Testcontainers integration tests (#34993)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-04-13 03:44:14 +00:00
ea41e9ab4e test: implement Account/Tenant model integration tests to replace db-mocked unit tests (#34994)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-04-13 03:39:16 +00:00
5770b5feef chore(deps): bump the opentelemetry group across 1 directory with 16 updates (#35028)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-04-13 03:28:46 +00:00
b5259a3a85 refactor(api): enable reportUntypedFunctionDecorator in pyright config (#26412) (#35031) 2026-04-13 03:28:23 +00:00
596559efc9 fix(rag): include is_summary and original_chunk_id in default vector projection (#34950)
Co-authored-by: VFootball Dev <vfootball@example.com>
2026-04-13 03:11:08 +00:00
33 changed files with 1153 additions and 690 deletions

100
.github/dependabot.yml vendored
View File

@ -1,106 +1,6 @@
version: 2
updates:
- package-ecosystem: "pip"
directory: "/api"
open-pull-requests-limit: 10
schedule:
interval: "weekly"
groups:
flask:
patterns:
- "flask"
- "flask-*"
- "werkzeug"
- "gunicorn"
google:
patterns:
- "google-*"
- "googleapis-*"
opentelemetry:
patterns:
- "opentelemetry-*"
pydantic:
patterns:
- "pydantic"
- "pydantic-*"
llm:
patterns:
- "langfuse"
- "langsmith"
- "litellm"
- "mlflow*"
- "opik"
- "weave*"
- "arize*"
- "tiktoken"
- "transformers"
database:
patterns:
- "sqlalchemy"
- "psycopg2*"
- "psycogreen"
- "redis*"
- "alembic*"
storage:
patterns:
- "boto3*"
- "botocore*"
- "azure-*"
- "bce-*"
- "cos-python-*"
- "esdk-obs-*"
- "google-cloud-storage"
- "opendal"
- "oss2"
- "supabase*"
- "tos*"
vdb:
patterns:
- "alibabacloud*"
- "chromadb"
- "clickhouse-*"
- "clickzetta-*"
- "couchbase"
- "elasticsearch"
- "opensearch-py"
- "oracledb"
- "pgvect*"
- "pymilvus"
- "pymochow"
- "pyobvector"
- "qdrant-client"
- "intersystems-*"
- "tablestore"
- "tcvectordb"
- "tidb-vector"
- "upstash-*"
- "volcengine-*"
- "weaviate-*"
- "xinference-*"
- "mo-vector"
- "mysql-connector-*"
dev:
patterns:
- "coverage"
- "dotenv-linter"
- "faker"
- "lxml-stubs"
- "basedpyright"
- "ruff"
- "pytest*"
- "types-*"
- "boto3-stubs"
- "hypothesis"
- "pandas-stubs"
- "scipy-stubs"
- "import-linter"
- "celery-types"
- "mypy*"
- "pyrefly"
python-packages:
patterns:
- "*"
- package-ecosystem: "uv"
directory: "/api"
open-pull-requests-limit: 10

View File

@ -18,7 +18,7 @@
## Checklist
- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
- [x] I've updated the documentation accordingly.
- [x] I ran `make lint` and `make type-check` (backend) and `cd web && pnpm exec vp staged` (frontend) to appease the lint gods
- [ ] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
- [ ] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
- [ ] I've updated the documentation accordingly.
- [ ] I ran `make lint && make type-check` (backend) and `cd web && pnpm exec vp staged` (frontend) to appease the lint gods

View File

@ -23,8 +23,8 @@ jobs:
days-before-issue-stale: 15
days-before-issue-close: 3
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
stale-pr-message: "Close due to it's no longer active, if you have any questions, you can reopen it."
stale-issue-message: "Closed due to inactivity. If you have any questions, you can reopen it."
stale-pr-message: "Closed due to inactivity. If you have any questions, you can reopen it."
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'
any-of-labels: 'duplicate,question,invalid,wontfix,no-issue-activity,no-pr-activity,enhancement,cant-reproduce,help-wanted'
any-of-labels: '🌚 invalid,🙋‍♂️ question,wont-fix,no-issue-activity,no-pr-activity,💪 enhancement,🤔 cant-reproduce,🙏 help wanted'

View File

@ -25,7 +25,13 @@ from fields.annotation_fields import (
)
from libs.helper import uuid_value
from libs.login import login_required
from services.annotation_service import AppAnnotationService
from services.annotation_service import (
AppAnnotationService,
EnableAnnotationArgs,
UpdateAnnotationArgs,
UpdateAnnotationSettingArgs,
UpsertAnnotationArgs,
)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
@ -120,7 +126,12 @@ class AnnotationReplyActionApi(Resource):
args = AnnotationReplyPayload.model_validate(console_ns.payload)
match action:
case "enable":
result = AppAnnotationService.enable_app_annotation(args.model_dump(), app_id)
enable_args: EnableAnnotationArgs = {
"score_threshold": args.score_threshold,
"embedding_provider_name": args.embedding_provider_name,
"embedding_model_name": args.embedding_model_name,
}
result = AppAnnotationService.enable_app_annotation(enable_args, app_id)
case "disable":
result = AppAnnotationService.disable_app_annotation(app_id)
return result, 200
@ -161,7 +172,8 @@ class AppAnnotationSettingUpdateApi(Resource):
args = AnnotationSettingUpdatePayload.model_validate(console_ns.payload)
result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, args.model_dump())
setting_args: UpdateAnnotationSettingArgs = {"score_threshold": args.score_threshold}
result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, setting_args)
return result, 200
@ -237,8 +249,16 @@ class AnnotationApi(Resource):
def post(self, app_id):
app_id = str(app_id)
args = CreateAnnotationPayload.model_validate(console_ns.payload)
data = args.model_dump(exclude_none=True)
annotation = AppAnnotationService.up_insert_app_annotation_from_message(data, app_id)
upsert_args: UpsertAnnotationArgs = {}
if args.answer is not None:
upsert_args["answer"] = args.answer
if args.content is not None:
upsert_args["content"] = args.content
if args.message_id is not None:
upsert_args["message_id"] = args.message_id
if args.question is not None:
upsert_args["question"] = args.question
annotation = AppAnnotationService.up_insert_app_annotation_from_message(upsert_args, app_id)
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
@setup_required
@ -315,9 +335,12 @@ class AnnotationUpdateDeleteApi(Resource):
app_id = str(app_id)
annotation_id = str(annotation_id)
args = UpdateAnnotationPayload.model_validate(console_ns.payload)
annotation = AppAnnotationService.update_app_annotation_directly(
args.model_dump(exclude_none=True), app_id, annotation_id
)
update_args: UpdateAnnotationArgs = {}
if args.answer is not None:
update_args["answer"] = args.answer
if args.question is not None:
update_args["question"] = args.question
annotation = AppAnnotationService.update_app_annotation_directly(update_args, app_id, annotation_id)
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
@setup_required

View File

@ -94,10 +94,9 @@ def get_user_tenant[**P, R](view_func: Callable[P, R]) -> Callable[P, R]:
def plugin_data[**P, R](
view: Callable[P, R] | None = None,
*,
payload_type: type[BaseModel],
) -> Callable[P, R] | Callable[[Callable[P, R]], Callable[P, R]]:
) -> Callable[[Callable[P, R]], Callable[P, R]]:
def decorator(view_func: Callable[P, R]) -> Callable[P, R]:
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs) -> R:
@ -116,7 +115,4 @@ def plugin_data[**P, R](
return decorated_view
if view is None:
return decorator
else:
return decorator(view)
return decorator

View File

@ -12,7 +12,12 @@ from controllers.service_api.wraps import validate_app_token
from extensions.ext_redis import redis_client
from fields.annotation_fields import Annotation, AnnotationList
from models.model import App
from services.annotation_service import AppAnnotationService
from services.annotation_service import (
AppAnnotationService,
EnableAnnotationArgs,
InsertAnnotationArgs,
UpdateAnnotationArgs,
)
class AnnotationCreatePayload(BaseModel):
@ -46,10 +51,15 @@ class AnnotationReplyActionApi(Resource):
@validate_app_token
def post(self, app_model: App, action: Literal["enable", "disable"]):
"""Enable or disable annotation reply feature."""
args = AnnotationReplyActionPayload.model_validate(service_api_ns.payload or {}).model_dump()
payload = AnnotationReplyActionPayload.model_validate(service_api_ns.payload or {})
match action:
case "enable":
result = AppAnnotationService.enable_app_annotation(args, app_model.id)
enable_args: EnableAnnotationArgs = {
"score_threshold": payload.score_threshold,
"embedding_provider_name": payload.embedding_provider_name,
"embedding_model_name": payload.embedding_model_name,
}
result = AppAnnotationService.enable_app_annotation(enable_args, app_model.id)
case "disable":
result = AppAnnotationService.disable_app_annotation(app_model.id)
return result, 200
@ -135,8 +145,9 @@ class AnnotationListApi(Resource):
@validate_app_token
def post(self, app_model: App):
"""Create a new annotation."""
args = AnnotationCreatePayload.model_validate(service_api_ns.payload or {}).model_dump()
annotation = AppAnnotationService.insert_app_annotation_directly(args, app_model.id)
payload = AnnotationCreatePayload.model_validate(service_api_ns.payload or {})
insert_args: InsertAnnotationArgs = {"question": payload.question, "answer": payload.answer}
annotation = AppAnnotationService.insert_app_annotation_directly(insert_args, app_model.id)
response = Annotation.model_validate(annotation, from_attributes=True)
return response.model_dump(mode="json"), HTTPStatus.CREATED
@ -164,8 +175,9 @@ class AnnotationUpdateDeleteApi(Resource):
@edit_permission_required
def put(self, app_model: App, annotation_id: str):
"""Update an existing annotation."""
args = AnnotationCreatePayload.model_validate(service_api_ns.payload or {}).model_dump()
annotation = AppAnnotationService.update_app_annotation_directly(args, app_model.id, annotation_id)
payload = AnnotationCreatePayload.model_validate(service_api_ns.payload or {})
update_args: UpdateAnnotationArgs = {"question": payload.question, "answer": payload.answer}
annotation = AppAnnotationService.update_app_annotation_directly(update_args, app_model.id, annotation_id)
response = Annotation.model_validate(annotation, from_attributes=True)
return response.model_dump(mode="json")

View File

@ -41,7 +41,23 @@ class AbstractVectorFactory(ABC):
class Vector:
def __init__(self, dataset: Dataset, attributes: list | None = None):
if attributes is None:
attributes = ["doc_id", "dataset_id", "document_id", "doc_hash", "doc_type"]
# `is_summary` and `original_chunk_id` are stored on summary vectors
# by `SummaryIndexService` and read back by `RetrievalService` to
# route summary hits through their original parent chunks. They
# must be listed here so vector backends that use this list as an
# explicit return-properties projection (notably Weaviate) actually
# return those fields; without them, summary hits silently
# collapse into `is_summary = False` branches and the summary
# retrieval path is a no-op. See #34884.
attributes = [
"doc_id",
"dataset_id",
"document_id",
"doc_hash",
"doc_type",
"is_summary",
"original_chunk_id",
]
self._dataset = dataset
self._embeddings = self._get_embeddings()
self._attributes = attributes

View File

@ -17,7 +17,6 @@ def http_status_message(code):
def register_external_error_handlers(api: Api):
@api.errorhandler(HTTPException)
def handle_http_exception(e: HTTPException):
got_request_exception.send(current_app, exception=e)
@ -74,27 +73,18 @@ def register_external_error_handlers(api: Api):
headers["Set-Cookie"] = build_force_logout_cookie_headers()
return data, status_code, headers
_ = handle_http_exception
@api.errorhandler(ValueError)
def handle_value_error(e: ValueError):
got_request_exception.send(current_app, exception=e)
status_code = 400
data = {"code": "invalid_param", "message": str(e), "status": status_code}
return data, status_code
_ = handle_value_error
@api.errorhandler(AppInvokeQuotaExceededError)
def handle_quota_exceeded(e: AppInvokeQuotaExceededError):
got_request_exception.send(current_app, exception=e)
status_code = 429
data = {"code": "too_many_requests", "message": str(e), "status": status_code}
return data, status_code
_ = handle_quota_exceeded
@api.errorhandler(Exception)
def handle_general_exception(e: Exception):
got_request_exception.send(current_app, exception=e)
@ -113,7 +103,10 @@ def register_external_error_handlers(api: Api):
return data, status_code
_ = handle_general_exception
api.errorhandler(HTTPException)(handle_http_exception)
api.errorhandler(ValueError)(handle_value_error)
api.errorhandler(AppInvokeQuotaExceededError)(handle_quota_exceeded)
api.errorhandler(Exception)(handle_general_exception)
class ExternalApi(Api):

View File

@ -9,7 +9,7 @@ dependencies = [
"azure-identity==1.25.3",
"beautifulsoup4==4.14.3",
"boto3==1.42.88",
"bs4~=0.0.2",
"bs4~=0.0.1",
"cachetools~=7.0.5",
"celery~=5.6.3",
"charset-normalizer>=3.4.7",
@ -41,23 +41,23 @@ dependencies = [
"openpyxl~=3.1.5",
"opik~=1.11.2",
"litellm==1.83.0", # Pinned to avoid madoka dependency issue
"opentelemetry-api==1.40.0",
"opentelemetry-distro==0.61b0",
"opentelemetry-exporter-otlp==1.40.0",
"opentelemetry-exporter-otlp-proto-common==1.40.0",
"opentelemetry-exporter-otlp-proto-grpc==1.40.0",
"opentelemetry-exporter-otlp-proto-http==1.40.0",
"opentelemetry-instrumentation==0.61b0",
"opentelemetry-instrumentation-celery==0.61b0",
"opentelemetry-instrumentation-flask==0.61b0",
"opentelemetry-instrumentation-httpx==0.61b0",
"opentelemetry-instrumentation-redis==0.61b0",
"opentelemetry-instrumentation-sqlalchemy==0.61b0",
"opentelemetry-api==1.41.0",
"opentelemetry-distro==0.62b0",
"opentelemetry-exporter-otlp==1.41.0",
"opentelemetry-exporter-otlp-proto-common==1.41.0",
"opentelemetry-exporter-otlp-proto-grpc==1.41.0",
"opentelemetry-exporter-otlp-proto-http==1.41.0",
"opentelemetry-instrumentation==0.62b0",
"opentelemetry-instrumentation-celery==0.62b0",
"opentelemetry-instrumentation-flask==0.62b0",
"opentelemetry-instrumentation-httpx==0.62b0",
"opentelemetry-instrumentation-redis==0.62b0",
"opentelemetry-instrumentation-sqlalchemy==0.62b0",
"opentelemetry-propagator-b3==1.41.0",
"opentelemetry-proto==1.40.0",
"opentelemetry-sdk==1.40.0",
"opentelemetry-semantic-conventions==0.61b0",
"opentelemetry-util-http==0.61b0",
"opentelemetry-proto==1.41.0",
"opentelemetry-sdk==1.41.0",
"opentelemetry-semantic-conventions==0.62b0",
"opentelemetry-util-http==0.62b0",
"pandas[excel,output-formatting,performance]~=3.0.2",
"psycogreen~=1.0.2",
"psycopg2-binary~=2.9.11",
@ -65,10 +65,10 @@ dependencies = [
"pydantic~=2.12.5",
"pydantic-settings~=2.13.1",
"pyjwt~=2.12.1",
"pypdfium2==5.7.0",
"pypdfium2==5.6.0",
"python-docx~=1.2.0",
"python-dotenv==1.2.2",
"pyyaml~=6.0.3",
"pyyaml~=6.0.1",
"readabilipy~=0.3.0",
"redis[hiredis]~=7.4.0",
"resend~=2.27.0",
@ -77,11 +77,11 @@ dependencies = [
"starlette==1.0.0",
"tiktoken~=0.12.0",
"transformers~=5.3.0",
"unstructured[docx,epub,md,ppt,pptx]~=0.22.18",
"pypandoc~=1.17",
"unstructured[docx,epub,md,ppt,pptx]~=0.21.5",
"pypandoc~=1.13",
"yarl~=1.23.0",
"sseclient-py~=1.9.0",
"httpx-sse~=0.4.3",
"httpx-sse~=0.4.0",
"sendgrid~=6.12.5",
"flask-restx~=1.3.2",
"packaging~=26.0",
@ -193,7 +193,7 @@ storage = [
############################################################
# [ Tools ] dependency group
############################################################
tools = ["cloudscraper~=1.2.71", "nltk~=3.9.4"]
tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"]
############################################################
# [ VDB ] dependency group

View File

@ -47,7 +47,6 @@
"reportMissingTypeArgument": "hint",
"reportUnnecessaryComparison": "hint",
"reportUnnecessaryIsInstance": "hint",
"reportUntypedFunctionDecorator": "hint",
"reportUnnecessaryTypeIgnoreComment": "hint",
"reportAttributeAccessIssue": "hint",
"pythonVersion": "3.12",

View File

@ -1,11 +1,8 @@
import logging
import uuid
import pandas as pd
logger = logging.getLogger(__name__)
from typing import TypedDict
import pandas as pd
from sqlalchemy import delete, or_, select, update
from werkzeug.datastructures import FileStorage
from werkzeug.exceptions import NotFound
@ -24,6 +21,8 @@ from tasks.annotation.disable_annotation_reply_task import disable_annotation_re
from tasks.annotation.enable_annotation_reply_task import enable_annotation_reply_task
from tasks.annotation.update_annotation_to_index_task import update_annotation_to_index_task
logger = logging.getLogger(__name__)
class AnnotationJobStatusDict(TypedDict):
job_id: str
@ -46,9 +45,50 @@ class AnnotationSettingDisabledDict(TypedDict):
enabled: bool
class EnableAnnotationArgs(TypedDict):
"""Expected shape of the args dict passed to enable_app_annotation."""
score_threshold: float
embedding_provider_name: str
embedding_model_name: str
class UpsertAnnotationArgs(TypedDict, total=False):
"""Expected shape of the args dict passed to up_insert_app_annotation_from_message."""
answer: str
content: str
message_id: str
question: str
class InsertAnnotationArgs(TypedDict):
"""Expected shape of the args dict passed to insert_app_annotation_directly."""
question: str
answer: str
class UpdateAnnotationArgs(TypedDict, total=False):
"""Expected shape of the args dict passed to update_app_annotation_directly.
Both fields are optional at the type level; the service validates at runtime
and raises ValueError if either is missing.
"""
answer: str
question: str
class UpdateAnnotationSettingArgs(TypedDict):
"""Expected shape of the args dict passed to update_app_annotation_setting."""
score_threshold: float
class AppAnnotationService:
@classmethod
def up_insert_app_annotation_from_message(cls, args: dict, app_id: str) -> MessageAnnotation:
def up_insert_app_annotation_from_message(cls, args: UpsertAnnotationArgs, app_id: str) -> MessageAnnotation:
# get app info
current_user, current_tenant_id = current_account_with_tenant()
app = db.session.scalar(
@ -62,8 +102,9 @@ class AppAnnotationService:
if answer is None:
raise ValueError("Either 'answer' or 'content' must be provided")
if args.get("message_id"):
message_id = str(args["message_id"])
raw_message_id = args.get("message_id")
if raw_message_id:
message_id = str(raw_message_id)
message = db.session.scalar(
select(Message).where(Message.id == message_id, Message.app_id == app.id).limit(1)
)
@ -87,9 +128,10 @@ class AppAnnotationService:
account_id=current_user.id,
)
else:
question = args.get("question")
if not question:
maybe_question = args.get("question")
if not maybe_question:
raise ValueError("'question' is required when 'message_id' is not provided")
question = maybe_question
annotation = MessageAnnotation(app_id=app.id, content=answer, question=question, account_id=current_user.id)
db.session.add(annotation)
@ -110,7 +152,7 @@ class AppAnnotationService:
return annotation
@classmethod
def enable_app_annotation(cls, args: dict, app_id: str) -> AnnotationJobStatusDict:
def enable_app_annotation(cls, args: EnableAnnotationArgs, app_id: str) -> AnnotationJobStatusDict:
enable_app_annotation_key = f"enable_app_annotation_{str(app_id)}"
cache_result = redis_client.get(enable_app_annotation_key)
if cache_result is not None:
@ -217,7 +259,7 @@ class AppAnnotationService:
return annotations
@classmethod
def insert_app_annotation_directly(cls, args: dict, app_id: str) -> MessageAnnotation:
def insert_app_annotation_directly(cls, args: InsertAnnotationArgs, app_id: str) -> MessageAnnotation:
# get app info
current_user, current_tenant_id = current_account_with_tenant()
app = db.session.scalar(
@ -251,7 +293,7 @@ class AppAnnotationService:
return annotation
@classmethod
def update_app_annotation_directly(cls, args: dict, app_id: str, annotation_id: str):
def update_app_annotation_directly(cls, args: UpdateAnnotationArgs, app_id: str, annotation_id: str):
# get app info
_, current_tenant_id = current_account_with_tenant()
app = db.session.scalar(
@ -270,7 +312,11 @@ class AppAnnotationService:
if question is None:
raise ValueError("'question' is required")
annotation.content = args["answer"]
answer = args.get("answer")
if answer is None:
raise ValueError("'answer' is required")
annotation.content = answer
annotation.question = question
db.session.commit()
@ -613,7 +659,7 @@ class AppAnnotationService:
@classmethod
def update_app_annotation_setting(
cls, app_id: str, annotation_setting_id: str, args: dict
cls, app_id: str, annotation_setting_id: str, args: UpdateAnnotationSettingArgs
) -> AnnotationSettingDict:
current_user, current_tenant_id = current_account_with_tenant()
# get app info

View File

@ -4,7 +4,7 @@ from datetime import datetime
from graphon.model_runtime.utils.encoders import jsonable_encoder
from sqlalchemy import delete, or_, select
from sqlalchemy.orm import Session
from sqlalchemy.orm import sessionmaker
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
@ -42,32 +42,43 @@ class WorkflowToolManageService:
labels: list[str] | None = None,
):
# check if the name is unique
existing_workflow_tool_provider = db.session.scalar(
select(WorkflowToolProvider)
.where(
WorkflowToolProvider.tenant_id == tenant_id,
# name or app_id
or_(WorkflowToolProvider.name == name, WorkflowToolProvider.app_id == workflow_app_id),
existing_workflow_tool_provider: WorkflowToolProvider | None = None
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
# query if the name or app_id exists
existing_workflow_tool_provider = _session.scalar(
select(WorkflowToolProvider)
.where(
WorkflowToolProvider.tenant_id == tenant_id,
# name or app_id
or_(WorkflowToolProvider.name == name, WorkflowToolProvider.app_id == workflow_app_id),
)
.limit(1)
)
.limit(1)
)
# if the name or app_id exists raise error
if existing_workflow_tool_provider is not None:
raise ValueError(f"Tool with name {name} or app_id {workflow_app_id} already exists")
app: App | None = db.session.scalar(
select(App).where(App.id == workflow_app_id, App.tenant_id == tenant_id).limit(1)
)
# query the app
app: App | None = None
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
app = _session.scalar(select(App).where(App.id == workflow_app_id, App.tenant_id == tenant_id).limit(1))
# if not found raise error
if app is None:
raise ValueError(f"App {workflow_app_id} not found")
# query the workflow
workflow: Workflow | None = app.workflow
# if not found raise error
if workflow is None:
raise ValueError(f"Workflow not found for app {workflow_app_id}")
# check if workflow configuration is synced
WorkflowToolConfigurationUtils.ensure_no_human_input_nodes(workflow.graph_dict)
# create workflow tool provider
workflow_tool_provider = WorkflowToolProvider(
tenant_id=tenant_id,
user_id=user_id,
@ -87,13 +98,15 @@ class WorkflowToolManageService:
logger.warning(e, exc_info=True)
raise ValueError(str(e))
with Session(db.engine, expire_on_commit=False) as session, session.begin():
session.add(workflow_tool_provider)
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
_session.add(workflow_tool_provider)
# keep the session open to make orm instances in the same session
if labels is not None:
ToolLabelManager.update_tool_labels(
ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels
)
return {"result": "success"}
@classmethod
@ -112,6 +125,7 @@ class WorkflowToolManageService:
):
"""
Update a workflow tool.
:param user_id: the user id
:param tenant_id: the tenant id
:param workflow_tool_id: workflow tool id
@ -187,28 +201,32 @@ class WorkflowToolManageService:
def list_tenant_workflow_tools(cls, user_id: str, tenant_id: str) -> list[ToolProviderApiEntity]:
"""
List workflow tools.
:param user_id: the user id
:param tenant_id: the tenant id
:return: the list of tools
"""
db_tools = db.session.scalars(
select(WorkflowToolProvider).where(WorkflowToolProvider.tenant_id == tenant_id)
).all()
providers: list[WorkflowToolProvider] = []
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
providers = list(
_session.scalars(select(WorkflowToolProvider).where(WorkflowToolProvider.tenant_id == tenant_id)).all()
)
# Create a mapping from provider_id to app_id
provider_id_to_app_id = {provider.id: provider.app_id for provider in db_tools}
provider_id_to_app_id = {provider.id: provider.app_id for provider in providers}
tools: list[WorkflowToolProviderController] = []
for provider in db_tools:
for provider in providers:
try:
tools.append(ToolTransformService.workflow_provider_to_controller(provider))
except Exception:
# skip deleted tools
logger.exception("Failed to load workflow tool provider %s", provider.id)
labels = ToolLabelManager.get_tools_labels([t for t in tools if isinstance(t, ToolProviderController)])
labels = ToolLabelManager.get_tools_labels([tool for tool in tools if isinstance(tool, ToolProviderController)])
result = []
result: list[ToolProviderApiEntity] = []
for tool in tools:
workflow_app_id = provider_id_to_app_id.get(tool.provider_id)
@ -233,17 +251,18 @@ class WorkflowToolManageService:
def delete_workflow_tool(cls, user_id: str, tenant_id: str, workflow_tool_id: str):
"""
Delete a workflow tool.
:param user_id: the user id
:param tenant_id: the tenant id
:param workflow_tool_id: the workflow tool id
"""
db.session.execute(
delete(WorkflowToolProvider).where(
WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id
)
)
db.session.commit()
with sessionmaker(db.engine).begin() as _session:
_ = _session.execute(
delete(WorkflowToolProvider).where(
WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id
)
)
return {"result": "success"}
@ -251,47 +270,59 @@ class WorkflowToolManageService:
def get_workflow_tool_by_tool_id(cls, user_id: str, tenant_id: str, workflow_tool_id: str):
"""
Get a workflow tool.
:param user_id: the user id
:param tenant_id: the tenant id
:param workflow_tool_id: the workflow tool id
:return: the tool
"""
db_tool: WorkflowToolProvider | None = db.session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id)
.limit(1)
)
return cls._get_workflow_tool(tenant_id, db_tool)
tool_provider: WorkflowToolProvider | None = None
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
tool_provider = _session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id)
.limit(1)
)
return cls._get_workflow_tool(tenant_id, tool_provider)
@classmethod
def get_workflow_tool_by_app_id(cls, user_id: str, tenant_id: str, workflow_app_id: str):
"""
Get a workflow tool.
:param user_id: the user id
:param tenant_id: the tenant id
:param workflow_app_id: the workflow app id
:return: the tool
"""
db_tool: WorkflowToolProvider | None = db.session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id)
.limit(1)
)
return cls._get_workflow_tool(tenant_id, db_tool)
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
tool_provider: WorkflowToolProvider | None = _session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.app_id == workflow_app_id)
.limit(1)
)
return cls._get_workflow_tool(tenant_id, tool_provider)
@classmethod
def _get_workflow_tool(cls, tenant_id: str, db_tool: WorkflowToolProvider | None):
"""
Get a workflow tool.
:db_tool: the database tool
:return: the tool
"""
if db_tool is None:
raise ValueError("Tool not found")
workflow_app: App | None = db.session.scalar(
select(App).where(App.id == db_tool.app_id, App.tenant_id == db_tool.tenant_id).limit(1)
)
workflow_app: App | None = None
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
workflow_app = _session.scalar(
select(App).where(App.id == db_tool.app_id, App.tenant_id == db_tool.tenant_id).limit(1)
)
if workflow_app is None:
raise ValueError(f"App {db_tool.app_id} not found")
@ -331,28 +362,32 @@ class WorkflowToolManageService:
def list_single_workflow_tools(cls, user_id: str, tenant_id: str, workflow_tool_id: str) -> list[ToolApiEntity]:
"""
List workflow tool provider tools.
:param user_id: the user id
:param tenant_id: the tenant id
:param workflow_tool_id: the workflow tool id
:return: the list of tools
"""
db_tool: WorkflowToolProvider | None = db.session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id)
.limit(1)
)
if db_tool is None:
provider: WorkflowToolProvider | None = None
with sessionmaker(db.engine, expire_on_commit=False).begin() as _session:
provider = _session.scalar(
select(WorkflowToolProvider)
.where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id)
.limit(1)
)
if provider is None:
raise ValueError(f"Tool {workflow_tool_id} not found")
tool = ToolTransformService.workflow_provider_to_controller(db_tool)
tool = ToolTransformService.workflow_provider_to_controller(provider)
workflow_tools: list[WorkflowTool] = tool.get_tools(tenant_id)
if len(workflow_tools) == 0:
raise ValueError(f"Tool {workflow_tool_id} not found")
return [
ToolTransformService.convert_tool_entity_to_api_entity(
tool=tool.get_tools(db_tool.tenant_id)[0],
tool=tool.get_tools(provider.tenant_id)[0],
labels=ToolLabelManager.get_tool_labels(tool),
tenant_id=tenant_id,
)

View File

@ -1,79 +1,202 @@
# import secrets
"""
Integration tests for Account and Tenant model methods that interact with the database.
# import pytest
# from sqlalchemy import select
# from sqlalchemy.orm import Session
# from sqlalchemy.orm.exc import DetachedInstanceError
Migrated from unit_tests/models/test_account_models.py, replacing
@patch("models.account.db") mock patches with real PostgreSQL operations.
# from libs.datetime_utils import naive_utc_now
# from models.account import Account, Tenant, TenantAccountJoin
Covers:
- Account.current_tenant setter (sets _current_tenant and role from TenantAccountJoin)
- Account.set_tenant_id (resolves tenant + role from real join row)
- Account.get_by_openid (AccountIntegrate lookup then Account fetch)
- Tenant.get_accounts (returns accounts linked via TenantAccountJoin)
"""
from collections.abc import Generator
from uuid import uuid4
import pytest
from sqlalchemy import delete
from sqlalchemy.orm import Session
from models.account import Account, AccountIntegrate, Tenant, TenantAccountJoin, TenantAccountRole
# @pytest.fixture
# def session(db_session_with_containers):
# with Session(db_session_with_containers.get_bind()) as session:
# yield session
def _cleanup_tracked_rows(db_session: Session, tracked: list) -> None:
"""Delete rows tracked during the test so committed state does not leak into the DB.
Rolls back any pending (uncommitted) session state first, then issues DELETE
statements by primary key for each tracked entity (in reverse creation order)
and commits. This cleans up rows created via either flush() or commit().
"""
db_session.rollback()
for entity in reversed(tracked):
db_session.execute(delete(type(entity)).where(type(entity).id == entity.id))
db_session.commit()
# @pytest.fixture
# def account(session):
# account = Account(
# name="test account",
# email=f"test_{secrets.token_hex(8)}@example.com",
# )
# session.add(account)
# session.commit()
# return account
def _build_tenant() -> Tenant:
return Tenant(name=f"Tenant {uuid4()}")
# @pytest.fixture
# def tenant(session):
# tenant = Tenant(name="test tenant")
# session.add(tenant)
# session.commit()
# return tenant
def _build_account(email_prefix: str = "account") -> Account:
return Account(
name=f"Account {uuid4()}",
email=f"{email_prefix}_{uuid4()}@example.com",
password="hashed-password",
password_salt="salt",
interface_language="en-US",
timezone="UTC",
)
# @pytest.fixture
# def tenant_account_join(session, account, tenant):
# tenant_join = TenantAccountJoin(account_id=account.id, tenant_id=tenant.id)
# session.add(tenant_join)
# session.commit()
# yield tenant_join
# session.delete(tenant_join)
# session.commit()
class _DBTrackingTestBase:
"""Base class providing a tracker list and shared row factories for account/tenant tests."""
_tracked: list
@pytest.fixture(autouse=True)
def _setup_cleanup(self, db_session_with_containers: Session) -> Generator[None, None, None]:
self._tracked = []
yield
_cleanup_tracked_rows(db_session_with_containers, self._tracked)
def _create_tenant(self, db_session: Session) -> Tenant:
tenant = _build_tenant()
db_session.add(tenant)
db_session.flush()
self._tracked.append(tenant)
return tenant
def _create_account(self, db_session: Session, email_prefix: str = "account") -> Account:
account = _build_account(email_prefix)
db_session.add(account)
db_session.flush()
self._tracked.append(account)
return account
def _create_join(
self, db_session: Session, tenant_id: str, account_id: str, role: TenantAccountRole, current: bool = True
) -> TenantAccountJoin:
join = TenantAccountJoin(tenant_id=tenant_id, account_id=account_id, role=role, current=current)
db_session.add(join)
db_session.flush()
self._tracked.append(join)
return join
# class TestAccountTenant:
# def test_set_current_tenant_should_reload_tenant(
# self,
# db_session_with_containers,
# account,
# tenant,
# tenant_account_join,
# ):
# with Session(db_session_with_containers.get_bind(), expire_on_commit=True) as session:
# scoped_tenant = session.scalars(select(Tenant).where(Tenant.id == tenant.id)).one()
# account.current_tenant = scoped_tenant
# scoped_tenant.created_at = naive_utc_now()
# # session.commit()
class TestAccountCurrentTenantSetter(_DBTrackingTestBase):
"""Integration tests for Account.current_tenant property setter."""
# # Ensure the tenant used in assignment is detached.
# with pytest.raises(DetachedInstanceError):
# _ = scoped_tenant.name
def test_current_tenant_property_returns_cached_tenant(self, db_session_with_containers: Session) -> None:
"""current_tenant getter returns the in-memory _current_tenant without DB access."""
account = self._create_account(db_session_with_containers)
tenant = self._create_tenant(db_session_with_containers)
account._current_tenant = tenant
# assert account._current_tenant.id == tenant.id
# assert account._current_tenant.id == tenant.id
assert account.current_tenant is tenant
# def test_set_tenant_id_should_load_tenant_as_not_expire(
# self,
# flask_app_with_containers,
# account,
# tenant,
# tenant_account_join,
# ):
# with flask_app_with_containers.test_request_context():
# account.set_tenant_id(tenant.id)
def test_current_tenant_setter_sets_tenant_and_role_when_join_exists(
self, db_session_with_containers: Session
) -> None:
"""Setting current_tenant loads the join row and assigns role when relationship exists."""
tenant = self._create_tenant(db_session_with_containers)
account = self._create_account(db_session_with_containers)
self._create_join(db_session_with_containers, tenant.id, account.id, TenantAccountRole.OWNER)
db_session_with_containers.commit()
# assert account._current_tenant.id == tenant.id
# assert account._current_tenant.id == tenant.id
account.current_tenant = tenant
assert account._current_tenant is not None
assert account._current_tenant.id == tenant.id
assert account.role == TenantAccountRole.OWNER
def test_current_tenant_setter_sets_none_when_no_join_exists(self, db_session_with_containers: Session) -> None:
"""Setting current_tenant results in _current_tenant=None when no join row exists."""
tenant = self._create_tenant(db_session_with_containers)
account = self._create_account(db_session_with_containers)
db_session_with_containers.commit()
account.current_tenant = tenant
assert account._current_tenant is None
class TestAccountSetTenantId(_DBTrackingTestBase):
"""Integration tests for Account.set_tenant_id method."""
def test_set_tenant_id_sets_tenant_and_role_when_relationship_exists(
self, db_session_with_containers: Session
) -> None:
"""set_tenant_id loads the tenant and assigns role when a join row exists."""
tenant = self._create_tenant(db_session_with_containers)
account = self._create_account(db_session_with_containers)
self._create_join(db_session_with_containers, tenant.id, account.id, TenantAccountRole.ADMIN)
db_session_with_containers.commit()
account.set_tenant_id(tenant.id)
assert account._current_tenant is not None
assert account._current_tenant.id == tenant.id
assert account.role == TenantAccountRole.ADMIN
def test_set_tenant_id_does_not_set_tenant_when_no_relationship_exists(
self, db_session_with_containers: Session
) -> None:
"""set_tenant_id does nothing when no join row matches the tenant."""
tenant = self._create_tenant(db_session_with_containers)
account = self._create_account(db_session_with_containers)
db_session_with_containers.commit()
account.set_tenant_id(tenant.id)
assert account._current_tenant is None
class TestAccountGetByOpenId(_DBTrackingTestBase):
"""Integration tests for Account.get_by_openid class method."""
def test_get_by_openid_returns_account_when_integrate_exists(self, db_session_with_containers: Session) -> None:
"""get_by_openid returns the Account when a matching AccountIntegrate row exists."""
account = self._create_account(db_session_with_containers, email_prefix="openid")
provider = "google"
open_id = f"google_{uuid4()}"
integrate = AccountIntegrate(
account_id=account.id,
provider=provider,
open_id=open_id,
encrypted_token="token",
)
db_session_with_containers.add(integrate)
db_session_with_containers.flush()
self._tracked.append(integrate)
result = Account.get_by_openid(provider, open_id)
assert result is not None
assert result.id == account.id
def test_get_by_openid_returns_none_when_no_integrate_exists(self, db_session_with_containers: Session) -> None:
"""get_by_openid returns None when no AccountIntegrate row matches."""
result = Account.get_by_openid("github", f"github_{uuid4()}")
assert result is None
class TestTenantGetAccounts(_DBTrackingTestBase):
"""Integration tests for Tenant.get_accounts method."""
def test_get_accounts_returns_linked_accounts(self, db_session_with_containers: Session) -> None:
"""get_accounts returns all accounts linked to the tenant via TenantAccountJoin."""
tenant = self._create_tenant(db_session_with_containers)
account1 = self._create_account(db_session_with_containers, email_prefix="tenant_member")
account2 = self._create_account(db_session_with_containers, email_prefix="tenant_member")
self._create_join(db_session_with_containers, tenant.id, account1.id, TenantAccountRole.OWNER, current=False)
self._create_join(db_session_with_containers, tenant.id, account2.id, TenantAccountRole.NORMAL, current=False)
accounts = tenant.get_accounts()
assert len(accounts) == 2
account_ids = {a.id for a in accounts}
assert account1.id in account_ids
assert account2.id in account_ids

View File

@ -1,9 +1,13 @@
import json
from collections.abc import Generator
from unittest.mock import patch
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session
from extensions.ext_redis import redis_client
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
from services.billing_service import BillingService
@ -363,3 +367,62 @@ class TestBillingServiceGetPlanBulkWithCache:
assert ttl_1_new <= 600
assert ttl_2 > 0
assert ttl_2 <= 600
class TestBillingServiceIsTenantOwnerOrAdmin:
"""
Integration tests for BillingService.is_tenant_owner_or_admin.
Verifies that non-privileged roles (EDITOR, DATASET_OPERATOR) raise ValueError
when checked against real TenantAccountJoin rows in PostgreSQL.
"""
@pytest.fixture(autouse=True)
def _auto_rollback(self, db_session_with_containers: Session) -> Generator[None, None, None]:
yield
db_session_with_containers.rollback()
def _create_account_with_tenant_role(self, db_session: Session, role: TenantAccountRole) -> tuple[Account, Tenant]:
tenant = Tenant(name=f"Tenant {uuid4()}")
db_session.add(tenant)
db_session.flush()
account = Account(
name=f"Account {uuid4()}",
email=f"billing_{uuid4()}@example.com",
password="hashed-password",
password_salt="salt",
interface_language="en-US",
timezone="UTC",
)
db_session.add(account)
db_session.flush()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=role,
current=True,
)
db_session.add(join)
db_session.flush()
# Wire up in-memory reference so current_tenant_id resolves
account._current_tenant = tenant
return account, tenant
def test_is_tenant_owner_or_admin_editor_role_raises_error(self, db_session_with_containers: Session) -> None:
"""is_tenant_owner_or_admin raises ValueError for EDITOR role."""
account, _ = self._create_account_with_tenant_role(db_session_with_containers, TenantAccountRole.EDITOR)
with pytest.raises(ValueError, match="Only team owner or team admin can perform this action"):
BillingService.is_tenant_owner_or_admin(account)
def test_is_tenant_owner_or_admin_dataset_operator_raises_error(self, db_session_with_containers: Session) -> None:
"""is_tenant_owner_or_admin raises ValueError for DATASET_OPERATOR role."""
account, _ = self._create_account_with_tenant_role(
db_session_with_containers, TenantAccountRole.DATASET_OPERATOR
)
with pytest.raises(ValueError, match="Only team owner or team admin can perform this action"):
BillingService.is_tenant_owner_or_admin(account)

View File

@ -121,7 +121,18 @@ def test_vector_init_uses_default_and_custom_attributes(vector_factory_module):
default_vector = vector_factory_module.Vector(dataset)
custom_vector = vector_factory_module.Vector(dataset, attributes=["doc_id"])
assert default_vector._attributes == ["doc_id", "dataset_id", "document_id", "doc_hash", "doc_type"]
# `is_summary` and `original_chunk_id` must be in the default return-properties
# projection so summary index retrieval works on backends that honor the list
# as an explicit projection (e.g. Weaviate). See #34884.
assert default_vector._attributes == [
"doc_id",
"dataset_id",
"document_id",
"doc_hash",
"doc_type",
"is_summary",
"original_chunk_id",
]
assert custom_vector._attributes == ["doc_id"]
assert default_vector._embeddings == "embeddings"
assert default_vector._vector_processor == "processor"

View File

@ -12,7 +12,6 @@ This test suite covers:
import base64
import secrets
from datetime import UTC, datetime
from unittest.mock import MagicMock, patch
from uuid import uuid4
import pytest
@ -310,90 +309,6 @@ class TestAccountStatusTransitions:
class TestTenantRelationshipIntegrity:
"""Test suite for tenant relationship integrity."""
@patch("models.account.db")
def test_account_current_tenant_property(self, mock_db):
"""Test the current_tenant property getter."""
# Arrange
account = Account(
name="Test User",
email="test@example.com",
)
account.id = str(uuid4())
tenant = Tenant(name="Test Tenant")
tenant.id = str(uuid4())
account._current_tenant = tenant
# Act
result = account.current_tenant
# Assert
assert result == tenant
@patch("models.account.Session")
@patch("models.account.db")
def test_account_current_tenant_setter_with_valid_tenant(self, mock_db, mock_session_class):
"""Test setting current_tenant with a valid tenant relationship."""
# Arrange
account = Account(
name="Test User",
email="test@example.com",
)
account.id = str(uuid4())
tenant = Tenant(name="Test Tenant")
tenant.id = str(uuid4())
# Mock the session and queries
mock_session = MagicMock()
mock_session_class.return_value.__enter__.return_value = mock_session
# Mock TenantAccountJoin query result
tenant_join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
)
mock_session.scalar.return_value = tenant_join
# Mock Tenant query result
mock_session.scalars.return_value.one.return_value = tenant
# Act
account.current_tenant = tenant
# Assert
assert account._current_tenant == tenant
assert account.role == TenantAccountRole.OWNER
@patch("models.account.Session")
@patch("models.account.db")
def test_account_current_tenant_setter_without_relationship(self, mock_db, mock_session_class):
"""Test setting current_tenant when no relationship exists."""
# Arrange
account = Account(
name="Test User",
email="test@example.com",
)
account.id = str(uuid4())
tenant = Tenant(name="Test Tenant")
tenant.id = str(uuid4())
# Mock the session and queries
mock_session = MagicMock()
mock_session_class.return_value.__enter__.return_value = mock_session
# Mock no TenantAccountJoin found
mock_session.scalar.return_value = None
# Act
account.current_tenant = tenant
# Assert
assert account._current_tenant is None
def test_account_current_tenant_id_property(self):
"""Test the current_tenant_id property."""
# Arrange
@ -418,61 +333,6 @@ class TestTenantRelationshipIntegrity:
# Assert
assert tenant_id_none is None
@patch("models.account.Session")
@patch("models.account.db")
def test_account_set_tenant_id_method(self, mock_db, mock_session_class):
"""Test the set_tenant_id method."""
# Arrange
account = Account(
name="Test User",
email="test@example.com",
)
account.id = str(uuid4())
tenant = Tenant(name="Test Tenant")
tenant.id = str(uuid4())
tenant_join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.ADMIN,
)
# Mock the session and queries
mock_session = MagicMock()
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.first.return_value = (tenant, tenant_join)
# Act
account.set_tenant_id(tenant.id)
# Assert
assert account._current_tenant == tenant
assert account.role == TenantAccountRole.ADMIN
@patch("models.account.Session")
@patch("models.account.db")
def test_account_set_tenant_id_with_no_relationship(self, mock_db, mock_session_class):
"""Test set_tenant_id when no relationship exists."""
# Arrange
account = Account(
name="Test User",
email="test@example.com",
)
account.id = str(uuid4())
tenant_id = str(uuid4())
# Mock the session and queries
mock_session = MagicMock()
mock_session_class.return_value.__enter__.return_value = mock_session
mock_session.execute.return_value.first.return_value = None
# Act
account.set_tenant_id(tenant_id)
# Assert - should not set tenant when no relationship exists
# The method returns early without setting _current_tenant
class TestAccountRolePermissions:
"""Test suite for account role permissions."""
@ -605,51 +465,6 @@ class TestAccountRolePermissions:
assert current_role == TenantAccountRole.EDITOR
class TestAccountGetByOpenId:
"""Test suite for get_by_openid class method."""
@patch("models.account.db")
def test_get_by_openid_success(self, mock_db):
"""Test successful retrieval of account by OpenID."""
# Arrange
provider = "google"
open_id = "google_user_123"
account_id = str(uuid4())
mock_account_integrate = MagicMock()
mock_account_integrate.account_id = account_id
mock_account = Account(name="Test User", email="test@example.com")
mock_account.id = account_id
# Mock db.session.execute().scalar_one_or_none() for AccountIntegrate lookup
mock_db.session.execute.return_value.scalar_one_or_none.return_value = mock_account_integrate
# Mock db.session.scalar() for Account lookup
mock_db.session.scalar.return_value = mock_account
# Act
result = Account.get_by_openid(provider, open_id)
# Assert
assert result == mock_account
@patch("models.account.db")
def test_get_by_openid_not_found(self, mock_db):
"""Test get_by_openid when account integrate doesn't exist."""
# Arrange
provider = "github"
open_id = "github_user_456"
# Mock db.session.execute().scalar_one_or_none() to return None
mock_db.session.execute.return_value.scalar_one_or_none.return_value = None
# Act
result = Account.get_by_openid(provider, open_id)
# Assert
assert result is None
class TestTenantAccountJoinModel:
"""Test suite for TenantAccountJoin model."""
@ -760,31 +575,6 @@ class TestTenantModel:
# Assert
assert tenant.custom_config == '{"feature1": true, "feature2": "value"}'
@patch("models.account.db")
def test_tenant_get_accounts(self, mock_db):
"""Test getting accounts associated with a tenant."""
# Arrange
tenant = Tenant(name="Test Workspace")
tenant.id = str(uuid4())
account1 = Account(name="User 1", email="user1@example.com")
account1.id = str(uuid4())
account2 = Account(name="User 2", email="user2@example.com")
account2.id = str(uuid4())
# Mock the query chain
mock_scalars = MagicMock()
mock_scalars.all.return_value = [account1, account2]
mock_db.session.scalars.return_value = mock_scalars
# Act
accounts = tenant.get_accounts()
# Assert
assert len(accounts) == 2
assert account1 in accounts
assert account2 in accounts
class TestTenantStatusEnum:
"""Test suite for TenantStatus enum."""

View File

@ -1117,42 +1117,6 @@ class TestBillingServiceEdgeCases:
# Assert
assert result["history_id"] == history_id
def test_is_tenant_owner_or_admin_editor_role_raises_error(self):
"""Test tenant owner/admin check raises error for editor role."""
# Arrange
current_user = MagicMock(spec=Account)
current_user.id = "account-123"
current_user.current_tenant_id = "tenant-456"
mock_join = MagicMock(spec=TenantAccountJoin)
mock_join.role = TenantAccountRole.EDITOR # Editor is not privileged
with patch("services.billing_service.db.session") as mock_session:
mock_session.scalar.return_value = mock_join
# Act & Assert
with pytest.raises(ValueError) as exc_info:
BillingService.is_tenant_owner_or_admin(current_user)
assert "Only team owner or team admin can perform this action" in str(exc_info.value)
def test_is_tenant_owner_or_admin_dataset_operator_raises_error(self):
"""Test tenant owner/admin check raises error for dataset operator role."""
# Arrange
current_user = MagicMock(spec=Account)
current_user.id = "account-123"
current_user.current_tenant_id = "tenant-456"
mock_join = MagicMock(spec=TenantAccountJoin)
mock_join.role = TenantAccountRole.DATASET_OPERATOR # Dataset operator is not privileged
with patch("services.billing_service.db.session") as mock_session:
mock_session.scalar.return_value = mock_join
# Act & Assert
with pytest.raises(ValueError) as exc_info:
BillingService.is_tenant_owner_or_admin(current_user)
assert "Only team owner or team admin can perform this action" in str(exc_info.value)
class TestBillingServiceSubscriptionOperations:
"""Unit tests for subscription operations in BillingService.

146
api/uv.lock generated
View File

@ -1511,23 +1511,23 @@ requires-dist = [
{ name = "mlflow-skinny", specifier = ">=3.11.1" },
{ name = "numpy", specifier = "~=2.4.4" },
{ name = "openpyxl", specifier = "~=3.1.5" },
{ name = "opentelemetry-api", specifier = "==1.40.0" },
{ name = "opentelemetry-distro", specifier = "==0.61b0" },
{ name = "opentelemetry-exporter-otlp", specifier = "==1.40.0" },
{ name = "opentelemetry-exporter-otlp-proto-common", specifier = "==1.40.0" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", specifier = "==1.40.0" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = "==1.40.0" },
{ name = "opentelemetry-instrumentation", specifier = "==0.61b0" },
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.61b0" },
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.61b0" },
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.61b0" },
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.61b0" },
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.61b0" },
{ name = "opentelemetry-api", specifier = "==1.41.0" },
{ name = "opentelemetry-distro", specifier = "==0.62b0" },
{ name = "opentelemetry-exporter-otlp", specifier = "==1.41.0" },
{ name = "opentelemetry-exporter-otlp-proto-common", specifier = "==1.41.0" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", specifier = "==1.41.0" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = "==1.41.0" },
{ name = "opentelemetry-instrumentation", specifier = "==0.62b0" },
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.62b0" },
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.62b0" },
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.62b0" },
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.62b0" },
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.62b0" },
{ name = "opentelemetry-propagator-b3", specifier = "==1.41.0" },
{ name = "opentelemetry-proto", specifier = "==1.40.0" },
{ name = "opentelemetry-sdk", specifier = "==1.40.0" },
{ name = "opentelemetry-semantic-conventions", specifier = "==0.61b0" },
{ name = "opentelemetry-util-http", specifier = "==0.61b0" },
{ name = "opentelemetry-proto", specifier = "==1.41.0" },
{ name = "opentelemetry-sdk", specifier = "==1.41.0" },
{ name = "opentelemetry-semantic-conventions", specifier = "==0.62b0" },
{ name = "opentelemetry-util-http", specifier = "==0.62b0" },
{ name = "opik", specifier = "~=1.11.2" },
{ name = "packaging", specifier = "~=26.0" },
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=3.0.2" },
@ -3708,59 +3708,59 @@ wheels = [
[[package]]
name = "opentelemetry-api"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "importlib-metadata" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" }
sdist = { url = "https://files.pythonhosted.org/packages/47/8e/3778a7e87801d994869a9396b9fc2a289e5f9be91ff54a27d41eace494b0/opentelemetry_api-1.41.0.tar.gz", hash = "sha256:9421d911326ec12dee8bc933f7839090cad7a3f13fcfb0f9e82f8174dc003c09", size = 71416, upload-time = "2026-04-09T14:38:34.544Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" },
{ url = "https://files.pythonhosted.org/packages/58/ee/99ab786653b3bda9c37ade7e24a7b607a1b1f696063172768417539d876d/opentelemetry_api-1.41.0-py3-none-any.whl", hash = "sha256:0e77c806e6a89c9e4f8d372034622f3e1418a11bdbe1c80a50b3d3397ad0fa4f", size = 69007, upload-time = "2026-04-09T14:38:11.833Z" },
]
[[package]]
name = "opentelemetry-distro"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-sdk" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/00/1f8acc51326956a596fefaf67751380001af36029132a7a07d4debce3c06/opentelemetry_distro-0.61b0.tar.gz", hash = "sha256:975b845f50181ad53753becf4fd4b123b54fa04df5a9d78812264436d6518981", size = 2590, upload-time = "2026-03-04T14:20:12.453Z" }
sdist = { url = "https://files.pythonhosted.org/packages/72/c6/52b0dbcc8fbdecf179047921940516cbb8aaf05f6b737faa526ad76fec51/opentelemetry_distro-0.62b0.tar.gz", hash = "sha256:aa0308fbe50ad8f17d4446982dbf26870e20b8031ba38d8e1224ecf7aedd3184", size = 2611, upload-time = "2026-04-09T14:40:20.404Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/56/2c/efcc995cd7484e6e55b1d26bd7fa6c55ca96bd415ff94310b52c19f330b0/opentelemetry_distro-0.61b0-py3-none-any.whl", hash = "sha256:f21d1ac0627549795d75e332006dd068877f00e461b1b2e8fe4568d6eb7b9590", size = 3349, upload-time = "2026-03-04T14:18:57.788Z" },
{ url = "https://files.pythonhosted.org/packages/b3/7e/5858bba1c7ed880c7b0fe7d9a1ea40ab8affd18c9ebc1e16c2d69c501da1/opentelemetry_distro-0.62b0-py3-none-any.whl", hash = "sha256:23e9065a35cef12868ad5efb18ce9c88a9103800256b318dec4c9c850c6c78c1", size = 3348, upload-time = "2026-04-09T14:39:17.406Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d0/37/b6708e0eff5c5fb9aba2e0ea09f7f3bcbfd12a592d2a780241b5f6014df7/opentelemetry_exporter_otlp-1.40.0.tar.gz", hash = "sha256:7caa0870b95e2fcb59d64e16e2b639ecffb07771b6cd0000b5d12e5e4fef765a", size = 6152, upload-time = "2026-03-04T14:17:23.235Z" }
sdist = { url = "https://files.pythonhosted.org/packages/65/b7/845565a2ab5d22c1486bc7729a06b05cd0964c61539d766e1f107c9eea0c/opentelemetry_exporter_otlp-1.41.0.tar.gz", hash = "sha256:97ff847321f8d4c919032a67d20d3137fb7b34eac0c47f13f71112858927fc5b", size = 6152, upload-time = "2026-04-09T14:38:35.895Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/fc/aea77c28d9f3ffef2fdafdc3f4a235aee4091d262ddabd25882f47ce5c5f/opentelemetry_exporter_otlp-1.40.0-py3-none-any.whl", hash = "sha256:48c87e539ec9afb30dc443775a1334cc5487de2f72a770a4c00b1610bf6c697d", size = 7023, upload-time = "2026-03-04T14:17:03.612Z" },
{ url = "https://files.pythonhosted.org/packages/e0/f2/f1076fff152858773f22cda146713f9ae3661795af6bacd411a76f2151ac/opentelemetry_exporter_otlp-1.41.0-py3-none-any.whl", hash = "sha256:443b6a45c990ae4c55e147f97049a86c5f5b704f3d78b48b44a073a886ec4d6e", size = 7022, upload-time = "2026-04-09T14:38:13.934Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-proto" },
]
sdist = { url = "https://files.pythonhosted.org/packages/51/bc/1559d46557fe6eca0b46c88d4c2676285f1f3be2e8d06bb5d15fbffc814a/opentelemetry_exporter_otlp_proto_common-1.40.0.tar.gz", hash = "sha256:1cbee86a4064790b362a86601ee7934f368b81cd4cc2f2e163902a6e7818a0fa", size = 20416, upload-time = "2026-03-04T14:17:23.801Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8c/28/e8eca94966fe9a1465f6094dc5ddc5398473682180279c94020bc23b4906/opentelemetry_exporter_otlp_proto_common-1.41.0.tar.gz", hash = "sha256:966bbce537e9edb166154779a7c4f8ab6b8654a03a28024aeaf1a3eacb07d6ee", size = 20411, upload-time = "2026-04-09T14:38:36.572Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/ca/8f122055c97a932311a3f640273f084e738008933503d0c2563cd5d591fc/opentelemetry_exporter_otlp_proto_common-1.40.0-py3-none-any.whl", hash = "sha256:7081ff453835a82417bf38dccf122c827c3cbc94f2079b03bba02a3165f25149", size = 18369, upload-time = "2026-03-04T14:17:04.796Z" },
{ url = "https://files.pythonhosted.org/packages/26/c4/78b9bf2d9c1d5e494f44932988d9d91c51a66b9a7b48adf99b62f7c65318/opentelemetry_exporter_otlp_proto_common-1.41.0-py3-none-any.whl", hash = "sha256:7a99177bf61f85f4f9ed2072f54d676364719c066f6d11f515acc6c745c7acf0", size = 18366, upload-time = "2026-04-09T14:38:15.135Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "googleapis-common-protos" },
@ -3771,14 +3771,14 @@ dependencies = [
{ name = "opentelemetry-sdk" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8f/7f/b9e60435cfcc7590fa87436edad6822240dddbc184643a2a005301cc31f4/opentelemetry_exporter_otlp_proto_grpc-1.40.0.tar.gz", hash = "sha256:bd4015183e40b635b3dab8da528b27161ba83bf4ef545776b196f0fb4ec47740", size = 25759, upload-time = "2026-03-04T14:17:24.4Z" }
sdist = { url = "https://files.pythonhosted.org/packages/42/46/d75a3f8c91915f2e58f61d0a2e4ada63891e7c7a37a20ff7949ba184a6b2/opentelemetry_exporter_otlp_proto_grpc-1.41.0.tar.gz", hash = "sha256:f704201251c6f65772b11bddea1c948000554459101bdbb0116e0a01b70592f6", size = 25754, upload-time = "2026-04-09T14:38:37.423Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/6f/7ee0980afcbdcd2d40362da16f7f9796bd083bf7f0b8e038abfbc0300f5d/opentelemetry_exporter_otlp_proto_grpc-1.40.0-py3-none-any.whl", hash = "sha256:2aa0ca53483fe0cf6405087a7491472b70335bc5c7944378a0a8e72e86995c52", size = 20304, upload-time = "2026-03-04T14:17:05.942Z" },
{ url = "https://files.pythonhosted.org/packages/81/f6/b09e2e0c9f0b5750cebc6eaf31527b910821453cef40a5a0fe93550422b2/opentelemetry_exporter_otlp_proto_grpc-1.41.0-py3-none-any.whl", hash = "sha256:3a1a86bd24806ccf136ec9737dbfa4c09b069f9130ff66b0acb014f9c5255fd1", size = 20299, upload-time = "2026-04-09T14:38:17.01Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-http"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "googleapis-common-protos" },
@ -3789,14 +3789,14 @@ dependencies = [
{ name = "requests" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2e/fa/73d50e2c15c56be4d000c98e24221d494674b0cc95524e2a8cb3856d95a4/opentelemetry_exporter_otlp_proto_http-1.40.0.tar.gz", hash = "sha256:db48f5e0f33217588bbc00274a31517ba830da576e59503507c839b38fa0869c", size = 17772, upload-time = "2026-03-04T14:17:25.324Z" }
sdist = { url = "https://files.pythonhosted.org/packages/19/63/d9f43cd75f3fabb7e01148c89cfa9491fc18f6580a6764c554ff7c953c46/opentelemetry_exporter_otlp_proto_http-1.41.0.tar.gz", hash = "sha256:dcd6e0686f56277db4eecbadd5262124e8f2cc739cadbc3fae3d08a12c976cf5", size = 24139, upload-time = "2026-04-09T14:38:38.128Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a0/3a/8865d6754e61c9fb170cdd530a124a53769ee5f740236064816eb0ca7301/opentelemetry_exporter_otlp_proto_http-1.40.0-py3-none-any.whl", hash = "sha256:a8d1dab28f504c5d96577d6509f80a8150e44e8f45f82cdbe0e34c99ab040069", size = 19960, upload-time = "2026-03-04T14:17:07.153Z" },
{ url = "https://files.pythonhosted.org/packages/64/b5/a214cd907eedc17699d1c2d602288ae17cb775526df04db3a3b3585329d2/opentelemetry_exporter_otlp_proto_http-1.41.0-py3-none-any.whl", hash = "sha256:a9c4ee69cce9c3f4d7ee736ad1b44e3c9654002c0816900abbafd9f3cf289751", size = 22673, upload-time = "2026-04-09T14:38:18.349Z" },
]
[[package]]
name = "opentelemetry-instrumentation"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3804,14 +3804,14 @@ dependencies = [
{ name = "packaging" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/37/6bf8e66bfcee5d3c6515b79cb2ee9ad05fe573c20f7ceb288d0e7eeec28c/opentelemetry_instrumentation-0.61b0.tar.gz", hash = "sha256:cb21b48db738c9de196eba6b805b4ff9de3b7f187e4bbf9a466fa170514f1fc7", size = 32606, upload-time = "2026-03-04T14:20:16.825Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/fd/b8e90bb340957f059084376f94cff336b0e871a42feba7d3f7342365e987/opentelemetry_instrumentation-0.62b0.tar.gz", hash = "sha256:aa1b0b9ab2e1722c2a8a5384fb016fc28d30bba51826676c8036074790d2861e", size = 34042, upload-time = "2026-04-09T14:40:22.843Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/3e/f6f10f178b6316de67f0dfdbbb699a24fbe8917cf1743c1595fb9dcdd461/opentelemetry_instrumentation-0.61b0-py3-none-any.whl", hash = "sha256:92a93a280e69788e8f88391247cc530fd81f16f2b011979d4d6398f805cfbc63", size = 33448, upload-time = "2026-03-04T14:19:02.447Z" },
{ url = "https://files.pythonhosted.org/packages/00/b6/3356d2e335e3c449c5183e9b023f30f04f1b7073a6583c68745ea2e704b1/opentelemetry_instrumentation-0.62b0-py3-none-any.whl", hash = "sha256:30d4e76486eae64fb095264a70c2c809c4bed17b73373e53091470661f7d477c", size = 34158, upload-time = "2026-04-09T14:39:21.428Z" },
]
[[package]]
name = "opentelemetry-instrumentation-asgi"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref" },
@ -3820,28 +3820,28 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/00/3e/143cf5c034e58037307e6a24f06e0dd64b2c49ae60a965fc580027581931/opentelemetry_instrumentation_asgi-0.61b0.tar.gz", hash = "sha256:9d08e127244361dc33976d39dd4ca8f128b5aa5a7ae425208400a80a095019b5", size = 26691, upload-time = "2026-03-04T14:20:21.038Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/38/999bf777774878971c2716de4b7a03cd57a7decb4af25090e703b79fa0e5/opentelemetry_instrumentation_asgi-0.62b0.tar.gz", hash = "sha256:93cde8c62e5918a3c1ff9ba020518127300e5e0816b7e8b14baf46a26ba619fc", size = 26779, upload-time = "2026-04-09T14:40:26.566Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/19/78/154470cf9d741a7487fbb5067357b87386475bbb77948a6707cae982e158/opentelemetry_instrumentation_asgi-0.61b0-py3-none-any.whl", hash = "sha256:e4b3ce6b66074e525e717efff20745434e5efd5d9df6557710856fba356da7a4", size = 16980, upload-time = "2026-03-04T14:19:10.894Z" },
{ url = "https://files.pythonhosted.org/packages/25/cf/29df82f5870178143bdb5c9a7be044b9f78c71e1c5dcf995242e86d80158/opentelemetry_instrumentation_asgi-0.62b0-py3-none-any.whl", hash = "sha256:89b62a6f996b260b162f515c25e6d78e39286e4cbe2f935899e51b32f31027e2", size = 17011, upload-time = "2026-04-09T14:39:27.305Z" },
]
[[package]]
name = "opentelemetry-instrumentation-celery"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-semantic-conventions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8d/43/e79108a804d16b1dc8ff28edd0e94ac393cf6359a5adcd7cdd2ec4be85f4/opentelemetry_instrumentation_celery-0.61b0.tar.gz", hash = "sha256:0e352a567dc89ed8bc083fc635035ce3c5b96bbbd92831ffd676e93b87f8e94f", size = 14780, upload-time = "2026-03-04T14:20:27.776Z" }
sdist = { url = "https://files.pythonhosted.org/packages/01/b4/20a3c8c669dc45aa3703c0370041d67e8be613f1829523cdaf634a5f9626/opentelemetry_instrumentation_celery-0.62b0.tar.gz", hash = "sha256:55e8fa48e5b886bcca448fa32e28a6cc2165157745e8328de479a826d3903095", size = 14808, upload-time = "2026-04-09T14:40:31.603Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/ed/c05f3c84b455654eb6c047474ffde61ed92efc24030f64213c98bca9d44b/opentelemetry_instrumentation_celery-0.61b0-py3-none-any.whl", hash = "sha256:01235733ff0cdf571cb03b270645abb14b9c8d830313dc5842097ec90146320b", size = 13856, upload-time = "2026-03-04T14:19:20.98Z" },
{ url = "https://files.pythonhosted.org/packages/f6/60/cf951e6bd6ec62ec55bd2384e0ba9841ea38f2d128c773d85dc60da97172/opentelemetry_instrumentation_celery-0.62b0-py3-none-any.whl", hash = "sha256:cadfd3e65287a36099dce5ba7e05d98e4c5f9479a455241e01d140ecc5c10935", size = 13864, upload-time = "2026-04-09T14:39:35.009Z" },
]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3850,14 +3850,14 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/37/35/aa727bb6e6ef930dcdc96a617b83748fece57b43c47d83ba8d83fbeca657/opentelemetry_instrumentation_fastapi-0.61b0.tar.gz", hash = "sha256:3a24f35b07c557ae1bbc483bf8412221f25d79a405f8b047de8b670722e2fa9f", size = 24800, upload-time = "2026-03-04T14:20:32.759Z" }
sdist = { url = "https://files.pythonhosted.org/packages/37/09/92740c6d114d1bef392557a03ae6de64065c83c1b331dae9b57fe718497c/opentelemetry_instrumentation_fastapi-0.62b0.tar.gz", hash = "sha256:e4748e4e575077e08beaf2c5d2f369da63dd90882d89d73c4192a97356637dec", size = 25056, upload-time = "2026-04-09T14:40:36.438Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/91/05/acfeb2cccd434242a0a7d0ea29afaf077e04b42b35b485d89aee4e0d9340/opentelemetry_instrumentation_fastapi-0.61b0-py3-none-any.whl", hash = "sha256:a1a844d846540d687d377516b2ff698b51d87c781b59f47c214359c4a241047c", size = 13485, upload-time = "2026-03-04T14:19:30.351Z" },
{ url = "https://files.pythonhosted.org/packages/64/bb/186ffe0fde0ad33ceb50e1d3596cc849b732d3b825592a6a507a40c8c49b/opentelemetry_instrumentation_fastapi-0.62b0-py3-none-any.whl", hash = "sha256:06d3272ad15f9daea5a0a27c32831aff376110a4b0394197120256ef6d610e6e", size = 13482, upload-time = "2026-04-09T14:39:43.446Z" },
]
[[package]]
name = "opentelemetry-instrumentation-flask"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3867,14 +3867,14 @@ dependencies = [
{ name = "opentelemetry-util-http" },
{ name = "packaging" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d9/33/d6852d8f2c3eef86f2f8c858d6f5315983c7063e07e595519e96d4c31c06/opentelemetry_instrumentation_flask-0.61b0.tar.gz", hash = "sha256:e9faf58dfd9860a1868442d180142645abdafc1a652dd73d469a5efd106a7d49", size = 24071, upload-time = "2026-03-04T14:20:33.437Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8e/86/522294f6a80d59560d8f722da59513d2ed2d53c6178fa109789dacc5dd50/opentelemetry_instrumentation_flask-0.62b0.tar.gz", hash = "sha256:330e903c0e92b06aae32f9eb7b8a923599d7a29440f50841a59dbba34ec6dd9f", size = 24100, upload-time = "2026-04-09T14:40:37.111Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3e/41/619f3530324a58491f2d20f216a10dd7393629b29db4610dda642a27f4ed/opentelemetry_instrumentation_flask-0.61b0-py3-none-any.whl", hash = "sha256:e8ce474d7ce543bfbbb3e93f8a6f8263348af9d7b45502f387420cf3afa71253", size = 15996, upload-time = "2026-03-04T14:19:31.304Z" },
{ url = "https://files.pythonhosted.org/packages/bc/c8/9f3bb38281bcb50c93c3d2358b303645f6917bf972c167484c09f9a97ff1/opentelemetry_instrumentation_flask-0.62b0-py3-none-any.whl", hash = "sha256:8c1f8986ec3887d08899d2eb654625252c929105174911b3b50dcf12b1001807", size = 16006, upload-time = "2026-04-09T14:39:44.401Z" },
]
[[package]]
name = "opentelemetry-instrumentation-httpx"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3883,14 +3883,14 @@ dependencies = [
{ name = "opentelemetry-util-http" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cd/2a/e2becd55e33c29d1d9ef76e2579040ed1951cb33bacba259f6aff2fdd2a6/opentelemetry_instrumentation_httpx-0.61b0.tar.gz", hash = "sha256:6569ec097946c5551c2a4252f74c98666addd1bf047c1dde6b4ef426719ff8dd", size = 24104, upload-time = "2026-03-04T14:20:34.752Z" }
sdist = { url = "https://files.pythonhosted.org/packages/77/a7/63e2c6325c8e99cd9b8e0229a8b61c37520ee537214a2c8d514e84486a94/opentelemetry_instrumentation_httpx-0.62b0.tar.gz", hash = "sha256:d865398db3f3c289ba226e355bf4d94460a4301c0c8916e3136caea55ae18000", size = 24182, upload-time = "2026-04-09T14:40:38.719Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/af/88/dde310dce56e2d85cf1a09507f5888544955309edc4b8d22971d6d3d1417/opentelemetry_instrumentation_httpx-0.61b0-py3-none-any.whl", hash = "sha256:dee05c93a6593a5dc3ae5d9d5c01df8b4e2c5d02e49275e5558534ee46343d5e", size = 17198, upload-time = "2026-03-04T14:19:33.585Z" },
{ url = "https://files.pythonhosted.org/packages/c0/5e/7d5fc28487637871b015128cd5dbb3c36f6d343a9098b893bd803d5a9cca/opentelemetry_instrumentation_httpx-0.62b0-py3-none-any.whl", hash = "sha256:c7660b939c12608fec67743126e9b4dc23dceef0ed631c415924966b0d1579e3", size = 17200, upload-time = "2026-04-09T14:39:46.618Z" },
]
[[package]]
name = "opentelemetry-instrumentation-redis"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3898,14 +3898,14 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cf/21/26205f89358a5f2be3ee5512d3d3bce16b622977f64aeaa9d3fa8887dd39/opentelemetry_instrumentation_redis-0.61b0.tar.gz", hash = "sha256:ae0fbb56be9a641e621d55b02a7d62977a2c77c5ee760addd79b9b266e46e523", size = 14781, upload-time = "2026-03-04T14:20:45.694Z" }
sdist = { url = "https://files.pythonhosted.org/packages/55/7d/5acdb4e4e36c522f9393cfa91f7a431ee089663c77855e524bc97f993020/opentelemetry_instrumentation_redis-0.62b0.tar.gz", hash = "sha256:513bc6679ee251436f0aff7be7ddab6186637dde09a795a8dc9659103f103bef", size = 14796, upload-time = "2026-04-09T14:40:48.391Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a5/e1/8f4c8e4194291dbe828aeabe779050a8497b379ad90040a5a0a7074b1d08/opentelemetry_instrumentation_redis-0.61b0-py3-none-any.whl", hash = "sha256:8d4e850bbb5f8eeafa44c0eac3a007990c7125de187bc9c3659e29ff7e091172", size = 15506, upload-time = "2026-03-04T14:19:48.588Z" },
{ url = "https://files.pythonhosted.org/packages/de/42/a13a7da074c972a51c14277e7f747e90037b9d815515c73b802e95897690/opentelemetry_instrumentation_redis-0.62b0-py3-none-any.whl", hash = "sha256:92ada3d7bdf395785f660549b0e6e8e5bac7cab80e7f1369a7d02228b27684c3", size = 15501, upload-time = "2026-04-09T14:40:00.69Z" },
]
[[package]]
name = "opentelemetry-instrumentation-sqlalchemy"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3914,14 +3914,14 @@ dependencies = [
{ name = "packaging" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9e/4f/3a325b180944610697a0a926d49d782b41a86120050d44fefb2715b630ac/opentelemetry_instrumentation_sqlalchemy-0.61b0.tar.gz", hash = "sha256:13a3a159a2043a52f0180b3757fbaa26741b0e08abb50deddce4394c118956e6", size = 15343, upload-time = "2026-03-04T14:20:47.648Z" }
sdist = { url = "https://files.pythonhosted.org/packages/2a/3d/40adc8c38e5be017ceb230a28ca57ca81981d4dc0c4b902cc930c77fd14f/opentelemetry_instrumentation_sqlalchemy-0.62b0.tar.gz", hash = "sha256:d02f85b83f349e9ef70a34cb3f4c3a3481fa15b11747f09209818663e161cac4", size = 18539, upload-time = "2026-04-09T14:40:50.251Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1f/97/b906a930c6a1a20c53ecc8b58cabc2cdd0ce560a2b5d44259084ffe4333e/opentelemetry_instrumentation_sqlalchemy-0.61b0-py3-none-any.whl", hash = "sha256:f115e0be54116ba4c327b8d7b68db4045ee18d44439d888ab8130a549c50d1c1", size = 14547, upload-time = "2026-03-04T14:19:53.088Z" },
{ url = "https://files.pythonhosted.org/packages/e7/e0/77954ac593f34740dc32e28a15fe7170e90f6ba6398eaaa5c88b34c05ed1/opentelemetry_instrumentation_sqlalchemy-0.62b0-py3-none-any.whl", hash = "sha256:ec576e0660080d9d15ce4fa44d2a07fff8cb4b796a84344cb0f2c9e5d6e26f79", size = 15534, upload-time = "2026-04-09T14:40:03.957Z" },
]
[[package]]
name = "opentelemetry-instrumentation-wsgi"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -3929,9 +3929,9 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/89/e5/189f2845362cfe78e356ba127eab21456309def411c6874aa4800c3de816/opentelemetry_instrumentation_wsgi-0.61b0.tar.gz", hash = "sha256:380f2ae61714e5303275a80b2e14c58571573cd1fddf496d8c39fb9551c5e532", size = 19898, upload-time = "2026-03-04T14:20:54.068Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b7/5c/ed45ff053d76c94c59173f2bcde3d61052adb10214f70f028f760aa56625/opentelemetry_instrumentation_wsgi-0.62b0.tar.gz", hash = "sha256:d179f969ecce0c29a15ffd4d982580dfae57c8ff2fd4d9366e299a6d4815e668", size = 19922, upload-time = "2026-04-09T14:40:56.227Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/75/d6b42ba26f3c921be6d01b16561b7bb863f843bad7ac3a5011f62617bcab/opentelemetry_instrumentation_wsgi-0.61b0-py3-none-any.whl", hash = "sha256:bd33b0824166f24134a3400648805e8d2e6a7951f070241294e8b8866611d7fa", size = 14628, upload-time = "2026-03-04T14:20:03.934Z" },
{ url = "https://files.pythonhosted.org/packages/f6/cb/753dbbe624df88594fa35a3ff26302fea22623385ed64462f6c8ee7c81eb/opentelemetry_instrumentation_wsgi-0.62b0-py3-none-any.whl", hash = "sha256:2714ab5ab2f35e67dc181ffa3a43fa15313c85c09b4d024c36d72cf1efa29c9a", size = 14628, upload-time = "2026-04-09T14:40:13.529Z" },
]
[[package]]
@ -3949,50 +3949,50 @@ wheels = [
[[package]]
name = "opentelemetry-proto"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "protobuf" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4c/77/dd38991db037fdfce45849491cb61de5ab000f49824a00230afb112a4392/opentelemetry_proto-1.40.0.tar.gz", hash = "sha256:03f639ca129ba513f5819810f5b1f42bcb371391405d99c168fe6937c62febcd", size = 45667, upload-time = "2026-03-04T14:17:31.194Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e0/d9/08e3dc6156878713e8c811682bc76151f5fe1a3cb7f3abda3966fd56e71e/opentelemetry_proto-1.41.0.tar.gz", hash = "sha256:95d2e576f9fb1800473a3e4cfcca054295d06bdb869fda4dc9f4f779dc68f7b6", size = 45669, upload-time = "2026-04-09T14:38:45.978Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b9/b2/189b2577dde745b15625b3214302605b1353436219d42b7912e77fa8dc24/opentelemetry_proto-1.40.0-py3-none-any.whl", hash = "sha256:266c4385d88923a23d63e353e9761af0f47a6ed0d486979777fe4de59dc9b25f", size = 72073, upload-time = "2026-03-04T14:17:16.673Z" },
{ url = "https://files.pythonhosted.org/packages/49/8c/65ef7a9383a363864772022e822b5d5c6988e6f9dabeebb9278f5b86ebc3/opentelemetry_proto-1.41.0-py3-none-any.whl", hash = "sha256:b970ab537309f9eed296be482c3e7cca05d8aca8165346e929f658dbe153b247", size = 72074, upload-time = "2026-04-09T14:38:29.38Z" },
]
[[package]]
name = "opentelemetry-sdk"
version = "1.40.0"
version = "1.41.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/0e/a586df1186f9f56b5a0879d52653effc40357b8e88fc50fe300038c3c08b/opentelemetry_sdk-1.41.0.tar.gz", hash = "sha256:7bddf3961131b318fc2d158947971a8e37e38b1cd23470cfb72b624e7cc108bd", size = 230181, upload-time = "2026-04-09T14:38:47.225Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" },
{ url = "https://files.pythonhosted.org/packages/2c/13/a7825118208cb32e6a4edcd0a99f925cbef81e77b3b0aedfd9125583c543/opentelemetry_sdk-1.41.0-py3-none-any.whl", hash = "sha256:a596f5687964a3e0d7f8edfdcf5b79cbca9c93c7025ebf5fb00f398a9443b0bd", size = 180214, upload-time = "2026-04-09T14:38:30.657Z" },
]
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a3/b0/c14f723e86c049b7bf8ff431160d982519b97a7be2857ed2247377397a24/opentelemetry_semantic_conventions-0.62b0.tar.gz", hash = "sha256:cbfb3c8fc259575cf68a6e1b94083cc35adc4a6b06e8cf431efa0d62606c0097", size = 145753, upload-time = "2026-04-09T14:38:48.274Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" },
{ url = "https://files.pythonhosted.org/packages/58/6c/5e86fa1759a525ef91c2d8b79d668574760ff3f900d114297765eb8786cb/opentelemetry_semantic_conventions-0.62b0-py3-none-any.whl", hash = "sha256:0ddac1ce59eaf1a827d9987ab60d9315fb27aea23304144242d1fcad9e16b489", size = 231619, upload-time = "2026-04-09T14:38:32.394Z" },
]
[[package]]
name = "opentelemetry-util-http"
version = "0.61b0"
version = "0.62b0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/57/3c/f0196223efc5c4ca19f8fad3d5462b171ac6333013335ce540c01af419e9/opentelemetry_util_http-0.61b0.tar.gz", hash = "sha256:1039cb891334ad2731affdf034d8fb8b48c239af9b6dd295e5fabd07f1c95572", size = 11361, upload-time = "2026-03-04T14:20:57.01Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/830f7c57135158eb8a8efd3f94ab191a89e3b8a49bed314a35ee501da3f2/opentelemetry_util_http-0.62b0.tar.gz", hash = "sha256:a62e4b19b8a432c0de657f167dee3455516136bb9c6ed463ca8063019970d835", size = 11393, upload-time = "2026-04-09T14:40:59.442Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0d/e5/c08aaaf2f64288d2b6ef65741d2de5454e64af3e050f34285fb1907492fe/opentelemetry_util_http-0.61b0-py3-none-any.whl", hash = "sha256:8e715e848233e9527ea47e275659ea60a57a75edf5206a3b937e236a6da5fc33", size = 9281, upload-time = "2026-03-04T14:20:08.364Z" },
{ url = "https://files.pythonhosted.org/packages/3d/7f/5c1b7d4385852b9e5eacd4e7f9d8b565d3d351d17463b24916ad098adf1a/opentelemetry_util_http-0.62b0-py3-none-any.whl", hash = "sha256:c20462808d8cc95b69b0dc4a3e02a9d36beb663347e96c931f51ffd78bd318ad", size = 9294, upload-time = "2026-04-09T14:40:19.014Z" },
]
[[package]]

View File

@ -9,6 +9,7 @@ import {
EDUCATION_VERIFYING_LOCALSTORAGE_ITEM,
} from '@/app/education-apply/constants'
import { usePathname, useRouter, useSearchParams } from '@/next/navigation'
import { rememberCreateAppExternalAttribution } from '@/utils/create-app-tracking'
import { sendGAEvent } from '@/utils/gtag'
import { fetchSetupStatusWithCache } from '@/utils/setup-status'
import { resolvePostLoginRedirect } from '../signin/utils/post-login-redirect'
@ -45,6 +46,8 @@ export const AppInitializer = ({
(async () => {
const action = searchParams.get('action')
rememberCreateAppExternalAttribution({ searchParams })
if (oauthNewUser) {
let utmInfo = null
const utmInfoStr = Cookies.get('utm_info')

View File

@ -4,7 +4,6 @@ import { AppModeEnum } from '@/types/app'
import Apps from '../index'
const mockUseExploreAppList = vi.fn()
const mockTrackEvent = vi.fn()
const mockImportDSL = vi.fn()
const mockFetchAppDetail = vi.fn()
const mockHandleCheckPluginDependencies = vi.fn()
@ -12,6 +11,7 @@ const mockGetRedirection = vi.fn()
const mockPush = vi.fn()
const mockToastSuccess = vi.fn()
const mockToastError = vi.fn()
const mockTrackCreateApp = vi.fn()
let latestDebounceFn = () => {}
vi.mock('ahooks', () => ({
@ -92,8 +92,8 @@ vi.mock('@/app/components/base/ui/toast', () => ({
error: (...args: unknown[]) => mockToastError(...args),
},
}))
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: (...args: unknown[]) => mockTrackEvent(...args),
vi.mock('@/utils/create-app-tracking', () => ({
trackCreateApp: (...args: unknown[]) => mockTrackCreateApp(...args),
}))
vi.mock('@/service/apps', () => ({
importDSL: (...args: unknown[]) => mockImportDSL(...args),
@ -246,10 +246,9 @@ describe('Apps', () => {
}))
})
expect(mockTrackEvent).toHaveBeenCalledWith('create_app_with_template', expect.objectContaining({
template_id: 'Alpha',
template_name: 'Alpha',
}))
expect(mockTrackCreateApp).toHaveBeenCalledWith({
appMode: AppModeEnum.CHAT,
})
expect(mockToastSuccess).toHaveBeenCalledWith('app.newApp.appCreated')
expect(onSuccess).toHaveBeenCalled()
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('created-app-id')

View File

@ -8,7 +8,6 @@ import * as React from 'react'
import { useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppTypeSelector from '@/app/components/app/type-selector'
import { trackEvent } from '@/app/components/base/amplitude'
import Divider from '@/app/components/base/divider'
import Input from '@/app/components/base/input'
import Loading from '@/app/components/base/loading'
@ -25,6 +24,7 @@ import { useExploreAppList } from '@/service/use-explore'
import { AppModeEnum } from '@/types/app'
import { getRedirection } from '@/utils/app-redirection'
import { cn } from '@/utils/classnames'
import { trackCreateApp } from '@/utils/create-app-tracking'
import AppCard from '../app-card'
import Sidebar, { AppCategories, AppCategoryLabel } from './sidebar'
@ -127,14 +127,7 @@ const Apps = ({
icon_background,
description,
})
// Track app creation from template
trackEvent('create_app_with_template', {
app_mode: mode,
template_id: currApp?.app.id,
template_name: currApp?.app.name,
description,
})
trackCreateApp({ appMode: mode })
setIsShowCreateModal(false)
toast.success(t('newApp.appCreated', { ns: 'app' }))

View File

@ -1,7 +1,6 @@
import type { App } from '@/types/app'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'
import { trackEvent } from '@/app/components/base/amplitude'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useAppContext } from '@/context/app-context'
@ -10,6 +9,7 @@ import { useRouter } from '@/next/navigation'
import { createApp } from '@/service/apps'
import { AppModeEnum } from '@/types/app'
import { getRedirection } from '@/utils/app-redirection'
import { trackCreateApp } from '@/utils/create-app-tracking'
import CreateAppModal from '../index'
const ahooksMocks = vi.hoisted(() => ({
@ -31,8 +31,8 @@ vi.mock('ahooks', () => ({
vi.mock('@/next/navigation', () => ({
useRouter: vi.fn(),
}))
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: vi.fn(),
vi.mock('@/utils/create-app-tracking', () => ({
trackCreateApp: vi.fn(),
}))
vi.mock('@/service/apps', () => ({
createApp: vi.fn(),
@ -87,7 +87,7 @@ vi.mock('@/hooks/use-theme', () => ({
const mockUseRouter = vi.mocked(useRouter)
const mockPush = vi.fn()
const mockCreateApp = vi.mocked(createApp)
const mockTrackEvent = vi.mocked(trackEvent)
const mockTrackCreateApp = vi.mocked(trackCreateApp)
const mockGetRedirection = vi.mocked(getRedirection)
const mockUseProviderContext = vi.mocked(useProviderContext)
const mockUseAppContext = vi.mocked(useAppContext)
@ -178,10 +178,7 @@ describe('CreateAppModal', () => {
mode: AppModeEnum.ADVANCED_CHAT,
}))
expect(mockTrackEvent).toHaveBeenCalledWith('create_app', {
app_mode: AppModeEnum.ADVANCED_CHAT,
description: '',
})
expect(mockTrackCreateApp).toHaveBeenCalledWith({ appMode: AppModeEnum.ADVANCED_CHAT })
expect(mockToastSuccess).toHaveBeenCalledWith('app.newApp.appCreated')
expect(onSuccess).toHaveBeenCalled()
expect(onClose).toHaveBeenCalled()

View File

@ -6,7 +6,6 @@ import { RiArrowRightLine, RiArrowRightSLine, RiExchange2Fill } from '@remixicon
import { useDebounceFn, useKeyPress } from 'ahooks'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { trackEvent } from '@/app/components/base/amplitude'
import AppIcon from '@/app/components/base/app-icon'
import Button from '@/app/components/base/button'
import Divider from '@/app/components/base/divider'
@ -25,6 +24,7 @@ import { createApp } from '@/service/apps'
import { AppModeEnum } from '@/types/app'
import { getRedirection } from '@/utils/app-redirection'
import { cn } from '@/utils/classnames'
import { trackCreateApp } from '@/utils/create-app-tracking'
import { basePath } from '@/utils/var'
import AppIconPicker from '../../base/app-icon-picker'
import ShortcutsName from '../../workflow/shortcuts-name'
@ -80,11 +80,7 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate, defaultAppMode }:
mode: appMode,
})
// Track app creation success
trackEvent('create_app', {
app_mode: appMode,
description,
})
trackCreateApp({ appMode: app.mode })
toast.success(t('newApp.appCreated', { ns: 'app' }))
onSuccess()

View File

@ -2,12 +2,13 @@
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { DSLImportMode, DSLImportStatus } from '@/models/app'
import { AppModeEnum } from '@/types/app'
import CreateFromDSLModal, { CreateFromDSLModalTab } from '../index'
const mockPush = vi.fn()
const mockImportDSL = vi.fn()
const mockImportDSLConfirm = vi.fn()
const mockTrackEvent = vi.fn()
const mockTrackCreateApp = vi.fn()
const mockHandleCheckPluginDependencies = vi.fn()
const mockGetRedirection = vi.fn()
const toastMocks = vi.hoisted(() => ({
@ -43,8 +44,8 @@ vi.mock('@/next/navigation', () => ({
}),
}))
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: (...args: unknown[]) => mockTrackEvent(...args),
vi.mock('@/utils/create-app-tracking', () => ({
trackCreateApp: (...args: unknown[]) => mockTrackCreateApp(...args),
}))
vi.mock('@/service/apps', () => ({
@ -172,7 +173,7 @@ describe('CreateFromDSLModal', () => {
id: 'import-1',
status: DSLImportStatus.COMPLETED,
app_id: 'app-1',
app_mode: 'chat',
app_mode: AppModeEnum.CHAT,
})
render(
@ -196,10 +197,7 @@ describe('CreateFromDSLModal', () => {
mode: DSLImportMode.YAML_URL,
yaml_url: 'https://example.com/app.yml',
})
expect(mockTrackEvent).toHaveBeenCalledWith('create_app_with_dsl', expect.objectContaining({
creation_method: 'dsl_url',
has_warnings: false,
}))
expect(mockTrackCreateApp).toHaveBeenCalledWith({ appMode: AppModeEnum.CHAT })
expect(handleSuccess).toHaveBeenCalledTimes(1)
expect(handleClose).toHaveBeenCalledTimes(1)
expect(localStorage.getItem(NEED_REFRESH_APP_LIST_KEY)).toBe('1')
@ -212,7 +210,7 @@ describe('CreateFromDSLModal', () => {
id: 'import-2',
status: DSLImportStatus.COMPLETED_WITH_WARNINGS,
app_id: 'app-2',
app_mode: 'chat',
app_mode: AppModeEnum.CHAT,
})
render(
@ -275,7 +273,7 @@ describe('CreateFromDSLModal', () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
app_id: 'app-3',
app_mode: 'workflow',
app_mode: AppModeEnum.WORKFLOW,
})
render(
@ -305,6 +303,7 @@ describe('CreateFromDSLModal', () => {
expect(mockImportDSLConfirm).toHaveBeenCalledWith({
import_id: 'import-3',
})
expect(mockTrackCreateApp).toHaveBeenCalledWith({ appMode: AppModeEnum.WORKFLOW })
})
it('should ignore empty import responses and prevent duplicate submissions while a request is in flight', async () => {
@ -332,7 +331,7 @@ describe('CreateFromDSLModal', () => {
id: 'import-in-flight',
status: DSLImportStatus.COMPLETED,
app_id: 'app-1',
app_mode: 'chat',
app_mode: AppModeEnum.CHAT,
})
})

View File

@ -6,7 +6,6 @@ import { useDebounceFn, useKeyPress } from 'ahooks'
import { noop } from 'es-toolkit/function'
import { useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { trackEvent } from '@/app/components/base/amplitude'
import Button from '@/app/components/base/button'
import Input from '@/app/components/base/input'
import Modal from '@/app/components/base/modal'
@ -27,6 +26,7 @@ import {
} from '@/service/apps'
import { getRedirection } from '@/utils/app-redirection'
import { cn } from '@/utils/classnames'
import { trackCreateApp } from '@/utils/create-app-tracking'
import ShortcutsName from '../../workflow/shortcuts-name'
import Uploader from './uploader'
@ -112,12 +112,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
return
const { id, status, app_id, app_mode, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
// Track app creation from DSL import
trackEvent('create_app_with_dsl', {
app_mode,
creation_method: currentTab === CreateFromDSLModalTab.FROM_FILE ? 'dsl_file' : 'dsl_url',
has_warnings: status === DSLImportStatus.COMPLETED_WITH_WARNINGS,
})
trackCreateApp({ appMode: app_mode })
if (onSuccess)
onSuccess()
@ -179,6 +174,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
const { status, app_id, app_mode } = response
if (status === DSLImportStatus.COMPLETED) {
trackCreateApp({ appMode: app_mode })
if (onSuccess)
onSuccess()
if (onClose)
@ -228,7 +224,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
isShow={show}
onClose={noop}
>
<div className="flex items-center justify-between pb-3 pl-6 pr-5 pt-6 text-text-primary title-2xl-semi-bold">
<div className="flex items-center justify-between pt-6 pr-5 pb-3 pl-6 title-2xl-semi-bold text-text-primary">
{t('importFromDSL', { ns: 'app' })}
<div
className="flex h-8 w-8 cursor-pointer items-center"
@ -237,7 +233,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
<RiCloseLine className="h-5 w-5 text-text-tertiary" />
</div>
</div>
<div className="flex h-9 items-center space-x-6 border-b border-divider-subtle px-6 text-text-tertiary system-md-semibold">
<div className="flex h-9 items-center space-x-6 border-b border-divider-subtle px-6 system-md-semibold text-text-tertiary">
{
tabs.map(tab => (
<div
@ -271,7 +267,7 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
{
currentTab === CreateFromDSLModalTab.FROM_URL && (
<div>
<div className="mb-1 text-text-secondary system-md-semibold">DSL URL</div>
<div className="mb-1 system-md-semibold text-text-secondary">DSL URL</div>
<Input
placeholder={t('importFromDSLUrlPlaceholder', { ns: 'app' }) || ''}
value={dslUrlValue}
@ -305,8 +301,8 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="text-text-primary title-2xl-semi-bold">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="flex grow flex-col text-text-secondary system-md-regular">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="flex grow flex-col system-md-regular text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />

View File

@ -1,12 +1,48 @@
import type { ReactNode } from 'react'
import type { App } from '@/models/explore'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { render, screen } from '@testing-library/react'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import { useContextSelector } from 'use-context-selector'
import AppListContext from '@/context/app-list-context'
import { fetchAppDetail } from '@/service/explore'
import { AppModeEnum } from '@/types/app'
import Apps from '../index'
let documentTitleCalls: string[] = []
let educationInitCalls: number = 0
const mockHandleImportDSL = vi.fn()
const mockHandleImportDSLConfirm = vi.fn()
const mockTrackCreateApp = vi.fn()
const mockFetchAppDetail = vi.mocked(fetchAppDetail)
const mockTemplateApp: App = {
app_id: 'template-1',
category: 'Assistant',
app: {
id: 'template-1',
mode: AppModeEnum.CHAT,
icon_type: 'emoji',
icon: '🤖',
icon_background: '#fff',
icon_url: '',
name: 'Sample App',
description: 'Sample App',
use_icon_as_answer_icon: false,
},
description: 'Sample App',
can_trial: true,
copyright: '',
privacy_policy: null,
custom_disclaimer: null,
position: 1,
is_listed: true,
install_count: 0,
installed: false,
editable: false,
is_agent: false,
}
vi.mock('@/hooks/use-document-title', () => ({
default: (title: string) => {
@ -22,17 +58,80 @@ vi.mock('@/app/education-apply/hooks', () => ({
vi.mock('@/hooks/use-import-dsl', () => ({
useImportDSL: () => ({
handleImportDSL: vi.fn(),
handleImportDSLConfirm: vi.fn(),
handleImportDSL: mockHandleImportDSL,
handleImportDSLConfirm: mockHandleImportDSLConfirm,
versions: [],
isFetching: false,
}),
}))
vi.mock('../list', () => ({
default: () => {
return React.createElement('div', { 'data-testid': 'apps-list' }, 'Apps List')
},
vi.mock('../list', () => {
const MockList = () => {
const setShowTryAppPanel = useContextSelector(AppListContext, ctx => ctx.setShowTryAppPanel)
return React.createElement(
'div',
{ 'data-testid': 'apps-list' },
React.createElement('span', null, 'Apps List'),
React.createElement(
'button',
{
'data-testid': 'open-preview',
'onClick': () => setShowTryAppPanel(true, {
appId: mockTemplateApp.app_id,
app: mockTemplateApp,
}),
},
'Open Preview',
),
)
}
return { default: MockList }
})
vi.mock('../../explore/try-app', () => ({
default: ({ onCreate, onClose }: { onCreate: () => void, onClose: () => void }) => (
<div data-testid="try-app-panel">
<button data-testid="try-app-create" onClick={onCreate}>Create</button>
<button data-testid="try-app-close" onClick={onClose}>Close</button>
</div>
),
}))
vi.mock('../../explore/create-app-modal', () => ({
default: ({ show, onConfirm, onHide }: { show: boolean, onConfirm: (payload: Record<string, string>) => Promise<void>, onHide: () => void }) => show
? (
<div data-testid="create-app-modal">
<button
data-testid="confirm-create"
onClick={() => onConfirm({
name: 'Created App',
icon_type: 'emoji',
icon: '🤖',
icon_background: '#fff',
description: 'created from preview',
})}
>
Confirm
</button>
<button data-testid="hide-create" onClick={onHide}>Hide</button>
</div>
)
: null,
}))
vi.mock('../../app/create-from-dsl-modal/dsl-confirm-modal', () => ({
default: ({ onConfirm }: { onConfirm: () => void }) => (
<button data-testid="confirm-dsl" onClick={onConfirm}>Confirm DSL</button>
),
}))
vi.mock('@/service/explore', () => ({
fetchAppDetail: vi.fn(),
}))
vi.mock('@/utils/create-app-tracking', () => ({
trackCreateApp: (...args: unknown[]) => mockTrackCreateApp(...args),
}))
describe('Apps', () => {
@ -59,6 +158,14 @@ describe('Apps', () => {
vi.clearAllMocks()
documentTitleCalls = []
educationInitCalls = 0
mockFetchAppDetail.mockResolvedValue({
id: 'template-1',
name: 'Sample App',
icon: '🤖',
icon_background: '#fff',
mode: AppModeEnum.CHAT,
export_data: 'yaml-content',
})
})
describe('Rendering', () => {
@ -116,6 +223,25 @@ describe('Apps', () => {
)
expect(screen.getByTestId('apps-list')).toBeInTheDocument()
})
it('should track template preview creation after a successful import', async () => {
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onSuccess?: () => void }) => {
options.onSuccess?.()
})
renderWithClient(<Apps />)
fireEvent.click(screen.getByTestId('open-preview'))
fireEvent.click(await screen.findByTestId('try-app-create'))
fireEvent.click(await screen.findByTestId('confirm-create'))
await waitFor(() => {
expect(mockFetchAppDetail).toHaveBeenCalledWith('template-1')
expect(mockTrackCreateApp).toHaveBeenCalledWith({
appMode: AppModeEnum.CHAT,
})
})
})
})
describe('Styling', () => {

View File

@ -1,7 +1,7 @@
'use client'
import type { CreateAppModalProps } from '../explore/create-app-modal'
import type { TryAppSelection } from '@/types/try-app'
import { useCallback, useState } from 'react'
import { useCallback, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useEducationInit } from '@/app/education-apply/hooks'
import AppListContext from '@/context/app-list-context'
@ -10,6 +10,7 @@ import { useImportDSL } from '@/hooks/use-import-dsl'
import { DSLImportMode } from '@/models/app'
import dynamic from '@/next/dynamic'
import { fetchAppDetail } from '@/service/explore'
import { trackCreateApp } from '@/utils/create-app-tracking'
import List from './list'
const DSLConfirmModal = dynamic(() => import('../app/create-from-dsl-modal/dsl-confirm-modal'), { ssr: false })
@ -23,6 +24,7 @@ const Apps = () => {
useEducationInit()
const [currentTryAppParams, setCurrentTryAppParams] = useState<TryAppSelection | undefined>(undefined)
const currentCreateAppModeRef = useRef<TryAppSelection['app']['app']['mode'] | null>(null)
const currApp = currentTryAppParams?.app
const [isShowTryAppPanel, setIsShowTryAppPanel] = useState(false)
const hideTryAppPanel = useCallback(() => {
@ -40,6 +42,12 @@ const Apps = () => {
const handleShowFromTryApp = useCallback(() => {
setIsShowCreateModal(true)
}, [])
const trackCurrentCreateApp = useCallback(() => {
if (!currentCreateAppModeRef.current)
return
trackCreateApp({ appMode: currentCreateAppModeRef.current })
}, [])
const [controlRefreshList, setControlRefreshList] = useState(0)
const [controlHideCreateFromTemplatePanel, setControlHideCreateFromTemplatePanel] = useState(0)
@ -59,11 +67,14 @@ const Apps = () => {
const onConfirmDSL = useCallback(async () => {
await handleImportDSLConfirm({
onSuccess,
onSuccess: () => {
trackCurrentCreateApp()
onSuccess()
},
})
}, [handleImportDSLConfirm, onSuccess])
}, [handleImportDSLConfirm, onSuccess, trackCurrentCreateApp])
const onCreate: CreateAppModalProps['onConfirm'] = async ({
const onCreate: CreateAppModalProps['onConfirm'] = useCallback(async ({
name,
icon_type,
icon,
@ -72,9 +83,10 @@ const Apps = () => {
}) => {
hideTryAppPanel()
const { export_data } = await fetchAppDetail(
const { export_data, mode } = await fetchAppDetail(
currApp?.app.id as string,
)
currentCreateAppModeRef.current = mode
const payload = {
mode: DSLImportMode.YAML_CONTENT,
yaml_content: export_data,
@ -86,13 +98,14 @@ const Apps = () => {
}
await handleImportDSL(payload, {
onSuccess: () => {
trackCurrentCreateApp()
setIsShowCreateModal(false)
},
onPending: () => {
setShowDSLConfirmModal(true)
},
})
}
}, [currApp?.app.id, handleImportDSL, hideTryAppPanel, trackCurrentCreateApp])
return (
<AppListContext.Provider value={{

View File

@ -5,7 +5,7 @@ import * as amplitude from '@amplitude/analytics-browser'
import { sessionReplayPlugin } from '@amplitude/plugin-session-replay-browser'
import * as React from 'react'
import { useEffect } from 'react'
import { AMPLITUDE_API_KEY, isAmplitudeEnabled } from '@/config'
import { AMPLITUDE_API_KEY } from '@/config'
export type IAmplitudeProps = {
sessionReplaySampleRate?: number
@ -54,8 +54,8 @@ const AmplitudeProvider: FC<IAmplitudeProps> = ({
}) => {
useEffect(() => {
// Only enable in Saas edition with valid API key
if (!isAmplitudeEnabled)
return
// if (!isAmplitudeEnabled)
// return
// Initialize Amplitude
amplitude.init(AMPLITUDE_API_KEY, {

View File

@ -15,6 +15,7 @@ let mockIsLoading = false
let mockIsError = false
const mockHandleImportDSL = vi.fn()
const mockHandleImportDSLConfirm = vi.fn()
const mockTrackCreateApp = vi.fn()
vi.mock('@/service/use-explore', () => ({
useExploreAppList: () => ({
@ -45,6 +46,9 @@ vi.mock('@/hooks/use-import-dsl', () => ({
isFetching: false,
}),
}))
vi.mock('@/utils/create-app-tracking', () => ({
trackCreateApp: (...args: unknown[]) => mockTrackCreateApp(...args),
}))
vi.mock('@/app/components/explore/create-app-modal', () => ({
default: (props: CreateAppModalProps) => {
@ -214,7 +218,7 @@ describe('AppList', () => {
categories: ['Writing'],
allList: [createApp()],
};
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml-content' })
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml-content', mode: AppModeEnum.CHAT })
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onSuccess?: () => void, onPending?: () => void }) => {
options.onPending?.()
})
@ -235,6 +239,9 @@ describe('AppList', () => {
fireEvent.click(screen.getByTestId('dsl-confirm'))
await waitFor(() => {
expect(mockHandleImportDSLConfirm).toHaveBeenCalledTimes(1)
expect(mockTrackCreateApp).toHaveBeenCalledWith({
appMode: AppModeEnum.CHAT,
})
expect(onSuccess).toHaveBeenCalledTimes(1)
})
})
@ -307,7 +314,7 @@ describe('AppList', () => {
categories: ['Writing'],
allList: [createApp()],
};
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml' })
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml', mode: AppModeEnum.CHAT })
renderAppList(true)
fireEvent.click(screen.getByText('explore.appCard.addToWorkspace'))
@ -325,7 +332,7 @@ describe('AppList', () => {
categories: ['Writing'],
allList: [createApp()],
};
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml' })
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml', mode: AppModeEnum.CHAT })
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onSuccess?: () => void }) => {
options.onSuccess?.()
})
@ -337,6 +344,9 @@ describe('AppList', () => {
await waitFor(() => {
expect(screen.queryByTestId('create-app-modal')).not.toBeInTheDocument()
})
expect(mockTrackCreateApp).toHaveBeenCalledWith({
appMode: AppModeEnum.CHAT,
})
})
it('should cancel DSL confirm modal', async () => {
@ -345,7 +355,7 @@ describe('AppList', () => {
categories: ['Writing'],
allList: [createApp()],
};
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml' })
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml', mode: AppModeEnum.CHAT })
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onPending?: () => void }) => {
options.onPending?.()
})
@ -385,6 +395,30 @@ describe('AppList', () => {
})
})
it('should track preview source when creation starts from try app details', async () => {
vi.useRealTimers()
mockExploreData = {
categories: ['Writing'],
allList: [createApp()],
};
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml', mode: AppModeEnum.CHAT })
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onSuccess?: () => void }) => {
options.onSuccess?.()
})
renderAppList(true)
fireEvent.click(screen.getByText('explore.appCard.try'))
fireEvent.click(screen.getByTestId('try-app-create'))
fireEvent.click(await screen.findByTestId('confirm-create'))
await waitFor(() => {
expect(mockTrackCreateApp).toHaveBeenCalledWith({
appMode: AppModeEnum.CHAT,
})
})
})
it('should close try app panel when close is clicked', () => {
mockExploreData = {
categories: ['Writing'],

View File

@ -6,7 +6,7 @@ import type { TryAppSelection } from '@/types/try-app'
import { useDebounceFn } from 'ahooks'
import { useQueryState } from 'nuqs'
import * as React from 'react'
import { useCallback, useMemo, useState } from 'react'
import { useCallback, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import DSLConfirmModal from '@/app/components/app/create-from-dsl-modal/dsl-confirm-modal'
import Button from '@/app/components/base/button'
@ -26,6 +26,7 @@ import { fetchAppDetail } from '@/service/explore'
import { useMembers } from '@/service/use-common'
import { useExploreAppList } from '@/service/use-explore'
import { cn } from '@/utils/classnames'
import { trackCreateApp } from '@/utils/create-app-tracking'
import TryApp from '../try-app'
import s from './style.module.css'
@ -101,6 +102,7 @@ const Apps = ({
const [showDSLConfirmModal, setShowDSLConfirmModal] = useState(false)
const [currentTryApp, setCurrentTryApp] = useState<TryAppSelection | undefined>(undefined)
const currentCreateAppModeRef = useRef<App['app']['mode'] | null>(null)
const isShowTryAppPanel = !!currentTryApp
const hideTryAppPanel = useCallback(() => {
setCurrentTryApp(undefined)
@ -112,8 +114,14 @@ const Apps = ({
setCurrApp(currentTryApp?.app || null)
setIsShowCreateModal(true)
}, [currentTryApp?.app])
const trackCurrentCreateApp = useCallback(() => {
if (!currentCreateAppModeRef.current)
return
const onCreate: CreateAppModalProps['onConfirm'] = async ({
trackCreateApp({ appMode: currentCreateAppModeRef.current })
}, [])
const onCreate: CreateAppModalProps['onConfirm'] = useCallback(async ({
name,
icon_type,
icon,
@ -122,9 +130,10 @@ const Apps = ({
}) => {
hideTryAppPanel()
const { export_data } = await fetchAppDetail(
const { export_data, mode } = await fetchAppDetail(
currApp?.app.id as string,
)
currentCreateAppModeRef.current = mode
const payload = {
mode: DSLImportMode.YAML_CONTENT,
yaml_content: export_data,
@ -136,19 +145,23 @@ const Apps = ({
}
await handleImportDSL(payload, {
onSuccess: () => {
trackCurrentCreateApp()
setIsShowCreateModal(false)
},
onPending: () => {
setShowDSLConfirmModal(true)
},
})
}
}, [currApp?.app.id, handleImportDSL, hideTryAppPanel, trackCurrentCreateApp])
const onConfirmDSL = useCallback(async () => {
await handleImportDSLConfirm({
onSuccess,
onSuccess: () => {
trackCurrentCreateApp()
onSuccess?.()
},
})
}, [handleImportDSLConfirm, onSuccess])
}, [handleImportDSLConfirm, onSuccess, trackCurrentCreateApp])
if (isLoading) {
return (

View File

@ -11,6 +11,7 @@ import { validPassword } from '@/config'
import { useRouter, useSearchParams } from '@/next/navigation'
import { useMailRegister } from '@/service/use-common'
import { cn } from '@/utils/classnames'
import { rememberCreateAppExternalAttribution } from '@/utils/create-app-tracking'
import { sendGAEvent } from '@/utils/gtag'
const parseUtmInfo = () => {
@ -68,6 +69,7 @@ const ChangePasswordForm = () => {
const { result } = res as MailRegisterResponse
if (result === 'success') {
const utmInfo = parseUtmInfo()
rememberCreateAppExternalAttribution({ utmInfo })
trackEvent(utmInfo ? 'user_registration_success_with_utm' : 'user_registration_success', {
method: 'email',
...utmInfo,

View File

@ -0,0 +1,134 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import * as amplitude from '@/app/components/base/amplitude'
import { AppModeEnum } from '@/types/app'
import {
buildCreateAppEventPayload,
extractExternalCreateAppAttribution,
rememberCreateAppExternalAttribution,
trackCreateApp,
} from '../create-app-tracking'
describe('create-app-tracking', () => {
beforeEach(() => {
vi.restoreAllMocks()
vi.spyOn(amplitude, 'trackEvent').mockImplementation(() => {})
window.sessionStorage.clear()
window.history.replaceState({}, '', '/apps')
})
describe('extractExternalCreateAppAttribution', () => {
it('should map campaign links to external attribution', () => {
const attribution = extractExternalCreateAppAttribution({
searchParams: new URLSearchParams('utm_source=x&slug=how-to-build-rag-agent'),
})
expect(attribution).toEqual({
utmSource: 'twitter/x',
utmCampaign: 'how-to-build-rag-agent',
})
})
it('should map newsletter and blog sources to blog', () => {
expect(extractExternalCreateAppAttribution({
searchParams: new URLSearchParams('utm_source=newsletter'),
})).toEqual({ utmSource: 'blog' })
expect(extractExternalCreateAppAttribution({
utmInfo: { utm_source: 'dify_blog', slug: 'launch-week' },
})).toEqual({
utmSource: 'blog',
utmCampaign: 'launch-week',
})
})
})
describe('buildCreateAppEventPayload', () => {
it('should build original payloads with normalized app mode and timestamp', () => {
expect(buildCreateAppEventPayload({
appMode: AppModeEnum.ADVANCED_CHAT,
}, null, new Date(2026, 3, 13, 14, 5, 9))).toEqual({
source: 'original',
app_mode: 'chatflow',
time: '04-13-14:05:09',
})
})
it('should map agent mode into the canonical app mode bucket', () => {
expect(buildCreateAppEventPayload({
appMode: AppModeEnum.AGENT_CHAT,
}, null, new Date(2026, 3, 13, 9, 8, 7))).toEqual({
source: 'original',
app_mode: 'agent',
time: '04-13-09:08:07',
})
})
it('should fold legacy non-agent modes into chatflow', () => {
expect(buildCreateAppEventPayload({
appMode: AppModeEnum.CHAT,
}, null, new Date(2026, 3, 13, 8, 0, 1))).toEqual({
source: 'original',
app_mode: 'chatflow',
time: '04-13-08:00:01',
})
expect(buildCreateAppEventPayload({
appMode: AppModeEnum.COMPLETION,
}, null, new Date(2026, 3, 13, 8, 0, 2))).toEqual({
source: 'original',
app_mode: 'chatflow',
time: '04-13-08:00:02',
})
})
it('should map workflow mode into the workflow bucket', () => {
expect(buildCreateAppEventPayload({
appMode: AppModeEnum.WORKFLOW,
}, null, new Date(2026, 3, 13, 7, 6, 5))).toEqual({
source: 'original',
app_mode: 'workflow',
time: '04-13-07:06:05',
})
})
it('should prefer external attribution when present', () => {
expect(buildCreateAppEventPayload(
{
appMode: AppModeEnum.WORKFLOW,
},
{
utmSource: 'linkedin',
utmCampaign: 'agent-launch',
},
)).toEqual({
source: 'external',
utm_source: 'linkedin',
utm_campaign: 'agent-launch',
})
})
})
describe('trackCreateApp', () => {
it('should track remembered external attribution once before falling back to internal source', () => {
rememberCreateAppExternalAttribution({
searchParams: new URLSearchParams('utm_source=newsletter&slug=how-to-build-rag-agent'),
})
trackCreateApp({ appMode: AppModeEnum.WORKFLOW })
expect(amplitude.trackEvent).toHaveBeenNthCalledWith(1, 'create_app', {
source: 'external',
utm_source: 'blog',
utm_campaign: 'how-to-build-rag-agent',
})
trackCreateApp({ appMode: AppModeEnum.WORKFLOW })
expect(amplitude.trackEvent).toHaveBeenNthCalledWith(2, 'create_app', {
source: 'original',
app_mode: 'workflow',
time: expect.stringMatching(/^\d{2}-\d{2}-\d{2}:\d{2}:\d{2}$/),
})
})
})
})

View File

@ -0,0 +1,187 @@
import Cookies from 'js-cookie'
import { trackEvent } from '@/app/components/base/amplitude'
import { AppModeEnum } from '@/types/app'
const CREATE_APP_EXTERNAL_ATTRIBUTION_STORAGE_KEY = 'create_app_external_attribution'
const EXTERNAL_UTM_SOURCE_MAP = {
blog: 'blog',
dify_blog: 'blog',
linkedin: 'linkedin',
newsletter: 'blog',
twitter: 'twitter/x',
x: 'twitter/x',
} as const
type SearchParamReader = {
get: (name: string) => string | null
}
type OriginalCreateAppMode = 'workflow' | 'chatflow' | 'agent'
type TrackCreateAppParams = {
appMode: AppModeEnum
}
type ExternalCreateAppAttribution = {
utmSource: typeof EXTERNAL_UTM_SOURCE_MAP[keyof typeof EXTERNAL_UTM_SOURCE_MAP]
utmCampaign?: string
}
const normalizeString = (value?: string | null) => {
const trimmed = value?.trim()
return trimmed || undefined
}
const getObjectStringValue = (value: unknown) => {
return typeof value === 'string' ? normalizeString(value) : undefined
}
const getSearchParamValue = (searchParams?: SearchParamReader | null, key?: string) => {
if (!searchParams || !key)
return undefined
return normalizeString(searchParams.get(key))
}
const parseJSONRecord = (value?: string | null): Record<string, unknown> | null => {
if (!value)
return null
try {
const parsed = JSON.parse(value)
return parsed && typeof parsed === 'object' ? parsed as Record<string, unknown> : null
}
catch {
return null
}
}
const getCookieUtmInfo = () => {
return parseJSONRecord(Cookies.get('utm_info'))
}
const mapExternalUtmSource = (value?: string) => {
if (!value)
return undefined
const normalized = value.toLowerCase()
return EXTERNAL_UTM_SOURCE_MAP[normalized as keyof typeof EXTERNAL_UTM_SOURCE_MAP]
}
const padTimeValue = (value: number) => String(value).padStart(2, '0')
const formatCreateAppTime = (date: Date) => {
return `${padTimeValue(date.getMonth() + 1)}-${padTimeValue(date.getDate())}-${padTimeValue(date.getHours())}:${padTimeValue(date.getMinutes())}:${padTimeValue(date.getSeconds())}`
}
const mapOriginalCreateAppMode = (appMode: AppModeEnum): OriginalCreateAppMode => {
if (appMode === AppModeEnum.WORKFLOW)
return 'workflow'
if (appMode === AppModeEnum.AGENT_CHAT)
return 'agent'
return 'chatflow'
}
export const extractExternalCreateAppAttribution = ({
searchParams,
utmInfo,
}: {
searchParams?: SearchParamReader | null
utmInfo?: Record<string, unknown> | null
}) => {
const rawSource = getSearchParamValue(searchParams, 'utm_source') ?? getObjectStringValue(utmInfo?.utm_source)
const mappedSource = mapExternalUtmSource(rawSource)
if (!mappedSource)
return null
const utmCampaign = getSearchParamValue(searchParams, 'slug')
?? getSearchParamValue(searchParams, 'utm_campaign')
?? getObjectStringValue(utmInfo?.slug)
?? getObjectStringValue(utmInfo?.utm_campaign)
return {
utmSource: mappedSource,
...(utmCampaign ? { utmCampaign } : {}),
} satisfies ExternalCreateAppAttribution
}
const readRememberedExternalCreateAppAttribution = (): ExternalCreateAppAttribution | null => {
if (typeof window === 'undefined')
return null
return parseJSONRecord(window.sessionStorage.getItem(CREATE_APP_EXTERNAL_ATTRIBUTION_STORAGE_KEY)) as ExternalCreateAppAttribution | null
}
const writeRememberedExternalCreateAppAttribution = (attribution: ExternalCreateAppAttribution) => {
if (typeof window === 'undefined')
return
window.sessionStorage.setItem(CREATE_APP_EXTERNAL_ATTRIBUTION_STORAGE_KEY, JSON.stringify(attribution))
}
const clearRememberedExternalCreateAppAttribution = () => {
if (typeof window === 'undefined')
return
window.sessionStorage.removeItem(CREATE_APP_EXTERNAL_ATTRIBUTION_STORAGE_KEY)
}
export const rememberCreateAppExternalAttribution = ({
searchParams,
utmInfo,
}: {
searchParams?: SearchParamReader | null
utmInfo?: Record<string, unknown> | null
} = {}) => {
const attribution = extractExternalCreateAppAttribution({
searchParams,
utmInfo: utmInfo ?? getCookieUtmInfo(),
})
if (attribution)
writeRememberedExternalCreateAppAttribution(attribution)
return attribution
}
const resolveCurrentExternalCreateAppAttribution = () => {
if (typeof window === 'undefined')
return null
return rememberCreateAppExternalAttribution({
searchParams: new URLSearchParams(window.location.search),
}) ?? readRememberedExternalCreateAppAttribution()
}
export const buildCreateAppEventPayload = (
params: TrackCreateAppParams,
externalAttribution?: ExternalCreateAppAttribution | null,
currentTime = new Date(),
) => {
if (externalAttribution) {
return {
source: 'external',
utm_source: externalAttribution.utmSource,
...(externalAttribution.utmCampaign ? { utm_campaign: externalAttribution.utmCampaign } : {}),
} satisfies Record<string, string>
}
return {
source: 'original',
app_mode: mapOriginalCreateAppMode(params.appMode),
time: formatCreateAppTime(currentTime),
} satisfies Record<string, string>
}
export const trackCreateApp = (params: TrackCreateAppParams) => {
const externalAttribution = resolveCurrentExternalCreateAppAttribution()
const payload = buildCreateAppEventPayload(params, externalAttribution)
if (externalAttribution)
clearRememberedExternalCreateAppAttribution()
trackEvent('create_app', payload)
}