Compare commits

..

10 Commits

Author SHA1 Message Date
45315492ad ci: cache eslint for autofix 2026-03-26 16:32:41 +08:00
e08c06cbc3 fix: import path (#34124)
Co-authored-by: -LAN- <laipz8200@outlook.com>
2026-03-26 16:13:53 +08:00
8ca54ddf94 refactor(web): convert 7 enums to as-const objects (batch 5) (#33960)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-26 15:50:54 +08:00
3e073404cc fix: the menu of multi nodes always display on left top corner (#34120)
Co-authored-by: yyh <yuanyouhuilyz@gmail.com>
2026-03-26 15:49:42 +08:00
0acabf5f73 chore(deps): update picomatch version in nodejs-client and web packages (#34123)
Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com>
2026-03-26 15:49:19 +08:00
38285aa1ac chore: enable no-barrel-files (#34121) 2026-03-26 15:11:25 +08:00
5341cd015b fix: dataset query created_by empty UUID in iteration subgraph (#34004) (#34044)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-26 14:57:19 +08:00
c32eebf57d refactor: use ungh for github api (#34108) 2026-03-26 14:37:17 +08:00
554ba6b8f3 chore(deps): bump pypdf from 6.9.1 to 6.9.2 in /api (#34099)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-26 13:27:04 +09:00
a69b8c1e96 refactor: select in service API dataset document and segment controllers (#34101)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-26 13:24:54 +09:00
53 changed files with 1320 additions and 758 deletions

View File

@ -98,10 +98,27 @@ jobs:
if: steps.web-changes.outputs.any_changed == 'true'
uses: ./.github/actions/setup-web
- name: Restore ESLint cache
if: steps.web-changes.outputs.any_changed == 'true'
id: eslint-cache-restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: web/.eslintcache
key: ${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-${{ github.sha }}
restore-keys: |
${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-
- name: ESLint autofix
if: steps.web-changes.outputs.any_changed == 'true'
run: |
cd web
vp exec eslint --concurrency=2 --prune-suppressions --quiet || true
vp exec eslint --cache --concurrency=2 --prune-suppressions --quiet || true
- name: Save ESLint cache
if: steps.web-changes.outputs.any_changed == 'true' && success() && steps.eslint-cache-restore.outputs.cache-hit != 'true'
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: web/.eslintcache
key: ${{ steps.eslint-cache-restore.outputs.cache-primary-key }}
- uses: autofix-ci/action@7a166d7532b277f34e16238930461bf77f9d7ed8 # v1.3.3

View File

@ -6,7 +6,7 @@ from uuid import UUID
from flask import request, send_file
from flask_restx import marshal
from pydantic import BaseModel, Field, field_validator, model_validator
from sqlalchemy import desc, select
from sqlalchemy import desc, func, select
from werkzeug.exceptions import Forbidden, NotFound
import services
@ -155,7 +155,9 @@ class DocumentAddByTextApi(DatasetApiResource):
dataset_id = str(dataset_id)
tenant_id = str(tenant_id)
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")
@ -238,7 +240,9 @@ class DocumentUpdateByTextApi(DatasetApiResource):
def post(self, tenant_id: str, dataset_id: UUID, document_id: UUID):
"""Update document by text."""
payload = DocumentTextUpdate.model_validate(service_api_ns.payload or {})
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == str(dataset_id)).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == str(dataset_id)).limit(1)
)
args = payload.model_dump(exclude_none=True)
if not dataset:
raise ValueError("Dataset does not exist.")
@ -315,7 +319,9 @@ class DocumentAddByFileApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id):
"""Create document by upload file."""
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")
@ -425,7 +431,9 @@ class DocumentUpdateByFileApi(DatasetApiResource):
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id):
"""Update document by upload file."""
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")
@ -515,7 +523,9 @@ class DocumentListApi(DatasetApiResource):
dataset_id = str(dataset_id)
tenant_id = str(tenant_id)
query_params = DocumentListQuery.model_validate(request.args.to_dict())
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
@ -609,7 +619,9 @@ class DocumentIndexingStatusApi(DatasetApiResource):
batch = str(batch)
tenant_id = str(tenant_id)
# get dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# get documents
@ -619,20 +631,23 @@ class DocumentIndexingStatusApi(DatasetApiResource):
documents_status = []
for document in documents:
completed_segments = (
db.session.query(DocumentSegment)
.where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != SegmentStatus.RE_SEGMENT,
db.session.scalar(
select(func.count(DocumentSegment.id)).where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != SegmentStatus.RE_SEGMENT,
)
)
.count()
or 0
)
total_segments = (
db.session.query(DocumentSegment)
.where(
DocumentSegment.document_id == str(document.id), DocumentSegment.status != SegmentStatus.RE_SEGMENT
db.session.scalar(
select(func.count(DocumentSegment.id)).where(
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != SegmentStatus.RE_SEGMENT,
)
)
.count()
or 0
)
# Create a dictionary with document attributes and additional fields
document_dict = {
@ -822,7 +837,9 @@ class DocumentApi(DatasetApiResource):
tenant_id = str(tenant_id)
# get dataset info
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")

View File

@ -3,6 +3,7 @@ from typing import Any
from flask import request
from flask_restx import marshal
from pydantic import BaseModel, Field
from sqlalchemy import select
from werkzeug.exceptions import NotFound
from configs import dify_config
@ -92,7 +93,9 @@ class SegmentApi(DatasetApiResource):
_, current_tenant_id = current_account_with_tenant()
"""Create single segment."""
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# check document
@ -150,7 +153,9 @@ class SegmentApi(DatasetApiResource):
# check dataset
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# check document
@ -220,7 +225,9 @@ class DatasetSegmentApi(DatasetApiResource):
def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str):
_, current_tenant_id = current_account_with_tenant()
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# check user's model setting
@ -254,7 +261,9 @@ class DatasetSegmentApi(DatasetApiResource):
def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str):
_, current_tenant_id = current_account_with_tenant()
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# check user's model setting
@ -301,7 +310,9 @@ class DatasetSegmentApi(DatasetApiResource):
def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str):
_, current_tenant_id = current_account_with_tenant()
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
# check user's model setting
@ -344,7 +355,9 @@ class ChildChunkApi(DatasetApiResource):
_, current_tenant_id = current_account_with_tenant()
"""Create child chunk."""
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
@ -402,7 +415,9 @@ class ChildChunkApi(DatasetApiResource):
_, current_tenant_id = current_account_with_tenant()
"""Get child chunks."""
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
@ -468,7 +483,9 @@ class DatasetChildChunkApi(DatasetApiResource):
_, current_tenant_id = current_account_with_tenant()
"""Delete child chunk."""
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")
@ -527,7 +544,9 @@ class DatasetChildChunkApi(DatasetApiResource):
_, current_tenant_id = current_account_with_tenant()
"""Update child chunk."""
# check dataset
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise NotFound("Dataset not found.")

View File

@ -137,6 +137,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
workflow=self._workflow,
single_iteration_run=self.application_generate_entity.single_iteration_run,
single_loop_run=self.application_generate_entity.single_loop_run,
user_id=self.application_generate_entity.user_id,
)
else:
inputs = self.application_generate_entity.inputs

View File

@ -106,6 +106,7 @@ class PipelineRunner(WorkflowBasedAppRunner):
workflow=workflow,
single_iteration_run=self.application_generate_entity.single_iteration_run,
single_loop_run=self.application_generate_entity.single_loop_run,
user_id=self.application_generate_entity.user_id,
)
else:
inputs = self.application_generate_entity.inputs

View File

@ -92,6 +92,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
workflow=self._workflow,
single_iteration_run=self.application_generate_entity.single_iteration_run,
single_loop_run=self.application_generate_entity.single_loop_run,
user_id=self.application_generate_entity.user_id,
)
else:
inputs = self.application_generate_entity.inputs

View File

@ -164,6 +164,8 @@ class WorkflowBasedAppRunner:
workflow: Workflow,
single_iteration_run: Any | None = None,
single_loop_run: Any | None = None,
*,
user_id: str,
) -> tuple[Graph, VariablePool, GraphRuntimeState]:
"""
Prepare graph, variable pool, and runtime state for single node execution
@ -200,6 +202,7 @@ class WorkflowBasedAppRunner:
graph_runtime_state=graph_runtime_state,
node_type_filter_key="iteration_id",
node_type_label="iteration",
user_id=user_id,
)
elif single_loop_run:
graph, variable_pool = self._get_graph_and_variable_pool_for_single_node_run(
@ -209,6 +212,7 @@ class WorkflowBasedAppRunner:
graph_runtime_state=graph_runtime_state,
node_type_filter_key="loop_id",
node_type_label="loop",
user_id=user_id,
)
else:
raise ValueError("Neither single_iteration_run nor single_loop_run is specified")
@ -225,6 +229,8 @@ class WorkflowBasedAppRunner:
graph_runtime_state: GraphRuntimeState,
node_type_filter_key: str, # 'iteration_id' or 'loop_id'
node_type_label: str = "node", # 'iteration' or 'loop' for error messages
*,
user_id: str = "",
) -> tuple[Graph, VariablePool]:
"""
Get graph and variable pool for single node execution (iteration or loop).
@ -290,7 +296,7 @@ class WorkflowBasedAppRunner:
run_context=build_dify_run_context(
tenant_id=workflow.tenant_id,
app_id=self._app_id,
user_id="",
user_id=user_id,
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
),

View File

@ -71,6 +71,7 @@ from graphon.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMU
from graphon.model_runtime.entities.message_entities import PromptMessage, PromptMessageRole, PromptMessageTool
from graphon.model_runtime.entities.model_entities import ModelFeature, ModelType
from graphon.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from libs.helper import parse_uuid_str_or_none
from libs.json_in_md_parser import parse_and_check_json_markdown
from models import UploadFile
from models.dataset import (
@ -1024,8 +1025,13 @@ class DatasetRetrieval:
"""
if not query and not attachment_ids:
return
created_by_role = self._resolve_creator_user_role(user_from)
if created_by_role is None:
created_by = parse_uuid_str_or_none(user_id)
if created_by is None:
logger.debug(
"Skipping dataset query log: empty created_by user_id (user_from=%s, app_id=%s)",
user_from,
app_id,
)
return
dataset_queries = []
for dataset_id in dataset_ids:
@ -1041,8 +1047,8 @@ class DatasetRetrieval:
content=json.dumps(contents),
source=DatasetQuerySource.APP,
source_app_id=app_id,
created_by_role=created_by_role,
created_by=user_id,
created_by_role=CreatorUserRole(user_from),
created_by=created_by,
)
dataset_queries.append(dataset_query)
if dataset_queries:

View File

@ -174,6 +174,18 @@ def normalize_uuid(value: str | UUID) -> str:
raise ValueError("must be a valid UUID") from exc
def parse_uuid_str_or_none(value: str | None) -> str | None:
"""
Return None for missing/empty UUID-like values.
Keep non-empty values unchanged to avoid changing behavior in paths that
currently pass placeholder IDs in tests/mocks.
"""
if value is None or not str(value).strip():
return None
return str(value)
UUIDStrOrEmpty = Annotated[str, AfterValidator(normalize_uuid)]

View File

@ -788,7 +788,7 @@ class TestSegmentApiGet:
"""Test successful segment list retrieval."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock(doc_form=IndexStructureType.PARAGRAPH_INDEX)
mock_seg_svc.get_segments.return_value = ([mock_segment], 1)
mock_marshal.return_value = [{"id": mock_segment.id}]
@ -813,7 +813,7 @@ class TestSegmentApiGet:
"""Test 404 when dataset not found."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -833,7 +833,7 @@ class TestSegmentApiGet:
"""Test 404 when document not found."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = None
# Act & Assert
@ -899,7 +899,7 @@ class TestSegmentApiPost:
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc = Mock()
mock_doc.indexing_status = "completed"
@ -950,7 +950,7 @@ class TestSegmentApiPost:
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc = Mock()
mock_doc.indexing_status = "completed"
@ -992,7 +992,7 @@ class TestSegmentApiPost:
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc = Mock()
mock_doc.indexing_status = "indexing" # Not completed
@ -1043,7 +1043,7 @@ class TestDatasetSegmentApiDelete:
"""Test successful segment deletion."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc = Mock()
@ -1087,7 +1087,7 @@ class TestDatasetSegmentApiDelete:
"""Test 404 when segment not found."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc = Mock()
mock_doc.indexing_status = "completed"
@ -1129,7 +1129,7 @@ class TestDatasetSegmentApiDelete:
"""Test 404 when dataset not found for delete."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -1163,7 +1163,7 @@ class TestDatasetSegmentApiDelete:
"""Test 404 when document not found for delete."""
# Arrange
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc_svc.get_document.return_value = None
@ -1233,7 +1233,7 @@ class TestDatasetSegmentApiUpdate:
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = mock_segment
@ -1280,7 +1280,7 @@ class TestDatasetSegmentApiUpdate:
"""Test 404 when dataset not found for update."""
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/doc-id/segments/seg-id",
@ -1321,7 +1321,7 @@ class TestDatasetSegmentApiUpdate:
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = None
@ -1370,7 +1370,7 @@ class TestDatasetSegmentApiGetSingle:
):
"""Test successful single segment retrieval."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc = Mock(doc_form=IndexStructureType.PARAGRAPH_INDEX)
mock_doc_svc.get_document.return_value = mock_doc
@ -1405,7 +1405,7 @@ class TestDatasetSegmentApiGetSingle:
):
"""Test 404 when dataset not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/doc-id/segments/seg-id",
@ -1436,7 +1436,7 @@ class TestDatasetSegmentApiGetSingle:
):
"""Test 404 when document not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc_svc.get_document.return_value = None
@ -1471,7 +1471,7 @@ class TestDatasetSegmentApiGetSingle:
):
"""Test 404 when segment not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset_svc.check_dataset_model_setting.return_value = None
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = None
@ -1515,7 +1515,7 @@ class TestChildChunkApiGet:
):
"""Test successful child chunk list retrieval."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = Mock()
@ -1554,7 +1554,7 @@ class TestChildChunkApiGet:
):
"""Test 404 when dataset not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/doc-id/segments/seg-id/child_chunks",
@ -1583,7 +1583,7 @@ class TestChildChunkApiGet:
):
"""Test 404 when document not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = None
with app.test_request_context(
@ -1615,7 +1615,7 @@ class TestChildChunkApiGet:
):
"""Test 404 when segment not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = None
@ -1676,7 +1676,7 @@ class TestChildChunkApiPost:
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = Mock()
mock_child = Mock()
@ -1717,7 +1717,7 @@ class TestChildChunkApiPost:
"""Test 404 when dataset not found."""
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/doc-id/segments/seg-id/child_chunks",
@ -1755,7 +1755,7 @@ class TestChildChunkApiPost:
"""Test 404 when segment not found."""
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
mock_seg_svc.get_segment_by_id.return_value = None
@ -1808,7 +1808,7 @@ class TestDatasetChildChunkApiDelete:
):
"""Test successful child chunk deletion."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc = Mock()
mock_doc_svc.get_document.return_value = mock_doc
@ -1858,7 +1858,7 @@ class TestDatasetChildChunkApiDelete:
):
"""Test 404 when child chunk not found."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
segment_id = str(uuid.uuid4())
@ -1899,7 +1899,7 @@ class TestDatasetChildChunkApiDelete:
):
"""Test 404 when segment does not belong to the document."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
segment_id = str(uuid.uuid4())
@ -1939,7 +1939,7 @@ class TestDatasetChildChunkApiDelete:
):
"""Test 404 when child chunk does not belong to the segment."""
mock_account_fn.return_value = (Mock(), mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = Mock()
segment_id = str(uuid.uuid4())

View File

@ -717,7 +717,7 @@ class TestDocumentApiDelete:
dataset_id = str(uuid.uuid4())
mock_dataset = Mock()
mock_dataset.id = dataset_id
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = mock_document
mock_doc_svc.check_archived.return_value = False
@ -746,7 +746,7 @@ class TestDocumentApiDelete:
document_id = str(uuid.uuid4())
mock_dataset = Mock()
mock_dataset.id = dataset_id
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = None
@ -767,7 +767,7 @@ class TestDocumentApiDelete:
dataset_id = str(uuid.uuid4())
mock_dataset = Mock()
mock_dataset.id = dataset_id
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_document.return_value = mock_document
mock_doc_svc.check_archived.return_value = True
@ -788,7 +788,7 @@ class TestDocumentApiDelete:
# Arrange
dataset_id = str(uuid.uuid4())
document_id = str(uuid.uuid4())
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -809,7 +809,7 @@ class TestDocumentListApi:
def test_list_documents_success(self, mock_db, mock_doc_svc, mock_marshal, app, mock_tenant, mock_dataset):
"""Test successful document list retrieval."""
# Arrange
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_pagination = Mock()
mock_pagination.items = [Mock(), Mock()]
@ -838,7 +838,7 @@ class TestDocumentListApi:
def test_list_documents_dataset_not_found(self, mock_db, app, mock_tenant, mock_dataset):
"""Test 404 when dataset not found."""
# Arrange
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -860,8 +860,6 @@ class TestDocumentIndexingStatusApi:
"""Test successful indexing status retrieval."""
# Arrange
batch_id = "batch_123"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_doc = Mock()
mock_doc.id = str(uuid.uuid4())
mock_doc.is_paused = False
@ -877,8 +875,8 @@ class TestDocumentIndexingStatusApi:
mock_doc_svc.get_batch_documents.return_value = [mock_doc]
# Mock segment count queries
mock_db.session.query.return_value.where.return_value.where.return_value.count.return_value = 5
# scalar() called 3 times: dataset lookup, completed_segments count, total_segments count
mock_db.session.scalar.side_effect = [mock_dataset, 5, 5]
mock_marshal.return_value = {"id": mock_doc.id, "indexing_status": "completed"}
# Act
@ -898,7 +896,7 @@ class TestDocumentIndexingStatusApi:
"""Test 404 when dataset not found."""
# Arrange
batch_id = "batch_123"
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -915,7 +913,7 @@ class TestDocumentIndexingStatusApi:
"""Test 404 when no documents found for batch."""
# Arrange
batch_id = "batch_empty"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_doc_svc.get_batch_documents.return_value = []
# Act & Assert
@ -986,7 +984,7 @@ class TestDocumentAddByTextApi:
# Arrange — neutralise billing decorators
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_dataset.indexing_technique = "economy"
mock_current_user.id = str(uuid.uuid4())
@ -1035,7 +1033,7 @@ class TestDocumentAddByTextApi:
# Arrange — neutralise billing decorators
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
# Act & Assert
with app.test_request_context(
@ -1064,7 +1062,7 @@ class TestDocumentAddByTextApi:
self._setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_dataset.indexing_technique = None
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
# Act & Assert
with app.test_request_context(
@ -1150,7 +1148,7 @@ class TestDocumentUpdateByTextApiPost:
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_dataset.indexing_technique = "economy"
mock_dataset.latest_process_rule = Mock()
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_current_user.id = "user-1"
mock_upload = Mock()
@ -1193,7 +1191,7 @@ class TestDocumentUpdateByTextApiPost:
):
"""Test ValueError when dataset not found."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
doc_id = str(uuid.uuid4())
with app.test_request_context(
@ -1232,7 +1230,7 @@ class TestDocumentAddByFileApiPost:
):
"""Test ValueError when dataset not found."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
from io import BytesIO
@ -1263,7 +1261,7 @@ class TestDocumentAddByFileApiPost:
"""Test ValueError when dataset is external."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_dataset.provider = "external"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
from io import BytesIO
@ -1298,7 +1296,7 @@ class TestDocumentAddByFileApiPost:
mock_dataset.provider = "vendor"
mock_dataset.indexing_technique = "economy"
mock_dataset.chunk_structure = None
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
with app.test_request_context(
f"/datasets/{mock_dataset.id}/document/create_by_file",
@ -1328,7 +1326,7 @@ class TestDocumentAddByFileApiPost:
mock_dataset.provider = "vendor"
mock_dataset.indexing_technique = None
mock_dataset.chunk_structure = None
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
from io import BytesIO
@ -1366,7 +1364,7 @@ class TestDocumentUpdateByFileApiPost:
):
"""Test ValueError when dataset not found."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
from io import BytesIO
@ -1402,7 +1400,7 @@ class TestDocumentUpdateByFileApiPost:
"""Test ValueError when dataset is external."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_dataset.provider = "external"
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
from io import BytesIO
@ -1450,7 +1448,7 @@ class TestDocumentUpdateByFileApiPost:
mock_dataset.chunk_structure = None
mock_dataset.latest_process_rule = Mock()
mock_dataset.created_by_account = Mock()
mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset
mock_db.session.scalar.return_value = mock_dataset
mock_current_user.id = "user-1"
mock_upload = Mock()

View File

@ -88,7 +88,7 @@ class TestWorkflowBasedAppRunner:
workflow = SimpleNamespace(environment_variables=[], graph_dict={})
with pytest.raises(ValueError, match="Neither single_iteration_run nor single_loop_run"):
runner._prepare_single_node_execution(workflow, None, None)
runner._prepare_single_node_execution(workflow, None, None, user_id="00000000-0000-0000-0000-000000000001")
def test_get_graph_and_variable_pool_for_single_node_run(self, monkeypatch):
runner = WorkflowBasedAppRunner(queue_manager=SimpleNamespace(), app_id="app")
@ -136,6 +136,7 @@ class TestWorkflowBasedAppRunner:
graph_runtime_state=graph_runtime_state,
node_type_filter_key="iteration_id",
node_type_label="iteration",
user_id="00000000-0000-0000-0000-000000000001",
)
assert graph is not None

View File

@ -100,6 +100,7 @@ def test_run_uses_single_node_execution_branch(
workflow=workflow,
single_iteration_run=single_iteration_run,
single_loop_run=single_loop_run,
user_id="user",
)
init_graph.assert_not_called()
@ -158,6 +159,7 @@ def test_single_node_run_validates_target_node_config(monkeypatch) -> None:
graph_runtime_state=graph_runtime_state,
node_type_filter_key="loop_id",
node_type_label="loop",
user_id="00000000-0000-0000-0000-000000000001",
)
assert seen_configs == [workflow.graph_dict["nodes"][0]]

View File

@ -16,8 +16,8 @@ from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.rag.index_processor.constant.index_type import IndexStructureType
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from dify_graph.model_runtime.entities.model_entities import ModelFeature, ModelType
from enums.cloud_plan import CloudPlan
from graphon.model_runtime.entities.model_entities import ModelFeature, ModelType
from models import Account, TenantAccountRole
from models.dataset import (
ChildChunk,

View File

@ -190,7 +190,7 @@ class TestDatasetServiceValidation:
with patch("services.dataset_service.ModelManager") as model_manager_cls:
DatasetService.check_dataset_model_setting(dataset)
model_manager_cls.return_value.get_model_instance.assert_called_once_with(
model_manager_cls.for_tenant.return_value.get_model_instance.assert_called_once_with(
tenant_id=dataset.tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
@ -201,7 +201,7 @@ class TestDatasetServiceValidation:
dataset = DatasetServiceUnitDataFactory.create_dataset_mock(indexing_technique="high_quality")
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.side_effect = LLMBadRequestError()
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = LLMBadRequestError()
with pytest.raises(ValueError, match="No Embedding Model available"):
DatasetService.check_dataset_model_setting(dataset)
@ -210,14 +210,18 @@ class TestDatasetServiceValidation:
dataset = DatasetServiceUnitDataFactory.create_dataset_mock(indexing_technique="high_quality")
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.side_effect = ProviderTokenNotInitError("token missing")
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = ProviderTokenNotInitError(
"token missing"
)
with pytest.raises(ValueError, match="token missing"):
with pytest.raises(ValueError, match="The dataset is unavailable, due to: token missing"):
DatasetService.check_dataset_model_setting(dataset)
def test_check_embedding_model_setting_wraps_provider_token_error_description(self):
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.side_effect = ProviderTokenNotInitError("provider setup")
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = ProviderTokenNotInitError(
"provider setup"
)
with pytest.raises(ValueError, match="provider setup"):
DatasetService.check_embedding_model_setting("tenant-1", "provider", "embedding-model")
@ -226,7 +230,7 @@ class TestDatasetServiceValidation:
with patch("services.dataset_service.ModelManager") as model_manager_cls:
DatasetService.check_reranking_model_setting("tenant-1", "provider", "reranker")
model_manager_cls.return_value.get_model_instance.assert_called_once_with(
model_manager_cls.for_tenant.return_value.get_model_instance.assert_called_once_with(
tenant_id="tenant-1",
provider="provider",
model_type=ModelType.RERANK,
@ -235,7 +239,7 @@ class TestDatasetServiceValidation:
def test_check_reranking_model_setting_wraps_bad_request(self):
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.side_effect = LLMBadRequestError()
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = LLMBadRequestError()
with pytest.raises(ValueError, match="No Rerank Model available"):
DatasetService.check_reranking_model_setting("tenant-1", "provider", "reranker")
@ -251,7 +255,7 @@ class TestDatasetServiceValidation:
)
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.return_value = model_instance
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = model_instance
result = DatasetService.check_is_multimodal_model("tenant-1", "provider", "embedding-model")
@ -268,7 +272,7 @@ class TestDatasetServiceValidation:
)
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.return_value = model_instance
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = model_instance
result = DatasetService.check_is_multimodal_model("tenant-1", "provider", "embedding-model")
@ -284,14 +288,14 @@ class TestDatasetServiceValidation:
)
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.return_value = model_instance
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = model_instance
with pytest.raises(ValueError, match="Model schema not found"):
DatasetService.check_is_multimodal_model("tenant-1", "provider", "embedding-model")
def test_check_is_multimodal_model_wraps_bad_request_error(self):
with patch("services.dataset_service.ModelManager") as model_manager_cls:
model_manager_cls.return_value.get_model_instance.side_effect = LLMBadRequestError()
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = LLMBadRequestError()
with pytest.raises(ValueError, match="No Model available"):
DatasetService.check_is_multimodal_model("tenant-1", "provider", "embedding-model")
@ -323,7 +327,7 @@ class TestDatasetServiceCreationAndUpdate:
patch.object(DatasetService, "check_embedding_model_setting") as check_embedding,
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
model_manager_cls.return_value.get_default_model_instance.return_value = default_embedding_model
model_manager_cls.for_tenant.return_value.get_default_model_instance.return_value = default_embedding_model
dataset = DatasetService.create_empty_dataset(
tenant_id="tenant-1",
@ -337,7 +341,7 @@ class TestDatasetServiceCreationAndUpdate:
assert dataset.embedding_model == "default-embedding"
assert dataset.permission == DatasetPermissionEnum.ONLY_ME
assert dataset.provider == "vendor"
model_manager_cls.return_value.get_default_model_instance.assert_called_once_with(
model_manager_cls.for_tenant.return_value.get_default_model_instance.assert_called_once_with(
tenant_id="tenant-1",
model_type=ModelType.TEXT_EMBEDDING,
)
@ -365,7 +369,7 @@ class TestDatasetServiceCreationAndUpdate:
patch.object(DatasetService, "check_reranking_model_setting") as check_reranking,
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
dataset = DatasetService.create_empty_dataset(
tenant_id="tenant-1",
@ -804,7 +808,7 @@ class TestDatasetServiceCreationAndUpdate:
return_value=SimpleNamespace(id="binding-1"),
),
):
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
DatasetService._configure_embedding_model_for_high_quality(
{"embedding_model_provider": "provider", "embedding_model": "embedding-model"},
@ -836,7 +840,7 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.current_user", current_user),
patch("services.dataset_service.ModelManager") as model_manager_cls,
):
model_manager_cls.return_value.get_model_instance.side_effect = error
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = error
with pytest.raises(ValueError, match=message):
DatasetService._configure_embedding_model_for_high_quality(
@ -967,7 +971,7 @@ class TestDatasetServiceCreationAndUpdate:
return_value=SimpleNamespace(id="binding-2"),
),
):
model_manager_cls.return_value.get_model_instance.return_value = SimpleNamespace(
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = SimpleNamespace(
provider="provider-two",
model_name="embedding-model-two",
)
@ -1002,7 +1006,9 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.current_user", current_user),
patch("services.dataset_service.ModelManager") as model_manager_cls,
):
model_manager_cls.return_value.get_model_instance.side_effect = ProviderTokenNotInitError("token missing")
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = ProviderTokenNotInitError(
"token missing"
)
DatasetService._apply_new_embedding_settings(
dataset,
@ -1067,7 +1073,7 @@ class TestDatasetServiceRagPipelineSettings:
return_value=SimpleNamespace(id="binding-1"),
),
):
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
DatasetService.update_rag_pipeline_dataset_settings(session, dataset, knowledge_configuration)
@ -1161,7 +1167,7 @@ class TestDatasetServiceRagPipelineSettings:
),
patch("services.dataset_service.deal_dataset_index_update_task") as update_task,
):
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
DatasetService.update_rag_pipeline_dataset_settings(
session,
@ -1204,7 +1210,7 @@ class TestDatasetServiceRagPipelineSettings:
),
patch("services.dataset_service.deal_dataset_index_update_task") as update_task,
):
model_manager_cls.return_value.get_model_instance.return_value = SimpleNamespace(
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = SimpleNamespace(
provider="provider-two",
model_name="embedding-model-two",
)
@ -1243,7 +1249,9 @@ class TestDatasetServiceRagPipelineSettings:
patch("services.dataset_service.ModelManager") as model_manager_cls,
patch("services.dataset_service.deal_dataset_index_update_task") as update_task,
):
model_manager_cls.return_value.get_model_instance.side_effect = ProviderTokenNotInitError("token missing")
model_manager_cls.for_tenant.return_value.get_model_instance.side_effect = ProviderTokenNotInitError(
"token missing"
)
DatasetService.update_rag_pipeline_dataset_settings(
session,

View File

@ -1828,7 +1828,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
) as get_binding,
patch.object(DocumentService, "update_document_with_dataset_id", return_value=updated_document),
):
model_manager_cls.return_value.get_default_model_instance.return_value = SimpleNamespace(
model_manager_cls.for_tenant.return_value.get_default_model_instance.return_value = SimpleNamespace(
model_name="default-embedding",
provider="default-provider",
)
@ -1880,7 +1880,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
):
DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account_context)
model_manager_cls.return_value.get_default_model_instance.assert_not_called()
model_manager_cls.for_tenant.return_value.get_default_model_instance.assert_not_called()
get_binding.assert_called_once_with("explicit-provider", "explicit-model")
assert dataset.embedding_model == "explicit-model"
assert dataset.embedding_model_provider == "explicit-provider"

View File

@ -9,6 +9,7 @@ from .dataset_service_test_helpers import (
DocumentSegment,
IndexStructureType,
MagicMock,
ModelType,
SegmentService,
SegmentUpdateArgs,
SimpleNamespace,
@ -459,7 +460,7 @@ class TestSegmentServiceMutations:
patch("services.dataset_service.naive_utc_now", return_value="now"),
):
mock_redis.lock.return_value = _make_lock_context()
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
mock_db.session.query.return_value.where.return_value.scalar.return_value = 1
vector_service.create_segments_vector.side_effect = RuntimeError("vector failed")
@ -571,7 +572,7 @@ class TestSegmentServiceMutations:
patch("services.summary_index_service.SummaryIndexService.update_summary_for_segment") as update_summary,
):
mock_redis.get.return_value = None
model_manager_cls.return_value.get_model_instance.return_value = embedding_model_instance
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model_instance
processing_rule_query = MagicMock()
processing_rule_query.where.return_value.first.return_value = processing_rule
@ -618,7 +619,7 @@ class TestSegmentServiceMutations:
) as generate_summary,
):
mock_redis.get.return_value = None
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
@ -661,7 +662,7 @@ class TestSegmentServiceMutations:
patch("services.summary_index_service.SummaryIndexService.update_summary_for_segment") as update_summary,
):
mock_redis.get.return_value = None
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
@ -900,7 +901,7 @@ class TestSegmentServiceAdditionalRegenerationBranches:
patch("services.dataset_service.naive_utc_now", return_value="now"),
):
mock_redis.get.return_value = None
model_manager_cls.return_value.get_model_instance.return_value = embedding_model
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = None
refreshed_query = MagicMock()
@ -947,7 +948,7 @@ class TestSegmentServiceAdditionalRegenerationBranches:
patch("services.summary_index_service.SummaryIndexService.update_summary_for_segment") as update_summary,
):
mock_redis.get.return_value = None
model_manager_cls.return_value.get_default_model_instance.return_value = embedding_model_instance
model_manager_cls.for_tenant.return_value.get_default_model_instance.return_value = embedding_model_instance
update_summary.side_effect = RuntimeError("summary failed")
processing_rule_query = MagicMock()
@ -966,9 +967,9 @@ class TestSegmentServiceAdditionalRegenerationBranches:
)
assert result is refreshed_segment
model_manager_cls.return_value.get_default_model_instance.assert_called_once_with(
model_manager_cls.for_tenant.return_value.get_default_model_instance.assert_called_once_with(
tenant_id="tenant-1",
model_type="text-embedding",
model_type=ModelType.TEXT_EMBEDDING,
)
vector_service.generate_child_chunks.assert_called_once_with(
segment,

6
api/uv.lock generated
View File

@ -5340,11 +5340,11 @@ wheels = [
[[package]]
name = "pypdf"
version = "6.9.1"
version = "6.9.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/fb/dc2e8cb006e80b0020ed20d8649106fe4274e82d8e756ad3e24ade19c0df/pypdf-6.9.1.tar.gz", hash = "sha256:ae052407d33d34de0c86c5c729be6d51010bf36e03035a8f23ab449bca52377d", size = 5311551, upload-time = "2026-03-17T10:46:07.876Z" }
sdist = { url = "https://files.pythonhosted.org/packages/31/83/691bdb309306232362503083cb15777491045dd54f45393a317dc7d8082f/pypdf-6.9.2.tar.gz", hash = "sha256:7f850faf2b0d4ab936582c05da32c52214c2b089d61a316627b5bfb5b0dab46c", size = 5311837, upload-time = "2026-03-23T14:53:27.983Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f9/f4/75543fa802b86e72f87e9395440fe1a89a6d149887e3e55745715c3352ac/pypdf-6.9.1-py3-none-any.whl", hash = "sha256:f35a6a022348fae47e092a908339a8f3dc993510c026bb39a96718fc7185e89f", size = 333661, upload-time = "2026-03-17T10:46:06.286Z" },
{ url = "https://files.pythonhosted.org/packages/a5/7e/c85f41243086a8fe5d1baeba527cb26a1918158a565932b41e0f7c0b32e9/pypdf-6.9.2-py3-none-any.whl", hash = "sha256:662cf29bcb419a36a1365232449624ab40b7c2d0cfc28e54f42eeecd1fd7e844", size = 333744, upload-time = "2026-03-23T14:53:26.573Z" },
]
[[package]]

View File

@ -70,7 +70,8 @@
"pnpm": {
"overrides": {
"flatted@<=3.4.1": "3.4.2",
"rollup@>=4.0.0,<4.59.0": "4.59.0"
"picomatch@>=4.0.0 <4.0.4": "4.0.4",
"rollup@>=4.0.0 <4.59.0": "4.59.0"
}
}
}

View File

@ -6,7 +6,8 @@ settings:
overrides:
flatted@<=3.4.1: 3.4.2
rollup@>=4.0.0,<4.59.0: 4.59.0
picomatch@>=4.0.0 <4.0.4: 4.0.4
rollup@>=4.0.0 <4.59.0: 4.59.0
importers:
@ -735,7 +736,7 @@ packages:
resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==}
engines: {node: '>=12.0.0'}
peerDependencies:
picomatch: ^3 || ^4
picomatch: 4.0.4
peerDependenciesMeta:
picomatch:
optional: true
@ -963,8 +964,8 @@ packages:
picocolors@1.1.1:
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
picomatch@4.0.3:
resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==}
picomatch@4.0.4:
resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==}
engines: {node: '>=12'}
pirates@4.0.7:
@ -1829,9 +1830,9 @@ snapshots:
fast-levenshtein@2.0.6: {}
fdir@6.5.0(picomatch@4.0.3):
fdir@6.5.0(picomatch@4.0.4):
optionalDependencies:
picomatch: 4.0.3
picomatch: 4.0.4
file-entry-cache@8.0.0:
dependencies:
@ -2038,7 +2039,7 @@ snapshots:
picocolors@1.1.1: {}
picomatch@4.0.3: {}
picomatch@4.0.4: {}
pirates@4.0.7: {}
@ -2149,8 +2150,8 @@ snapshots:
tinyglobby@0.2.15:
dependencies:
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
fdir: 6.5.0(picomatch@4.0.4)
picomatch: 4.0.4
tinyrainbow@3.0.3: {}
@ -2207,8 +2208,8 @@ snapshots:
vite@7.3.1(@types/node@25.4.0):
dependencies:
esbuild: 0.27.3
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
fdir: 6.5.0(picomatch@4.0.4)
picomatch: 4.0.4
postcss: 8.5.8
rollup: 4.59.0
tinyglobby: 0.2.15
@ -2230,7 +2231,7 @@ snapshots:
magic-string: 0.30.21
obug: 2.1.1
pathe: 2.0.3
picomatch: 4.0.3
picomatch: 4.0.4
std-env: 3.10.0
tinybench: 2.9.0
tinyexec: 1.0.2

View File

@ -51,8 +51,6 @@ NEXT_PUBLIC_ALLOW_EMBED=
# Allow rendering unsafe URLs which have "data:" scheme.
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false
# Github Access Token, used for invoking Github API
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN=
# The maximum number of top-k value for RAG.
NEXT_PUBLIC_TOP_K_MAX_VALUE=10

View File

@ -5,11 +5,9 @@
* upload handling, and task status polling. Verifies the complete plugin
* installation pipeline from source discovery to completion.
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/config', () => ({
GITHUB_ACCESS_TOKEN: '',
}))
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { checkForUpdates, fetchReleases, handleUpload } from '@/app/components/plugins/install-plugin/hooks'
const mockToastNotify = vi.fn()
vi.mock('@/app/components/base/ui/toast', () => ({
@ -30,10 +28,6 @@ vi.mock('@/service/plugins', () => ({
checkTaskStatus: vi.fn(),
}))
const { useGitHubReleases, useGitHubUpload } = await import(
'@/app/components/plugins/install-plugin/hooks',
)
describe('Plugin Installation Flow Integration', () => {
beforeEach(() => {
vi.clearAllMocks()
@ -44,22 +38,22 @@ describe('Plugin Installation Flow Integration', () => {
it('fetches releases, checks for updates, and uploads the new version', async () => {
const mockReleases = [
{
tag_name: 'v2.0.0',
assets: [{ browser_download_url: 'https://github.com/test/v2.difypkg', name: 'plugin-v2.difypkg' }],
tag: 'v2.0.0',
assets: [{ downloadUrl: 'https://github.com/test/v2.difypkg' }],
},
{
tag_name: 'v1.5.0',
assets: [{ browser_download_url: 'https://github.com/test/v1.5.difypkg', name: 'plugin-v1.5.difypkg' }],
tag: 'v1.5.0',
assets: [{ downloadUrl: 'https://github.com/test/v1.5.difypkg' }],
},
{
tag_name: 'v1.0.0',
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
tag: 'v1.0.0',
assets: [{ downloadUrl: 'https://github.com/test/v1.difypkg' }],
},
]
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockReleases),
json: () => Promise.resolve({ releases: mockReleases }),
})
mockUploadGitHub.mockResolvedValue({
@ -67,8 +61,6 @@ describe('Plugin Installation Flow Integration', () => {
unique_identifier: 'test-plugin:2.0.0',
})
const { fetchReleases, checkForUpdates } = useGitHubReleases()
const releases = await fetchReleases('test-org', 'test-repo')
expect(releases).toHaveLength(3)
expect(releases[0].tag_name).toBe('v2.0.0')
@ -77,7 +69,6 @@ describe('Plugin Installation Flow Integration', () => {
expect(needUpdate).toBe(true)
expect(toastProps.message).toContain('v2.0.0')
const { handleUpload } = useGitHubUpload()
const onSuccess = vi.fn()
const result = await handleUpload(
'https://github.com/test-org/test-repo',
@ -104,18 +95,16 @@ describe('Plugin Installation Flow Integration', () => {
it('handles no new version available', async () => {
const mockReleases = [
{
tag_name: 'v1.0.0',
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
tag: 'v1.0.0',
assets: [{ downloadUrl: 'https://github.com/test/v1.difypkg' }],
},
]
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockReleases),
json: () => Promise.resolve({ releases: mockReleases }),
})
const { fetchReleases, checkForUpdates } = useGitHubReleases()
const releases = await fetchReleases('test-org', 'test-repo')
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
@ -127,11 +116,9 @@ describe('Plugin Installation Flow Integration', () => {
it('handles empty releases', async () => {
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ releases: [] }),
})
const { fetchReleases, checkForUpdates } = useGitHubReleases()
const releases = await fetchReleases('test-org', 'test-repo')
expect(releases).toHaveLength(0)
@ -147,7 +134,6 @@ describe('Plugin Installation Flow Integration', () => {
status: 404,
})
const { fetchReleases } = useGitHubReleases()
const releases = await fetchReleases('nonexistent-org', 'nonexistent-repo')
expect(releases).toEqual([])
@ -159,7 +145,6 @@ describe('Plugin Installation Flow Integration', () => {
it('handles upload failure gracefully', async () => {
mockUploadGitHub.mockRejectedValue(new Error('Upload failed'))
const { handleUpload } = useGitHubUpload()
const onSuccess = vi.fn()
await expect(

View File

@ -1,11 +1,8 @@
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { render, screen, waitFor } from '@testing-library/react'
import nock from 'nock'
import * as React from 'react'
import GithubStar from '../index'
const GITHUB_HOST = 'https://api.github.com'
const GITHUB_PATH = '/repos/langgenius/dify'
const GITHUB_STAR_URL = 'https://ungh.cc/repos/langgenius/dify'
const renderWithQueryClient = () => {
const queryClient = new QueryClient({
@ -18,40 +15,66 @@ const renderWithQueryClient = () => {
)
}
const mockGithubStar = (status: number, body: Record<string, unknown>, delayMs = 0) => {
return nock(GITHUB_HOST).get(GITHUB_PATH).delay(delayMs).reply(status, body)
const createJsonResponse = (body: Record<string, unknown>, status = 200) => {
return new Response(JSON.stringify(body), {
status,
headers: { 'Content-Type': 'application/json' },
})
}
const createDeferred = <T,>() => {
let resolve!: (value: T | PromiseLike<T>) => void
let reject!: (reason?: unknown) => void
const promise = new Promise<T>((res, rej) => {
resolve = res
reject = rej
})
return { promise, resolve, reject }
}
describe('GithubStar', () => {
beforeEach(() => {
nock.cleanAll()
vi.restoreAllMocks()
vi.clearAllMocks()
})
// Shows fetched star count when request succeeds
// Covers the fetched star count shown after a successful request.
it('should render fetched star count', async () => {
mockGithubStar(200, { stargazers_count: 123456 })
const fetchSpy = vi.spyOn(globalThis, 'fetch').mockResolvedValue(
createJsonResponse({ repo: { stars: 123456 } }),
)
renderWithQueryClient()
expect(await screen.findByText('123,456')).toBeInTheDocument()
expect(fetchSpy).toHaveBeenCalledWith(GITHUB_STAR_URL)
})
// Falls back to default star count when request fails
// Covers the fallback star count shown when the request fails.
it('should render default star count on error', async () => {
mockGithubStar(500, {})
vi.spyOn(globalThis, 'fetch').mockResolvedValue(
createJsonResponse({}, 500),
)
renderWithQueryClient()
expect(await screen.findByText('110,918')).toBeInTheDocument()
})
// Renders loader while fetching data
// Covers the loading indicator while the fetch promise is still pending.
it('should show loader while fetching', async () => {
mockGithubStar(200, { stargazers_count: 222222 }, 50)
const deferred = createDeferred<Response>()
vi.spyOn(globalThis, 'fetch').mockReturnValueOnce(deferred.promise)
const { container } = renderWithQueryClient()
expect(container.querySelector('.animate-spin')).toBeInTheDocument()
await waitFor(() => expect(screen.getByText('222,222')).toBeInTheDocument())
deferred.resolve(createJsonResponse({ repo: { stars: 222222 } }))
await waitFor(() => {
expect(screen.getByText('222,222')).toBeInTheDocument()
})
})
})

View File

@ -1,16 +1,19 @@
'use client'
import type { FC } from 'react'
import type { GithubRepo } from '@/models/common'
import { RiLoader2Line } from '@remixicon/react'
import { useQuery } from '@tanstack/react-query'
import { IS_DEV } from '@/config'
const defaultData = {
stargazers_count: 110918,
type GithubStarResponse = {
repo: {
stars: number
}
}
const defaultData: GithubStarResponse = {
repo: { stars: 110918 },
}
const getStar = async () => {
const res = await fetch('https://api.github.com/repos/langgenius/dify')
const res = await fetch('https://ungh.cc/repos/langgenius/dify')
if (!res.ok)
throw new Error('Failed to fetch github star')
@ -19,21 +22,20 @@ const getStar = async () => {
}
const GithubStar: FC<{ className: string }> = (props) => {
const { isFetching, isError, data } = useQuery<GithubRepo>({
const { isFetching, isError, data } = useQuery<GithubStarResponse>({
queryKey: ['github-star'],
queryFn: getStar,
enabled: !IS_DEV,
retry: false,
placeholderData: defaultData,
})
if (isFetching)
return <RiLoader2Line className="size-3 shrink-0 animate-spin text-text-tertiary" />
return <span className="i-ri-loader-2-line size-3 shrink-0 animate-spin text-text-tertiary" />
if (isError)
return <span {...props}>{defaultData.stargazers_count.toLocaleString()}</span>
return <span {...props}>{defaultData.repo.stars.toLocaleString()}</span>
return <span {...props}>{data?.stargazers_count.toLocaleString()}</span>
return <span {...props}>{data?.repo.stars.toLocaleString()}</span>
}
export default GithubStar

View File

@ -1,6 +1,5 @@
import { renderHook } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { useGitHubReleases, useGitHubUpload } from '../hooks'
import { checkForUpdates, fetchReleases, handleUpload } from '../hooks'
const mockNotify = vi.fn()
vi.mock('@/app/components/base/ui/toast', () => ({
@ -15,10 +14,6 @@ vi.mock('@/app/components/base/ui/toast', () => ({
}),
}))
vi.mock('@/config', () => ({
GITHUB_ACCESS_TOKEN: '',
}))
const mockUploadGitHub = vi.fn()
vi.mock('@/service/plugins', () => ({
uploadGitHub: (...args: unknown[]) => mockUploadGitHub(...args),
@ -37,17 +32,17 @@ describe('install-plugin/hooks', () => {
it('fetches releases from GitHub API and formats them', async () => {
mockFetch.mockResolvedValue({
ok: true,
json: () => Promise.resolve([
{
tag_name: 'v1.0.0',
assets: [{ browser_download_url: 'https://example.com/v1.zip', name: 'plugin.zip' }],
body: 'Release notes',
},
]),
json: () => Promise.resolve({
releases: [
{
tag: 'v1.0.0',
assets: [{ downloadUrl: 'https://example.com/plugin.zip' }],
},
],
}),
})
const { result } = renderHook(() => useGitHubReleases())
const releases = await result.current.fetchReleases('owner', 'repo')
const releases = await fetchReleases('owner', 'repo')
expect(releases).toHaveLength(1)
expect(releases[0].tag_name).toBe('v1.0.0')
@ -60,8 +55,7 @@ describe('install-plugin/hooks', () => {
ok: false,
})
const { result } = renderHook(() => useGitHubReleases())
const releases = await result.current.fetchReleases('owner', 'repo')
const releases = await fetchReleases('owner', 'repo')
expect(releases).toEqual([])
expect(mockNotify).toHaveBeenCalledWith('Failed to fetch repository releases')
@ -70,29 +64,26 @@ describe('install-plugin/hooks', () => {
describe('checkForUpdates', () => {
it('detects newer version available', () => {
const { result } = renderHook(() => useGitHubReleases())
const releases = [
{ tag_name: 'v1.0.0', assets: [] },
{ tag_name: 'v2.0.0', assets: [] },
]
const { needUpdate, toastProps } = result.current.checkForUpdates(releases, 'v1.0.0')
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
expect(needUpdate).toBe(true)
expect(toastProps.message).toContain('v2.0.0')
})
it('returns no update when current is latest', () => {
const { result } = renderHook(() => useGitHubReleases())
const releases = [
{ tag_name: 'v1.0.0', assets: [] },
]
const { needUpdate, toastProps } = result.current.checkForUpdates(releases, 'v1.0.0')
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
expect(needUpdate).toBe(false)
expect(toastProps.type).toBe('info')
})
it('returns error for empty releases', () => {
const { result } = renderHook(() => useGitHubReleases())
const { needUpdate, toastProps } = result.current.checkForUpdates([], 'v1.0.0')
const { needUpdate, toastProps } = checkForUpdates([], 'v1.0.0')
expect(needUpdate).toBe(false)
expect(toastProps.type).toBe('error')
expect(toastProps.message).toContain('empty')
@ -109,8 +100,7 @@ describe('install-plugin/hooks', () => {
})
const onSuccess = vi.fn()
const { result } = renderHook(() => useGitHubUpload())
const pkg = await result.current.handleUpload(
const pkg = await handleUpload(
'https://github.com/owner/repo',
'v1.0.0',
'plugin.difypkg',
@ -132,9 +122,8 @@ describe('install-plugin/hooks', () => {
it('shows toast on upload error', async () => {
mockUploadGitHub.mockRejectedValue(new Error('Upload failed'))
const { result } = renderHook(() => useGitHubUpload())
await expect(
result.current.handleUpload('url', 'v1', 'pkg'),
handleUpload('url', 'v1', 'pkg'),
).rejects.toThrow('Upload failed')
expect(mockNotify).toHaveBeenCalledWith('Error uploading package')
})

View File

@ -1,101 +1,87 @@
import type { GitHubRepoReleaseResponse } from '../types'
import { toast } from '@/app/components/base/ui/toast'
import { GITHUB_ACCESS_TOKEN } from '@/config'
import { uploadGitHub } from '@/service/plugins'
import { compareVersion, getLatestVersion } from '@/utils/semver'
const normalizeAssetName = (downloadUrl: string) => {
const parts = downloadUrl.split('/')
return parts[parts.length - 1]
}
const formatReleases = (releases: any) => {
return releases.map((release: any) => ({
tag_name: release.tag_name,
tag_name: release.tag,
assets: release.assets.map((asset: any) => ({
browser_download_url: asset.browser_download_url,
name: asset.name,
browser_download_url: asset.downloadUrl,
name: normalizeAssetName(asset.downloadUrl),
})),
}))
}
export const useGitHubReleases = () => {
const fetchReleases = async (owner: string, repo: string) => {
try {
if (!GITHUB_ACCESS_TOKEN) {
// Fetch releases without authentication from client
const res = await fetch(`https://api.github.com/repos/${owner}/${repo}/releases`)
if (!res.ok)
throw new Error('Failed to fetch repository releases')
const data = await res.json()
return formatReleases(data)
}
else {
// Fetch releases with authentication from server
const res = await fetch(`/repos/${owner}/${repo}/releases`)
const bodyJson = await res.json()
if (bodyJson.status !== 200)
throw new Error(bodyJson.data.message)
return formatReleases(bodyJson.data)
}
}
catch (error) {
if (error instanceof Error) {
toast.error(error.message)
}
else {
toast.error('Failed to fetch repository releases')
}
return []
}
export const fetchReleases = async (owner: string, repo: string) => {
try {
// Fetch releases without authentication from client
const res = await fetch(`https://ungh.cc/repos/${owner}/${repo}/releases`)
if (!res.ok)
throw new Error('Failed to fetch repository releases')
const data = await res.json()
return formatReleases(data.releases)
}
catch (error) {
if (error instanceof Error) {
toast.error(error.message)
}
else {
toast.error('Failed to fetch repository releases')
}
return []
}
}
const checkForUpdates = (fetchedReleases: GitHubRepoReleaseResponse[], currentVersion: string) => {
let needUpdate = false
const toastProps: { type?: 'success' | 'error' | 'info' | 'warning', message: string } = {
type: 'info',
message: 'No new version available',
}
if (fetchedReleases.length === 0) {
toastProps.type = 'error'
toastProps.message = 'Input releases is empty'
return { needUpdate, toastProps }
}
const versions = fetchedReleases.map(release => release.tag_name)
const latestVersion = getLatestVersion(versions)
try {
needUpdate = compareVersion(latestVersion, currentVersion) === 1
if (needUpdate)
toastProps.message = `New version available: ${latestVersion}`
}
catch {
needUpdate = false
toastProps.type = 'error'
toastProps.message = 'Fail to compare versions, please check the version format'
}
export const checkForUpdates = (fetchedReleases: GitHubRepoReleaseResponse[], currentVersion: string) => {
let needUpdate = false
const toastProps: { type?: 'success' | 'error' | 'info' | 'warning', message: string } = {
type: 'info',
message: 'No new version available',
}
if (fetchedReleases.length === 0) {
toastProps.type = 'error'
toastProps.message = 'Input releases is empty'
return { needUpdate, toastProps }
}
return { fetchReleases, checkForUpdates }
}
export const useGitHubUpload = () => {
const handleUpload = async (
repoUrl: string,
selectedVersion: string,
selectedPackage: string,
onSuccess?: (GitHubPackage: { manifest: any, unique_identifier: string }) => void,
) => {
try {
const response = await uploadGitHub(repoUrl, selectedVersion, selectedPackage)
const GitHubPackage = {
manifest: response.manifest,
unique_identifier: response.unique_identifier,
}
if (onSuccess)
onSuccess(GitHubPackage)
return GitHubPackage
}
catch (error) {
toast.error('Error uploading package')
throw error
}
const versions = fetchedReleases.map(release => release.tag_name)
const latestVersion = getLatestVersion(versions)
try {
needUpdate = compareVersion(latestVersion, currentVersion) === 1
if (needUpdate)
toastProps.message = `New version available: ${latestVersion}`
}
catch {
needUpdate = false
toastProps.type = 'error'
toastProps.message = 'Fail to compare versions, please check the version format'
}
return { needUpdate, toastProps }
}
export const handleUpload = async (
repoUrl: string,
selectedVersion: string,
selectedPackage: string,
onSuccess?: (GitHubPackage: { manifest: any, unique_identifier: string }) => void,
) => {
try {
const response = await uploadGitHub(repoUrl, selectedVersion, selectedPackage)
const GitHubPackage = {
manifest: response.manifest,
unique_identifier: response.unique_identifier,
}
if (onSuccess)
onSuccess(GitHubPackage)
return GitHubPackage
}
catch (error) {
toast.error('Error uploading package')
throw error
}
return { handleUpload }
}

View File

@ -74,10 +74,16 @@ vi.mock('@/app/components/plugins/install-plugin/base/use-get-icon', () => ({
default: () => ({ getIconUrl: mockGetIconUrl }),
}))
const mockFetchReleases = vi.fn()
vi.mock('../../hooks', () => ({
useGitHubReleases: () => ({ fetchReleases: mockFetchReleases }),
const { mockFetchReleases } = vi.hoisted(() => ({
mockFetchReleases: vi.fn(),
}))
vi.mock('../../hooks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../hooks')>()
return {
...actual,
fetchReleases: mockFetchReleases,
}
})
const mockRefreshPluginList = vi.fn()
vi.mock('../../hooks/use-refresh-plugin-list', () => ({

View File

@ -12,7 +12,7 @@ import useGetIcon from '@/app/components/plugins/install-plugin/base/use-get-ico
import { cn } from '@/utils/classnames'
import { InstallStepFromGitHub } from '../../types'
import Installed from '../base/installed'
import { useGitHubReleases } from '../hooks'
import { fetchReleases } from '../hooks'
import useHideLogic from '../hooks/use-hide-logic'
import useRefreshPluginList from '../hooks/use-refresh-plugin-list'
import { convertRepoToUrl, parseGitHubUrl } from '../utils'
@ -31,7 +31,6 @@ type InstallFromGitHubProps = {
const InstallFromGitHub: React.FC<InstallFromGitHubProps> = ({ updatePayload, onClose, onSuccess }) => {
const { t } = useTranslation()
const { getIconUrl } = useGetIcon()
const { fetchReleases } = useGitHubReleases()
const { refreshPluginList } = useRefreshPluginList()
const {

View File

@ -5,11 +5,17 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PluginCategoryEnum } from '../../../../types'
import SelectPackage from '../selectPackage'
// Mock the useGitHubUpload hook
const mockHandleUpload = vi.fn()
vi.mock('../../../hooks', () => ({
useGitHubUpload: () => ({ handleUpload: mockHandleUpload }),
// Mock upload helper from hooks module
const { mockHandleUpload } = vi.hoisted(() => ({
mockHandleUpload: vi.fn(),
}))
vi.mock('../../../hooks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../../hooks')>()
return {
...actual,
handleUpload: mockHandleUpload,
}
})
// Factory functions
const createMockManifest = (): PluginDeclaration => ({

View File

@ -6,7 +6,7 @@ import * as React from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { PortalSelect } from '@/app/components/base/select'
import { useGitHubUpload } from '../../hooks'
import { handleUpload } from '../../hooks'
const i18nPrefix = 'installFromGitHub'
@ -43,7 +43,6 @@ const SelectPackage: React.FC<SelectPackageProps> = ({
const { t } = useTranslation()
const isEdit = Boolean(updatePayload)
const [isUploading, setIsUploading] = React.useState(false)
const { handleUpload } = useGitHubUpload()
const handleUploadPackage = async () => {
if (isUploading)

View File

@ -103,12 +103,14 @@ vi.mock('@/service/use-tools', () => ({
useInvalidateAllToolProviders: () => mockInvalidateAllToolProviders,
}))
vi.mock('../../install-plugin/hooks', () => ({
useGitHubReleases: () => ({
vi.mock('../../install-plugin/hooks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../install-plugin/hooks')>()
return {
...actual,
checkForUpdates: mockCheckForUpdates,
fetchReleases: mockFetchReleases,
}),
}))
}
})
// Auto upgrade settings mock
let mockAutoUpgradeInfo: {

View File

@ -72,12 +72,14 @@ vi.mock('@/service/use-tools', () => ({
useInvalidateAllToolProviders: () => mockInvalidateAllToolProviders,
}))
vi.mock('../../../../install-plugin/hooks', () => ({
useGitHubReleases: () => ({
vi.mock('../../../../install-plugin/hooks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../../../install-plugin/hooks')>()
return {
...actual,
checkForUpdates: mockCheckForUpdates,
fetchReleases: mockFetchReleases,
}),
}))
}
})
const createPluginDetail = (overrides: Partial<PluginDetail> = {}): PluginDetail => ({
id: 'test-id',

View File

@ -11,7 +11,7 @@ import { useProviderContext } from '@/context/provider-context'
import { uninstallPlugin } from '@/service/plugins'
import { useInvalidateCheckInstalled } from '@/service/use-plugins'
import { useInvalidateAllToolProviders } from '@/service/use-tools'
import { useGitHubReleases } from '../../../install-plugin/hooks'
import { checkForUpdates, fetchReleases } from '../../../install-plugin/hooks'
import { PluginCategoryEnum, PluginSource } from '../../../types'
type UsePluginOperationsParams = {
@ -39,7 +39,6 @@ export const usePluginOperations = ({
onUpdate,
}: UsePluginOperationsParams): UsePluginOperationsReturn => {
const { t } = useTranslation()
const { checkForUpdates, fetchReleases } = useGitHubReleases()
const { setShowUpdatePluginModal } = useModalContext()
const { refreshModelProviders } = useProviderContext()
const invalidateCheckInstalled = useInvalidateCheckInstalled()

View File

@ -46,13 +46,15 @@ vi.mock('@/service/plugins', () => ({
uninstallPlugin: (id: string) => mockUninstallPlugin(id),
}))
// Mock GitHub releases hook
vi.mock('../../install-plugin/hooks', () => ({
useGitHubReleases: () => ({
// Mock GitHub release helpers
vi.mock('../../install-plugin/hooks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../install-plugin/hooks')>()
return {
...actual,
fetchReleases: mockFetchReleases,
checkForUpdates: mockCheckForUpdates,
}),
}))
}
})
// Mock modal context
vi.mock('@/context/modal-context', () => ({

View File

@ -14,7 +14,7 @@ import { useInvalidateInstalledPluginList } from '@/service/use-plugins'
import ActionButton from '../../base/action-button'
import Confirm from '../../base/confirm'
import Tooltip from '../../base/tooltip'
import { useGitHubReleases } from '../install-plugin/hooks'
import { checkForUpdates, fetchReleases } from '../install-plugin/hooks'
import PluginInfo from '../plugin-page/plugin-info'
import { PluginSource } from '../types'
@ -54,7 +54,6 @@ const Action: FC<Props> = ({
setTrue: showDeleting,
setFalse: hideDeleting,
}] = useBoolean(false)
const { checkForUpdates, fetchReleases } = useGitHubReleases()
const { setShowUpdatePluginModal } = useModalContext()
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()

View File

@ -26,7 +26,8 @@ export type QAChunks = {
export type ChunkInfo = GeneralChunks | ParentChildChunks | QAChunks
export enum QAItemType {
Question = 'question',
Answer = 'answer',
}
export const QAItemType = {
Question: 'question',
Answer: 'answer',
} as const
export type QAItemType = typeof QAItemType[keyof typeof QAItemType]

View File

@ -1,9 +1,10 @@
import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
export enum TestRunStep {
dataSource = 'dataSource',
documentProcessing = 'documentProcessing',
}
export const TestRunStep = {
dataSource: 'dataSource',
documentProcessing: 'documentProcessing',
} as const
export type TestRunStep = typeof TestRunStep[keyof typeof TestRunStep]
export type DataSourceOption = {
label: string

View File

@ -1,13 +1,14 @@
import type { CommonNodeType } from '../types'
export enum NoteTheme {
blue = 'blue',
cyan = 'cyan',
green = 'green',
yellow = 'yellow',
pink = 'pink',
violet = 'violet',
}
export const NoteTheme = {
blue: 'blue',
cyan: 'cyan',
green: 'green',
yellow: 'yellow',
pink: 'pink',
violet: 'violet',
} as const
export type NoteTheme = typeof NoteTheme[keyof typeof NoteTheme]
export type NoteNodeType = CommonNodeType & {
text: string

View File

@ -174,7 +174,7 @@ const Right = ({
{currentNodeVar?.var && (
<>
{
[VarInInspectType.environment, VarInInspectType.conversation, VarInInspectType.system].includes(currentNodeVar.nodeType as VarInInspectType) && (
([VarInInspectType.environment, VarInInspectType.conversation, VarInInspectType.system] as VarInInspectType[]).includes(currentNodeVar.nodeType as VarInInspectType) && (
<VariableIconWithColor
variableCategory={currentNodeVar.nodeType as VarInInspectType}
className="size-4"

View File

@ -2,12 +2,14 @@ export const EVENT_WORKFLOW_STOP = 'WORKFLOW_STOP'
export const CHUNK_SCHEMA_TYPES = ['general_structure', 'parent_child_structure', 'qa_structure']
export enum ViewMode {
Code = 'code',
Preview = 'preview',
}
export const ViewMode = {
Code: 'code',
Preview: 'preview',
} as const
export type ViewMode = typeof ViewMode[keyof typeof ViewMode]
export enum PreviewType {
Markdown = 'markdown',
Chunks = 'chunks',
}
export const PreviewType = {
Markdown: 'markdown',
Chunks: 'chunks',
} as const
export type PreviewType = typeof PreviewType[keyof typeof PreviewType]

View File

@ -1,37 +0,0 @@
import type { NextRequest } from '@/next/server'
import { Octokit } from '@octokit/core'
import { RequestError } from '@octokit/request-error'
import { GITHUB_ACCESS_TOKEN } from '@/config'
import { NextResponse } from '@/next/server'
type Params = {
owner: string
repo: string
}
const octokit = new Octokit({
auth: GITHUB_ACCESS_TOKEN,
})
export async function GET(
request: NextRequest,
{ params }: { params: Promise<Params> },
) {
const { owner, repo } = (await params)
try {
const releasesRes = await octokit.request('GET /repos/{owner}/{repo}/releases', {
owner,
repo,
headers: {
'X-GitHub-Api-Version': '2022-11-28',
},
})
return NextResponse.json(releasesRes)
}
catch (error) {
if (error instanceof RequestError)
return NextResponse.json(error.response)
else
throw error
}
}

View File

@ -292,9 +292,6 @@ export const resetHITLInputReg = () => HITL_INPUT_REG.lastIndex = 0
export const DISABLE_UPLOAD_IMAGE_AS_ICON = env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON
export const GITHUB_ACCESS_TOKEN
= env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN
export const SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS = '.difypkg,.difybndl'
export const FULL_DOC_PREVIEW_LENGTH = 50

View File

@ -283,16 +283,6 @@ Reserve snapshots for static, deterministic fragments (icons, badges, layout chr
**Note**: Dify is a desktop application. **No need for** responsive/mobile testing.
### 12. Mock API
Use Nock to mock API calls. Example:
```ts
const mockGithubStar = (status: number, body: Record<string, unknown>, delayMs = 0) => {
return nock(GITHUB_HOST).get(GITHUB_PATH).delay(delayMs).reply(status, body)
}
```
## Code Style
### Example Structure

View File

@ -66,10 +66,6 @@ const clientSchema = {
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: coercedBoolean.default(true),
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: coercedBoolean.default(true),
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: coercedBoolean.default(false),
/**
* Github Access Token, used for invoking Github API
*/
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: z.string().optional(),
/**
* The maximum number of tokens for segmentation
*/
@ -171,7 +167,6 @@ export const env = createEnv({
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL : getRuntimeEnvFromBody('enableWebsiteFirecrawl'),
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER : getRuntimeEnvFromBody('enableWebsiteJinareader'),
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL : getRuntimeEnvFromBody('enableWebsiteWatercrawl'),
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: isServer ? process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN : getRuntimeEnvFromBody('githubAccessToken'),
NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: isServer ? process.env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH : getRuntimeEnvFromBody('indexingMaxSegmentationTokensLength'),
NEXT_PUBLIC_IS_MARKETPLACE: isServer ? process.env.NEXT_PUBLIC_IS_MARKETPLACE : getRuntimeEnvFromBody('isMarketplace'),
NEXT_PUBLIC_LOOP_NODE_MAX_COUNT: isServer ? process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT : getRuntimeEnvFromBody('loopNodeMaxCount'),

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ import md from 'eslint-markdown'
import tailwindcss from 'eslint-plugin-better-tailwindcss'
import hyoban from 'eslint-plugin-hyoban'
import markdownPreferences from 'eslint-plugin-markdown-preferences'
import noBarrelFiles from 'eslint-plugin-no-barrel-files'
import { reactRefresh } from 'eslint-plugin-react-refresh'
import sonar from 'eslint-plugin-sonarjs'
import storybook from 'eslint-plugin-storybook'
@ -30,12 +31,17 @@ const plugins = pluginReact.configs.all.plugins
export default antfu(
{
react: false,
nextjs: true,
nextjs: {
overrides: {
'next/no-img-element': 'off',
},
},
ignores: ['public', 'types/doc-paths.ts', 'eslint-suppressions.json'],
typescript: {
overrides: {
'ts/consistent-type-definitions': ['error', 'type'],
'ts/no-explicit-any': 'error',
'ts/no-redeclare': 'off',
},
erasableOnly: true,
},
@ -66,12 +72,23 @@ export default antfu(
...pluginReact.configs['recommended-typescript'].rules,
'react/prefer-namespace-import': 'error',
'react/set-state-in-effect': 'error',
'react/no-unnecessary-use-prefix': 'error',
},
},
{
files: [...GLOB_TESTS, GLOB_MARKDOWN_CODE, 'vitest.setup.ts', 'test/i18n-mock.ts'],
rules: {
'react/component-hook-factories': 'off',
'react/no-unnecessary-use-prefix': 'off',
},
},
{
plugins: {
'no-barrel-files': noBarrelFiles,
},
ignores: ['next/**'],
rules: {
'no-barrel-files/no-barrel-files': 'error',
},
},
reactRefresh.configs.next(),
@ -98,7 +115,6 @@ export default antfu(
{
rules: {
'node/prefer-global/process': 'off',
'next/no-img-element': 'off',
},
},
{
@ -160,7 +176,7 @@ export default antfu(
},
},
{
files: ['**/package.json'],
files: ['package.json'],
rules: {
'hyoban/no-dependency-version-prefix': 'error',
},

View File

@ -220,10 +220,6 @@ export type DataSources = {
sources: DataSourceItem[]
}
export type GithubRepo = {
stargazers_count: number
}
export type PluginProvider = {
tool_name: string
is_enabled: boolean

View File

@ -1,2 +0,0 @@
export { NextResponse } from 'next/server'
export type { NextRequest } from 'next/server'

View File

@ -74,8 +74,6 @@
"@lexical/text": "0.42.0",
"@lexical/utils": "0.42.0",
"@monaco-editor/react": "4.7.0",
"@octokit/core": "7.0.6",
"@octokit/request-error": "7.1.0",
"@orpc/client": "1.13.9",
"@orpc/contract": "1.13.9",
"@orpc/openapi-client": "1.13.9",
@ -217,6 +215,7 @@
"eslint-plugin-better-tailwindcss": "4.3.2",
"eslint-plugin-hyoban": "0.14.1",
"eslint-plugin-markdown-preferences": "0.40.3",
"eslint-plugin-no-barrel-files": "1.2.2",
"eslint-plugin-react-hooks": "7.0.1",
"eslint-plugin-react-refresh": "0.5.2",
"eslint-plugin-sonarjs": "4.0.2",
@ -228,7 +227,6 @@
"jsdom-testing-mocks": "1.16.0",
"knip": "6.0.2",
"lint-staged": "16.4.0",
"nock": "14.0.11",
"postcss": "8.5.8",
"postcss-js": "5.1.0",
"react-server-dom-webpack": "19.2.4",
@ -278,6 +276,8 @@
"object.values": "npm:@nolyfill/object.values@^1.0.44",
"pbkdf2": "~3.1.5",
"pbkdf2@<3.1.3": "3.1.3",
"picomatch@<2.3.2": "2.3.2",
"picomatch@>=4.0.0 <4.0.4": "4.0.4",
"prismjs": "~1.30",
"prismjs@<1.30.0": "1.30.0",
"rollup@>=4.0.0 <4.59.0": "4.59.0",
@ -285,6 +285,7 @@
"safe-regex-test": "npm:@nolyfill/safe-regex-test@^1.0.44",
"safer-buffer": "npm:@nolyfill/safer-buffer@^1.0.44",
"side-channel": "npm:@nolyfill/side-channel@^1.0.44",
"smol-toml@<1.6.1": "1.6.1",
"solid-js": "1.9.11",
"string-width": "~8.2.0",
"string.prototype.includes": "npm:@nolyfill/string.prototype.includes@^1.0.44",
@ -298,6 +299,7 @@
"vite": "npm:@voidzero-dev/vite-plus-core@0.1.13",
"vitest": "npm:@voidzero-dev/vite-plus-test@0.1.13",
"which-typed-array": "npm:@nolyfill/which-typed-array@^1.0.44",
"yaml@>=2.0.0 <2.8.3": "2.8.3",
"yauzl@<3.2.1": "3.2.1"
},
"ignoredBuiltDependencies": [

434
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,11 @@
import type { NextRequest } from '@/next/server'
// eslint-disable-next-line no-restricted-imports
import type { NextRequest } from 'next/server'
import { Buffer } from 'node:buffer'
// eslint-disable-next-line no-restricted-imports
import { NextResponse } from 'next/server'
import { env } from '@/env'
import { NextResponse } from '@/next/server'
const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://api.github.com https://api2.amplitude.com *.amplitude.com'
const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://ungh.cc https://api2.amplitude.com *.amplitude.com'
const wrapResponseWithXFrameOptions = (response: NextResponse, pathname: string) => {
// prevent clickjacking: https://owasp.org/www-community/attacks/Clickjacking

View File

@ -2,12 +2,13 @@
* Model provider quota types - shared type definitions for API responses
* These represent the provider identifiers that support paid/trial quotas
*/
export enum ModelProviderQuotaGetPaid {
ANTHROPIC = 'langgenius/anthropic/anthropic',
OPENAI = 'langgenius/openai/openai',
// AZURE_OPENAI = 'langgenius/azure_openai/azure_openai',
GEMINI = 'langgenius/gemini/google',
X = 'langgenius/x/x',
DEEPSEEK = 'langgenius/deepseek/deepseek',
TONGYI = 'langgenius/tongyi/tongyi',
}
export const ModelProviderQuotaGetPaid = {
ANTHROPIC: 'langgenius/anthropic/anthropic',
OPENAI: 'langgenius/openai/openai',
// AZURE_OPENAI: 'langgenius/azure_openai/azure_openai',
GEMINI: 'langgenius/gemini/google',
X: 'langgenius/x/x',
DEEPSEEK: 'langgenius/deepseek/deepseek',
TONGYI: 'langgenius/tongyi/tongyi',
} as const
export type ModelProviderQuotaGetPaid = typeof ModelProviderQuotaGetPaid[keyof typeof ModelProviderQuotaGetPaid]

View File

@ -455,12 +455,13 @@ export type PanelProps = {
export type NodeRunResult = NodeTracing
// Var Inspect
export enum VarInInspectType {
conversation = 'conversation',
environment = 'env',
node = 'node',
system = 'sys',
}
export const VarInInspectType = {
conversation: 'conversation',
environment: 'env',
node: 'node',
system: 'sys',
} as const
export type VarInInspectType = typeof VarInInspectType[keyof typeof VarInInspectType]
export type FullContent = {
size_bytes: number