Compare commits

..

30 Commits
1.1.2 ... 1.1.3

Author SHA1 Message Date
1be0d26c1f fix metadata filter not affect in keyword-search and fulltext-search (#16644) 2025-03-24 18:35:16 +08:00
c167a1f4f4 chore: bump the package version to 1.1.3 (#16612)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-03-24 17:59:54 +08:00
5eb0ca9b9d fix: fix inner API workspace Account.query error. (#16630) 2025-03-24 17:52:50 +08:00
6e26ed2bb7 fix: update retrieval configuration to correctly handle reranking mod… (#16641) 2025-03-24 17:47:56 +08:00
058d9c3525 chore: update release trigger to include all tags in build-push workflow (#16631)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-03-24 17:17:55 +08:00
b247fbb2ef Fix: style of sidebar with in mobile (#16629) 2025-03-24 16:30:23 +08:00
bc6f122364 Fix: style issue of app detail panel in jp (#16620) 2025-03-24 15:54:00 +08:00
815d77856d Fix: show feedback status in conversation (#16615) 2025-03-24 15:20:18 +08:00
05eaef84bb fix: cancel marketplace debounced search when clear search keywords (#16614) 2025-03-24 15:17:12 +08:00
770c461a8f feat: add openGauss PQ acceleration feature (#16432)
Co-authored-by: chenhuan <huan.chen0728@foxmail>
2025-03-24 15:16:40 +08:00
16b6ffd915 fix: sanitizer svg to avoid xss (#16606) 2025-03-24 14:36:07 +08:00
9701b573e0 feat: add datasets detail context and provider for improved data vali… (#16451) 2025-03-24 14:30:26 +08:00
83cd14104d feat: datasets openapi list segements support paged resp (#16603) 2025-03-24 14:27:31 +08:00
e2988acc2f Fix: web app sidebar cannot close when long title conversation existed (#16593) 2025-03-24 12:51:21 +08:00
cea4669b76 fix: transition in simple select causes page crash (#16587) 2025-03-24 11:07:16 +08:00
17b4d4c7b2 fix: workflow if-else node variable tag style (#16583) 2025-03-24 11:06:10 +08:00
e95f0fcceb chore: enable eslint cache (#16570) 2025-03-24 10:17:54 +08:00
6d5d6f0f24 fix: update app mode display text for advanced-chat type (#16578) 2025-03-24 10:17:04 +08:00
7ce8faf176 fix: fix variable-aggregator cannot pass node check in group mode (#16439)
Co-authored-by: crazywoola <427733928@qq.com>
2025-03-24 09:49:33 +08:00
f31e3313b0 feat: Make the logic of APP filtering and creation the same (#16079)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-03-23 09:40:15 +08:00
f6ac98a37d fix: fix the app max_active_requests been overwritten bug (#16513) 2025-03-23 09:34:23 +08:00
f8e7e301cd fix: update chatbot doc link at create app page (#16479) 2025-03-23 09:27:16 +08:00
35bafb3235 fix: the error occurring when passing inputs on iOS devices and some … (#16534) 2025-03-23 09:26:35 +08:00
ae5d2ecf48 fix:weight_type missing when create document in dataset (#16503) 2025-03-22 20:21:57 +08:00
1907d2a90a fix: optimize query for expired workflow runs by adding date filter and limiting results (#16491) 2025-03-22 11:17:21 +08:00
4448a54cc1 use REDIS_PORT to replace 6379 in celery config (#16182) 2025-03-21 21:34:07 +08:00
bfc0d606dc feat: cleanup free tenants expired data like messages/conversations/workflow_runs/workflow_node_executions (#16490) 2025-03-21 21:30:35 +08:00
3306228840 fix: workflow file add related-id in iteration node (#16255)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-03-21 20:57:02 +08:00
d7e00ae691 feat: Skip Redis operations if RateLimit is disabled (#12226) 2025-03-21 19:55:27 +08:00
0e2e2db3fa refactor: add OpikDataTrace instance builder. (#16444)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-03-21 18:09:39 +08:00
56 changed files with 832 additions and 175 deletions

View File

@ -6,8 +6,8 @@ on:
- "main"
- "deploy/dev"
- "deploy/enterprise"
release:
types: [published]
tags:
- "*"
concurrency:
group: build-push-${{ github.head_ref || github.run_id }}

View File

@ -26,9 +26,6 @@ ACCESS_TOKEN_EXPIRE_MINUTES=60
# Refresh token expiration time in days
REFRESH_TOKEN_EXPIRE_DAYS=30
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
# redis configuration
REDIS_HOST=localhost
REDIS_PORT=6379
@ -50,6 +47,9 @@ REDIS_USE_CLUSTERS=false
REDIS_CLUSTERS=
REDIS_CLUSTERS_PASSWORD=
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
# PostgreSQL database configuration
DB_USERNAME=postgres
DB_PASSWORD=difyai123456

View File

@ -26,6 +26,7 @@ from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.provider import Provider, ProviderModel
from services.account_service import RegisterService, TenantService
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
from services.plugin.data_migration import PluginDataMigration
from services.plugin.plugin_migration import PluginMigration
@ -792,3 +793,23 @@ def install_plugins(input_file: str, output_file: str, workers: int):
PluginMigration.install_plugins(input_file, output_file, workers)
click.echo(click.style("Install plugins completed.", fg="green"))
@click.command("clear-free-plan-tenant-expired-logs", help="Clear free plan tenant expired logs.")
@click.option("--days", prompt=True, help="The days to clear free plan tenant expired logs.", default=30)
@click.option("--batch", prompt=True, help="The batch size to clear free plan tenant expired logs.", default=100)
@click.option(
"--tenant_ids",
prompt=True,
multiple=True,
help="The tenant ids to clear free plan tenant expired logs.",
)
def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[str]):
"""
Clear free plan tenant expired logs.
"""
click.echo(click.style("Starting clear free plan tenant expired logs.", fg="white"))
ClearFreePlanTenantExpiredLogs.process(days, batch, tenant_ids)
click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green"))

View File

@ -43,3 +43,8 @@ class OpenGaussConfig(BaseSettings):
description="Max connection of the OpenGauss database",
default=5,
)
OPENGAUSS_ENABLE_PQ: bool = Field(
description="Enable openGauss PQ acceleration feature",
default=False,
)

View File

@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field(
description="Dify version",
default="1.1.2",
default="1.1.3",
)
COMMIT_SHA: str = Field(

View File

@ -50,7 +50,15 @@ class AppListApi(Resource):
parser.add_argument(
"mode",
type=str,
choices=["chat", "workflow", "agent-chat", "channel", "all"],
choices=[
"completion",
"chat",
"advanced-chat",
"workflow",
"agent-chat",
"channel",
"all",
],
default="all",
location="args",
required=False,
@ -130,7 +138,6 @@ class AppApi(Resource):
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
parser.add_argument("max_active_requests", type=int, location="json")
parser.add_argument("use_icon_as_answer_icon", type=bool, location="json")
args = parser.parse_args()

View File

@ -6,6 +6,7 @@ from controllers.console.wraps import setup_required
from controllers.inner_api import api
from controllers.inner_api.wraps import enterprise_inner_api_only
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from models.account import Account
from services.account_service import TenantService
@ -19,7 +20,7 @@ class EnterpriseWorkspace(Resource):
parser.add_argument("owner_email", type=str, required=True, location="json")
args = parser.parse_args()
account = Account.query.filter_by(email=args["owner_email"]).first()
account = db.session.query(Account).filter_by(email=args["owner_email"]).first()
if account is None:
return {"message": "owner account not found."}, 404

View File

@ -1,3 +1,4 @@
from flask import request
from flask_login import current_user # type: ignore
from flask_restful import marshal, reqparse # type: ignore
from werkzeug.exceptions import NotFound
@ -74,6 +75,8 @@ class SegmentApi(DatasetApiResource):
# check dataset
dataset_id = str(dataset_id)
tenant_id = str(tenant_id)
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset:
raise NotFound("Dataset not found.")
@ -118,8 +121,25 @@ class SegmentApi(DatasetApiResource):
query = query.where(DocumentSegment.content.ilike(f"%{keyword}%"))
total = query.count()
segments = query.order_by(DocumentSegment.position).all()
return {"data": marshal(segments, segment_fields), "doc_form": document.doc_form, "total": total}, 200
query = query.order_by(DocumentSegment.position)
paginated_segments = query.paginate(
page=page,
per_page=limit,
max_per_page=100,
error_out=False,
)
segments = paginated_segments.items
response = {
"data": marshal(segments, segment_fields),
"doc_form": document.doc_form,
"total": total,
"has_more": len(segments) == limit,
"limit": limit,
"page": page,
}
return response, 200
class DatasetSegmentApi(DatasetApiResource):

View File

@ -27,6 +27,9 @@ class RateLimit:
def __init__(self, client_id: str, max_active_requests: int):
self.max_active_requests = max_active_requests
# must be called after max_active_requests is set
if self.disabled():
return
if hasattr(self, "initialized"):
return
self.initialized = True
@ -37,6 +40,8 @@ class RateLimit:
self.flush_cache(use_local_value=True)
def flush_cache(self, use_local_value=False):
if self.disabled():
return
self.last_recalculate_time = time.time()
# flush max active requests
if use_local_value or not redis_client.exists(self.max_active_requests_key):
@ -59,18 +64,18 @@ class RateLimit:
redis_client.hdel(self.active_requests_key, *timeout_requests)
def enter(self, request_id: Optional[str] = None) -> str:
if self.disabled():
return RateLimit._UNLIMITED_REQUEST_ID
if time.time() - self.last_recalculate_time > RateLimit._ACTIVE_REQUESTS_COUNT_FLUSH_INTERVAL:
self.flush_cache()
if self.max_active_requests <= 0:
return RateLimit._UNLIMITED_REQUEST_ID
if not request_id:
request_id = RateLimit.gen_request_key()
active_requests_count = redis_client.hlen(self.active_requests_key)
if active_requests_count >= self.max_active_requests:
raise AppInvokeQuotaExceededError(
"Too many requests. Please try again later. The current maximum "
"concurrent requests allowed is {}.".format(self.max_active_requests)
f"Too many requests. Please try again later. The current maximum concurrent requests allowed "
f"for {self.client_id} is {self.max_active_requests}."
)
redis_client.hset(self.active_requests_key, request_id, str(time.time()))
return request_id
@ -80,6 +85,9 @@ class RateLimit:
return
redis_client.hdel(self.active_requests_key, request_id)
def disabled(self):
return self.max_active_requests <= 0
@staticmethod
def gen_request_key() -> str:
return str(uuid.uuid4())

View File

@ -49,6 +49,7 @@ class FileAttribute(StrEnum):
TRANSFER_METHOD = "transfer_method"
URL = "url"
EXTENSION = "extension"
RELATED_ID = "related_id"
class ArrayFileAttribute(StrEnum):

View File

@ -34,6 +34,8 @@ def get_attr(*, file: File, attr: FileAttribute):
return file.remote_url
case FileAttribute.EXTENSION:
return file.extension
case FileAttribute.RELATED_ID:
return file.related_id
def to_prompt_message_content(

View File

@ -33,7 +33,6 @@ from core.ops.entities.trace_entity import (
)
from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace
from core.ops.opik_trace.opik_trace import OpikDataTrace
from core.ops.utils import get_message_data
from extensions.ext_database import db
from extensions.ext_storage import storage
@ -41,6 +40,13 @@ from models.model import App, AppModelConfig, Conversation, Message, MessageFile
from models.workflow import WorkflowAppLog, WorkflowRun
from tasks.ops_trace_task import process_trace_tasks
def build_opik_trace_instance(config: OpikConfig):
from core.ops.opik_trace.opik_trace import OpikDataTrace
return OpikDataTrace(config)
provider_config_map: dict[str, dict[str, Any]] = {
TracingProviderEnum.LANGFUSE.value: {
"config_class": LangfuseConfig,
@ -58,7 +64,7 @@ provider_config_map: dict[str, dict[str, Any]] = {
"config_class": OpikConfig,
"secret_keys": ["api_key"],
"other_keys": ["project", "url", "workspace"],
"trace_instance": OpikDataTrace,
"trace_instance": lambda config: build_opik_trace_instance(config),
},
}

View File

@ -97,6 +97,7 @@ class RetrievalService:
all_documents=all_documents,
retrieval_method=retrieval_method,
exceptions=exceptions,
document_ids_filter=document_ids_filter,
)
)
concurrent.futures.wait(futures, timeout=30, return_when=concurrent.futures.ALL_COMPLETED)
@ -222,6 +223,7 @@ class RetrievalService:
all_documents: list,
retrieval_method: str,
exceptions: list,
document_ids_filter: Optional[list[str]] = None,
):
with flask_app.app_context():
try:
@ -231,7 +233,9 @@ class RetrievalService:
vector_processor = Vector(dataset=dataset)
documents = vector_processor.search_by_full_text(cls.escape_query_for_search(query), top_k=top_k)
documents = vector_processor.search_by_full_text(
cls.escape_query_for_search(query), top_k=top_k, document_ids_filter=document_ids_filter
)
if documents:
if (
reranking_model

View File

@ -25,6 +25,7 @@ class OpenGaussConfig(BaseModel):
database: str
min_connection: int
max_connection: int
enable_pq: bool = False # Enable PQ acceleration
@model_validator(mode="before")
@classmethod
@ -57,6 +58,11 @@ CREATE TABLE IF NOT EXISTS {table_name} (
);
"""
SQL_CREATE_INDEX_PQ = """
CREATE INDEX IF NOT EXISTS embedding_{table_name}_pq_idx ON {table_name}
USING hnsw (embedding vector_cosine_ops) WITH (m = 16, ef_construction = 64, enable_pq=on, pq_m={pq_m});
"""
SQL_CREATE_INDEX = """
CREATE INDEX IF NOT EXISTS embedding_cosine_{table_name}_idx ON {table_name}
USING hnsw (embedding vector_cosine_ops) WITH (m = 16, ef_construction = 64);
@ -68,6 +74,7 @@ class OpenGauss(BaseVector):
super().__init__(collection_name)
self.pool = self._create_connection_pool(config)
self.table_name = f"embedding_{collection_name}"
self.pq_enabled = config.enable_pq
def get_type(self) -> str:
return VectorType.OPENGAUSS
@ -97,7 +104,26 @@ class OpenGauss(BaseVector):
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
dimension = len(embeddings[0])
self._create_collection(dimension)
return self.add_texts(texts, embeddings)
self.add_texts(texts, embeddings)
self._create_index(dimension)
def _create_index(self, dimension: int):
index_cache_key = f"vector_index_{self._collection_name}"
lock_name = f"{index_cache_key}_lock"
with redis_client.lock(lock_name, timeout=60):
index_exist_cache_key = f"vector_index_{self._collection_name}"
if redis_client.get(index_exist_cache_key):
return
with self._get_cursor() as cur:
if dimension <= 2000:
if self.pq_enabled:
cur.execute(SQL_CREATE_INDEX_PQ.format(table_name=self.table_name, pq_m=int(dimension / 4)))
cur.execute("SET hnsw_earlystop_threshold = 320")
if not self.pq_enabled:
cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name))
redis_client.set(index_exist_cache_key, 1, ex=3600)
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
values = []
@ -211,8 +237,6 @@ class OpenGauss(BaseVector):
with self._get_cursor() as cur:
cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name, dimension=dimension))
if dimension <= 2000:
cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name))
redis_client.set(collection_exist_cache_key, 1, ex=3600)
@ -236,5 +260,6 @@ class OpenGaussFactory(AbstractVectorFactory):
database=dify_config.OPENGAUSS_DATABASE or "dify",
min_connection=dify_config.OPENGAUSS_MIN_CONNECTION,
max_connection=dify_config.OPENGAUSS_MAX_CONNECTION,
enable_pq=dify_config.OPENGAUSS_ENABLE_PQ or False,
),
)

View File

@ -610,7 +610,11 @@ class DatasetRetrieval:
if dataset.indexing_technique == "economy":
# use keyword table query
documents = RetrievalService.retrieve(
retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=top_k
retrieval_method="keyword_search",
dataset_id=dataset.id,
query=query,
top_k=top_k,
document_ids_filter=document_ids_filter,
)
if documents:
all_documents.extend(documents)

View File

@ -4,6 +4,7 @@ from dify_app import DifyApp
def init_app(app: DifyApp):
from commands import (
add_qdrant_index,
clear_free_plan_tenant_expired_logs,
convert_to_agent_apps,
create_tenant,
extract_plugins,
@ -34,6 +35,7 @@ def init_app(app: DifyApp):
extract_unique_plugins,
install_plugins,
old_metadata_migration,
clear_free_plan_tenant_expired_logs,
]
for cmd in cmds_to_register:
app.cli.add_command(cmd)

View File

@ -24,6 +24,7 @@ vector_setting_fields = {
}
weighted_score_fields = {
"weight_type": fields.String,
"keyword_setting": fields.Nested(keyword_setting_fields),
"vector_setting": fields.Nested(vector_setting_fields),
}

View File

@ -838,6 +838,33 @@ class Conversation(db.Model): # type: ignore[name-defined]
def in_debug_mode(self):
return self.override_model_configs is not None
def to_dict(self):
return {
"id": self.id,
"app_id": self.app_id,
"app_model_config_id": self.app_model_config_id,
"model_provider": self.model_provider,
"override_model_configs": self.override_model_configs,
"model_id": self.model_id,
"mode": self.mode,
"name": self.name,
"summary": self.summary,
"inputs": self.inputs,
"introduction": self.introduction,
"system_instruction": self.system_instruction,
"system_instruction_tokens": self.system_instruction_tokens,
"status": self.status,
"invoke_from": self.invoke_from,
"from_source": self.from_source,
"from_end_user_id": self.from_end_user_id,
"from_account_id": self.from_account_id,
"read_at": self.read_at,
"read_account_id": self.read_account_id,
"dialogue_count": self.dialogue_count,
"created_at": self.created_at,
"updated_at": self.updated_at,
}
class Message(db.Model): # type: ignore[name-defined]
__tablename__ = "messages"

View File

@ -9,7 +9,6 @@ from flask_sqlalchemy.pagination import Pagination
from configs import dify_config
from constants.model_template import default_app_templates
from core.agent.entities import AgentToolEntity
from core.app.features.rate_limiting import RateLimit
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
@ -37,9 +36,13 @@ class AppService:
filters = [App.tenant_id == tenant_id, App.is_universal == False]
if args["mode"] == "workflow":
filters.append(App.mode.in_([AppMode.WORKFLOW.value, AppMode.COMPLETION.value]))
filters.append(App.mode == AppMode.WORKFLOW.value)
elif args["mode"] == "completion":
filters.append(App.mode == AppMode.COMPLETION.value)
elif args["mode"] == "chat":
filters.append(App.mode.in_([AppMode.CHAT.value, AppMode.ADVANCED_CHAT.value]))
filters.append(App.mode == AppMode.CHAT.value)
elif args["mode"] == "advanced-chat":
filters.append(App.mode == AppMode.ADVANCED_CHAT.value)
elif args["mode"] == "agent-chat":
filters.append(App.mode == AppMode.AGENT_CHAT.value)
elif args["mode"] == "channel":
@ -222,7 +225,6 @@ class AppService:
"""
app.name = args.get("name")
app.description = args.get("description", "")
app.max_active_requests = args.get("max_active_requests")
app.icon_type = args.get("icon_type", "emoji")
app.icon = args.get("icon")
app.icon_background = args.get("icon_background")
@ -231,9 +233,6 @@ class AppService:
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
if app.max_active_requests is not None:
rate_limit = RateLimit(app.id, app.max_active_requests)
rate_limit.flush_cache(use_local_value=True)
return app
def update_app_name(self, app: App, name: str) -> App:

View File

@ -0,0 +1,313 @@
import datetime
import json
import logging
import time
from concurrent.futures import ThreadPoolExecutor
import click
from flask import Flask, current_app
from sqlalchemy.orm import Session
from configs import dify_config
from core.model_runtime.utils.encoders import jsonable_encoder
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.account import Tenant
from models.model import App, Conversation, Message
from models.workflow import WorkflowNodeExecution, WorkflowRun
from services.billing_service import BillingService
logger = logging.getLogger(__name__)
class ClearFreePlanTenantExpiredLogs:
@classmethod
def process_tenant(cls, flask_app: Flask, tenant_id: str, days: int, batch: int):
with flask_app.app_context():
apps = db.session.query(App).filter(App.tenant_id == tenant_id).all()
app_ids = [app.id for app in apps]
while True:
with Session(db.engine).no_autoflush as session:
messages = (
session.query(Message)
.filter(
Message.app_id.in_(app_ids),
Message.created_at < datetime.datetime.now() - datetime.timedelta(days=days),
)
.limit(batch)
.all()
)
if len(messages) == 0:
break
storage.save(
f"free_plan_tenant_expired_logs/"
f"{tenant_id}/messages/{datetime.datetime.now().strftime('%Y-%m-%d')}"
f"-{time.time()}.json",
json.dumps(
jsonable_encoder(
[message.to_dict() for message in messages],
),
).encode("utf-8"),
)
message_ids = [message.id for message in messages]
# delete messages
session.query(Message).filter(
Message.id.in_(message_ids),
).delete(synchronize_session=False)
session.commit()
click.echo(
click.style(
f"[{datetime.datetime.now()}] Processed {len(message_ids)} messages for tenant {tenant_id} "
)
)
while True:
with Session(db.engine).no_autoflush as session:
conversations = (
session.query(Conversation)
.filter(
Conversation.app_id.in_(app_ids),
Conversation.updated_at < datetime.datetime.now() - datetime.timedelta(days=days),
)
.limit(batch)
.all()
)
if len(conversations) == 0:
break
storage.save(
f"free_plan_tenant_expired_logs/"
f"{tenant_id}/conversations/{datetime.datetime.now().strftime('%Y-%m-%d')}"
f"-{time.time()}.json",
json.dumps(
jsonable_encoder(
[conversation.to_dict() for conversation in conversations],
),
).encode("utf-8"),
)
conversation_ids = [conversation.id for conversation in conversations]
session.query(Conversation).filter(
Conversation.id.in_(conversation_ids),
).delete(synchronize_session=False)
session.commit()
click.echo(
click.style(
f"[{datetime.datetime.now()}] Processed {len(conversation_ids)}"
f" conversations for tenant {tenant_id}"
)
)
while True:
with Session(db.engine).no_autoflush as session:
workflow_node_executions = (
session.query(WorkflowNodeExecution)
.filter(
WorkflowNodeExecution.tenant_id == tenant_id,
WorkflowNodeExecution.created_at < datetime.datetime.now() - datetime.timedelta(days=days),
)
.limit(batch)
.all()
)
if len(workflow_node_executions) == 0:
break
# save workflow node executions
storage.save(
f"free_plan_tenant_expired_logs/"
f"{tenant_id}/workflow_node_executions/{datetime.datetime.now().strftime('%Y-%m-%d')}"
f"-{time.time()}.json",
json.dumps(
jsonable_encoder(workflow_node_executions),
).encode("utf-8"),
)
workflow_node_execution_ids = [
workflow_node_execution.id for workflow_node_execution in workflow_node_executions
]
# delete workflow node executions
session.query(WorkflowNodeExecution).filter(
WorkflowNodeExecution.id.in_(workflow_node_execution_ids),
).delete(synchronize_session=False)
session.commit()
click.echo(
click.style(
f"[{datetime.datetime.now()}] Processed {len(workflow_node_execution_ids)}"
f" workflow node executions for tenant {tenant_id}"
)
)
while True:
with Session(db.engine).no_autoflush as session:
workflow_runs = (
session.query(WorkflowRun)
.filter(
WorkflowRun.tenant_id == tenant_id,
WorkflowRun.created_at < datetime.datetime.now() - datetime.timedelta(days=days),
)
.limit(batch)
.all()
)
if len(workflow_runs) == 0:
break
# save workflow runs
storage.save(
f"free_plan_tenant_expired_logs/"
f"{tenant_id}/workflow_runs/{datetime.datetime.now().strftime('%Y-%m-%d')}"
f"-{time.time()}.json",
json.dumps(
jsonable_encoder(
[workflow_run.to_dict() for workflow_run in workflow_runs],
),
).encode("utf-8"),
)
workflow_run_ids = [workflow_run.id for workflow_run in workflow_runs]
# delete workflow runs
session.query(WorkflowRun).filter(
WorkflowRun.id.in_(workflow_run_ids),
).delete(synchronize_session=False)
session.commit()
@classmethod
def process(cls, days: int, batch: int, tenant_ids: list[str]):
"""
Clear free plan tenant expired logs.
"""
click.echo(click.style("Clearing free plan tenant expired logs", fg="white"))
ended_at = datetime.datetime.now()
started_at = datetime.datetime(2023, 4, 3, 8, 59, 24)
current_time = started_at
with Session(db.engine) as session:
total_tenant_count = session.query(Tenant.id).count()
click.echo(click.style(f"Total tenant count: {total_tenant_count}", fg="white"))
handled_tenant_count = 0
thread_pool = ThreadPoolExecutor(max_workers=10)
def process_tenant(flask_app: Flask, tenant_id: str) -> None:
try:
if (
not dify_config.BILLING_ENABLED
or BillingService.get_info(tenant_id)["subscription"]["plan"] == "sandbox"
):
# only process sandbox tenant
cls.process_tenant(flask_app, tenant_id, days, batch)
except Exception:
logger.exception(f"Failed to process tenant {tenant_id}")
finally:
nonlocal handled_tenant_count
handled_tenant_count += 1
if handled_tenant_count % 100 == 0:
click.echo(
click.style(
f"[{datetime.datetime.now()}] "
f"Processed {handled_tenant_count} tenants "
f"({(handled_tenant_count / total_tenant_count) * 100:.1f}%), "
f"{handled_tenant_count}/{total_tenant_count}",
fg="green",
)
)
futures = []
if tenant_ids:
for tenant_id in tenant_ids:
futures.append(
thread_pool.submit(
process_tenant,
current_app._get_current_object(), # type: ignore[attr-defined]
tenant_id,
)
)
else:
while current_time < ended_at:
click.echo(
click.style(f"Current time: {current_time}, Started at: {datetime.datetime.now()}", fg="white")
)
# Initial interval of 1 day, will be dynamically adjusted based on tenant count
interval = datetime.timedelta(days=1)
# Process tenants in this batch
with Session(db.engine) as session:
# Calculate tenant count in next batch with current interval
# Try different intervals until we find one with a reasonable tenant count
test_intervals = [
datetime.timedelta(days=1),
datetime.timedelta(hours=12),
datetime.timedelta(hours=6),
datetime.timedelta(hours=3),
datetime.timedelta(hours=1),
]
for test_interval in test_intervals:
tenant_count = (
session.query(Tenant.id)
.filter(Tenant.created_at.between(current_time, current_time + test_interval))
.count()
)
if tenant_count <= 100:
interval = test_interval
break
else:
# If all intervals have too many tenants, use minimum interval
interval = datetime.timedelta(hours=1)
# Adjust interval to target ~100 tenants per batch
if tenant_count > 0:
# Scale interval based on ratio to target count
interval = min(
datetime.timedelta(days=1), # Max 1 day
max(
datetime.timedelta(hours=1), # Min 1 hour
interval * (100 / tenant_count), # Scale to target 100
),
)
batch_end = min(current_time + interval, ended_at)
rs = (
session.query(Tenant.id)
.filter(Tenant.created_at.between(current_time, batch_end))
.order_by(Tenant.created_at)
)
tenants = []
for row in rs:
tenant_id = str(row.id)
try:
tenants.append(tenant_id)
except Exception:
logger.exception(f"Failed to process tenant {tenant_id}")
continue
futures.append(
thread_pool.submit(
process_tenant,
current_app._get_current_object(), # type: ignore[attr-defined]
tenant_id,
)
)
current_time = batch_end
# wait for all threads to finish
for future in futures:
future.result()

View File

@ -563,6 +563,7 @@ OPENGAUSS_PASSWORD=Dify@123
OPENGAUSS_DATABASE=dify
OPENGAUSS_MIN_CONNECTION=1
OPENGAUSS_MAX_CONNECTION=5
OPENGAUSS_ENABLE_PQ=false
# Upstash Vector configuration, only available when VECTOR_STORE is `upstash`
UPSTASH_VECTOR_URL=https://xxx-vector.upstash.io

View File

@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.1.2
image: langgenius/dify-api:1.1.3
restart: always
environment:
# Use the shared environment variables.
@ -29,7 +29,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.1.2
image: langgenius/dify-api:1.1.3
restart: always
environment:
# Use the shared environment variables.
@ -53,7 +53,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.1.2
image: langgenius/dify-web:1.1.3
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -110,7 +110,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.10
image: langgenius/dify-sandbox:0.2.11
restart: always
environment:
# The DifySandbox configurations

View File

@ -43,7 +43,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.10
image: langgenius/dify-sandbox:0.2.11
restart: always
environment:
# The DifySandbox configurations

View File

@ -259,6 +259,7 @@ x-shared-env: &shared-api-worker-env
OPENGAUSS_DATABASE: ${OPENGAUSS_DATABASE:-dify}
OPENGAUSS_MIN_CONNECTION: ${OPENGAUSS_MIN_CONNECTION:-1}
OPENGAUSS_MAX_CONNECTION: ${OPENGAUSS_MAX_CONNECTION:-5}
OPENGAUSS_ENABLE_PQ: ${OPENGAUSS_ENABLE_PQ:-false}
UPSTASH_VECTOR_URL: ${UPSTASH_VECTOR_URL:-https://xxx-vector.upstash.io}
UPSTASH_VECTOR_TOKEN: ${UPSTASH_VECTOR_TOKEN:-dify}
UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15}
@ -432,7 +433,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.1.2
image: langgenius/dify-api:1.1.3
restart: always
environment:
# Use the shared environment variables.
@ -459,7 +460,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.1.2
image: langgenius/dify-api:1.1.3
restart: always
environment:
# Use the shared environment variables.
@ -483,7 +484,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.1.2
image: langgenius/dify-web:1.1.3
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -540,7 +541,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.10
image: langgenius/dify-sandbox:0.2.11
restart: always
environment:
# The DifySandbox configurations

View File

@ -8,6 +8,7 @@ import { useDebounceFn } from 'ahooks'
import {
RiApps2Line,
RiExchange2Line,
RiFile4Line,
RiMessage3Line,
RiRobot3Line,
} from '@remixicon/react'
@ -81,6 +82,8 @@ const Apps = () => {
{ value: 'all', text: t('app.types.all'), icon: <RiApps2Line className='mr-1 h-[14px] w-[14px]' /> },
{ value: 'chat', text: t('app.types.chatbot'), icon: <RiMessage3Line className='mr-1 h-[14px] w-[14px]' /> },
{ value: 'agent-chat', text: t('app.types.agent'), icon: <RiRobot3Line className='mr-1 h-[14px] w-[14px]' /> },
{ value: 'completion', text: t('app.types.completion'), icon: <RiFile4Line className='mr-1 h-[14px] w-[14px]' /> },
{ value: 'advanced-chat', text: t('app.types.advanced'), icon: <RiMessage3Line className='mr-1 h-[14px] w-[14px]' /> },
{ value: 'workflow', text: t('app.types.workflow'), icon: <RiExchange2Line className='mr-1 h-[14px] w-[14px]' /> },
]

View File

@ -961,6 +961,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
<Property name='status' type='string' key='status'>
Search status, completed
</Property>
<Property name='page' type='string' key='page'>
Page number (optional)
</Property>
<Property name='limit' type='string' key='limit'>
Number of items returned, default 20, range 1-100 (optional)
</Property>
</Properties>
</Col>
<Col sticky>
@ -1004,7 +1010,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
"error": null,
"stopped_at": null
}],
"doc_form": "text_model"
"doc_form": "text_model",
"has_more": false,
"limit": 20,
"total": 9,
"page": 1
}
```
</CodeGroup>

View File

@ -961,6 +961,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
<Property name='status' type='string' key='status'>
搜索状态completed
</Property>
<Property name='page' type='string' key='page'>
页码,可选
</Property>
<Property name='limit' type='string' key='limit'>
返回条数,可选,默认 20范围 1-100
</Property>
</Properties>
</Col>
<Col sticky>
@ -1004,7 +1010,11 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
"error": null,
"stopped_at": null
}],
"doc_form": "text_model"
"doc_form": "text_model",
"has_more": false,
"limit": 20,
"total": 9,
"page": 1
}
```
</CodeGroup>

View File

@ -212,7 +212,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => {
<div className='flex w-full'>
<div className='system-md-semibold truncate text-text-secondary'>{appDetail.name}</div>
</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>{appDetail.mode === 'advanced-chat' ? t('app.types.chatbot') : appDetail.mode === 'agent-chat' ? t('app.types.agent') : appDetail.mode === 'chat' ? t('app.types.chatbot') : appDetail.mode === 'completion' ? t('app.types.completion') : t('app.types.workflow')}</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>{appDetail.mode === 'advanced-chat' ? t('app.types.advanced') : appDetail.mode === 'agent-chat' ? t('app.types.agent') : appDetail.mode === 'chat' ? t('app.types.chatbot') : appDetail.mode === 'completion' ? t('app.types.completion') : t('app.types.workflow')}</div>
</div>
)
}
@ -234,7 +234,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => {
/>
<div className='flex w-full grow flex-col items-start justify-center'>
<div className='system-md-semibold w-full truncate text-text-secondary'>{appDetail.name}</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>{appDetail.mode === 'advanced-chat' ? t('app.types.chatbot') : appDetail.mode === 'agent-chat' ? t('app.types.agent') : appDetail.mode === 'chat' ? t('app.types.chatbot') : appDetail.mode === 'completion' ? t('app.types.completion') : t('app.types.workflow')}</div>
<div className='system-2xs-medium-uppercase text-text-tertiary'>{appDetail.mode === 'advanced-chat' ? t('app.types.advanced') : appDetail.mode === 'agent-chat' ? t('app.types.agent') : appDetail.mode === 'chat' ? t('app.types.chatbot') : appDetail.mode === 'completion' ? t('app.types.completion') : t('app.types.workflow')}</div>
</div>
</div>
{/* description */}
@ -242,7 +242,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => {
<div className='system-xs-regular text-text-tertiary'>{appDetail.description}</div>
)}
{/* operations */}
<div className='flex items-center gap-1 self-stretch'>
<div className='flex flex-wrap items-center gap-1 self-stretch'>
<Button
size={'small'}
variant={'secondary'}

View File

@ -651,7 +651,13 @@ const Configuration: FC = () => {
syncToPublishedConfig(config)
setPublishedConfig(config)
const retrievalConfig = getMultipleRetrievalConfig(modelConfig.dataset_configs, datasets, datasets, {
const retrievalConfig = getMultipleRetrievalConfig({
...modelConfig.dataset_configs,
reranking_model: modelConfig.dataset_configs.reranking_model && {
provider: modelConfig.dataset_configs.reranking_model.reranking_provider_name,
model: modelConfig.dataset_configs.reranking_model.reranking_model_name,
},
}, datasets, datasets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
@ -661,8 +667,8 @@ const Configuration: FC = () => {
...retrievalConfig,
...(retrievalConfig.reranking_model ? {
reranking_model: {
...retrievalConfig.reranking_model,
reranking_provider_name: correctModelProvider(modelConfig.dataset_configs.reranking_model.reranking_provider_name),
reranking_model_name: retrievalConfig.reranking_model.model,
reranking_provider_name: correctModelProvider(retrievalConfig.reranking_model.provider),
},
} : {}),
})

View File

@ -313,7 +313,7 @@ function AppPreview({ mode }: { mode: AppMode }) {
'chat': {
title: t('app.types.chatbot'),
description: t('app.newApp.chatbotUserDescription'),
link: 'https://docs.dify.ai/guides/application-orchestrate/conversation-application?fallback=true',
link: 'https://docs.dify.ai/guides/application-orchestrate#application_type',
},
'advanced-chat': {
title: t('app.types.advanced'),

View File

@ -78,7 +78,7 @@ const Sidebar = ({ isPanel }: Props) => {
return (
<div className={cn(
'flex grow flex-col',
'flex w-full grow flex-col',
isPanel && 'rounded-xl border-[0.5px] border-components-panel-border-subtle bg-components-panel-bg shadow-lg',
)}>
<div className={cn(

View File

@ -81,13 +81,13 @@ const Operation: FC<OperationProps> = ({
const operationWidth = useMemo(() => {
let width = 0
if (!isOpeningStatement)
width += 28
width += 26
if (!isOpeningStatement && showPromptLog)
width += 102 + 8
width += 28 + 8
if (!isOpeningStatement && config?.text_to_speech?.enabled)
width += 33
width += 26
if (!isOpeningStatement && config?.supportAnnotation && config?.annotation_reply?.enabled)
width += 56 + 8
width += 26
if (config?.supportFeedback && !localFeedback?.rating && onFeedback && !isOpeningStatement)
width += 60 + 8
if (config?.supportFeedback && localFeedback?.rating && onFeedback && !isOpeningStatement)
@ -140,7 +140,7 @@ const Operation: FC<OperationProps> = ({
)}
</div>
)}
{!isOpeningStatement && config?.supportFeedback && onFeedback && (
{!isOpeningStatement && config?.supportFeedback && !localFeedback?.rating && onFeedback && (
<div className='ml-1 hidden items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm group-hover:flex'>
{!localFeedback?.rating && (
<>
@ -152,6 +152,10 @@ const Operation: FC<OperationProps> = ({
</ActionButton>
</>
)}
</div>
)}
{!isOpeningStatement && config?.supportFeedback && localFeedback?.rating && onFeedback && (
<div className='ml-1 flex items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm'>
{localFeedback?.rating === 'like' && (
<ActionButton state={ActionButtonState.Active} onClick={() => handleFeedback(null)}>
<RiThumbUpLine className='h-4 w-4' />

View File

@ -13,8 +13,9 @@ async function decodeBase64AndDecompress(base64String: string) {
async function getProcessedInputsFromUrlParams(): Promise<Record<string, any>> {
const urlParams = new URLSearchParams(window.location.search)
const inputs: Record<string, any> = {}
const entriesArray = Array.from(urlParams.entries())
await Promise.all(
urlParams.entries().map(async ([key, value]) => {
entriesArray.map(async ([key, value]) => {
inputs[key] = await decodeBase64AndDecompress(decodeURIComponent(value))
}),
)

View File

@ -27,6 +27,7 @@ import ThinkBlock from '@/app/components/base/markdown-blocks/think-block'
import { Theme } from '@/types/app'
import useTheme from '@/hooks/use-theme'
import cn from '@/utils/classnames'
import SVGRenderer from './svg-gallery'
// Available language https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/AVAILABLE_LANGUAGES_HLJS.MD
const capitalizationLanguageNameMap: Record<string, string> = {
@ -136,14 +137,13 @@ const CodeBlock: any = memo(({ inline, className, children, ...props }: any) =>
</div>
)
}
// Attention: SVGRenderer has xss vulnerability
// else if (language === 'svg' && isSVG) {
// return (
// <ErrorBoundary>
// <SVGRenderer content={content} />
// </ErrorBoundary>
// )
// }
else if (language === 'svg' && isSVG) {
return (
<ErrorBoundary>
<SVGRenderer content={content} />
</ErrorBoundary>
)
}
else {
return (
<SyntaxHighlighter
@ -240,19 +240,11 @@ const Link = ({ node, ...props }: any) => {
}
}
function escapeSVGTags(htmlString: string): string {
return htmlString.replace(/(<svg[\s\S]*?>)([\s\S]*?)(<\/svg>)/gi, (match: string, openTag: string, innerContent: string, closeTag: string): string => {
return openTag.replace(/</g, '&lt;').replace(/>/g, '&gt;')
+ innerContent.replace(/</g, '&lt;').replace(/>/g, '&gt;')
+ closeTag.replace(/</g, '&lt;').replace(/>/g, '&gt;')
})
}
export function Markdown(props: { content: string; className?: string; customDisallowedElements?: string[] }) {
const latexContent = flow([
preprocessThinkTag,
preprocessLaTeX,
])(escapeSVGTags(props.content))
])(props.content)
return (
<div className={cn('markdown-body', '!text-text-primary', props.className)}>

View File

@ -1,7 +1,7 @@
'use client'
import type { FC } from 'react'
import React, { Fragment, useEffect, useState } from 'react'
import { Combobox, ComboboxButton, ComboboxInput, ComboboxOption, ComboboxOptions, Listbox, ListboxButton, ListboxOption, ListboxOptions, Transition } from '@headlessui/react'
import React, { useEffect, useState } from 'react'
import { Combobox, ComboboxButton, ComboboxInput, ComboboxOption, ComboboxOptions, Listbox, ListboxButton, ListboxOption, ListboxOptions } from '@headlessui/react'
import { ChevronDownIcon, ChevronUpIcon, XMarkIcon } from '@heroicons/react/20/solid'
import Badge from '../badge/index'
import { RiCheckLine } from '@remixicon/react'
@ -238,48 +238,40 @@ const SimpleSelect: FC<ISelectProps> = ({
)}
{!disabled && (
<Transition
as={Fragment}
leave="transition ease-in duration-100"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<ListboxOptions className={classNames('absolute z-10 mt-1 px-1 max-h-60 w-full overflow-auto rounded-md bg-components-panel-bg-blur backdrop-blur-sm py-1 text-base shadow-lg border-components-panel-border border-[0.5px] focus:outline-none sm:text-sm', optionWrapClassName)}>
{items.map((item: Item) => (
<ListboxOption
key={item.value}
className={
classNames(
'relative cursor-pointer select-none py-2 pl-3 pr-9 rounded-lg hover:bg-state-base-hover text-text-secondary',
optionClassName,
)
}
value={item}
disabled={disabled}
>
{({ /* active, */ selected }) => (
<>
{renderOption
? renderOption({ item, selected })
: (<>
<span className={classNames('block', selected && 'font-normal')}>{item.name}</span>
{selected && !hideChecked && (
<span
className={classNames(
'absolute inset-y-0 right-0 flex items-center pr-4 text-text-accent',
)}
>
<RiCheckLine className="h-4 w-4" aria-hidden="true" />
</span>
)}
</>)}
</>
)}
</ListboxOption>
))}
</ListboxOptions>
</Transition>
<ListboxOptions className={classNames('absolute z-10 mt-1 px-1 max-h-60 w-full overflow-auto rounded-md bg-components-panel-bg-blur backdrop-blur-sm py-1 text-base shadow-lg border-components-panel-border border-[0.5px] focus:outline-none sm:text-sm', optionWrapClassName)}>
{items.map((item: Item) => (
<ListboxOption
key={item.value}
className={
classNames(
'relative cursor-pointer select-none py-2 pl-3 pr-9 rounded-lg hover:bg-state-base-hover text-text-secondary',
optionClassName,
)
}
value={item}
disabled={disabled}
>
{({ /* active, */ selected }) => (
<>
{renderOption
? renderOption({ item, selected })
: (<>
<span className={classNames('block', selected && 'font-normal')}>{item.name}</span>
{selected && !hideChecked && (
<span
className={classNames(
'absolute inset-y-0 right-0 flex items-center pr-4 text-text-accent',
)}
>
<RiCheckLine className="h-4 w-4" aria-hidden="true" />
</span>
)}
</>)}
</>
)}
</ListboxOption>
))}
</ListboxOptions>
)}
</div>
</Listbox>

View File

@ -1,6 +1,7 @@
import { useEffect, useRef, useState } from 'react'
import { SVG } from '@svgdotjs/svg.js'
import ImagePreview from '@/app/components/base/image-uploader/image-preview'
import DOMPurify from 'dompurify'
export const SVGRenderer = ({ content }: { content: string }) => {
const svgRef = useRef<HTMLDivElement>(null)
@ -44,7 +45,7 @@ export const SVGRenderer = ({ content }: { content: string }) => {
svgRef.current.style.width = `${Math.min(originalWidth, 298)}px`
const rootElement = draw.svg(content)
const rootElement = draw.svg(DOMPurify.sanitize(content))
rootElement.click(() => {
setImagePreview(svgToDataURL(svgElement as Element))

View File

@ -143,6 +143,7 @@ export const MarketplaceContextProvider = ({
resetPlugins,
queryPlugins,
queryPluginsWithDebounced,
cancelQueryPluginsWithDebounced,
isLoading: isPluginsLoading,
} = useMarketplacePlugins()
@ -209,12 +210,13 @@ export const MarketplaceContextProvider = ({
const handleQuery = useCallback((debounced?: boolean) => {
if (!searchPluginTextRef.current && !filterPluginTagsRef.current.length) {
cancelQueryPluginsWithDebounced()
handleQueryMarketplaceCollectionsAndPlugins()
return
}
handleQueryPlugins(debounced)
}, [handleQueryMarketplaceCollectionsAndPlugins, handleQueryPlugins])
}, [handleQueryMarketplaceCollectionsAndPlugins, handleQueryPlugins, cancelQueryPluginsWithDebounced])
const handleSearchPluginTextChange = useCallback((text: string) => {
setSearchPluginText(text)

View File

@ -89,7 +89,7 @@ export const useMarketplacePlugins = () => {
handleUpdatePlugins(pluginsSearchParams)
}, [handleUpdatePlugins])
const { run: queryPluginsWithDebounced } = useDebounceFn((pluginsSearchParams: PluginsSearchParams) => {
const { run: queryPluginsWithDebounced, cancel: cancelQueryPluginsWithDebounced } = useDebounceFn((pluginsSearchParams: PluginsSearchParams) => {
handleUpdatePlugins(pluginsSearchParams)
}, {
wait: 500,
@ -101,6 +101,7 @@ export const useMarketplacePlugins = () => {
resetPlugins,
queryPlugins,
queryPluginsWithDebounced,
cancelQueryPluginsWithDebounced,
isLoading: isPending,
}
}

View File

@ -556,6 +556,10 @@ export const FILE_STRUCT: Var[] = [
variable: 'url',
type: VarType.string,
},
{
variable: 'related_id',
type: VarType.string,
},
]
export const DEFAULT_FILE_UPLOAD_SETTING = {

View File

@ -0,0 +1,53 @@
import type { FC } from 'react'
import { createContext, useCallback, useEffect, useRef } from 'react'
import { createDatasetsDetailStore } from './store'
import type { CommonNodeType, Node } from '../types'
import { BlockEnum } from '../types'
import type { KnowledgeRetrievalNodeType } from '../nodes/knowledge-retrieval/types'
import { fetchDatasets } from '@/service/datasets'
type DatasetsDetailStoreApi = ReturnType<typeof createDatasetsDetailStore>
type DatasetsDetailContextType = DatasetsDetailStoreApi | undefined
export const DatasetsDetailContext = createContext<DatasetsDetailContextType>(undefined)
type DatasetsDetailProviderProps = {
nodes: Node[]
children: React.ReactNode
}
const DatasetsDetailProvider: FC<DatasetsDetailProviderProps> = ({
nodes,
children,
}) => {
const storeRef = useRef<DatasetsDetailStoreApi>()
if (!storeRef.current)
storeRef.current = createDatasetsDetailStore()
const updateDatasetsDetail = useCallback(async (datasetIds: string[]) => {
const { data: datasetsDetail } = await fetchDatasets({ url: '/datasets', params: { page: 1, ids: datasetIds } })
if (datasetsDetail && datasetsDetail.length > 0)
storeRef.current!.getState().updateDatasetsDetail(datasetsDetail)
}, [])
useEffect(() => {
if (!storeRef.current) return
const knowledgeRetrievalNodes = nodes.filter(node => node.data.type === BlockEnum.KnowledgeRetrieval)
const allDatasetIds = knowledgeRetrievalNodes.reduce<string[]>((acc, node) => {
return Array.from(new Set([...acc, ...(node.data as CommonNodeType<KnowledgeRetrievalNodeType>).dataset_ids]))
}, [])
if (allDatasetIds.length === 0) return
updateDatasetsDetail(allDatasetIds)
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
return (
<DatasetsDetailContext.Provider value={storeRef.current!}>
{children}
</DatasetsDetailContext.Provider>
)
}
export default DatasetsDetailProvider

View File

@ -0,0 +1,38 @@
import { useContext } from 'react'
import { createStore, useStore } from 'zustand'
import type { DataSet } from '@/models/datasets'
import { DatasetsDetailContext } from './provider'
import produce from 'immer'
type DatasetsDetailStore = {
datasetsDetail: Record<string, DataSet>
updateDatasetsDetail: (datasetsDetail: DataSet[]) => void
}
export const createDatasetsDetailStore = () => {
return createStore<DatasetsDetailStore>((set, get) => ({
datasetsDetail: {},
updateDatasetsDetail: (datasets: DataSet[]) => {
const oldDatasetsDetail = get().datasetsDetail
const datasetsDetail = datasets.reduce<Record<string, DataSet>>((acc, dataset) => {
acc[dataset.id] = dataset
return acc
}, {})
// Merge new datasets detail into old one
const newDatasetsDetail = produce(oldDatasetsDetail, (draft) => {
Object.entries(datasetsDetail).forEach(([key, value]) => {
draft[key] = value
})
})
set({ datasetsDetail: newDatasetsDetail })
},
}))
}
export const useDatasetsDetailStore = <T>(selector: (state: DatasetsDetailStore) => T): T => {
const store = useContext(DatasetsDetailContext)
if (!store)
throw new Error('Missing DatasetsDetailContext.Provider in the tree')
return useStore(store, selector)
}

View File

@ -160,7 +160,7 @@ const Header: FC = () => {
const { mutateAsync: publishWorkflow } = usePublishWorkflow(appID!)
const onPublish = useCallback(async (params?: PublishWorkflowParams) => {
if (handleCheckBeforePublish()) {
if (await handleCheckBeforePublish()) {
const res = await publishWorkflow({
title: params?.title || '',
releaseNotes: params?.releaseNotes || '',

View File

@ -1,10 +1,12 @@
import {
useCallback,
useMemo,
useRef,
} from 'react'
import { useTranslation } from 'react-i18next'
import { useStoreApi } from 'reactflow'
import type {
CommonNodeType,
Edge,
Node,
} from '../types'
@ -27,6 +29,10 @@ import { useGetLanguage } from '@/context/i18n'
import type { AgentNodeType } from '../nodes/agent/types'
import { useStrategyProviders } from '@/service/use-strategy'
import { canFindTool } from '@/utils'
import { useDatasetsDetailStore } from '../datasets-detail-store/store'
import type { KnowledgeRetrievalNodeType } from '../nodes/knowledge-retrieval/types'
import type { DataSet } from '@/models/datasets'
import { fetchDatasets } from '@/service/datasets'
export const useChecklist = (nodes: Node[], edges: Edge[]) => {
const { t } = useTranslation()
@ -37,6 +43,24 @@ export const useChecklist = (nodes: Node[], edges: Edge[]) => {
const customTools = useStore(s => s.customTools)
const workflowTools = useStore(s => s.workflowTools)
const { data: strategyProviders } = useStrategyProviders()
const datasetsDetail = useDatasetsDetailStore(s => s.datasetsDetail)
const getCheckData = useCallback((data: CommonNodeType<{}>) => {
let checkData = data
if (data.type === BlockEnum.KnowledgeRetrieval) {
const datasetIds = (data as CommonNodeType<KnowledgeRetrievalNodeType>).dataset_ids
const _datasets = datasetIds.reduce<DataSet[]>((acc, id) => {
if (datasetsDetail[id])
acc.push(datasetsDetail[id])
return acc
}, [])
checkData = {
...data,
_datasets,
} as CommonNodeType<KnowledgeRetrievalNodeType>
}
return checkData
}, [datasetsDetail])
const needWarningNodes = useMemo(() => {
const list = []
@ -75,7 +99,8 @@ export const useChecklist = (nodes: Node[], edges: Edge[]) => {
}
if (node.type === CUSTOM_NODE) {
const { errorMessage } = nodesExtraData[node.data.type].checkValid(node.data, t, moreDataForCheckValid)
const checkData = getCheckData(node.data)
const { errorMessage } = nodesExtraData[node.data.type].checkValid(checkData, t, moreDataForCheckValid)
if (errorMessage || !validNodes.find(n => n.id === node.id)) {
list.push({
@ -109,7 +134,7 @@ export const useChecklist = (nodes: Node[], edges: Edge[]) => {
}
return list
}, [nodes, edges, isChatMode, buildInTools, customTools, workflowTools, language, nodesExtraData, t, strategyProviders])
}, [nodes, edges, isChatMode, buildInTools, customTools, workflowTools, language, nodesExtraData, t, strategyProviders, getCheckData])
return needWarningNodes
}
@ -125,8 +150,31 @@ export const useChecklistBeforePublish = () => {
const store = useStoreApi()
const nodesExtraData = useNodesExtraData()
const { data: strategyProviders } = useStrategyProviders()
const updateDatasetsDetail = useDatasetsDetailStore(s => s.updateDatasetsDetail)
const updateTime = useRef(0)
const handleCheckBeforePublish = useCallback(() => {
const getCheckData = useCallback((data: CommonNodeType<{}>, datasets: DataSet[]) => {
let checkData = data
if (data.type === BlockEnum.KnowledgeRetrieval) {
const datasetIds = (data as CommonNodeType<KnowledgeRetrievalNodeType>).dataset_ids
const datasetsDetail = datasets.reduce<Record<string, DataSet>>((acc, dataset) => {
acc[dataset.id] = dataset
return acc
}, {})
const _datasets = datasetIds.reduce<DataSet[]>((acc, id) => {
if (datasetsDetail[id])
acc.push(datasetsDetail[id])
return acc
}, [])
checkData = {
...data,
_datasets,
} as CommonNodeType<KnowledgeRetrievalNodeType>
}
return checkData
}, [])
const handleCheckBeforePublish = useCallback(async () => {
const {
getNodes,
edges,
@ -141,6 +189,24 @@ export const useChecklistBeforePublish = () => {
notify({ type: 'error', message: t('workflow.common.maxTreeDepth', { depth: MAX_TREE_DEPTH }) })
return false
}
// Before publish, we need to fetch datasets detail, in case of the settings of datasets have been changed
const knowledgeRetrievalNodes = nodes.filter(node => node.data.type === BlockEnum.KnowledgeRetrieval)
const allDatasetIds = knowledgeRetrievalNodes.reduce<string[]>((acc, node) => {
return Array.from(new Set([...acc, ...(node.data as CommonNodeType<KnowledgeRetrievalNodeType>).dataset_ids]))
}, [])
let datasets: DataSet[] = []
if (allDatasetIds.length > 0) {
updateTime.current = updateTime.current + 1
const currUpdateTime = updateTime.current
const { data: datasetsDetail } = await fetchDatasets({ url: '/datasets', params: { page: 1, ids: allDatasetIds } })
if (datasetsDetail && datasetsDetail.length > 0) {
// avoid old data to overwrite the new data
if (currUpdateTime < updateTime.current)
return false
datasets = datasetsDetail
updateDatasetsDetail(datasetsDetail)
}
}
for (let i = 0; i < nodes.length; i++) {
const node = nodes[i]
@ -161,7 +227,8 @@ export const useChecklistBeforePublish = () => {
}
}
const { errorMessage } = nodesExtraData[node.data.type as BlockEnum].checkValid(node.data, t, moreDataForCheckValid)
const checkData = getCheckData(node.data, datasets)
const { errorMessage } = nodesExtraData[node.data.type as BlockEnum].checkValid(checkData, t, moreDataForCheckValid)
if (errorMessage) {
notify({ type: 'error', message: `[${node.data.title}] ${errorMessage}` })
@ -185,7 +252,7 @@ export const useChecklistBeforePublish = () => {
}
return true
}, [store, isChatMode, notify, t, buildInTools, customTools, workflowTools, language, nodesExtraData, strategyProviders])
}, [store, isChatMode, notify, t, buildInTools, customTools, workflowTools, language, nodesExtraData, strategyProviders, updateDatasetsDetail, getCheckData])
return {
handleCheckBeforePublish,

View File

@ -99,6 +99,7 @@ import { useEventEmitterContextContext } from '@/context/event-emitter'
import Confirm from '@/app/components/base/confirm'
import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants'
import { fetchFileUploadConfig } from '@/service/common'
import DatasetsDetailProvider from './datasets-detail-store/provider'
const nodeTypes = {
[CUSTOM_NODE]: CustomNode,
@ -448,11 +449,13 @@ const WorkflowWrap = memo(() => {
nodes={nodesData}
edges={edgesData} >
<FeaturesProvider features={initialFeatures}>
<Workflow
nodes={nodesData}
edges={edgesData}
viewport={data?.graph.viewport}
/>
<DatasetsDetailProvider nodes={nodesData}>
<Workflow
nodes={nodesData}
edges={edgesData}
viewport={data?.graph.viewport}
/>
</DatasetsDetailProvider>
</FeaturesProvider>
</WorkflowHistoryProvider>
</ReactFlowProvider>

View File

@ -100,5 +100,5 @@ export const TRANSFER_METHOD = [
{ value: TransferMethod.remote_url, i18nKey: 'url' },
]
export const SUB_VARIABLES = ['type', 'size', 'name', 'url', 'extension', 'mime_type', 'transfer_method']
export const SUB_VARIABLES = ['type', 'size', 'name', 'url', 'extension', 'mime_type', 'transfer_method', 'related_id']
export const OUTPUT_FILE_SUB_VARIABLES = SUB_VARIABLES.filter(key => key !== 'transfer_method')

View File

@ -32,8 +32,8 @@ const ConditionVarSelector = ({
crossAxis: 0,
}}
>
<PortalToFollowElemTrigger onClick={() => onOpenChange(!open)}>
<div className="cursor-pointer">
<PortalToFollowElemTrigger asChild onClick={() => onOpenChange(!open)}>
<div className="w-full cursor-pointer">
<VariableTag
valueSelector={valueSelector}
varType={varType}

View File

@ -78,11 +78,11 @@ const ConditionValue = ({
<div className='flex h-6 items-center rounded-md bg-workflow-block-parma-bg px-1'>
{!isEnvVar && !isChatVar && <Variable02 className={cn('mr-1 h-3.5 w-3.5 shrink-0 text-text-accent', isException && 'text-text-warning')} />}
{isEnvVar && <Env className='mr-1 h-3.5 w-3.5 shrink-0 text-util-colors-violet-violet-600' />}
{isChatVar && <BubbleX className='h-3.5 w-3.5 text-util-colors-teal-teal-700' />}
{isChatVar && <BubbleX className='h-3.5 w-3.5 shrink-0 text-util-colors-teal-teal-700' />}
<div
className={cn(
'ml-0.5 shrink-0 truncate text-xs font-medium text-text-accent',
'ml-0.5 shrink-[2] truncate text-xs font-medium text-text-accent',
!notHasValue && 'max-w-[70px]',
isException && 'text-text-warning',
)}
@ -98,7 +98,7 @@ const ConditionValue = ({
</div>
{
!notHasValue && (
<div className='truncate text-xs text-text-secondary' title={formatValue}>{isSelect ? selectName : formatValue}</div>
<div className='shrink-[3] truncate text-xs text-text-secondary' title={formatValue}>{isSelect ? selectName : formatValue}</div>
)
}
</div>

View File

@ -1,33 +1,30 @@
import { type FC, useEffect, useRef, useState } from 'react'
import { type FC, useEffect, useState } from 'react'
import React from 'react'
import type { KnowledgeRetrievalNodeType } from './types'
import { Folder } from '@/app/components/base/icons/src/vender/solid/files'
import type { NodeProps } from '@/app/components/workflow/types'
import { fetchDatasets } from '@/service/datasets'
import type { DataSet } from '@/models/datasets'
import { useDatasetsDetailStore } from '../../datasets-detail-store/store'
const Node: FC<NodeProps<KnowledgeRetrievalNodeType>> = ({
data,
}) => {
const [selectedDatasets, setSelectedDatasets] = useState<DataSet[]>([])
const updateTime = useRef(0)
useEffect(() => {
(async () => {
updateTime.current = updateTime.current + 1
const currUpdateTime = updateTime.current
const datasetsDetail = useDatasetsDetailStore(s => s.datasetsDetail)
if (data.dataset_ids?.length > 0) {
const { data: dataSetsWithDetail } = await fetchDatasets({ url: '/datasets', params: { page: 1, ids: data.dataset_ids } })
// avoid old data overwrite new data
if (currUpdateTime < updateTime.current)
return
setSelectedDatasets(dataSetsWithDetail)
}
else {
setSelectedDatasets([])
}
})()
}, [data.dataset_ids])
useEffect(() => {
if (data.dataset_ids?.length > 0) {
const dataSetsWithDetail = data.dataset_ids.reduce<DataSet[]>((acc, id) => {
if (datasetsDetail[id])
acc.push(datasetsDetail[id])
return acc
}, [])
setSelectedDatasets(dataSetsWithDetail)
}
else {
setSelectedDatasets([])
}
}, [data.dataset_ids, datasetsDetail])
if (!selectedDatasets.length)
return null

View File

@ -10,6 +10,7 @@ import type {
DataSet,
MetadataInDoc,
RerankingModeEnum,
WeightedScoreEnum,
} from '@/models/datasets'
export type MultipleRetrievalConfig = {
@ -21,6 +22,7 @@ export type MultipleRetrievalConfig = {
}
reranking_mode?: RerankingModeEnum
weights?: {
weight_type: WeightedScoreEnum
vector_setting: {
vector_weight: number
embedding_provider_name: string

View File

@ -41,6 +41,7 @@ import useOneStepRun from '@/app/components/workflow/nodes/_base/hooks/use-one-s
import { useCurrentProviderAndModel, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import useAvailableVarList from '@/app/components/workflow/nodes/_base/hooks/use-available-var-list'
import { useDatasetsDetailStore } from '../../datasets-detail-store/store'
const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
const { nodesReadOnly: readOnly } = useNodesReadOnly()
@ -49,6 +50,7 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
const startNode = getBeforeNodesInSameBranch(id).find(node => node.data.type === BlockEnum.Start)
const startNodeId = startNode?.id
const { inputs, setInputs: doSetInputs } = useNodeCrud<KnowledgeRetrievalNodeType>(id, payload)
const updateDatasetsDetail = useDatasetsDetailStore(s => s.updateDatasetsDetail)
const inputRef = useRef(inputs)
@ -218,15 +220,12 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
(async () => {
const inputs = inputRef.current
const datasetIds = inputs.dataset_ids
let _datasets = selectedDatasets
if (datasetIds?.length > 0) {
const { data: dataSetsWithDetail } = await fetchDatasets({ url: '/datasets', params: { page: 1, ids: datasetIds } as any })
_datasets = dataSetsWithDetail
setSelectedDatasets(dataSetsWithDetail)
}
const newInputs = produce(inputs, (draft) => {
draft.dataset_ids = datasetIds
draft._datasets = _datasets
})
setInputs(newInputs)
setSelectedDatasetsLoaded(true)
@ -256,7 +255,6 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
} = getSelectedDatasetsMode(newDatasets)
const newInputs = produce(inputs, (draft) => {
draft.dataset_ids = newDatasets.map(d => d.id)
draft._datasets = newDatasets
if (payload.retrieval_mode === RETRIEVE_TYPE.multiWay && newDatasets.length > 0) {
const multipleRetrievalConfig = draft.multiple_retrieval_config
@ -266,6 +264,7 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
})
}
})
updateDatasetsDetail(newDatasets)
setInputs(newInputs)
setSelectedDatasets(newDatasets)
@ -275,7 +274,7 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
|| allExternal
)
setRerankModelOpen(true)
}, [inputs, setInputs, payload.retrieval_mode, selectedDatasets, currentRerankModel, currentRerankProvider])
}, [inputs, setInputs, payload.retrieval_mode, selectedDatasets, currentRerankModel, currentRerankProvider, updateDatasetsDetail])
const filterVar = useCallback((varPayload: Var) => {
return varPayload.type === VarType.string

View File

@ -83,7 +83,7 @@ export const TRANSFER_METHOD = [
{ value: TransferMethod.remote_url, i18nKey: 'url' },
]
export const SUB_VARIABLES = ['type', 'size', 'name', 'url', 'extension', 'mime_type', 'transfer_method']
export const SUB_VARIABLES = ['type', 'size', 'name', 'url', 'extension', 'mime_type', 'transfer_method', 'related_id']
export const OUTPUT_FILE_SUB_VARIABLES = SUB_VARIABLES.filter(key => key !== 'transfer_method')
export default nodeDefault

View File

@ -22,16 +22,33 @@ const nodeDefault: NodeDefault<VariableAssignerNodeType> = {
},
checkValid(payload: VariableAssignerNodeType, t: any) {
let errorMessages = ''
const { variables } = payload
if (!variables || variables.length === 0)
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.nodes.variableAssigner.title`) })
if (!errorMessages) {
const { variables, advanced_settings } = payload
const { group_enabled = false, groups = [] } = advanced_settings || {}
// enable group
const validateVariables = (variables: any[], field: string) => {
variables.forEach((variable) => {
if (!variable || variable.length === 0)
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.errorMsg.fields.variableValue`) })
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(field) })
})
}
if (group_enabled) {
if (!groups || groups.length === 0) {
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.nodes.variableAssigner.title`) })
}
else if (!errorMessages) {
groups.forEach((group) => {
validateVariables(group.variables || [], `${i18nPrefix}.errorMsg.fields.variableValue`)
})
}
}
else {
if (!variables || variables.length === 0)
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.nodes.variableAssigner.title`) })
else if (!errorMessages)
validateVariables(variables, `${i18nPrefix}.errorMsg.fields.variableValue`)
}
return {
isValid: !errorMessages,
errorMessage: errorMessages,

View File

@ -339,6 +339,7 @@ export type RunFile = {
transfer_method: TransferMethod[]
url?: string
upload_file_id?: string
related_id?: string
}
export type WorkflowRunningData = {

View File

@ -1,6 +1,7 @@
import type { AgentStrategy, ModelModeType, RETRIEVE_TYPE, ToolItem, TtsAutoPlay } from '@/types/app'
import type {
RerankingModeEnum,
WeightedScoreEnum,
} from '@/models/datasets'
import type { FileUpload } from '@/app/components/base/features/types'
import type {
@ -165,6 +166,7 @@ export type DatasetConfigs = {
}
reranking_mode?: RerankingModeEnum
weights?: {
weight_type: WeightedScoreEnum
vector_setting: {
vector_weight: number
embedding_provider_name: string

View File

@ -1,6 +1,6 @@
{
"name": "dify-web",
"version": "1.1.2",
"version": "1.1.3",
"private": true,
"engines": {
"node": ">=18.18.0"
@ -9,10 +9,10 @@
"dev": "cross-env NODE_OPTIONS='--inspect' next dev",
"build": "next build",
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
"lint": "pnpm eslint",
"lint": "pnpm eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"fix": "next lint --fix",
"eslint-fix": "eslint --fix",
"eslint-fix-only-show-error": "eslint --fix --quiet",
"eslint-fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix",
"eslint-fix-only-show-error": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet",
"prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky",
"gen-icons": "node ./app/components/base/icons/script.mjs",
"uglify-embed": "node ./bin/uglify-embed",
@ -61,6 +61,7 @@
"crypto-js": "^4.2.0",
"dayjs": "^1.11.13",
"decimal.js": "^10.4.3",
"dompurify": "^3.2.4",
"echarts": "^5.5.1",
"echarts-for-react": "^3.0.2",
"elkjs": "^0.9.3",
@ -132,10 +133,10 @@
},
"devDependencies": {
"@antfu/eslint-config": "^4.1.1",
"@eslint/js": "^9.20.0",
"@chromatic-com/storybook": "^3.1.0",
"@eslint-react/eslint-plugin": "^1.15.0",
"@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.20.0",
"@faker-js/faker": "^9.0.3",
"@next/eslint-plugin-next": "^15.2.3",
"@rgrove/parse-xml": "^4.1.0",
@ -174,11 +175,11 @@
"code-inspector-plugin": "^0.18.1",
"cross-env": "^7.0.3",
"eslint": "^9.20.1",
"eslint-config-next": "^15.0.0",
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"eslint-plugin-storybook": "^0.11.2",
"eslint-plugin-tailwindcss": "^3.18.0",
"eslint-config-next": "^15.0.0",
"husky": "^9.1.6",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",

11
web/pnpm-lock.yaml generated
View File

@ -121,6 +121,9 @@ importers:
decimal.js:
specifier: ^10.4.3
version: 10.4.3
dompurify:
specifier: ^3.2.4
version: 3.2.4
echarts:
specifier: ^5.5.1
version: 5.5.1
@ -4299,8 +4302,8 @@ packages:
resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
engines: {node: '>= 4'}
dompurify@3.2.3:
resolution: {integrity: sha512-U1U5Hzc2MO0oW3DF+G9qYN0aT7atAou4AgI0XjWz061nyBPbdxkfdhfy5uMgGn6+oLFCfn44ZGbdDqCzVmlOWA==}
dompurify@3.2.4:
resolution: {integrity: sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==}
domutils@2.8.0:
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
@ -13070,7 +13073,7 @@ snapshots:
dependencies:
domelementtype: 2.3.0
dompurify@3.2.3:
dompurify@3.2.4:
optionalDependencies:
'@types/trusted-types': 2.0.7
@ -15688,7 +15691,7 @@ snapshots:
d3-sankey: 0.12.3
dagre-d3-es: 7.0.11
dayjs: 1.11.13
dompurify: 3.2.3
dompurify: 3.2.4
katex: 0.16.21
khroma: 2.1.0
lodash-es: 4.17.21