mirror of
https://github.com/langgenius/dify.git
synced 2026-04-25 05:06:15 +08:00
Merge branch 'main' into feat/mcp
This commit is contained in:
28
.github/workflows/deploy-rag-dev.yml
vendored
Normal file
28
.github/workflows/deploy-rag-dev.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Deploy RAG Dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "deploy/rag-dev"
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'deploy/rag-dev'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
with:
|
||||
host: ${{ secrets.RAG_SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: |
|
||||
${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}
|
||||
1
.github/workflows/expose_service_ports.sh
vendored
1
.github/workflows/expose_service_ports.sh
vendored
@ -10,6 +10,7 @@ yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-com
|
||||
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
|
||||
yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml
|
||||
yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml
|
||||
|
||||
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss"
|
||||
|
||||
12
.github/workflows/vdb-tests.yml
vendored
12
.github/workflows/vdb-tests.yml
vendored
@ -31,6 +31,13 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Free Disk Space
|
||||
uses: endersonmenezes/free-disk-space@v2
|
||||
with:
|
||||
remove_dotnet: true
|
||||
remove_haskell: true
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: ./.github/actions/setup-uv
|
||||
with:
|
||||
@ -59,7 +66,7 @@ jobs:
|
||||
tidb
|
||||
tiflash
|
||||
|
||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
|
||||
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
|
||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||
with:
|
||||
compose-file: |
|
||||
@ -75,8 +82,9 @@ jobs:
|
||||
pgvector
|
||||
chroma
|
||||
elasticsearch
|
||||
oceanbase
|
||||
|
||||
- name: Check TiDB Ready
|
||||
- name: Check VDB Ready (TiDB)
|
||||
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
|
||||
|
||||
- name: Test Vector Stores
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -179,6 +179,7 @@ docker/volumes/pgvecto_rs/data/*
|
||||
docker/volumes/couchbase/*
|
||||
docker/volumes/oceanbase/*
|
||||
docker/volumes/plugin_daemon/*
|
||||
docker/volumes/matrixone/*
|
||||
!docker/volumes/oceanbase/init.d
|
||||
|
||||
docker/nginx/conf.d/default.conf
|
||||
@ -210,3 +211,6 @@ mise.toml
|
||||
|
||||
# Next.js build output
|
||||
.next/
|
||||
|
||||
# AI Assistant
|
||||
.roo/
|
||||
|
||||
@ -137,7 +137,7 @@ WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
|
||||
# Vector database configuration
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Weaviate configuration
|
||||
@ -294,6 +294,13 @@ VIKINGDB_SCHEMA=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
|
||||
# Matrixone configration
|
||||
MATRIXONE_HOST=127.0.0.1
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_DATABASE=dify
|
||||
|
||||
# Lindorm configuration
|
||||
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||
LINDORM_USERNAME=admin
|
||||
@ -332,9 +339,11 @@ PROMPT_GENERATION_MAX_TOKENS=512
|
||||
CODE_GENERATION_MAX_TOKENS=1024
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
|
||||
# Mail configuration, support: resend, smtp
|
||||
# Mail configuration, support: resend, smtp, sendgrid
|
||||
MAIL_TYPE=
|
||||
# If using SendGrid, use the 'from' field for authentication if necessary.
|
||||
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
|
||||
# resend configuration
|
||||
RESEND_API_KEY=
|
||||
RESEND_API_URL=https://api.resend.com
|
||||
# smtp configuration
|
||||
@ -344,7 +353,8 @@ SMTP_USERNAME=123
|
||||
SMTP_PASSWORD=abc
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
|
||||
# Sendgid configuration
|
||||
SENDGRID_API_KEY=
|
||||
# Sentry configuration
|
||||
SENTRY_DSN=
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, D
|
||||
from models.dataset import Document as DatasetDocument
|
||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||
from models.provider import Provider, ProviderModel
|
||||
from services.account_service import RegisterService, TenantService
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
from services.plugin.plugin_migration import PluginMigration
|
||||
@ -68,6 +68,7 @@ def reset_password(email, new_password, password_confirm):
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
db.session.commit()
|
||||
AccountService.reset_login_error_rate_limit(email)
|
||||
click.echo(click.style("Password reset successfully.", fg="green"))
|
||||
|
||||
|
||||
@ -280,6 +281,7 @@ def migrate_knowledge_vector_database():
|
||||
VectorType.ELASTICSEARCH,
|
||||
VectorType.OPENGAUSS,
|
||||
VectorType.TABLESTORE,
|
||||
VectorType.MATRIXONE,
|
||||
}
|
||||
lower_collection_vector_types = {
|
||||
VectorType.ANALYTICDB,
|
||||
|
||||
@ -609,7 +609,7 @@ class MailConfig(BaseSettings):
|
||||
"""
|
||||
|
||||
MAIL_TYPE: Optional[str] = Field(
|
||||
description="Email service provider type ('smtp' or 'resend'), default to None.",
|
||||
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -663,6 +663,11 @@ class MailConfig(BaseSettings):
|
||||
default=50,
|
||||
)
|
||||
|
||||
SENDGRID_API_KEY: Optional[str] = Field(
|
||||
description="API key for SendGrid service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class RagEtlConfig(BaseSettings):
|
||||
"""
|
||||
|
||||
@ -24,6 +24,7 @@ from .vdb.couchbase_config import CouchbaseConfig
|
||||
from .vdb.elasticsearch_config import ElasticsearchConfig
|
||||
from .vdb.huawei_cloud_config import HuaweiCloudConfig
|
||||
from .vdb.lindorm_config import LindormConfig
|
||||
from .vdb.matrixone_config import MatrixoneConfig
|
||||
from .vdb.milvus_config import MilvusConfig
|
||||
from .vdb.myscale_config import MyScaleConfig
|
||||
from .vdb.oceanbase_config import OceanBaseVectorConfig
|
||||
@ -323,5 +324,6 @@ class MiddlewareConfig(
|
||||
OpenGaussConfig,
|
||||
TableStoreConfig,
|
||||
DatasetQueueMonitorConfig,
|
||||
MatrixoneConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
14
api/configs/middleware/vdb/matrixone_config.py
Normal file
14
api/configs/middleware/vdb/matrixone_config.py
Normal file
@ -0,0 +1,14 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
"""Matrixone vector database configuration."""
|
||||
|
||||
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
|
||||
MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server")
|
||||
MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone")
|
||||
MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone")
|
||||
MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to")
|
||||
MATRIXONE_METRIC: str = Field(
|
||||
default="l2", description="Distance metric type for vector similarity search (cosine or l2)"
|
||||
)
|
||||
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.4.2",
|
||||
default="1.4.3",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
||||
@ -208,7 +208,7 @@ class AnnotationBatchImportApi(Resource):
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
# check file type
|
||||
if not file.filename or not file.filename.endswith(".csv"):
|
||||
if not file.filename or not file.filename.lower().endswith(".csv"):
|
||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||
return AppAnnotationService.batch_import_app_annotations(app_id, file)
|
||||
|
||||
|
||||
@ -34,6 +34,20 @@ class WorkflowAppLogApi(Resource):
|
||||
parser.add_argument(
|
||||
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_end_user_session_id",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_account",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
@ -57,6 +71,8 @@ class WorkflowAppLogApi(Resource):
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
created_by_end_user_session_id=args.created_by_end_user_session_id,
|
||||
created_by_account=args.created_by_account,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
@ -686,6 +686,7 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.TENCENT
|
||||
| VectorType.MATRIXONE
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
@ -733,6 +734,7 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.TENCENT
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.MATRIXONE
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
|
||||
@ -374,7 +374,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
# check file type
|
||||
if not file.filename or not file.filename.endswith(".csv"):
|
||||
if not file.filename or not file.filename.lower().endswith(".csv"):
|
||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||
|
||||
try:
|
||||
|
||||
@ -15,7 +15,7 @@ class LoadBalancingCredentialsValidateApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider: str):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
@ -64,7 +64,7 @@ class LoadBalancingConfigCredentialsValidateApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider: str, config_id: str):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
@ -47,7 +47,13 @@ class AppInfoApi(Resource):
|
||||
def get(self, app_model: App):
|
||||
"""Get app information"""
|
||||
tags = [tag.name for tag in app_model.tags]
|
||||
return {"name": app_model.name, "description": app_model.description, "tags": tags, "mode": app_model.mode}
|
||||
return {
|
||||
"name": app_model.name,
|
||||
"description": app_model.description,
|
||||
"tags": tags,
|
||||
"mode": app_model.mode,
|
||||
"author_name": app_model.author_name,
|
||||
}
|
||||
|
||||
|
||||
api.add_resource(AppParameterApi, "/parameters")
|
||||
|
||||
@ -135,6 +135,20 @@ class WorkflowAppLogApi(Resource):
|
||||
parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
|
||||
parser.add_argument("created_at__before", type=str, location="args")
|
||||
parser.add_argument("created_at__after", type=str, location="args")
|
||||
parser.add_argument(
|
||||
"created_by_end_user_session_id",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_account",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
@ -158,6 +172,8 @@ class WorkflowAppLogApi(Resource):
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
created_by_end_user_session_id=args.created_by_end_user_session_id,
|
||||
created_by_account=args.created_by_account,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
@ -5,7 +5,11 @@ from werkzeug.exceptions import Forbidden, NotFound
|
||||
import services.dataset_service
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError
|
||||
from controllers.service_api.wraps import DatasetApiResource, validate_dataset_token
|
||||
from controllers.service_api.wraps import (
|
||||
DatasetApiResource,
|
||||
cloud_edition_billing_rate_limit_check,
|
||||
validate_dataset_token,
|
||||
)
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.plugin.entities.plugin import ModelProviderID
|
||||
from core.provider_manager import ProviderManager
|
||||
@ -70,6 +74,7 @@ class DatasetListApi(DatasetApiResource):
|
||||
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
|
||||
return response, 200
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id):
|
||||
"""Resource for creating datasets."""
|
||||
parser = reqparse.RequestParser()
|
||||
@ -193,6 +198,7 @@ class DatasetApi(DatasetApiResource):
|
||||
|
||||
return data, 200
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def patch(self, _, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
@ -293,6 +299,7 @@ class DatasetApi(DatasetApiResource):
|
||||
|
||||
return result_data, 200
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def delete(self, _, dataset_id):
|
||||
"""
|
||||
Deletes a dataset given its ID.
|
||||
|
||||
@ -19,7 +19,11 @@ from controllers.service_api.dataset.error import (
|
||||
ArchivedDocumentImmutableError,
|
||||
DocumentIndexingError,
|
||||
)
|
||||
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_resource_check
|
||||
from controllers.service_api.wraps import (
|
||||
DatasetApiResource,
|
||||
cloud_edition_billing_rate_limit_check,
|
||||
cloud_edition_billing_resource_check,
|
||||
)
|
||||
from core.errors.error import ProviderTokenNotInitError
|
||||
from extensions.ext_database import db
|
||||
from fields.document_fields import document_fields, document_status_fields
|
||||
@ -35,6 +39,7 @@ class DocumentAddByTextApi(DatasetApiResource):
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_resource_check("documents", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
"""Create document by text."""
|
||||
parser = reqparse.RequestParser()
|
||||
@ -99,6 +104,7 @@ class DocumentUpdateByTextApi(DatasetApiResource):
|
||||
"""Resource for update documents."""
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id):
|
||||
"""Update document by text."""
|
||||
parser = reqparse.RequestParser()
|
||||
@ -158,6 +164,7 @@ class DocumentAddByFileApi(DatasetApiResource):
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_resource_check("documents", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
"""Create document by upload file."""
|
||||
args = {}
|
||||
@ -232,6 +239,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
"""Resource for update documents."""
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id):
|
||||
"""Update document by upload file."""
|
||||
args = {}
|
||||
@ -302,6 +310,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
|
||||
|
||||
|
||||
class DocumentDeleteApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def delete(self, tenant_id, dataset_id, document_id):
|
||||
"""Delete document."""
|
||||
document_id = str(document_id)
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.wraps import DatasetApiResource
|
||||
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check
|
||||
|
||||
|
||||
class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ from flask_restful import marshal, reqparse
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.wraps import DatasetApiResource
|
||||
from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check
|
||||
from fields.dataset_fields import dataset_metadata_fields
|
||||
from services.dataset_service import DatasetService
|
||||
from services.entities.knowledge_entities.knowledge_entities import (
|
||||
@ -14,6 +14,7 @@ from services.metadata_service import MetadataService
|
||||
|
||||
|
||||
class DatasetMetadataCreateServiceApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("type", type=str, required=True, nullable=True, location="json")
|
||||
@ -39,6 +40,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource):
|
||||
|
||||
|
||||
class DatasetMetadataServiceApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def patch(self, tenant_id, dataset_id, metadata_id):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
|
||||
@ -54,6 +56,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
|
||||
metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name"))
|
||||
return marshal(metadata, dataset_metadata_fields), 200
|
||||
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def delete(self, tenant_id, dataset_id, metadata_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
metadata_id_str = str(metadata_id)
|
||||
@ -73,6 +76,7 @@ class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource):
|
||||
|
||||
|
||||
class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, action):
|
||||
dataset_id_str = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
@ -88,6 +92,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource):
|
||||
|
||||
|
||||
class DocumentMetadataEditServiceApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
|
||||
@ -8,6 +8,7 @@ from controllers.service_api.app.error import ProviderNotInitializeError
|
||||
from controllers.service_api.wraps import (
|
||||
DatasetApiResource,
|
||||
cloud_edition_billing_knowledge_limit_check,
|
||||
cloud_edition_billing_rate_limit_check,
|
||||
cloud_edition_billing_resource_check,
|
||||
)
|
||||
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||
@ -35,6 +36,7 @@ class SegmentApi(DatasetApiResource):
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id):
|
||||
"""Create single segment."""
|
||||
# check dataset
|
||||
@ -139,6 +141,7 @@ class SegmentApi(DatasetApiResource):
|
||||
|
||||
|
||||
class DatasetSegmentApi(DatasetApiResource):
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def delete(self, tenant_id, dataset_id, document_id, segment_id):
|
||||
# check dataset
|
||||
dataset_id = str(dataset_id)
|
||||
@ -162,6 +165,7 @@ class DatasetSegmentApi(DatasetApiResource):
|
||||
return 204
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id, segment_id):
|
||||
# check dataset
|
||||
dataset_id = str(dataset_id)
|
||||
@ -236,6 +240,7 @@ class ChildChunkApi(DatasetApiResource):
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def post(self, tenant_id, dataset_id, document_id, segment_id):
|
||||
"""Create child chunk."""
|
||||
# check dataset
|
||||
@ -332,6 +337,7 @@ class DatasetChildChunkApi(DatasetApiResource):
|
||||
"""Resource for updating child chunks."""
|
||||
|
||||
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def delete(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id):
|
||||
"""Delete child chunk."""
|
||||
# check dataset
|
||||
@ -370,6 +376,7 @@ class DatasetChildChunkApi(DatasetApiResource):
|
||||
|
||||
@cloud_edition_billing_resource_check("vector_space", "dataset")
|
||||
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")
|
||||
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
|
||||
def patch(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id):
|
||||
"""Update child chunk."""
|
||||
# check dataset
|
||||
|
||||
@ -163,7 +163,7 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded:
|
||||
)
|
||||
db.session.add(end_user)
|
||||
db.session.commit()
|
||||
exp_dt = datetime.now(UTC) + timedelta(hours=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES * 24)
|
||||
exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
exp = int(exp_dt.timestamp())
|
||||
payload = {
|
||||
"iss": site.id,
|
||||
|
||||
@ -138,15 +138,12 @@ class DatasetConfigManager:
|
||||
if not config.get("dataset_configs"):
|
||||
config["dataset_configs"] = {"retrieval_model": "single"}
|
||||
|
||||
if not isinstance(config["dataset_configs"], dict):
|
||||
raise ValueError("dataset_configs must be of object type")
|
||||
|
||||
if not config["dataset_configs"].get("datasets"):
|
||||
config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []}
|
||||
|
||||
if not isinstance(config["dataset_configs"], dict):
|
||||
raise ValueError("dataset_configs must be of object type")
|
||||
|
||||
if not isinstance(config["dataset_configs"], dict):
|
||||
raise ValueError("dataset_configs must be of object type")
|
||||
|
||||
need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get(
|
||||
"datasets", {}
|
||||
).get("datasets")
|
||||
|
||||
@ -5,7 +5,7 @@ import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Optional, Union, overload
|
||||
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
@ -31,6 +31,7 @@ from core.workflow.repositories.workflow_execution_repository import WorkflowExe
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
@ -366,6 +367,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
:param user: account or end user
|
||||
:param invoke_from: invoke from source
|
||||
:param application_generate_entity: application generate entity
|
||||
:param workflow_execution_repository: repository for workflow execution
|
||||
:param workflow_node_execution_repository: repository for workflow node execution
|
||||
:param conversation: conversation
|
||||
:param stream: is stream
|
||||
@ -399,20 +401,17 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation_id=conversation.id,
|
||||
message_id=message.id,
|
||||
context=context,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"conversation_id": conversation.id,
|
||||
"message_id": message.id,
|
||||
"context": context,
|
||||
},
|
||||
)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
@ -449,24 +448,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
:param message_id: message ID
|
||||
:return:
|
||||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
with preserve_flask_contexts(flask_app, context_vars=context):
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
||||
@ -5,7 +5,7 @@ import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
|
||||
from configs import dify_config
|
||||
@ -23,6 +23,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
from models import Account, App, EndUser
|
||||
from services.conversation_service import ConversationService
|
||||
from services.errors.message import MessageNotExistsError
|
||||
@ -182,20 +183,17 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
context=context,
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation_id=conversation.id,
|
||||
message_id=message.id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"context": context,
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"conversation_id": conversation.id,
|
||||
"message_id": message.id,
|
||||
},
|
||||
)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
@ -229,24 +227,9 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
||||
:param message_id: message ID
|
||||
:return:
|
||||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
with preserve_flask_contexts(flask_app, context_vars=context):
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
@ -33,6 +34,8 @@ from models.model import App, AppMode, Message, MessageAnnotation
|
||||
if TYPE_CHECKING:
|
||||
from core.file.models import File
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppRunner:
|
||||
def get_pre_calculate_rest_tokens(
|
||||
@ -298,7 +301,7 @@ class AppRunner:
|
||||
)
|
||||
|
||||
def _handle_invoke_result_stream(
|
||||
self, invoke_result: Generator, queue_manager: AppQueueManager, agent: bool
|
||||
self, invoke_result: Generator[LLMResultChunk, None, None], queue_manager: AppQueueManager, agent: bool
|
||||
) -> None:
|
||||
"""
|
||||
Handle invoke result
|
||||
@ -317,18 +320,28 @@ class AppRunner:
|
||||
else:
|
||||
queue_manager.publish(QueueAgentMessageEvent(chunk=result), PublishFrom.APPLICATION_MANAGER)
|
||||
|
||||
text += result.delta.message.content
|
||||
message = result.delta.message
|
||||
if isinstance(message.content, str):
|
||||
text += message.content
|
||||
elif isinstance(message.content, list):
|
||||
for content in message.content:
|
||||
if not isinstance(content, str):
|
||||
# TODO(QuantumGhost): Add multimodal output support for easy ui.
|
||||
_logger.warning("received multimodal output, type=%s", type(content))
|
||||
text += content.data
|
||||
else:
|
||||
text += content # failback to str
|
||||
|
||||
if not model:
|
||||
model = result.model
|
||||
|
||||
if not prompt_messages:
|
||||
prompt_messages = result.prompt_messages
|
||||
prompt_messages = list(result.prompt_messages)
|
||||
|
||||
if result.delta.usage:
|
||||
usage = result.delta.usage
|
||||
|
||||
if not usage:
|
||||
if usage is None:
|
||||
usage = LLMUsage.empty_usage()
|
||||
|
||||
llm_result = LLMResult(
|
||||
|
||||
@ -5,7 +5,7 @@ import uuid
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import Any, Literal, Optional, Union, overload
|
||||
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
@ -29,6 +29,7 @@ from core.workflow.repositories.workflow_execution_repository import WorkflowExe
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
|
||||
@ -194,6 +195,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
:param user: account or end user
|
||||
:param application_generate_entity: application generate entity
|
||||
:param invoke_from: invoke from source
|
||||
:param workflow_execution_repository: repository for workflow execution
|
||||
:param workflow_node_execution_repository: repository for workflow node execution
|
||||
:param streaming: is stream
|
||||
:param workflow_thread_pool_id: workflow thread pool id
|
||||
@ -209,19 +211,16 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
context=context,
|
||||
workflow_thread_pool_id=workflow_thread_pool_id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"context": context,
|
||||
"workflow_thread_pool_id": workflow_thread_pool_id,
|
||||
},
|
||||
)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
@ -408,24 +407,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
:param workflow_thread_pool_id: workflow thread pool id
|
||||
:return:
|
||||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
with preserve_flask_contexts(flask_app, context_vars=context):
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# workflow app
|
||||
runner = WorkflowAppRunner(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
||||
@ -48,6 +48,7 @@ from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
AssistantPromptMessage,
|
||||
TextPromptMessageContent,
|
||||
)
|
||||
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||
from core.ops.entities.trace_entity import TraceTaskName
|
||||
@ -309,6 +310,23 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
delta_text = chunk.delta.message.content
|
||||
if delta_text is None:
|
||||
continue
|
||||
if isinstance(chunk.delta.message.content, list):
|
||||
delta_text = ""
|
||||
for content in chunk.delta.message.content:
|
||||
logger.debug(
|
||||
"The content type %s in LLM chunk delta message content.: %r", type(content), content
|
||||
)
|
||||
if isinstance(content, TextPromptMessageContent):
|
||||
delta_text += content.data
|
||||
elif isinstance(content, str):
|
||||
delta_text += content # failback to str
|
||||
else:
|
||||
logger.warning(
|
||||
"Unsupported content type %s in LLM chunk delta message content.: %r",
|
||||
type(content),
|
||||
content,
|
||||
)
|
||||
continue
|
||||
|
||||
if not self._task_state.llm_result.prompt_messages:
|
||||
self._task_state.llm_result.prompt_messages = chunk.prompt_messages
|
||||
|
||||
@ -542,8 +542,6 @@ class LBModelManager:
|
||||
|
||||
return config
|
||||
|
||||
return None
|
||||
|
||||
def cooldown(self, config: ModelLoadBalancingConfiguration, expire: int = 60) -> None:
|
||||
"""
|
||||
Cooldown model load balancing config
|
||||
|
||||
@ -251,7 +251,7 @@ class OpsTraceManager:
|
||||
provider_config_map[tracing_provider]["trace_instance"],
|
||||
provider_config_map[tracing_provider]["config_class"],
|
||||
)
|
||||
decrypt_trace_config_key = str(decrypt_trace_config)
|
||||
decrypt_trace_config_key = json.dumps(decrypt_trace_config, sort_keys=True)
|
||||
tracing_instance = cls.ops_trace_instances_cache.get(decrypt_trace_config_key)
|
||||
if tracing_instance is None:
|
||||
# create new tracing_instance and update the cache if it absent
|
||||
|
||||
@ -157,9 +157,23 @@ class PluginInstallTaskStartResponse(BaseModel):
|
||||
task_id: str = Field(description="The ID of the install task.")
|
||||
|
||||
|
||||
class PluginUploadResponse(BaseModel):
|
||||
class PluginVerification(BaseModel):
|
||||
"""
|
||||
Verification of the plugin.
|
||||
"""
|
||||
|
||||
class AuthorizedCategory(StrEnum):
|
||||
Langgenius = "langgenius"
|
||||
Partner = "partner"
|
||||
Community = "community"
|
||||
|
||||
authorized_category: AuthorizedCategory = Field(description="The authorized category of the plugin.")
|
||||
|
||||
|
||||
class PluginDecodeResponse(BaseModel):
|
||||
unique_identifier: str = Field(description="The unique identifier of the plugin.")
|
||||
manifest: PluginDeclaration
|
||||
verification: Optional[PluginVerification] = Field(default=None, description="Basic verification information")
|
||||
|
||||
|
||||
class PluginOAuthAuthorizationUrlResponse(BaseModel):
|
||||
|
||||
@ -10,10 +10,10 @@ from core.plugin.entities.plugin import (
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import (
|
||||
PluginDecodeResponse,
|
||||
PluginInstallTask,
|
||||
PluginInstallTaskStartResponse,
|
||||
PluginListResponse,
|
||||
PluginUploadResponse,
|
||||
)
|
||||
from core.plugin.impl.base import BasePluginClient
|
||||
|
||||
@ -53,7 +53,7 @@ class PluginInstaller(BasePluginClient):
|
||||
tenant_id: str,
|
||||
pkg: bytes,
|
||||
verify_signature: bool = False,
|
||||
) -> PluginUploadResponse:
|
||||
) -> PluginDecodeResponse:
|
||||
"""
|
||||
Upload a plugin package and return the plugin unique identifier.
|
||||
"""
|
||||
@ -68,7 +68,7 @@ class PluginInstaller(BasePluginClient):
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"POST",
|
||||
f"plugin/{tenant_id}/management/install/upload/package",
|
||||
PluginUploadResponse,
|
||||
PluginDecodeResponse,
|
||||
files=body,
|
||||
data=data,
|
||||
)
|
||||
@ -176,6 +176,18 @@ class PluginInstaller(BasePluginClient):
|
||||
params={"plugin_unique_identifier": plugin_unique_identifier},
|
||||
)
|
||||
|
||||
def decode_plugin_from_identifier(self, tenant_id: str, plugin_unique_identifier: str) -> PluginDecodeResponse:
|
||||
"""
|
||||
Decode a plugin from an identifier.
|
||||
"""
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/decode/from_identifier",
|
||||
PluginDecodeResponse,
|
||||
data={"plugin_unique_identifier": plugin_unique_identifier},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def fetch_plugin_installation_by_ids(
|
||||
self, tenant_id: str, plugin_ids: Sequence[str]
|
||||
) -> Sequence[PluginInstallation]:
|
||||
|
||||
0
api/core/rag/datasource/vdb/matrixone/__init__.py
Normal file
0
api/core/rag/datasource/vdb/matrixone/__init__.py
Normal file
233
api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
Normal file
233
api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
Normal file
@ -0,0 +1,233 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from functools import wraps
|
||||
from typing import Any, Optional
|
||||
|
||||
from mo_vector.client import MoVectorClient # type: ignore
|
||||
from pydantic import BaseModel, model_validator
|
||||
|
||||
from configs import dify_config
|
||||
from core.rag.datasource.vdb.vector_base import BaseVector
|
||||
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.embedding.embedding_base import Embeddings
|
||||
from core.rag.models.document import Document
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
host: str = "localhost"
|
||||
port: int = 6001
|
||||
user: str = "dump"
|
||||
password: str = "111"
|
||||
database: str = "dify"
|
||||
metric: str = "l2"
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict) -> dict:
|
||||
if not values["host"]:
|
||||
raise ValueError("config host is required")
|
||||
if not values["port"]:
|
||||
raise ValueError("config port is required")
|
||||
if not values["user"]:
|
||||
raise ValueError("config user is required")
|
||||
if not values["password"]:
|
||||
raise ValueError("config password is required")
|
||||
if not values["database"]:
|
||||
raise ValueError("config database is required")
|
||||
return values
|
||||
|
||||
|
||||
def ensure_client(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(None, False)
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MatrixoneVector(BaseVector):
|
||||
"""
|
||||
Matrixone vector storage implementation.
|
||||
"""
|
||||
|
||||
def __init__(self, collection_name: str, config: MatrixoneConfig):
|
||||
super().__init__(collection_name)
|
||||
self.config = config
|
||||
self.collection_name = collection_name.lower()
|
||||
self.client = None
|
||||
|
||||
@property
|
||||
def collection_name(self):
|
||||
return self._collection_name
|
||||
|
||||
@collection_name.setter
|
||||
def collection_name(self, value):
|
||||
self._collection_name = value
|
||||
|
||||
def get_type(self) -> str:
|
||||
return VectorType.MATRIXONE
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
return self.add_texts(texts, embeddings)
|
||||
|
||||
def _get_client(self, dimension: Optional[int] = None, create_table: bool = False) -> MoVectorClient:
|
||||
"""
|
||||
Create a new client for the collection.
|
||||
|
||||
The collection will be created if it doesn't exist.
|
||||
"""
|
||||
lock_name = f"vector_indexing_lock_{self._collection_name}"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
client = MoVectorClient(
|
||||
connection_string=f"mysql+pymysql://{self.config.user}:{self.config.password}@{self.config.host}:{self.config.port}/{self.config.database}",
|
||||
table_name=self.collection_name,
|
||||
vector_dimension=dimension,
|
||||
create_table=create_table,
|
||||
)
|
||||
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(collection_exist_cache_key):
|
||||
return client
|
||||
try:
|
||||
client.create_full_text_index()
|
||||
except Exception as e:
|
||||
logger.exception("Failed to create full text index")
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
return client
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
assert self.client is not None
|
||||
ids = []
|
||||
for _, doc in enumerate(documents):
|
||||
if doc.metadata is not None:
|
||||
doc_id = doc.metadata.get("doc_id", str(uuid.uuid4()))
|
||||
ids.append(doc_id)
|
||||
self.client.insert(
|
||||
texts=[doc.page_content for doc in documents],
|
||||
embeddings=embeddings,
|
||||
metadatas=[doc.metadata for doc in documents],
|
||||
ids=ids,
|
||||
)
|
||||
return ids
|
||||
|
||||
@ensure_client
|
||||
def text_exists(self, id: str) -> bool:
|
||||
assert self.client is not None
|
||||
result = self.client.get(ids=[id])
|
||||
return len(result) > 0
|
||||
|
||||
@ensure_client
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
assert self.client is not None
|
||||
if not ids:
|
||||
return
|
||||
self.client.delete(ids=ids)
|
||||
|
||||
@ensure_client
|
||||
def get_ids_by_metadata_field(self, key: str, value: str):
|
||||
assert self.client is not None
|
||||
results = self.client.query_by_metadata(filter={key: value})
|
||||
return [result.id for result in results]
|
||||
|
||||
@ensure_client
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete(filter={key: value})
|
||||
|
||||
@ensure_client
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
|
||||
results = self.client.query(
|
||||
query_vector=query_vector,
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
# TODO: add the score threshold to the query
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
score_threshold = float(kwargs.get("score_threshold", 0.0))
|
||||
|
||||
results = self.client.full_text_query(
|
||||
keywords=[query],
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
if isinstance(metadata, str):
|
||||
import json
|
||||
|
||||
metadata = json.loads(metadata)
|
||||
score = 1 - result.distance
|
||||
if score >= score_threshold:
|
||||
metadata["score"] = score
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def delete(self) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete()
|
||||
|
||||
|
||||
class MatrixoneVectorFactory(AbstractVectorFactory):
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MatrixoneVector:
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
else:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.MATRIXONE, collection_name))
|
||||
|
||||
config = MatrixoneConfig(
|
||||
host=dify_config.MATRIXONE_HOST or "localhost",
|
||||
port=dify_config.MATRIXONE_PORT or 6001,
|
||||
user=dify_config.MATRIXONE_USER or "dump",
|
||||
password=dify_config.MATRIXONE_PASSWORD or "111",
|
||||
database=dify_config.MATRIXONE_DATABASE or "dify",
|
||||
metric=dify_config.MATRIXONE_METRIC or "l2",
|
||||
)
|
||||
return MatrixoneVector(collection_name=collection_name, config=config)
|
||||
@ -80,6 +80,23 @@ class OceanBaseVector(BaseVector):
|
||||
|
||||
self.delete()
|
||||
|
||||
vals = []
|
||||
params = self._client.perform_raw_text_sql("SHOW PARAMETERS LIKE '%ob_vector_memory_limit_percentage%'")
|
||||
for row in params:
|
||||
val = int(row[6])
|
||||
vals.append(val)
|
||||
if len(vals) == 0:
|
||||
raise ValueError("ob_vector_memory_limit_percentage not found in parameters.")
|
||||
if any(val == 0 for val in vals):
|
||||
try:
|
||||
self._client.perform_raw_text_sql("ALTER SYSTEM SET ob_vector_memory_limit_percentage = 30")
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Failed to set ob_vector_memory_limit_percentage. "
|
||||
+ "Maybe the database user has insufficient privilege.",
|
||||
e,
|
||||
)
|
||||
|
||||
cols = [
|
||||
Column("id", String(36), primary_key=True, autoincrement=False),
|
||||
Column("vector", VECTOR(self._vec_dim)),
|
||||
@ -110,22 +127,6 @@ class OceanBaseVector(BaseVector):
|
||||
+ "to support fulltext index and vector index in the same table",
|
||||
e,
|
||||
)
|
||||
vals = []
|
||||
params = self._client.perform_raw_text_sql("SHOW PARAMETERS LIKE '%ob_vector_memory_limit_percentage%'")
|
||||
for row in params:
|
||||
val = int(row[6])
|
||||
vals.append(val)
|
||||
if len(vals) == 0:
|
||||
raise ValueError("ob_vector_memory_limit_percentage not found in parameters.")
|
||||
if any(val == 0 for val in vals):
|
||||
try:
|
||||
self._client.perform_raw_text_sql("ALTER SYSTEM SET ob_vector_memory_limit_percentage = 30")
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Failed to set ob_vector_memory_limit_percentage. "
|
||||
+ "Maybe the database user has insufficient privilege.",
|
||||
e,
|
||||
)
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
|
||||
def _check_hybrid_search_support(self) -> bool:
|
||||
|
||||
@ -164,6 +164,10 @@ class Vector:
|
||||
from core.rag.datasource.vdb.huawei.huawei_cloud_vector import HuaweiCloudVectorFactory
|
||||
|
||||
return HuaweiCloudVectorFactory
|
||||
case VectorType.MATRIXONE:
|
||||
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneVectorFactory
|
||||
|
||||
return MatrixoneVectorFactory
|
||||
case _:
|
||||
raise ValueError(f"Vector store {vector_type} is not supported.")
|
||||
|
||||
|
||||
@ -29,3 +29,4 @@ class VectorType(StrEnum):
|
||||
OPENGAUSS = "opengauss"
|
||||
TABLESTORE = "tablestore"
|
||||
HUAWEI_CLOUD = "huawei_cloud"
|
||||
MATRIXONE = "matrixone"
|
||||
|
||||
@ -41,6 +41,13 @@ class WeaviateVector(BaseVector):
|
||||
|
||||
weaviate.connect.connection.has_grpc = False
|
||||
|
||||
# Fix to minimize the performance impact of the deprecation check in weaviate-client 3.24.0,
|
||||
# by changing the connection timeout to pypi.org from 1 second to 0.001 seconds.
|
||||
# TODO: This can be removed once weaviate-client is updated to 3.26.7 or higher,
|
||||
# which does not contain the deprecation check.
|
||||
if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"):
|
||||
weaviate.connect.connection.PYPI_TIMEOUT = 0.001
|
||||
|
||||
try:
|
||||
client = weaviate.Client(
|
||||
url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None
|
||||
|
||||
@ -22,6 +22,7 @@ class FirecrawlApp:
|
||||
"formats": ["markdown"],
|
||||
"onlyMainContent": True,
|
||||
"timeout": 30000,
|
||||
"integration": "dify",
|
||||
}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
@ -39,7 +40,7 @@ class FirecrawlApp:
|
||||
def crawl_url(self, url, params=None) -> str:
|
||||
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/crawl-post
|
||||
headers = self._prepare_headers()
|
||||
json_data = {"url": url}
|
||||
json_data = {"url": url, "integration": "dify"}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers)
|
||||
@ -49,7 +50,6 @@ class FirecrawlApp:
|
||||
return cast(str, job_id)
|
||||
else:
|
||||
self._handle_error(response, "start crawl job")
|
||||
# FIXME: unreachable code for mypy
|
||||
return "" # unreachable
|
||||
|
||||
def check_crawl_status(self, job_id) -> dict[str, Any]:
|
||||
@ -82,7 +82,6 @@ class FirecrawlApp:
|
||||
)
|
||||
else:
|
||||
self._handle_error(response, "check crawl status")
|
||||
# FIXME: unreachable code for mypy
|
||||
return {} # unreachable
|
||||
|
||||
def _format_crawl_status_response(
|
||||
@ -126,4 +125,31 @@ class FirecrawlApp:
|
||||
|
||||
def _handle_error(self, response, action) -> None:
|
||||
error_message = response.json().get("error", "Unknown error occurred")
|
||||
raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}")
|
||||
raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") # type: ignore[return]
|
||||
|
||||
def search(self, query: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/search
|
||||
headers = self._prepare_headers()
|
||||
json_data = {
|
||||
"query": query,
|
||||
"limit": 5,
|
||||
"lang": "en",
|
||||
"country": "us",
|
||||
"timeout": 60000,
|
||||
"ignoreInvalidURLs": False,
|
||||
"scrapeOptions": {},
|
||||
"integration": "dify",
|
||||
}
|
||||
if params:
|
||||
json_data.update(params)
|
||||
response = self._post_request(f"{self.base_url}/v1/search", json_data, headers)
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
if not response_data.get("success"):
|
||||
raise Exception(f"Search failed. Error: {response_data.get('warning', 'Unknown error')}")
|
||||
return cast(dict[str, Any], response_data)
|
||||
elif response.status_code in {402, 409, 500, 429, 408}:
|
||||
self._handle_error(response, "perform search")
|
||||
return {} # Avoid additional exception after handling error
|
||||
else:
|
||||
raise Exception(f"Failed to perform search. Status code: {response.status_code}")
|
||||
|
||||
@ -79,55 +79,71 @@ class NotionExtractor(BaseExtractor):
|
||||
def _get_notion_database_data(self, database_id: str, query_dict: dict[str, Any] = {}) -> list[Document]:
|
||||
"""Get all the pages from a Notion database."""
|
||||
assert self._notion_access_token is not None, "Notion access token is required"
|
||||
res = requests.post(
|
||||
DATABASE_URL_TMPL.format(database_id=database_id),
|
||||
headers={
|
||||
"Authorization": "Bearer " + self._notion_access_token,
|
||||
"Content-Type": "application/json",
|
||||
"Notion-Version": "2022-06-28",
|
||||
},
|
||||
json=query_dict,
|
||||
)
|
||||
|
||||
data = res.json()
|
||||
|
||||
database_content = []
|
||||
if "results" not in data or data["results"] is None:
|
||||
next_cursor = None
|
||||
has_more = True
|
||||
|
||||
while has_more:
|
||||
current_query = query_dict.copy()
|
||||
if next_cursor:
|
||||
current_query["start_cursor"] = next_cursor
|
||||
|
||||
res = requests.post(
|
||||
DATABASE_URL_TMPL.format(database_id=database_id),
|
||||
headers={
|
||||
"Authorization": "Bearer " + self._notion_access_token,
|
||||
"Content-Type": "application/json",
|
||||
"Notion-Version": "2022-06-28",
|
||||
},
|
||||
json=current_query,
|
||||
)
|
||||
|
||||
response_data = res.json()
|
||||
|
||||
if "results" not in response_data or response_data["results"] is None:
|
||||
break
|
||||
|
||||
for result in response_data["results"]:
|
||||
properties = result["properties"]
|
||||
data = {}
|
||||
value: Any
|
||||
for property_name, property_value in properties.items():
|
||||
type = property_value["type"]
|
||||
if type == "multi_select":
|
||||
value = []
|
||||
multi_select_list = property_value[type]
|
||||
for multi_select in multi_select_list:
|
||||
value.append(multi_select["name"])
|
||||
elif type in {"rich_text", "title"}:
|
||||
if len(property_value[type]) > 0:
|
||||
value = property_value[type][0]["plain_text"]
|
||||
else:
|
||||
value = ""
|
||||
elif type in {"select", "status"}:
|
||||
if property_value[type]:
|
||||
value = property_value[type]["name"]
|
||||
else:
|
||||
value = ""
|
||||
else:
|
||||
value = property_value[type]
|
||||
data[property_name] = value
|
||||
row_dict = {k: v for k, v in data.items() if v}
|
||||
row_content = ""
|
||||
for key, value in row_dict.items():
|
||||
if isinstance(value, dict):
|
||||
value_dict = {k: v for k, v in value.items() if v}
|
||||
value_content = "".join(f"{k}:{v} " for k, v in value_dict.items())
|
||||
row_content = row_content + f"{key}:{value_content}\n"
|
||||
else:
|
||||
row_content = row_content + f"{key}:{value}\n"
|
||||
database_content.append(row_content)
|
||||
|
||||
has_more = response_data.get("has_more", False)
|
||||
next_cursor = response_data.get("next_cursor")
|
||||
|
||||
if not database_content:
|
||||
return []
|
||||
for result in data["results"]:
|
||||
properties = result["properties"]
|
||||
data = {}
|
||||
value: Any
|
||||
for property_name, property_value in properties.items():
|
||||
type = property_value["type"]
|
||||
if type == "multi_select":
|
||||
value = []
|
||||
multi_select_list = property_value[type]
|
||||
for multi_select in multi_select_list:
|
||||
value.append(multi_select["name"])
|
||||
elif type in {"rich_text", "title"}:
|
||||
if len(property_value[type]) > 0:
|
||||
value = property_value[type][0]["plain_text"]
|
||||
else:
|
||||
value = ""
|
||||
elif type in {"select", "status"}:
|
||||
if property_value[type]:
|
||||
value = property_value[type]["name"]
|
||||
else:
|
||||
value = ""
|
||||
else:
|
||||
value = property_value[type]
|
||||
data[property_name] = value
|
||||
row_dict = {k: v for k, v in data.items() if v}
|
||||
row_content = ""
|
||||
for key, value in row_dict.items():
|
||||
if isinstance(value, dict):
|
||||
value_dict = {k: v for k, v in value.items() if v}
|
||||
value_content = "".join(f"{k}:{v} " for k, v in value_dict.items())
|
||||
row_content = row_content + f"{key}:{value_content}\n"
|
||||
else:
|
||||
row_content = row_content + f"{key}:{value}\n"
|
||||
database_content.append(row_content)
|
||||
|
||||
return [Document(page_content="\n".join(database_content))]
|
||||
|
||||
|
||||
@ -104,7 +104,7 @@ class QAIndexProcessor(BaseIndexProcessor):
|
||||
|
||||
def format_by_template(self, file: FileStorage, **kwargs) -> list[Document]:
|
||||
# check file type
|
||||
if not file.filename or not file.filename.endswith(".csv"):
|
||||
if not file.filename or not file.filename.lower().endswith(".csv"):
|
||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||
|
||||
try:
|
||||
|
||||
@ -496,6 +496,8 @@ class DatasetRetrieval:
|
||||
all_documents = self.calculate_keyword_score(query, all_documents, top_k)
|
||||
elif index_type == "high_quality":
|
||||
all_documents = self.calculate_vector_score(all_documents, top_k, score_threshold)
|
||||
else:
|
||||
all_documents = all_documents[:top_k] if top_k else all_documents
|
||||
|
||||
self._on_query(query, dataset_ids, app_id, user_from, user_id)
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@ import json
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
@ -151,11 +151,11 @@ class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository):
|
||||
existing = session.scalar(select(WorkflowRun).where(WorkflowRun.id == domain_model.id_))
|
||||
if not existing:
|
||||
# For new records, get the next sequence number
|
||||
stmt = select(WorkflowRun.sequence_number).where(
|
||||
stmt = select(func.max(WorkflowRun.sequence_number)).where(
|
||||
WorkflowRun.app_id == self._app_id,
|
||||
WorkflowRun.tenant_id == self._tenant_id,
|
||||
)
|
||||
max_sequence = session.scalar(stmt.order_by(WorkflowRun.sequence_number.desc()))
|
||||
max_sequence = session.scalar(stmt)
|
||||
db_model.sequence_number = (max_sequence or 0) + 1
|
||||
else:
|
||||
# For updates, keep the existing sequence number
|
||||
|
||||
@ -9,7 +9,7 @@ from copy import copy, deepcopy
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app, has_request_context
|
||||
from flask import Flask, current_app
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError
|
||||
@ -53,6 +53,7 @@ from core.workflow.nodes.end.end_stream_processor import EndStreamProcessor
|
||||
from core.workflow.nodes.enums import ErrorStrategy, FailBranchSourceHandle
|
||||
from core.workflow.nodes.event import RunCompletedEvent, RunRetrieverResourceEvent, RunStreamChunkEvent
|
||||
from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowType
|
||||
|
||||
@ -537,24 +538,9 @@ class GraphEngine:
|
||||
"""
|
||||
Run parallel nodes
|
||||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
with preserve_flask_contexts(flask_app, context_vars=context):
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
q.put(
|
||||
ParallelBranchRunStartedEvent(
|
||||
parallel_id=parallel_id,
|
||||
@ -653,26 +639,19 @@ class GraphEngine:
|
||||
retry_start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
# yield control to other threads
|
||||
time.sleep(0.001)
|
||||
generator = node_instance.run()
|
||||
for item in generator:
|
||||
if isinstance(item, GraphEngineEvent):
|
||||
if isinstance(item, BaseIterationEvent):
|
||||
# add parallel info to iteration event
|
||||
item.parallel_id = parallel_id
|
||||
item.parallel_start_node_id = parallel_start_node_id
|
||||
item.parent_parallel_id = parent_parallel_id
|
||||
item.parent_parallel_start_node_id = parent_parallel_start_node_id
|
||||
elif isinstance(item, BaseLoopEvent):
|
||||
# add parallel info to loop event
|
||||
item.parallel_id = parallel_id
|
||||
item.parallel_start_node_id = parallel_start_node_id
|
||||
item.parent_parallel_id = parent_parallel_id
|
||||
item.parent_parallel_start_node_id = parent_parallel_start_node_id
|
||||
|
||||
yield item
|
||||
event_stream = node_instance.run()
|
||||
for event in event_stream:
|
||||
if isinstance(event, GraphEngineEvent):
|
||||
# add parallel info to iteration event
|
||||
if isinstance(event, BaseIterationEvent | BaseLoopEvent):
|
||||
event.parallel_id = parallel_id
|
||||
event.parallel_start_node_id = parallel_start_node_id
|
||||
event.parent_parallel_id = parent_parallel_id
|
||||
event.parent_parallel_start_node_id = parent_parallel_start_node_id
|
||||
yield event
|
||||
else:
|
||||
if isinstance(item, RunCompletedEvent):
|
||||
run_result = item.run_result
|
||||
if isinstance(event, RunCompletedEvent):
|
||||
run_result = event.run_result
|
||||
if run_result.status == WorkflowNodeExecutionStatus.FAILED:
|
||||
if (
|
||||
retries == max_retries
|
||||
@ -708,7 +687,7 @@ class GraphEngine:
|
||||
# if run failed, handle error
|
||||
run_result = self._handle_continue_on_error(
|
||||
node_instance,
|
||||
item.run_result,
|
||||
event.run_result,
|
||||
self.graph_runtime_state.variable_pool,
|
||||
handle_exceptions=handle_exceptions,
|
||||
)
|
||||
@ -811,28 +790,28 @@ class GraphEngine:
|
||||
should_continue_retry = False
|
||||
|
||||
break
|
||||
elif isinstance(item, RunStreamChunkEvent):
|
||||
elif isinstance(event, RunStreamChunkEvent):
|
||||
yield NodeRunStreamChunkEvent(
|
||||
id=node_instance.id,
|
||||
node_id=node_instance.node_id,
|
||||
node_type=node_instance.node_type,
|
||||
node_data=node_instance.node_data,
|
||||
chunk_content=item.chunk_content,
|
||||
from_variable_selector=item.from_variable_selector,
|
||||
chunk_content=event.chunk_content,
|
||||
from_variable_selector=event.from_variable_selector,
|
||||
route_node_state=route_node_state,
|
||||
parallel_id=parallel_id,
|
||||
parallel_start_node_id=parallel_start_node_id,
|
||||
parent_parallel_id=parent_parallel_id,
|
||||
parent_parallel_start_node_id=parent_parallel_start_node_id,
|
||||
)
|
||||
elif isinstance(item, RunRetrieverResourceEvent):
|
||||
elif isinstance(event, RunRetrieverResourceEvent):
|
||||
yield NodeRunRetrieverResourceEvent(
|
||||
id=node_instance.id,
|
||||
node_id=node_instance.node_id,
|
||||
node_type=node_instance.node_type,
|
||||
node_data=node_instance.node_data,
|
||||
retriever_resources=item.retriever_resources,
|
||||
context=item.context,
|
||||
retriever_resources=event.retriever_resources,
|
||||
context=event.context,
|
||||
route_node_state=route_node_state,
|
||||
parallel_id=parallel_id,
|
||||
parallel_start_node_id=parallel_start_node_id,
|
||||
|
||||
@ -219,7 +219,7 @@ class AgentNode(ToolNode):
|
||||
)
|
||||
if tool_runtime.entity.description:
|
||||
tool_runtime.entity.description.llm = (
|
||||
extra.get("descrption", "") or tool_runtime.entity.description.llm
|
||||
extra.get("description", "") or tool_runtime.entity.description.llm
|
||||
)
|
||||
for tool_runtime_params in tool_runtime.entity.parameters:
|
||||
tool_runtime_params.form = (
|
||||
|
||||
@ -57,7 +57,6 @@ class StreamProcessor(ABC):
|
||||
|
||||
# The branch_identify parameter is added to ensure that
|
||||
# only nodes in the correct logical branch are included.
|
||||
reachable_node_ids.append(edge.target_node_id)
|
||||
ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id, run_result.edge_source_handle)
|
||||
reachable_node_ids.extend(ids)
|
||||
else:
|
||||
@ -74,6 +73,8 @@ class StreamProcessor(ABC):
|
||||
self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids)
|
||||
|
||||
def _fetch_node_ids_in_reachable_branch(self, node_id: str, branch_identify: Optional[str] = None) -> list[str]:
|
||||
if node_id not in self.rest_node_ids:
|
||||
self.rest_node_ids.append(node_id)
|
||||
node_ids = []
|
||||
for edge in self.graph.edge_mapping.get(node_id, []):
|
||||
if edge.target_node_id == self.graph.root_node_id:
|
||||
|
||||
@ -6,7 +6,6 @@ from pydantic import BaseModel, Field
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.rag.entities.citation_metadata import RetrievalSourceMetadata
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus
|
||||
|
||||
|
||||
class RunCompletedEvent(BaseModel):
|
||||
@ -39,11 +38,3 @@ class RunRetryEvent(BaseModel):
|
||||
error: str = Field(..., description="error")
|
||||
retry_index: int = Field(..., description="Retry attempt number")
|
||||
start_at: datetime = Field(..., description="Retry start time")
|
||||
|
||||
|
||||
class SingleStepRetryEvent(NodeRunResult):
|
||||
"""Single step retry event"""
|
||||
|
||||
status: WorkflowNodeExecutionStatus = WorkflowNodeExecutionStatus.RETRY
|
||||
|
||||
elapsed_time: float = Field(..., description="elapsed time")
|
||||
|
||||
@ -7,7 +7,7 @@ from datetime import UTC, datetime
|
||||
from queue import Empty, Queue
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app, has_request_context
|
||||
from flask import Flask, current_app
|
||||
|
||||
from configs import dify_config
|
||||
from core.variables import ArrayVariable, IntegerVariable, NoneVariable
|
||||
@ -37,6 +37,7 @@ from core.workflow.nodes.base import BaseNode
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.nodes.event import NodeEvent, RunCompletedEvent
|
||||
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
|
||||
from .exc import (
|
||||
InvalidIteratorValueError,
|
||||
@ -583,23 +584,8 @@ class IterationNode(BaseNode[IterationNodeData]):
|
||||
"""
|
||||
run single iteration in parallel mode
|
||||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
with preserve_flask_contexts(flask_app, context_vars=context):
|
||||
parallel_mode_run_id = uuid.uuid4().hex
|
||||
graph_engine_copy = graph_engine.create_copy()
|
||||
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool
|
||||
|
||||
@ -525,6 +525,8 @@ class LLMNode(BaseNode[LLMNodeData]):
|
||||
# Set appropriate response format based on model capabilities
|
||||
self._set_response_format(completion_params, model_schema.parameter_rules)
|
||||
model_config_with_cred.parameters = completion_params
|
||||
# NOTE(-LAN-): This line modify the `self.node_data.model`, which is used in `_invoke_llm()`.
|
||||
node_data_model.completion_params = completion_params
|
||||
return model, model_config_with_cred
|
||||
|
||||
def _fetch_prompt_messages(
|
||||
|
||||
@ -8,4 +8,5 @@ EMPTY_VALUE_MAPPING = {
|
||||
SegmentType.ARRAY_STRING: [],
|
||||
SegmentType.ARRAY_NUMBER: [],
|
||||
SegmentType.ARRAY_OBJECT: [],
|
||||
SegmentType.ARRAY_FILE: [],
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
from core.file import File
|
||||
from core.variables import SegmentType
|
||||
|
||||
from .enums import Operation
|
||||
@ -85,6 +86,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
|
||||
return isinstance(value, int | float)
|
||||
case SegmentType.ARRAY_OBJECT if operation == Operation.APPEND:
|
||||
return isinstance(value, dict)
|
||||
case SegmentType.ARRAY_FILE if operation == Operation.APPEND:
|
||||
return isinstance(value, File)
|
||||
|
||||
# Array & Extend / Overwrite
|
||||
case SegmentType.ARRAY_ANY if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
@ -95,6 +98,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
|
||||
return isinstance(value, list) and all(isinstance(item, int | float) for item in value)
|
||||
case SegmentType.ARRAY_OBJECT if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
return isinstance(value, list) and all(isinstance(item, dict) for item in value)
|
||||
case SegmentType.ARRAY_FILE if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
return isinstance(value, list) and all(isinstance(item, File) for item in value)
|
||||
|
||||
case _:
|
||||
return False
|
||||
|
||||
@ -54,6 +54,15 @@ class Mail:
|
||||
use_tls=dify_config.SMTP_USE_TLS,
|
||||
opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS,
|
||||
)
|
||||
case "sendgrid":
|
||||
from libs.sendgrid import SendGridClient
|
||||
|
||||
if not dify_config.SENDGRID_API_KEY:
|
||||
raise ValueError("SENDGRID_API_KEY is required for SendGrid mail type")
|
||||
|
||||
self._client = SendGridClient(
|
||||
sendgrid_api_key=dify_config.SENDGRID_API_KEY, _from=dify_config.MAIL_DEFAULT_SEND_FROM or ""
|
||||
)
|
||||
case _:
|
||||
raise ValueError("Unsupported mail type {}".format(mail_type))
|
||||
|
||||
|
||||
@ -39,10 +39,6 @@ from core.variables.variables import (
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID
|
||||
|
||||
|
||||
class InvalidSelectorError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsupportedSegmentTypeError(Exception):
|
||||
pass
|
||||
|
||||
@ -105,6 +101,8 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen
|
||||
result = ArrayNumberVariable.model_validate(mapping)
|
||||
case SegmentType.ARRAY_OBJECT if isinstance(value, list):
|
||||
result = ArrayObjectVariable.model_validate(mapping)
|
||||
case SegmentType.ARRAY_FILE if isinstance(value, list):
|
||||
result = ArrayFileVariable.model_validate(mapping)
|
||||
case _:
|
||||
raise VariableError(f"not supported value type {value_type}")
|
||||
if result.size > dify_config.MAX_VARIABLE_SIZE:
|
||||
|
||||
65
api/libs/flask_utils.py
Normal file
65
api/libs/flask_utils.py
Normal file
@ -0,0 +1,65 @@
|
||||
import contextvars
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from typing import TypeVar
|
||||
|
||||
from flask import Flask, g, has_request_context
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def preserve_flask_contexts(
|
||||
flask_app: Flask,
|
||||
context_vars: contextvars.Context,
|
||||
) -> Iterator[None]:
|
||||
"""
|
||||
A context manager that handles:
|
||||
1. flask-login's UserProxy copy
|
||||
2. ContextVars copy
|
||||
3. flask_app.app_context()
|
||||
|
||||
This context manager ensures that the Flask application context is properly set up,
|
||||
the current user is preserved across context boundaries, and any provided context variables
|
||||
are set within the new context.
|
||||
|
||||
Note:
|
||||
This manager aims to allow use current_user cross thread and app context,
|
||||
but it's not the recommend use, it's better to pass user directly in parameters.
|
||||
|
||||
Args:
|
||||
flask_app: The Flask application instance
|
||||
context_vars: contextvars.Context object containing context variables to be set in the new context
|
||||
|
||||
Yields:
|
||||
None
|
||||
|
||||
Example:
|
||||
```python
|
||||
with preserve_flask_contexts(flask_app, context_vars=context_vars):
|
||||
# Code that needs Flask app context and context variables
|
||||
# Current user will be preserved if available
|
||||
```
|
||||
"""
|
||||
# Set context variables if provided
|
||||
if context_vars:
|
||||
for var, val in context_vars.items():
|
||||
var.set(val)
|
||||
|
||||
# Save current user before entering new app context
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
# Enter Flask app context
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context if it was saved
|
||||
if saved_user is not None:
|
||||
g._login_user = saved_user
|
||||
|
||||
# Yield control back to the caller
|
||||
yield
|
||||
finally:
|
||||
# Any cleanup can be added here if needed
|
||||
pass
|
||||
42
api/libs/sendgrid.py
Normal file
42
api/libs/sendgrid.py
Normal file
@ -0,0 +1,42 @@
|
||||
import logging
|
||||
|
||||
import sendgrid # type: ignore
|
||||
from python_http_client.exceptions import ForbiddenError, UnauthorizedError
|
||||
from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore
|
||||
|
||||
|
||||
class SendGridClient:
|
||||
def __init__(self, sendgrid_api_key: str, _from: str):
|
||||
self.sendgrid_api_key = sendgrid_api_key
|
||||
self._from = _from
|
||||
|
||||
def send(self, mail: dict):
|
||||
logging.debug("Sending email with SendGrid")
|
||||
|
||||
try:
|
||||
_to = mail["to"]
|
||||
|
||||
if not _to:
|
||||
raise ValueError("SendGridClient: Cannot send email: recipient address is missing.")
|
||||
|
||||
sg = sendgrid.SendGridAPIClient(api_key=self.sendgrid_api_key)
|
||||
from_email = Email(self._from)
|
||||
to_email = To(_to)
|
||||
subject = mail["subject"]
|
||||
content = Content("text/html", mail["html"])
|
||||
mail = Mail(from_email, to_email, subject, content)
|
||||
mail_json = mail.get() # type: ignore
|
||||
response = sg.client.mail.send.post(request_body=mail_json)
|
||||
logging.debug(response.status_code)
|
||||
logging.debug(response.body)
|
||||
logging.debug(response.headers)
|
||||
|
||||
except TimeoutError as e:
|
||||
logging.exception("SendGridClient Timeout occurred while sending email")
|
||||
raise
|
||||
except (UnauthorizedError, ForbiddenError) as e:
|
||||
logging.exception("SendGridClient Authentication failed. Verify that your credentials and the 'from")
|
||||
raise
|
||||
except Exception as e:
|
||||
logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}")
|
||||
raise
|
||||
@ -10,7 +10,6 @@ from core.plugin.entities.plugin import GenericProviderID
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.tools.signature import sign_tool_file
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
|
||||
from services.plugin.plugin_service import PluginService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models.workflow import Workflow
|
||||
@ -169,6 +168,7 @@ class App(Base):
|
||||
@property
|
||||
def deleted_tools(self) -> list:
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from services.plugin.plugin_service import PluginService
|
||||
|
||||
# get agent mode tools
|
||||
app_model_config = self.app_model_config
|
||||
|
||||
@ -18,4 +18,3 @@ ignore_missing_imports=True
|
||||
|
||||
[mypy-flask_restful.inputs]
|
||||
ignore_missing_imports=True
|
||||
|
||||
|
||||
@ -83,6 +83,7 @@ dependencies = [
|
||||
"webvtt-py~=0.5.1",
|
||||
"sseclient-py>=1.8.0",
|
||||
"httpx-sse>=0.4.0",
|
||||
"sendgrid~=6.12.3",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
@ -204,4 +205,5 @@ vdb = [
|
||||
"volcengine-compat~=1.0.0",
|
||||
"weaviate-client~=3.24.0",
|
||||
"xinference-client~=1.2.2",
|
||||
"mo-vector~=0.1.13",
|
||||
]
|
||||
|
||||
@ -421,7 +421,7 @@ class AppDslService:
|
||||
|
||||
# Set icon type
|
||||
icon_type_value = icon_type or app_data.get("icon_type")
|
||||
if icon_type_value in ["emoji", "link"]:
|
||||
if icon_type_value in ["emoji", "link", "image"]:
|
||||
icon_type = icon_type_value
|
||||
else:
|
||||
icon_type = "emoji"
|
||||
|
||||
@ -1402,16 +1402,16 @@ class DocumentService:
|
||||
knowledge_config.embedding_model, # type: ignore
|
||||
)
|
||||
dataset_collection_binding_id = dataset_collection_binding.id
|
||||
if knowledge_config.retrieval_model:
|
||||
retrieval_model = knowledge_config.retrieval_model
|
||||
else:
|
||||
retrieval_model = RetrievalModel(
|
||||
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
|
||||
reranking_enable=False,
|
||||
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
|
||||
top_k=2,
|
||||
score_threshold_enabled=False,
|
||||
)
|
||||
if knowledge_config.retrieval_model:
|
||||
retrieval_model = knowledge_config.retrieval_model
|
||||
else:
|
||||
retrieval_model = RetrievalModel(
|
||||
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
|
||||
reranking_enable=False,
|
||||
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
|
||||
top_k=2,
|
||||
score_threshold_enabled=False,
|
||||
)
|
||||
# save dataset
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant_id,
|
||||
|
||||
@ -101,7 +101,7 @@ class WeightModel(BaseModel):
|
||||
|
||||
|
||||
class RetrievalModel(BaseModel):
|
||||
search_method: Literal["hybrid_search", "semantic_search", "full_text_search"]
|
||||
search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"]
|
||||
reranking_enable: bool
|
||||
reranking_model: Optional[RerankingModel] = None
|
||||
reranking_mode: Optional[str] = None
|
||||
|
||||
@ -4,7 +4,6 @@ from . import (
|
||||
app_model_config,
|
||||
audio,
|
||||
base,
|
||||
completion,
|
||||
conversation,
|
||||
dataset,
|
||||
document,
|
||||
@ -19,7 +18,6 @@ __all__ = [
|
||||
"app_model_config",
|
||||
"audio",
|
||||
"base",
|
||||
"completion",
|
||||
"conversation",
|
||||
"dataset",
|
||||
"document",
|
||||
|
||||
@ -55,7 +55,3 @@ class MemberNotInTenantError(BaseServiceError):
|
||||
|
||||
class RoleAlreadyAssignedError(BaseServiceError):
|
||||
pass
|
||||
|
||||
|
||||
class RateLimitExceededError(BaseServiceError):
|
||||
pass
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from services.errors.base import BaseServiceError
|
||||
|
||||
|
||||
class CompletionStoppedError(BaseServiceError):
|
||||
class PluginInstallationForbiddenError(BaseServiceError):
|
||||
pass
|
||||
@ -88,6 +88,26 @@ class WebAppAuthModel(BaseModel):
|
||||
allow_email_password_login: bool = False
|
||||
|
||||
|
||||
class PluginInstallationScope(StrEnum):
|
||||
NONE = "none"
|
||||
OFFICIAL_ONLY = "official_only"
|
||||
OFFICIAL_AND_SPECIFIC_PARTNERS = "official_and_specific_partners"
|
||||
ALL = "all"
|
||||
|
||||
|
||||
class PluginInstallationPermissionModel(BaseModel):
|
||||
# Plugin installation scope – possible values:
|
||||
# none: prohibit all plugin installations
|
||||
# official_only: allow only Dify official plugins
|
||||
# official_and_specific_partners: allow official and specific partner plugins
|
||||
# all: allow installation of all plugins
|
||||
plugin_installation_scope: PluginInstallationScope = PluginInstallationScope.ALL
|
||||
|
||||
# If True, restrict plugin installation to the marketplace only
|
||||
# Equivalent to ForceEnablePluginVerification
|
||||
restrict_to_marketplace_only: bool = False
|
||||
|
||||
|
||||
class FeatureModel(BaseModel):
|
||||
billing: BillingModel = BillingModel()
|
||||
education: EducationModel = EducationModel()
|
||||
@ -128,6 +148,7 @@ class SystemFeatureModel(BaseModel):
|
||||
license: LicenseModel = LicenseModel()
|
||||
branding: BrandingModel = BrandingModel()
|
||||
webapp_auth: WebAppAuthModel = WebAppAuthModel()
|
||||
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
|
||||
|
||||
|
||||
class FeatureService:
|
||||
@ -291,3 +312,12 @@ class FeatureService:
|
||||
features.license.workspaces.enabled = license_info["workspaces"]["enabled"]
|
||||
features.license.workspaces.limit = license_info["workspaces"]["limit"]
|
||||
features.license.workspaces.size = license_info["workspaces"]["used"]
|
||||
|
||||
if "PluginInstallationPermission" in enterprise_info:
|
||||
plugin_installation_info = enterprise_info["PluginInstallationPermission"]
|
||||
features.plugin_installation_permission.plugin_installation_scope = plugin_installation_info[
|
||||
"pluginInstallationScope"
|
||||
]
|
||||
features.plugin_installation_permission.restrict_to_marketplace_only = plugin_installation_info[
|
||||
"restrictToMarketplaceOnly"
|
||||
]
|
||||
|
||||
@ -3,7 +3,7 @@ import logging
|
||||
|
||||
import click
|
||||
|
||||
from core.entities import DEFAULT_PLUGIN_ID
|
||||
from core.plugin.entities.plugin import GenericProviderID, ModelProviderID, ToolProviderID
|
||||
from models.engine import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -12,17 +12,17 @@ logger = logging.getLogger(__name__)
|
||||
class PluginDataMigration:
|
||||
@classmethod
|
||||
def migrate(cls) -> None:
|
||||
cls.migrate_db_records("providers", "provider_name") # large table
|
||||
cls.migrate_db_records("provider_models", "provider_name")
|
||||
cls.migrate_db_records("provider_orders", "provider_name")
|
||||
cls.migrate_db_records("tenant_default_models", "provider_name")
|
||||
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name")
|
||||
cls.migrate_db_records("provider_model_settings", "provider_name")
|
||||
cls.migrate_db_records("load_balancing_model_configs", "provider_name")
|
||||
cls.migrate_db_records("providers", "provider_name", ModelProviderID) # large table
|
||||
cls.migrate_db_records("provider_models", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("provider_orders", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tenant_default_models", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("provider_model_settings", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("load_balancing_model_configs", "provider_name", ModelProviderID)
|
||||
cls.migrate_datasets()
|
||||
cls.migrate_db_records("embeddings", "provider_name") # large table
|
||||
cls.migrate_db_records("dataset_collection_bindings", "provider_name")
|
||||
cls.migrate_db_records("tool_builtin_providers", "provider")
|
||||
cls.migrate_db_records("embeddings", "provider_name", ModelProviderID) # large table
|
||||
cls.migrate_db_records("dataset_collection_bindings", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tool_builtin_providers", "provider_name", ToolProviderID)
|
||||
|
||||
@classmethod
|
||||
def migrate_datasets(cls) -> None:
|
||||
@ -66,9 +66,10 @@ limit 1000"""
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"] = (
|
||||
f"{DEFAULT_PLUGIN_ID}/{retrieval_model['reranking_model']['reranking_provider_name']}/{retrieval_model['reranking_model']['reranking_provider_name']}"
|
||||
)
|
||||
# update google to langgenius/gemini/google etc.
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"] = ModelProviderID(
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"]
|
||||
).to_string()
|
||||
retrieval_model_changed = True
|
||||
|
||||
click.echo(
|
||||
@ -86,9 +87,11 @@ limit 1000"""
|
||||
update_retrieval_model_sql = ", retrieval_model = :retrieval_model"
|
||||
params["retrieval_model"] = json.dumps(retrieval_model)
|
||||
|
||||
params["provider_name"] = ModelProviderID(provider_name).to_string()
|
||||
|
||||
sql = f"""update {table_name}
|
||||
set {provider_column_name} =
|
||||
concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name})
|
||||
:provider_name
|
||||
{update_retrieval_model_sql}
|
||||
where id = :record_id"""
|
||||
conn.execute(db.text(sql), params)
|
||||
@ -122,7 +125,9 @@ limit 1000"""
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def migrate_db_records(cls, table_name: str, provider_column_name: str) -> None:
|
||||
def migrate_db_records(
|
||||
cls, table_name: str, provider_column_name: str, provider_cls: type[GenericProviderID]
|
||||
) -> None:
|
||||
click.echo(click.style(f"Migrating [{table_name}] data for plugin", fg="white"))
|
||||
|
||||
processed_count = 0
|
||||
@ -166,7 +171,8 @@ limit 1000"""
|
||||
)
|
||||
|
||||
try:
|
||||
updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}"
|
||||
# update jina to langgenius/jina_tool/jina etc.
|
||||
updated_value = provider_cls(provider_name).to_string()
|
||||
batch_updates.append((updated_value, record_id))
|
||||
except Exception as e:
|
||||
failed_ids.append(record_id)
|
||||
|
||||
@ -17,11 +17,18 @@ from core.plugin.entities.plugin import (
|
||||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse
|
||||
from core.plugin.entities.plugin_daemon import (
|
||||
PluginDecodeResponse,
|
||||
PluginInstallTask,
|
||||
PluginListResponse,
|
||||
PluginVerification,
|
||||
)
|
||||
from core.plugin.impl.asset import PluginAssetManager
|
||||
from core.plugin.impl.debugging import PluginDebuggingClient
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from extensions.ext_redis import redis_client
|
||||
from services.errors.plugin import PluginInstallationForbiddenError
|
||||
from services.feature_service import FeatureService, PluginInstallationScope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -86,6 +93,42 @@ class PluginService:
|
||||
logger.exception("failed to fetch latest plugin version")
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _check_marketplace_only_permission():
|
||||
"""
|
||||
Check if the marketplace only permission is enabled
|
||||
"""
|
||||
features = FeatureService.get_system_features()
|
||||
if features.plugin_installation_permission.restrict_to_marketplace_only:
|
||||
raise PluginInstallationForbiddenError("Plugin installation is restricted to marketplace only")
|
||||
|
||||
@staticmethod
|
||||
def _check_plugin_installation_scope(plugin_verification: Optional[PluginVerification]):
|
||||
"""
|
||||
Check the plugin installation scope
|
||||
"""
|
||||
features = FeatureService.get_system_features()
|
||||
|
||||
match features.plugin_installation_permission.plugin_installation_scope:
|
||||
case PluginInstallationScope.OFFICIAL_ONLY:
|
||||
if (
|
||||
plugin_verification is None
|
||||
or plugin_verification.authorized_category != PluginVerification.AuthorizedCategory.Langgenius
|
||||
):
|
||||
raise PluginInstallationForbiddenError("Plugin installation is restricted to official only")
|
||||
case PluginInstallationScope.OFFICIAL_AND_SPECIFIC_PARTNERS:
|
||||
if plugin_verification is None or plugin_verification.authorized_category not in [
|
||||
PluginVerification.AuthorizedCategory.Langgenius,
|
||||
PluginVerification.AuthorizedCategory.Partner,
|
||||
]:
|
||||
raise PluginInstallationForbiddenError(
|
||||
"Plugin installation is restricted to official and specific partners"
|
||||
)
|
||||
case PluginInstallationScope.NONE:
|
||||
raise PluginInstallationForbiddenError("Installing plugins is not allowed")
|
||||
case PluginInstallationScope.ALL:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_debugging_key(tenant_id: str) -> str:
|
||||
"""
|
||||
@ -208,6 +251,8 @@ class PluginService:
|
||||
# check if plugin pkg is already downloaded
|
||||
manager = PluginInstaller()
|
||||
|
||||
features = FeatureService.get_system_features()
|
||||
|
||||
try:
|
||||
manager.fetch_plugin_manifest(tenant_id, new_plugin_unique_identifier)
|
||||
# already downloaded, skip, and record install event
|
||||
@ -215,7 +260,14 @@ class PluginService:
|
||||
except Exception:
|
||||
# plugin not installed, download and upload pkg
|
||||
pkg = download_plugin_pkg(new_plugin_unique_identifier)
|
||||
manager.upload_pkg(tenant_id, pkg, verify_signature=False)
|
||||
response = manager.upload_pkg(
|
||||
tenant_id,
|
||||
pkg,
|
||||
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
|
||||
)
|
||||
|
||||
# check if the plugin is available to install
|
||||
PluginService._check_plugin_installation_scope(response.verification)
|
||||
|
||||
return manager.upgrade_plugin(
|
||||
tenant_id,
|
||||
@ -239,6 +291,7 @@ class PluginService:
|
||||
"""
|
||||
Upgrade plugin with github
|
||||
"""
|
||||
PluginService._check_marketplace_only_permission()
|
||||
manager = PluginInstaller()
|
||||
return manager.upgrade_plugin(
|
||||
tenant_id,
|
||||
@ -253,33 +306,43 @@ class PluginService:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginUploadResponse:
|
||||
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginDecodeResponse:
|
||||
"""
|
||||
Upload plugin package files
|
||||
|
||||
returns: plugin_unique_identifier
|
||||
"""
|
||||
PluginService._check_marketplace_only_permission()
|
||||
manager = PluginInstaller()
|
||||
return manager.upload_pkg(tenant_id, pkg, verify_signature)
|
||||
features = FeatureService.get_system_features()
|
||||
response = manager.upload_pkg(
|
||||
tenant_id,
|
||||
pkg,
|
||||
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
|
||||
)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def upload_pkg_from_github(
|
||||
tenant_id: str, repo: str, version: str, package: str, verify_signature: bool = False
|
||||
) -> PluginUploadResponse:
|
||||
) -> PluginDecodeResponse:
|
||||
"""
|
||||
Install plugin from github release package files,
|
||||
returns plugin_unique_identifier
|
||||
"""
|
||||
PluginService._check_marketplace_only_permission()
|
||||
pkg = download_with_size_limit(
|
||||
f"https://github.com/{repo}/releases/download/{version}/{package}", dify_config.PLUGIN_MAX_PACKAGE_SIZE
|
||||
)
|
||||
features = FeatureService.get_system_features()
|
||||
|
||||
manager = PluginInstaller()
|
||||
return manager.upload_pkg(
|
||||
response = manager.upload_pkg(
|
||||
tenant_id,
|
||||
pkg,
|
||||
verify_signature,
|
||||
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
|
||||
)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def upload_bundle(
|
||||
@ -289,11 +352,15 @@ class PluginService:
|
||||
Upload a plugin bundle and return the dependencies.
|
||||
"""
|
||||
manager = PluginInstaller()
|
||||
PluginService._check_marketplace_only_permission()
|
||||
return manager.upload_bundle(tenant_id, bundle, verify_signature)
|
||||
|
||||
@staticmethod
|
||||
def install_from_local_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]):
|
||||
PluginService._check_marketplace_only_permission()
|
||||
|
||||
manager = PluginInstaller()
|
||||
|
||||
return manager.install_from_identifiers(
|
||||
tenant_id,
|
||||
plugin_unique_identifiers,
|
||||
@ -307,6 +374,8 @@ class PluginService:
|
||||
Install plugin from github release package files,
|
||||
returns plugin_unique_identifier
|
||||
"""
|
||||
PluginService._check_marketplace_only_permission()
|
||||
|
||||
manager = PluginInstaller()
|
||||
return manager.install_from_identifiers(
|
||||
tenant_id,
|
||||
@ -322,28 +391,33 @@ class PluginService:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def fetch_marketplace_pkg(
|
||||
tenant_id: str, plugin_unique_identifier: str, verify_signature: bool = False
|
||||
) -> PluginDeclaration:
|
||||
def fetch_marketplace_pkg(tenant_id: str, plugin_unique_identifier: str) -> PluginDeclaration:
|
||||
"""
|
||||
Fetch marketplace package
|
||||
"""
|
||||
if not dify_config.MARKETPLACE_ENABLED:
|
||||
raise ValueError("marketplace is not enabled")
|
||||
|
||||
features = FeatureService.get_system_features()
|
||||
|
||||
manager = PluginInstaller()
|
||||
try:
|
||||
declaration = manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier)
|
||||
except Exception:
|
||||
pkg = download_plugin_pkg(plugin_unique_identifier)
|
||||
declaration = manager.upload_pkg(tenant_id, pkg, verify_signature).manifest
|
||||
response = manager.upload_pkg(
|
||||
tenant_id,
|
||||
pkg,
|
||||
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
|
||||
)
|
||||
# check if the plugin is available to install
|
||||
PluginService._check_plugin_installation_scope(response.verification)
|
||||
declaration = response.manifest
|
||||
|
||||
return declaration
|
||||
|
||||
@staticmethod
|
||||
def install_from_marketplace_pkg(
|
||||
tenant_id: str, plugin_unique_identifiers: Sequence[str], verify_signature: bool = False
|
||||
):
|
||||
def install_from_marketplace_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]):
|
||||
"""
|
||||
Install plugin from marketplace package files,
|
||||
returns installation task id
|
||||
@ -353,15 +427,26 @@ class PluginService:
|
||||
|
||||
manager = PluginInstaller()
|
||||
|
||||
features = FeatureService.get_system_features()
|
||||
|
||||
# check if already downloaded
|
||||
for plugin_unique_identifier in plugin_unique_identifiers:
|
||||
try:
|
||||
manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier)
|
||||
plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier)
|
||||
# check if the plugin is available to install
|
||||
PluginService._check_plugin_installation_scope(plugin_decode_response.verification)
|
||||
# already downloaded, skip
|
||||
except Exception:
|
||||
# plugin not installed, download and upload pkg
|
||||
pkg = download_plugin_pkg(plugin_unique_identifier)
|
||||
manager.upload_pkg(tenant_id, pkg, verify_signature)
|
||||
response = manager.upload_pkg(
|
||||
tenant_id,
|
||||
pkg,
|
||||
verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only,
|
||||
)
|
||||
# check if the plugin is available to install
|
||||
PluginService._check_plugin_installation_scope(response.verification)
|
||||
|
||||
return manager.install_from_identifiers(
|
||||
tenant_id,
|
||||
|
||||
@ -7,7 +7,7 @@ from sqlalchemy.orm import Session
|
||||
from configs import dify_config
|
||||
from core.helper.position_helper import is_filtered
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.entities.plugin import GenericProviderID, ToolProviderID
|
||||
from core.plugin.entities.plugin import ToolProviderID
|
||||
from core.plugin.impl.exc import PluginDaemonClientSideError
|
||||
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
|
||||
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
|
||||
@ -290,7 +290,7 @@ class BuiltinToolManageService:
|
||||
def _fetch_builtin_provider(provider_name: str, tenant_id: str) -> BuiltinToolProvider | None:
|
||||
try:
|
||||
full_provider_name = provider_name
|
||||
provider_id_entity = GenericProviderID(provider_name)
|
||||
provider_id_entity = ToolProviderID(provider_name)
|
||||
provider_name = provider_id_entity.provider_name
|
||||
if provider_id_entity.organization != "langgenius":
|
||||
provider_obj = (
|
||||
@ -315,7 +315,7 @@ class BuiltinToolManageService:
|
||||
if provider_obj is None:
|
||||
return None
|
||||
|
||||
provider_obj.provider = GenericProviderID(provider_obj.provider).to_string()
|
||||
provider_obj.provider = ToolProviderID(provider_obj.provider).to_string()
|
||||
return provider_obj
|
||||
except Exception:
|
||||
# it's an old provider without organization
|
||||
|
||||
@ -5,7 +5,7 @@ from sqlalchemy import and_, func, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
|
||||
from models import App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models.enums import CreatorUserRole
|
||||
|
||||
|
||||
@ -21,6 +21,8 @@ class WorkflowAppService:
|
||||
created_at_after: datetime | None = None,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
created_by_end_user_session_id: str | None = None,
|
||||
created_by_account: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Get paginate workflow app logs using SQLAlchemy 2.0 style
|
||||
@ -32,6 +34,8 @@ class WorkflowAppService:
|
||||
:param created_at_after: filter logs created after this timestamp
|
||||
:param page: page number
|
||||
:param limit: items per page
|
||||
:param created_by_end_user_session_id: filter by end user session id
|
||||
:param created_by_account: filter by account email
|
||||
:return: Pagination object
|
||||
"""
|
||||
# Build base statement using SQLAlchemy 2.0 style
|
||||
@ -71,6 +75,26 @@ class WorkflowAppService:
|
||||
if created_at_after:
|
||||
stmt = stmt.where(WorkflowAppLog.created_at >= created_at_after)
|
||||
|
||||
# Filter by end user session id or account email
|
||||
if created_by_end_user_session_id:
|
||||
stmt = stmt.join(
|
||||
EndUser,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == EndUser.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.END_USER,
|
||||
EndUser.session_id == created_by_end_user_session_id,
|
||||
),
|
||||
)
|
||||
if created_by_account:
|
||||
stmt = stmt.join(
|
||||
Account,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == Account.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT,
|
||||
Account.email == created_by_account,
|
||||
),
|
||||
)
|
||||
|
||||
stmt = stmt.order_by(WorkflowAppLog.created_at.desc())
|
||||
|
||||
# Get total count using the same filters
|
||||
|
||||
@ -30,11 +30,11 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
tenant_id = dataset.tenant_id
|
||||
for document_id in document_ids:
|
||||
retry_indexing_cache_key = "document_{}_is_retried".format(document_id)
|
||||
# check document limit
|
||||
features = FeatureService.get_features(dataset.tenant_id)
|
||||
features = FeatureService.get_features(tenant_id)
|
||||
try:
|
||||
if features.billing.enabled:
|
||||
vector_space = features.vector_space
|
||||
|
||||
25
api/tests/integration_tests/vdb/matrixone/test_matrixone.py
Normal file
25
api/tests/integration_tests/vdb/matrixone/test_matrixone.py
Normal file
@ -0,0 +1,25 @@
|
||||
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
|
||||
from tests.integration_tests.vdb.test_vector_store import (
|
||||
AbstractVectorTest,
|
||||
get_example_text,
|
||||
setup_mock_redis,
|
||||
)
|
||||
|
||||
|
||||
class MatrixoneVectorTest(AbstractVectorTest):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.vector = MatrixoneVector(
|
||||
collection_name=self.collection_name,
|
||||
config=MatrixoneConfig(
|
||||
host="localhost", port=6001, user="dump", password="111", database="dify", metric="l2"
|
||||
),
|
||||
)
|
||||
|
||||
def get_ids_by_metadata_field(self):
|
||||
ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id)
|
||||
assert len(ids) == 1
|
||||
|
||||
|
||||
def test_matrixone_vector(setup_mock_redis):
|
||||
MatrixoneVectorTest().run_all_tests()
|
||||
@ -1,15 +1,11 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.rag.datasource.vdb.oceanbase.oceanbase_vector import (
|
||||
OceanBaseVector,
|
||||
OceanBaseVectorConfig,
|
||||
)
|
||||
from tests.integration_tests.vdb.__mock.tcvectordb import setup_tcvectordb_mock
|
||||
from tests.integration_tests.vdb.test_vector_store import (
|
||||
AbstractVectorTest,
|
||||
get_example_text,
|
||||
setup_mock_redis,
|
||||
)
|
||||
|
||||
@ -20,10 +16,11 @@ def oceanbase_vector():
|
||||
"dify_test_collection",
|
||||
config=OceanBaseVectorConfig(
|
||||
host="127.0.0.1",
|
||||
port="2881",
|
||||
user="root@test",
|
||||
port=2881,
|
||||
user="root",
|
||||
database="test",
|
||||
password="test",
|
||||
password="difyai123456",
|
||||
enable_hybrid_search=True,
|
||||
),
|
||||
)
|
||||
|
||||
@ -33,39 +30,13 @@ class OceanBaseVectorTest(AbstractVectorTest):
|
||||
super().__init__()
|
||||
self.vector = vector
|
||||
|
||||
def search_by_vector(self):
|
||||
hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding)
|
||||
assert len(hits_by_vector) == 0
|
||||
|
||||
def search_by_full_text(self):
|
||||
hits_by_full_text = self.vector.search_by_full_text(query=get_example_text())
|
||||
assert len(hits_by_full_text) == 0
|
||||
|
||||
def text_exists(self):
|
||||
exist = self.vector.text_exists(self.example_doc_id)
|
||||
assert exist == True
|
||||
|
||||
def get_ids_by_metadata_field(self):
|
||||
ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id)
|
||||
assert len(ids) == 0
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_mock_oceanbase_client():
|
||||
with patch("core.rag.datasource.vdb.oceanbase.oceanbase_vector.ObVecClient", new_callable=MagicMock) as mock_client:
|
||||
yield mock_client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_mock_oceanbase_vector(oceanbase_vector):
|
||||
with patch.object(oceanbase_vector, "_client"):
|
||||
yield oceanbase_vector
|
||||
assert len(ids) == 1
|
||||
|
||||
|
||||
def test_oceanbase_vector(
|
||||
setup_mock_redis,
|
||||
setup_mock_oceanbase_client,
|
||||
setup_mock_oceanbase_vector,
|
||||
oceanbase_vector,
|
||||
):
|
||||
OceanBaseVectorTest(oceanbase_vector).run_all_tests()
|
||||
|
||||
124
api/tests/unit_tests/libs/test_flask_utils.py
Normal file
124
api/tests/unit_tests/libs/test_flask_utils.py
Normal file
@ -0,0 +1,124 @@
|
||||
import contextvars
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager, UserMixin, current_user, login_user
|
||||
|
||||
from libs.flask_utils import preserve_flask_contexts
|
||||
|
||||
|
||||
class User(UserMixin):
|
||||
"""Simple User class for testing."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
self.id = id
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self.id
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def login_app(app: Flask) -> Flask:
|
||||
"""Set up a Flask app with flask-login."""
|
||||
# Set a secret key for the app
|
||||
app.config["SECRET_KEY"] = "test-secret-key"
|
||||
|
||||
login_manager = LoginManager()
|
||||
login_manager.init_app(app)
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id: str) -> Optional[User]:
|
||||
if user_id == "test_user":
|
||||
return User("test_user")
|
||||
return None
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user() -> User:
|
||||
"""Create a test user."""
|
||||
return User("test_user")
|
||||
|
||||
|
||||
def test_current_user_not_accessible_across_threads(login_app: Flask, test_user: User):
|
||||
"""
|
||||
Test that current_user is not accessible in a different thread without preserve_flask_contexts.
|
||||
|
||||
This test demonstrates that without the preserve_flask_contexts, we cannot access
|
||||
current_user in a different thread, even with app_context.
|
||||
"""
|
||||
# Log in the user in the main thread
|
||||
with login_app.test_request_context():
|
||||
login_user(test_user)
|
||||
assert current_user.is_authenticated
|
||||
assert current_user.id == "test_user"
|
||||
|
||||
# Store the result of the thread execution
|
||||
result = {"user_accessible": True, "error": None}
|
||||
|
||||
# Define a function to run in a separate thread
|
||||
def check_user_in_thread():
|
||||
try:
|
||||
# Try to access current_user in a different thread with app_context
|
||||
with login_app.app_context():
|
||||
# This should fail because current_user is not accessible across threads
|
||||
# without preserve_flask_contexts
|
||||
result["user_accessible"] = current_user.is_authenticated
|
||||
except Exception as e:
|
||||
result["error"] = str(e) # type: ignore
|
||||
|
||||
# Run the function in a separate thread
|
||||
thread = threading.Thread(target=check_user_in_thread)
|
||||
thread.start()
|
||||
thread.join()
|
||||
|
||||
# Verify that we got an error or current_user is not authenticated
|
||||
assert result["error"] is not None or (result["user_accessible"] is not None and not result["user_accessible"])
|
||||
|
||||
|
||||
def test_current_user_accessible_with_preserve_flask_contexts(login_app: Flask, test_user: User):
|
||||
"""
|
||||
Test that current_user is accessible in a different thread with preserve_flask_contexts.
|
||||
|
||||
This test demonstrates that with the preserve_flask_contexts, we can access
|
||||
current_user in a different thread.
|
||||
"""
|
||||
# Log in the user in the main thread
|
||||
with login_app.test_request_context():
|
||||
login_user(test_user)
|
||||
assert current_user.is_authenticated
|
||||
assert current_user.id == "test_user"
|
||||
|
||||
# Save the context variables
|
||||
context_vars = contextvars.copy_context()
|
||||
|
||||
# Store the result of the thread execution
|
||||
result = {"user_accessible": False, "user_id": None, "error": None}
|
||||
|
||||
# Define a function to run in a separate thread
|
||||
def check_user_in_thread_with_manager():
|
||||
try:
|
||||
# Use preserve_flask_contexts to access current_user in a different thread
|
||||
with preserve_flask_contexts(login_app, context_vars):
|
||||
from flask_login import current_user
|
||||
|
||||
if current_user:
|
||||
result["user_accessible"] = True
|
||||
result["user_id"] = current_user.id
|
||||
else:
|
||||
result["user_accessible"] = False
|
||||
except Exception as e:
|
||||
result["error"] = str(e) # type: ignore
|
||||
|
||||
# Run the function in a separate thread
|
||||
thread = threading.Thread(target=check_user_in_thread_with_manager)
|
||||
thread.start()
|
||||
thread.join()
|
||||
|
||||
# Verify that current_user is accessible and has the correct ID
|
||||
assert result["error"] is None
|
||||
assert result["user_accessible"] is True
|
||||
assert result["user_id"] == "test_user"
|
||||
4436
api/uv.lock
generated
4436
api/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@ -399,7 +399,7 @@ SUPABASE_URL=your-server-url
|
||||
# ------------------------------
|
||||
|
||||
# The type of vector store to use.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
|
||||
@ -490,6 +490,13 @@ TIDB_VECTOR_USER=
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_VECTOR_DATABASE=dify
|
||||
|
||||
# Matrixone vector configurations.
|
||||
MATRIXONE_HOST=matrixone
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_DATABASE=dify
|
||||
|
||||
# Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant`
|
||||
TIDB_ON_QDRANT_URL=http://127.0.0.1
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
@ -719,10 +726,11 @@ NOTION_INTERNAL_SECRET=
|
||||
# Mail related configuration
|
||||
# ------------------------------
|
||||
|
||||
# Mail type, support: resend, smtp
|
||||
# Mail type, support: resend, smtp, sendgrid
|
||||
MAIL_TYPE=resend
|
||||
|
||||
# Default send from email address, if not specified
|
||||
# If using SendGrid, use the 'from' field for authentication if necessary.
|
||||
MAIL_DEFAULT_SEND_FROM=
|
||||
|
||||
# API-Key for the Resend email provider, used when MAIL_TYPE is `resend`.
|
||||
@ -738,6 +746,9 @@ SMTP_PASSWORD=
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
|
||||
# Sendgid configuration
|
||||
SENDGRID_API_KEY=
|
||||
|
||||
# ------------------------------
|
||||
# Others Configuration
|
||||
# ------------------------------
|
||||
@ -815,7 +826,8 @@ TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
# Environment Variables for db Service
|
||||
# ------------------------------
|
||||
|
||||
PGUSER=${DB_USERNAME}
|
||||
# The name of the default postgres user.
|
||||
POSTGRES_USER=${DB_USERNAME}
|
||||
# The password for the default postgres user.
|
||||
POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||
# The name of the default postgres database.
|
||||
@ -1067,6 +1079,7 @@ PLUGIN_MEDIA_CACHE_PATH=assets
|
||||
# Plugin oss bucket
|
||||
PLUGIN_STORAGE_OSS_BUCKET=
|
||||
# Plugin oss s3 credentials
|
||||
PLUGIN_S3_USE_AWS=false
|
||||
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
|
||||
PLUGIN_S3_ENDPOINT=
|
||||
PLUGIN_S3_USE_PATH_STYLE=false
|
||||
|
||||
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.2
|
||||
image: langgenius/dify-api:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -31,7 +31,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.2
|
||||
image: langgenius/dify-api:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -57,7 +57,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.2
|
||||
image: langgenius/dify-web:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@ -84,7 +84,7 @@ services:
|
||||
image: postgres:15-alpine
|
||||
restart: always
|
||||
environment:
|
||||
PGUSER: ${PGUSER:-postgres}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
|
||||
POSTGRES_DB: ${POSTGRES_DB:-dify}
|
||||
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
|
||||
@ -168,6 +168,7 @@ services:
|
||||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
|
||||
@ -434,7 +435,7 @@ services:
|
||||
|
||||
# OceanBase vector database
|
||||
oceanbase:
|
||||
image: oceanbase/oceanbase-ce:4.3.5.1-101000042025031818
|
||||
image: oceanbase/oceanbase-ce:4.3.5-lts
|
||||
container_name: oceanbase
|
||||
profiles:
|
||||
- oceanbase
|
||||
@ -449,9 +450,15 @@ services:
|
||||
OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
|
||||
OB_SERVER_IP: 127.0.0.1
|
||||
MODE: MINI
|
||||
MODE: mini
|
||||
ports:
|
||||
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
|
||||
healthcheck:
|
||||
test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
|
||||
interval: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
|
||||
# Oracle vector database
|
||||
oracle:
|
||||
@ -610,6 +617,18 @@ services:
|
||||
ports:
|
||||
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
|
||||
|
||||
# Matrixone vector store.
|
||||
matrixone:
|
||||
hostname: matrixone
|
||||
image: matrixorigin/matrixone:2.1.1
|
||||
profiles:
|
||||
- matrixone
|
||||
restart: always
|
||||
volumes:
|
||||
- ./volumes/matrixone/data:/mo-data
|
||||
ports:
|
||||
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
|
||||
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
|
||||
elasticsearch:
|
||||
|
||||
@ -104,6 +104,7 @@ services:
|
||||
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
|
||||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
|
||||
@ -195,6 +195,11 @@ x-shared-env: &shared-api-worker-env
|
||||
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-}
|
||||
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-}
|
||||
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify}
|
||||
MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone}
|
||||
MATRIXONE_PORT: ${MATRIXONE_PORT:-6001}
|
||||
MATRIXONE_USER: ${MATRIXONE_USER:-dump}
|
||||
MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111}
|
||||
MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify}
|
||||
TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1}
|
||||
TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify}
|
||||
TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20}
|
||||
@ -322,6 +327,7 @@ x-shared-env: &shared-api-worker-env
|
||||
SMTP_PASSWORD: ${SMTP_PASSWORD:-}
|
||||
SMTP_USE_TLS: ${SMTP_USE_TLS:-true}
|
||||
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false}
|
||||
SENDGRID_API_KEY: ${SENDGRID_API_KEY:-}
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000}
|
||||
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72}
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5}
|
||||
@ -356,7 +362,7 @@ x-shared-env: &shared-api-worker-env
|
||||
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
PGUSER: ${PGUSER:-${DB_USERNAME}}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
|
||||
POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}}
|
||||
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
|
||||
@ -467,6 +473,7 @@ x-shared-env: &shared-api-worker-env
|
||||
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
|
||||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
PLUGIN_S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
|
||||
PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
@ -508,7 +515,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.2
|
||||
image: langgenius/dify-api:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -537,7 +544,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.2
|
||||
image: langgenius/dify-api:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -563,7 +570,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.2
|
||||
image: langgenius/dify-web:1.4.3
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@ -590,7 +597,7 @@ services:
|
||||
image: postgres:15-alpine
|
||||
restart: always
|
||||
environment:
|
||||
PGUSER: ${PGUSER:-postgres}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-postgres}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456}
|
||||
POSTGRES_DB: ${POSTGRES_DB:-dify}
|
||||
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
|
||||
@ -674,6 +681,7 @@ services:
|
||||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
|
||||
@ -940,7 +948,7 @@ services:
|
||||
|
||||
# OceanBase vector database
|
||||
oceanbase:
|
||||
image: oceanbase/oceanbase-ce:4.3.5.1-101000042025031818
|
||||
image: oceanbase/oceanbase-ce:4.3.5-lts
|
||||
container_name: oceanbase
|
||||
profiles:
|
||||
- oceanbase
|
||||
@ -955,9 +963,15 @@ services:
|
||||
OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
|
||||
OB_SERVER_IP: 127.0.0.1
|
||||
MODE: MINI
|
||||
MODE: mini
|
||||
ports:
|
||||
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
|
||||
healthcheck:
|
||||
test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
|
||||
interval: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
|
||||
# Oracle vector database
|
||||
oracle:
|
||||
@ -1116,6 +1130,18 @@ services:
|
||||
ports:
|
||||
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
|
||||
|
||||
# Matrixone vector store.
|
||||
matrixone:
|
||||
hostname: matrixone
|
||||
image: matrixorigin/matrixone:2.1.1
|
||||
profiles:
|
||||
- matrixone
|
||||
restart: always
|
||||
volumes:
|
||||
- ./volumes/matrixone/data:/mo-data
|
||||
ports:
|
||||
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
|
||||
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
|
||||
elasticsearch:
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
# ------------------------------
|
||||
# Environment Variables for db Service
|
||||
# ------------------------------
|
||||
PGUSER=postgres
|
||||
POSTGRES_USER=postgres
|
||||
# The password for the default postgres user.
|
||||
POSTGRES_PASSWORD=difyai123456
|
||||
# The name of the default postgres database.
|
||||
@ -133,6 +133,7 @@ PLUGIN_MEDIA_CACHE_PATH=assets
|
||||
PLUGIN_STORAGE_OSS_BUCKET=
|
||||
# Plugin oss s3 credentials
|
||||
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
|
||||
PLUGIN_S3_USE_AWS=false
|
||||
PLUGIN_S3_ENDPOINT=
|
||||
PLUGIN_S3_USE_PATH_STYLE=false
|
||||
PLUGIN_AWS_ACCESS_KEY=
|
||||
|
||||
@ -15,7 +15,7 @@ const Overview = async (props: IDevelopProps) => {
|
||||
} = params
|
||||
|
||||
return (
|
||||
<div className="h-full overflow-scroll bg-chatbot-bg px-4 py-6 sm:px-12">
|
||||
<div className="h-full overflow-y-auto bg-chatbot-bg px-4 py-6 sm:px-12">
|
||||
<ApikeyInfoPanel />
|
||||
<ChartView
|
||||
appId={appId}
|
||||
|
||||
@ -9,6 +9,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import {
|
||||
RiApps2Line,
|
||||
RiDragDropLine,
|
||||
RiExchange2Line,
|
||||
RiFile4Line,
|
||||
RiMessage3Line,
|
||||
@ -16,7 +17,8 @@ import {
|
||||
} from '@remixicon/react'
|
||||
import AppCard from './AppCard'
|
||||
import NewAppCard from './NewAppCard'
|
||||
import useAppsQueryState from './hooks/useAppsQueryState'
|
||||
import useAppsQueryState from './hooks/use-apps-query-state'
|
||||
import { useDSLDragDrop } from './hooks/use-dsl-drag-drop'
|
||||
import type { AppListResponse } from '@/models/app'
|
||||
import { fetchAppList } from '@/service/apps'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
@ -29,6 +31,7 @@ import { useStore as useTagStore } from '@/app/components/base/tag-management/st
|
||||
import TagManagementModal from '@/app/components/base/tag-management'
|
||||
import TagFilter from '@/app/components/base/tag-management/filter'
|
||||
import CheckboxWithLabel from '@/app/components/datasets/create/website/base/checkbox-with-label'
|
||||
import CreateFromDSLModal from '@/app/components/app/create-from-dsl-modal'
|
||||
|
||||
const getKey = (
|
||||
pageIndex: number,
|
||||
@ -67,6 +70,9 @@ const Apps = () => {
|
||||
const [tagFilterValue, setTagFilterValue] = useState<string[]>(tagIDs)
|
||||
const [searchKeywords, setSearchKeywords] = useState(keywords)
|
||||
const newAppCardRef = useRef<HTMLDivElement>(null)
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
const [showCreateFromDSLModal, setShowCreateFromDSLModal] = useState(false)
|
||||
const [droppedDSLFile, setDroppedDSLFile] = useState<File | undefined>()
|
||||
const setKeywords = useCallback((keywords: string) => {
|
||||
setQuery(prev => ({ ...prev, keywords }))
|
||||
}, [setQuery])
|
||||
@ -74,6 +80,17 @@ const Apps = () => {
|
||||
setQuery(prev => ({ ...prev, tagIDs }))
|
||||
}, [setQuery])
|
||||
|
||||
const handleDSLFileDropped = useCallback((file: File) => {
|
||||
setDroppedDSLFile(file)
|
||||
setShowCreateFromDSLModal(true)
|
||||
}, [])
|
||||
|
||||
const { dragging } = useDSLDragDrop({
|
||||
onDSLFileDropped: handleDSLFileDropped,
|
||||
containerRef,
|
||||
enabled: isCurrentWorkspaceEditor,
|
||||
})
|
||||
|
||||
const { data, isLoading, error, setSize, mutate } = useSWRInfinite(
|
||||
(pageIndex: number, previousPageData: AppListResponse) => getKey(pageIndex, previousPageData, activeTab, isCreatedByMe, tagIDs, searchKeywords),
|
||||
fetchAppList,
|
||||
@ -151,47 +168,81 @@ const Apps = () => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='sticky top-0 z-10 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]'>
|
||||
<TabSliderNew
|
||||
value={activeTab}
|
||||
onChange={setActiveTab}
|
||||
options={options}
|
||||
/>
|
||||
<div className='flex items-center gap-2'>
|
||||
<CheckboxWithLabel
|
||||
className='mr-2'
|
||||
label={t('app.showMyCreatedAppsOnly')}
|
||||
isChecked={isCreatedByMe}
|
||||
onChange={handleCreatedByMeChange}
|
||||
/>
|
||||
<TagFilter type='app' value={tagFilterValue} onChange={handleTagsChange} />
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName='w-[200px]'
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
<div ref={containerRef} className='relative flex h-0 shrink-0 grow flex-col overflow-y-auto bg-background-body'>
|
||||
{dragging && (
|
||||
<div className="absolute inset-0 z-50 m-0.5 rounded-2xl border-2 border-dashed border-components-dropzone-border-accent bg-[rgba(21,90,239,0.14)] p-2">
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='sticky top-0 z-10 flex flex-wrap items-center justify-between gap-y-2 bg-background-body px-12 pb-2 pt-4 leading-[56px]'>
|
||||
<TabSliderNew
|
||||
value={activeTab}
|
||||
onChange={setActiveTab}
|
||||
options={options}
|
||||
/>
|
||||
<div className='flex items-center gap-2'>
|
||||
<CheckboxWithLabel
|
||||
className='mr-2'
|
||||
label={t('app.showMyCreatedAppsOnly')}
|
||||
isChecked={isCreatedByMe}
|
||||
onChange={handleCreatedByMeChange}
|
||||
/>
|
||||
<TagFilter type='app' value={tagFilterValue} onChange={handleTagsChange} />
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName='w-[200px]'
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{(data && data[0].total > 0)
|
||||
? <div className='relative grid grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
|
||||
{isCurrentWorkspaceEditor
|
||||
&& <NewAppCard ref={newAppCardRef} onSuccess={mutate} />}
|
||||
{data.map(({ data: apps }) => apps.map(app => (
|
||||
<AppCard key={app.id} app={app} onRefresh={mutate} />
|
||||
)))}
|
||||
</div>
|
||||
: <div className='relative grid grow grid-cols-1 content-start gap-4 overflow-hidden px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
|
||||
{isCurrentWorkspaceEditor
|
||||
&& <NewAppCard ref={newAppCardRef} className='z-10' onSuccess={mutate} />}
|
||||
<NoAppsFound />
|
||||
</div>}
|
||||
|
||||
{isCurrentWorkspaceEditor && (
|
||||
<div
|
||||
className={`flex items-center justify-center gap-2 py-4 ${dragging ? 'text-text-accent' : 'text-text-quaternary'}`}
|
||||
role="region"
|
||||
aria-label={t('app.newApp.dropDSLToCreateApp')}
|
||||
>
|
||||
<RiDragDropLine className="h-4 w-4" />
|
||||
<span className="system-xs-regular">{t('app.newApp.dropDSLToCreateApp')}</span>
|
||||
</div>
|
||||
)}
|
||||
<CheckModal />
|
||||
<div ref={anchorRef} className='h-0'> </div>
|
||||
{showTagManagementModal && (
|
||||
<TagManagementModal type='app' show={showTagManagementModal} />
|
||||
)}
|
||||
</div>
|
||||
{(data && data[0].total > 0)
|
||||
? <div className='relative grid grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
|
||||
{isCurrentWorkspaceEditor
|
||||
&& <NewAppCard ref={newAppCardRef} onSuccess={mutate} />}
|
||||
{data.map(({ data: apps }) => apps.map(app => (
|
||||
<AppCard key={app.id} app={app} onRefresh={mutate} />
|
||||
)))}
|
||||
</div>
|
||||
: <div className='relative grid grow grid-cols-1 content-start gap-4 overflow-hidden px-12 pt-2 sm:grid-cols-1 md:grid-cols-2 xl:grid-cols-4 2xl:grid-cols-5 2k:grid-cols-6'>
|
||||
{isCurrentWorkspaceEditor
|
||||
&& <NewAppCard ref={newAppCardRef} className='z-10' onSuccess={mutate} />}
|
||||
<NoAppsFound />
|
||||
</div>}
|
||||
<CheckModal />
|
||||
<div ref={anchorRef} className='h-0'> </div>
|
||||
{showTagManagementModal && (
|
||||
<TagManagementModal type='app' show={showTagManagementModal} />
|
||||
|
||||
{showCreateFromDSLModal && (
|
||||
<CreateFromDSLModal
|
||||
show={showCreateFromDSLModal}
|
||||
onClose={() => {
|
||||
setShowCreateFromDSLModal(false)
|
||||
setDroppedDSLFile(undefined)
|
||||
}}
|
||||
onSuccess={() => {
|
||||
setShowCreateFromDSLModal(false)
|
||||
setDroppedDSLFile(undefined)
|
||||
mutate()
|
||||
}}
|
||||
droppedFile={droppedDSLFile}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
|
||||
72
web/app/(commonLayout)/apps/hooks/use-dsl-drag-drop.ts
Normal file
72
web/app/(commonLayout)/apps/hooks/use-dsl-drag-drop.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
|
||||
type DSLDragDropHookProps = {
|
||||
onDSLFileDropped: (file: File) => void
|
||||
containerRef: React.RefObject<HTMLDivElement>
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export const useDSLDragDrop = ({ onDSLFileDropped, containerRef, enabled = true }: DSLDragDropHookProps) => {
|
||||
const [dragging, setDragging] = useState(false)
|
||||
|
||||
const handleDragEnter = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
if (e.dataTransfer?.types.includes('Files'))
|
||||
setDragging(true)
|
||||
}
|
||||
|
||||
const handleDragOver = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
}
|
||||
|
||||
const handleDragLeave = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
if (e.relatedTarget === null || !containerRef.current?.contains(e.relatedTarget as Node))
|
||||
setDragging(false)
|
||||
}
|
||||
|
||||
const handleDrop = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setDragging(false)
|
||||
|
||||
if (!e.dataTransfer)
|
||||
return
|
||||
|
||||
const files = [...e.dataTransfer.files]
|
||||
if (files.length === 0)
|
||||
return
|
||||
|
||||
const file = files[0]
|
||||
if (file.name.toLowerCase().endsWith('.yaml') || file.name.toLowerCase().endsWith('.yml'))
|
||||
onDSLFileDropped(file)
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled)
|
||||
return
|
||||
|
||||
const current = containerRef.current
|
||||
if (current) {
|
||||
current.addEventListener('dragenter', handleDragEnter)
|
||||
current.addEventListener('dragover', handleDragOver)
|
||||
current.addEventListener('dragleave', handleDragLeave)
|
||||
current.addEventListener('drop', handleDrop)
|
||||
}
|
||||
return () => {
|
||||
if (current) {
|
||||
current.removeEventListener('dragenter', handleDragEnter)
|
||||
current.removeEventListener('dragover', handleDragOver)
|
||||
current.removeEventListener('dragleave', handleDragLeave)
|
||||
current.removeEventListener('drop', handleDrop)
|
||||
}
|
||||
}
|
||||
}, [containerRef, enabled])
|
||||
|
||||
return {
|
||||
dragging: enabled ? dragging : false,
|
||||
}
|
||||
}
|
||||
@ -25,9 +25,8 @@ import Loading from '@/app/components/base/loading'
|
||||
import DatasetDetailContext from '@/context/dataset-detail'
|
||||
import { DataSourceType } from '@/models/datasets'
|
||||
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
||||
import { LanguagesSupported } from '@/i18n/language'
|
||||
import { useStore } from '@/app/components/app/store'
|
||||
import { getLocaleOnClient } from '@/i18n'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import LinkedAppsPanel from '@/app/components/base/linked-apps-panel'
|
||||
@ -45,9 +44,9 @@ type IExtraInfoProps = {
|
||||
}
|
||||
|
||||
const ExtraInfo = ({ isMobile, relatedApps, expand }: IExtraInfoProps) => {
|
||||
const locale = getLocaleOnClient()
|
||||
const [isShowTips, { toggle: toggleTips, set: setShowTips }] = useBoolean(!isMobile)
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
|
||||
const hasRelatedApps = relatedApps?.data && relatedApps?.data?.length > 0
|
||||
const relatedAppsTotal = relatedApps?.data?.length || 0
|
||||
@ -97,11 +96,7 @@ const ExtraInfo = ({ isMobile, relatedApps, expand }: IExtraInfoProps) => {
|
||||
<div className='my-2 text-xs text-text-tertiary'>{t('common.datasetMenus.emptyTip')}</div>
|
||||
<a
|
||||
className='mt-2 inline-flex cursor-pointer items-center text-xs text-text-accent'
|
||||
href={
|
||||
locale === LanguagesSupported[1]
|
||||
? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
||||
: 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
|
||||
}
|
||||
href={docLink('/guides/knowledge-base/integrate-knowledge-within-application')}
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
>
|
||||
<RiBookOpenLine className='mr-1 text-text-accent' />
|
||||
|
||||
@ -81,7 +81,7 @@ const Datasets = ({
|
||||
currentContainer?.removeEventListener('scroll', onScroll)
|
||||
onScroll.cancel()
|
||||
}
|
||||
}, [onScroll])
|
||||
}, [containerRef, onScroll])
|
||||
|
||||
return (
|
||||
<nav className='grid shrink-0 grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
|
||||
|
||||
@ -5,34 +5,34 @@ import {
|
||||
RiAddLine,
|
||||
RiArrowRightLine,
|
||||
} from '@remixicon/react'
|
||||
import Link from 'next/link'
|
||||
|
||||
const CreateAppCard = (
|
||||
{
|
||||
ref,
|
||||
..._
|
||||
},
|
||||
) => {
|
||||
type CreateAppCardProps = {
|
||||
ref?: React.Ref<HTMLAnchorElement>
|
||||
}
|
||||
|
||||
const CreateAppCard = ({ ref }: CreateAppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className='bg-background-default-dimm flex min-h-[160px] flex-col rounded-xl border-[0.5px]
|
||||
border-components-panel-border transition-all duration-200 ease-in-out'
|
||||
>
|
||||
<a ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
|
||||
<Link ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
|
||||
<div className='flex items-center gap-3'>
|
||||
<div className='flex h-10 w-10 items-center justify-center rounded-lg border border-dashed border-divider-regular bg-background-default-lighter
|
||||
p-2 group-hover:border-solid group-hover:border-effects-highlight group-hover:bg-background-default-dodge'
|
||||
>
|
||||
<RiAddLine className='h-4 w-4 text-text-tertiary group-hover:text-text-accent'/>
|
||||
<RiAddLine className='h-4 w-4 text-text-tertiary group-hover:text-text-accent' />
|
||||
</div>
|
||||
<div className='system-md-semibold text-text-secondary group-hover:text-text-accent'>{t('dataset.createDataset')}</div>
|
||||
</div>
|
||||
</a>
|
||||
</Link>
|
||||
<div className='system-xs-regular p-4 pt-0 text-text-tertiary'>{t('dataset.createDatasetIntro')}</div>
|
||||
<a className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
|
||||
<Link className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
|
||||
<div className='system-xs-medium text-text-tertiary group-hover:text-text-accent'>{t('dataset.connectDataset')}</div>
|
||||
<RiArrowRightLine className='h-3.5 w-3.5 text-text-tertiary group-hover:text-text-accent' />
|
||||
</a>
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -1,9 +1,25 @@
|
||||
'use client'
|
||||
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { ExternalApiPanelProvider } from '@/context/external-api-panel-context'
|
||||
import { ExternalKnowledgeApiProvider } from '@/context/external-knowledge-api-context'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
export default function DatasetsLayout({ children }: { children: React.ReactNode }) {
|
||||
const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, currentWorkspace, isLoadingCurrentWorkspace } = useAppContext()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoadingCurrentWorkspace || !currentWorkspace.id)
|
||||
return
|
||||
if (!(isCurrentWorkspaceEditor || isCurrentWorkspaceDatasetOperator))
|
||||
router.replace('/apps')
|
||||
}, [isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace, currentWorkspace, router])
|
||||
|
||||
if (isLoadingCurrentWorkspace || !(isCurrentWorkspaceEditor || isCurrentWorkspaceDatasetOperator))
|
||||
return <Loading type='app' />
|
||||
return (
|
||||
<ExternalKnowledgeApiProvider>
|
||||
<ExternalApiPanelProvider>
|
||||
|
||||
@ -54,7 +54,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
||||
</Property>
|
||||
<Property name='indexing_technique' type='string' key='indexing_technique'>
|
||||
Index mode
|
||||
- <code>high_quality</code> High quality: embedding using embedding model, built as vector database index
|
||||
- <code>high_quality</code> High quality: Embedding using embedding model, built as vector database index
|
||||
- <code>economy</code> Economy: Build using inverted index of keyword table index
|
||||
</Property>
|
||||
<Property name='doc_form' type='string' key='doc_form'>
|
||||
|
||||
@ -55,7 +55,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
||||
<Property name='indexing_technique' type='string' key='indexing_technique'>
|
||||
索引方式
|
||||
- <code>high_quality</code> 高质量:使用
|
||||
ding 模型进行嵌入,构建为向量数据库索引
|
||||
Embedding 模型进行嵌入,构建为向量数据库索引
|
||||
- <code>economy</code> 经济:使用 keyword table index 的倒排索引进行构建
|
||||
</Property>
|
||||
<Property name='doc_form' type='string' key='doc_form'>
|
||||
|
||||
@ -19,7 +19,7 @@ const Layout: FC<{
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
if (!systemFeatures.webapp_auth.enabled) {
|
||||
if (!isGlobalPending && !systemFeatures.webapp_auth.enabled) {
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
@ -37,7 +37,7 @@ const Layout: FC<{
|
||||
setWebAppAccessMode(ret?.accessMode || AccessMode.PUBLIC)
|
||||
setIsLoading(false)
|
||||
})()
|
||||
}, [pathname, redirectUrl, setWebAppAccessMode])
|
||||
}, [pathname, redirectUrl, setWebAppAccessMode, isGlobalPending, systemFeatures.webapp_auth.enabled])
|
||||
if (isLoading || isGlobalPending) {
|
||||
return <div className='flex h-full w-full items-center justify-center'>
|
||||
<Loading />
|
||||
|
||||
@ -314,10 +314,10 @@ const AppPublisher = ({
|
||||
{!isAppAccessSet && <p className='system-xs-regular mt-1 text-text-warning'>{t('app.publishApp.notSetDesc')}</p>}
|
||||
</div>}
|
||||
<div className='flex flex-col gap-y-1 border-t-[0.5px] border-t-divider-regular p-4 pt-3'>
|
||||
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
|
||||
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || appDetail?.access_mode === AccessMode.EXTERNAL_MEMBERS || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
|
||||
<SuggestedAction
|
||||
className='flex-1'
|
||||
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)}
|
||||
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && appDetail?.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result)}
|
||||
link={appURL}
|
||||
icon={<RiPlayCircleLine className='h-4 w-4' />}
|
||||
>
|
||||
@ -326,10 +326,10 @@ const AppPublisher = ({
|
||||
</Tooltip>
|
||||
{appDetail?.mode === 'workflow' || appDetail?.mode === 'completion'
|
||||
? (
|
||||
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
|
||||
<Tooltip triggerClassName='flex' disabled={!systemFeatures.webapp_auth.enabled || appDetail.access_mode === AccessMode.EXTERNAL_MEMBERS || userCanAccessApp?.result} popupContent={t('app.noAccessPermission')} asChild={false}>
|
||||
<SuggestedAction
|
||||
className='flex-1'
|
||||
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)}
|
||||
disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result)}
|
||||
link={`${appURL}${appURL.includes('?') ? '&' : '?'}mode=batch`}
|
||||
icon={<RiPlayList2Line className='h-4 w-4' />}
|
||||
>
|
||||
|
||||
@ -1,13 +1,11 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import OperationBtn from '@/app/components/app/configuration/base/operation-btn'
|
||||
import Panel from '@/app/components/app/configuration/base/feature-panel'
|
||||
import { MessageClockCircle } from '@/app/components/base/icons/src/vender/solid/general'
|
||||
import I18n from '@/context/i18n'
|
||||
import { LanguagesSupported } from '@/i18n/language'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
|
||||
type Props = {
|
||||
showWarning: boolean
|
||||
@ -19,7 +17,7 @@ const HistoryPanel: FC<Props> = ({
|
||||
onShowEditModal,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { locale } = useContext(I18n)
|
||||
const docLink = useDocLink()
|
||||
|
||||
return (
|
||||
<Panel
|
||||
@ -45,9 +43,8 @@ const HistoryPanel: FC<Props> = ({
|
||||
{showWarning && (
|
||||
<div className='flex justify-between rounded-b-xl bg-background-section-burn px-3 py-2 text-xs text-text-secondary'>
|
||||
<div>{t('appDebug.feature.conversationHistory.tip')}
|
||||
<a href={`${locale === LanguagesSupported[1]
|
||||
? 'https://docs.dify.ai/zh-hans/learn-more/extended-reading/prompt-engineering/README'
|
||||
: 'https://docs.dify.ai/en/features/prompt-engineering'}`}
|
||||
<a href={docLink('/learn-more/extended-reading/what-is-llmops',
|
||||
{ 'zh-Hans': '/learn-more/extended-reading/prompt-engineering/README' })}
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
className='text-[#155EEF]'>{t('appDebug.feature.conversationHistory.learnMore')}
|
||||
</a>
|
||||
|
||||
@ -31,6 +31,7 @@ import {
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { fetchMembers } from '@/service/common'
|
||||
import type { Member } from '@/models/common'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
|
||||
type SettingsModalProps = {
|
||||
currentDataset: DataSet
|
||||
@ -58,6 +59,7 @@ const SettingsModal: FC<SettingsModalProps> = ({
|
||||
currentModel: isRerankDefaultModelValid,
|
||||
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
const { notify } = useToastContext()
|
||||
const ref = useRef(null)
|
||||
const isExternal = currentDataset.provider === 'external'
|
||||
@ -328,7 +330,7 @@ const SettingsModal: FC<SettingsModalProps> = ({
|
||||
<div>
|
||||
<div className='system-sm-semibold text-text-secondary'>{t('datasetSettings.form.retrievalSetting.title')}</div>
|
||||
<div className='text-xs font-normal leading-[18px] text-text-tertiary'>
|
||||
<a target='_blank' rel='noopener noreferrer' href='https://docs.dify.ai/guides/knowledge-base/create-knowledge-and-upload-documents#id-4-retrieval-settings' className='text-text-accent'>{t('datasetSettings.form.retrievalSetting.learnMore')}</a>
|
||||
<a target='_blank' rel='noopener noreferrer' href={docLink('/guides/knowledge-base/create-knowledge-and-upload-documents/setting-indexing-methods#setting-the-retrieval-setting')} className='text-text-accent'>{t('datasetSettings.form.retrievalSetting.learnMore')}</a>
|
||||
{t('datasetSettings.form.retrievalSetting.description')}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -156,12 +156,11 @@ const Debug: FC<IDebug> = ({
|
||||
}
|
||||
let hasEmptyInput = ''
|
||||
const requiredVars = modelConfig.configs.prompt_variables.filter(({ key, name, required, type }) => {
|
||||
if (type !== 'string' && type !== 'paragraph' && type !== 'select')
|
||||
if (type !== 'string' && type !== 'paragraph' && type !== 'select' && type !== 'number')
|
||||
return false
|
||||
const res = (!key || !key.trim()) || (!name || !name.trim()) || (required || required === undefined || required === null)
|
||||
return res
|
||||
}) // compatible with old version
|
||||
// debugger
|
||||
requiredVars.forEach(({ key, name }) => {
|
||||
if (hasEmptyInput)
|
||||
return
|
||||
|
||||
@ -2,9 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import I18n from '@/context/i18n'
|
||||
import { LanguagesSupported } from '@/i18n/language'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
type Props = {
|
||||
onReturnToSimpleMode: () => void
|
||||
}
|
||||
@ -13,7 +11,7 @@ const AdvancedModeWarning: FC<Props> = ({
|
||||
onReturnToSimpleMode,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { locale } = useContext(I18n)
|
||||
const docLink = useDocLink()
|
||||
const [show, setShow] = React.useState(true)
|
||||
if (!show)
|
||||
return null
|
||||
@ -25,7 +23,7 @@ const AdvancedModeWarning: FC<Props> = ({
|
||||
<span className='text-gray-700'>{t('appDebug.promptMode.advancedWarning.description')}</span>
|
||||
<a
|
||||
className='font-medium text-[#155EEF]'
|
||||
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] ? '/guides/features/prompt-engineering' : 'features/prompt-engineering'}`}
|
||||
href={docLink('/guides/features/prompt-engineering')}
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
>
|
||||
{t('appDebug.promptMode.advancedWarning.learnMore')}
|
||||
|
||||
@ -20,6 +20,7 @@ import type {
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import AppIcon from '@/app/components/base/app-icon'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
|
||||
const systemTypes = ['api']
|
||||
type ExternalDataToolModalProps = {
|
||||
@ -40,6 +41,7 @@ const ExternalDataToolModal: FC<ExternalDataToolModalProps> = ({
|
||||
onValidateBeforeSave,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
const { notify } = useToastContext()
|
||||
const { locale } = useContext(I18n)
|
||||
const [localeData, setLocaleData] = useState(data.type ? data : { ...data, type: 'api' })
|
||||
@ -243,7 +245,7 @@ const ExternalDataToolModal: FC<ExternalDataToolModalProps> = ({
|
||||
<div className='flex h-9 items-center justify-between text-sm font-medium text-gray-900'>
|
||||
{t('common.apiBasedExtension.selector.title')}
|
||||
<a
|
||||
href={t('common.apiBasedExtension.linkUrl') || '/'}
|
||||
href={docLink('/guides/extension/api-based-extension/README')}
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
className='group flex items-center text-xs font-normal text-gray-500 hover:text-primary-600'
|
||||
>
|
||||
|
||||
@ -29,6 +29,7 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import FullScreenModal from '@/app/components/base/fullscreen-modal'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
|
||||
type CreateAppProps = {
|
||||
onSuccess: () => void
|
||||
@ -303,31 +304,41 @@ function AppTypeCard({ icon, title, description, active, onClick }: AppTypeCardP
|
||||
|
||||
function AppPreview({ mode }: { mode: AppMode }) {
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
const modeToPreviewInfoMap = {
|
||||
'chat': {
|
||||
title: t('app.types.chatbot'),
|
||||
description: t('app.newApp.chatbotUserDescription'),
|
||||
link: 'https://docs.dify.ai/guides/application-orchestrate/readme',
|
||||
link: docLink('/guides/application-orchestrate/chatbot-application'),
|
||||
},
|
||||
'advanced-chat': {
|
||||
title: t('app.types.advanced'),
|
||||
description: t('app.newApp.advancedUserDescription'),
|
||||
link: 'https://docs.dify.ai/en/guides/workflow/README',
|
||||
link: docLink('/guides/workflow/README', {
|
||||
'zh-Hans': '/guides/workflow/readme',
|
||||
'ja-JP': '/guides/workflow/concepts',
|
||||
}),
|
||||
},
|
||||
'agent-chat': {
|
||||
title: t('app.types.agent'),
|
||||
description: t('app.newApp.agentUserDescription'),
|
||||
link: 'https://docs.dify.ai/en/guides/application-orchestrate/agent',
|
||||
link: docLink('/guides/application-orchestrate/agent'),
|
||||
},
|
||||
'completion': {
|
||||
title: t('app.newApp.completeApp'),
|
||||
description: t('app.newApp.completionUserDescription'),
|
||||
link: null,
|
||||
link: docLink('/guides/application-orchestrate/text-generator', {
|
||||
'zh-Hans': '/guides/application-orchestrate/readme',
|
||||
'ja-JP': '/guides/application-orchestrate/README',
|
||||
}),
|
||||
},
|
||||
'workflow': {
|
||||
title: t('app.types.workflow'),
|
||||
description: t('app.newApp.workflowUserDescription'),
|
||||
link: 'https://docs.dify.ai/en/guides/workflow/README',
|
||||
link: docLink('/guides/workflow/README', {
|
||||
'zh-Hans': '/guides/workflow/readme',
|
||||
'ja-JP': '/guides/workflow/concepts',
|
||||
}),
|
||||
},
|
||||
}
|
||||
const previewInfo = modeToPreviewInfoMap[mode]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import { useMemo, useRef, useState } from 'react'
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
@ -35,6 +35,7 @@ type CreateFromDSLModalProps = {
|
||||
onClose: () => void
|
||||
activeTab?: string
|
||||
dslUrl?: string
|
||||
droppedFile?: File
|
||||
}
|
||||
|
||||
export enum CreateFromDSLModalTab {
|
||||
@ -42,11 +43,11 @@ export enum CreateFromDSLModalTab {
|
||||
FROM_URL = 'from-url',
|
||||
}
|
||||
|
||||
const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDSLModalTab.FROM_FILE, dslUrl = '' }: CreateFromDSLModalProps) => {
|
||||
const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDSLModalTab.FROM_FILE, dslUrl = '', droppedFile }: CreateFromDSLModalProps) => {
|
||||
const { push } = useRouter()
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const [currentFile, setDSLFile] = useState<File>()
|
||||
const [currentFile, setDSLFile] = useState<File | undefined>(droppedFile)
|
||||
const [fileContent, setFileContent] = useState<string>()
|
||||
const [currentTab, setCurrentTab] = useState(activeTab)
|
||||
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
|
||||
@ -78,6 +79,11 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
|
||||
|
||||
const isCreatingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (droppedFile)
|
||||
handleFile(droppedFile)
|
||||
}, [droppedFile])
|
||||
|
||||
const onCreate: MouseEventHandler = async () => {
|
||||
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
|
||||
return
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user