Compare commits

..

3 Commits

Author SHA1 Message Date
a14c980e32 Merge branch 'main' into feat/storage-path-prefix 2026-05-07 22:00:32 -07:00
04472cbd58 [autofix.ci] apply automated fixes 2026-04-16 09:31:23 +00:00
62d25cbffb feat: support STORAGE_PATH_PREFIX for global storage key namespacing
Add STORAGE_PATH_PREFIX config field and apply prefix in Storage wrapper
for all operations (save, load, download, exists, delete, scan).
Scan strips prefix from results to prevent double-prefixing.
Aliyun OSS disables its own folder prefix when STORAGE_PATH_PREFIX is set.
2026-04-16 02:25:47 -07:00
40 changed files with 2647 additions and 334 deletions

View File

@ -74,11 +74,13 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
```bash
cd dify/docker
cp .env.example .env
docker compose up -d
cd dify
cd docker
./dify-compose up -d
```
On Windows PowerShell, run `.\dify-compose.ps1 up -d` from the `docker` directory.
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
#### Seeking help
@ -136,7 +138,7 @@ Star Dify on GitHub and be instantly notified of new releases.
### Custom configurations
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
If you need to customize the configuration, add only the values you want to override to `docker/.env`. The default values live in [`docker/.env.default`](docker/.env.default), and the full reference remains in [`docker/.env.example`](docker/.env.example). After making any changes, re-run `./dify-compose up -d` or `.\dify-compose.ps1 up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Metrics Monitoring with Grafana

View File

@ -74,6 +74,11 @@ class StorageConfig(BaseSettings):
default="opendal",
)
STORAGE_PATH_PREFIX: str = Field(
description="Global path prefix prepended to all storage object keys.",
default="",
)
STORAGE_LOCAL_PATH: str = Field(
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
default="storage",

View File

@ -19,7 +19,7 @@
"name": "Website Generator"
},
"app_id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
"categories": ["Programming"],
"category": "Programming",
"copyright": null,
"description": null,
"is_listed": true,
@ -35,7 +35,7 @@
"name": "Investment Analysis Report Copilot"
},
"app_id": "a23b57fa-85da-49c0-a571-3aff375976c1",
"categories": ["Agent"],
"category": "Agent",
"copyright": "Dify.AI",
"description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
"is_listed": true,
@ -51,7 +51,7 @@
"name": "Workflow Planning Assistant "
},
"app_id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "An assistant that helps you plan and select the right node for a workflow (V0.6.0). ",
"is_listed": true,
@ -67,7 +67,7 @@
"name": "Automated Email Reply "
},
"app_id": "e9d92058-7d20-4904-892f-75d90bef7587",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Reply emails using Gmail API. It will automatically retrieve email in your inbox and create a response in Gmail. \nConfigure your Gmail API in Google Cloud Console. ",
"is_listed": true,
@ -83,7 +83,7 @@
"name": "Book Translation "
},
"app_id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "A workflow designed to translate a full book up to 15000 tokens per run. Uses Code node to separate text into chunks and Iteration to translate each chunk. ",
"is_listed": true,
@ -99,7 +99,7 @@
"name": "Python bug fixer"
},
"app_id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
"categories": ["Programming"],
"category": "Programming",
"copyright": null,
"description": null,
"is_listed": true,
@ -115,7 +115,7 @@
"name": "Code Interpreter"
},
"app_id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
"categories": ["Programming"],
"category": "Programming",
"copyright": "Copyright 2023 Dify",
"description": "Code interpreter, clarifying the syntax and semantics of the code.",
"is_listed": true,
@ -131,7 +131,7 @@
"name": "SVG Logo Design "
},
"app_id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
"categories": ["Agent"],
"category": "Agent",
"copyright": "Dify.AI",
"description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL·E 3. ",
"is_listed": true,
@ -147,7 +147,7 @@
"name": "Long Story Generator (Iteration) "
},
"app_id": "5efb98d7-176b-419c-b6ef-50767391ab62",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "A workflow demonstrating how to use Iteration node to generate long article that is longer than the context length of LLMs. ",
"is_listed": true,
@ -163,7 +163,7 @@
"name": "Text Summarization Workflow"
},
"app_id": "f00c4531-6551-45ee-808f-1d7903099515",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Based on users' choice, retrieve external knowledge to more accurately summarize articles.",
"is_listed": true,
@ -179,7 +179,7 @@
"name": "YouTube Channel Data Analysis"
},
"app_id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
"categories": ["Agent"],
"category": "Agent",
"copyright": "Dify.AI",
"description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
"is_listed": true,
@ -195,7 +195,7 @@
"name": "Article Grading Bot"
},
"app_id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
"categories": ["Writing"],
"category": "Writing",
"copyright": null,
"description": "Assess the quality of articles and text based on user defined criteria. ",
"is_listed": true,
@ -211,7 +211,7 @@
"name": "SEO Blog Generator"
},
"app_id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Workflow for retrieving information from the internet, followed by segmented generation of SEO blogs.",
"is_listed": true,
@ -227,7 +227,7 @@
"name": "SQL Creator"
},
"app_id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
"categories": ["Programming"],
"category": "Programming",
"copyright": "Copyright 2023 Dify",
"description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
"is_listed": true,
@ -243,7 +243,7 @@
"name": "Sentiment Analysis "
},
"app_id": "f06bf86b-d50c-4895-a942-35112dbe4189",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Batch sentiment analysis of text, followed by JSON output of sentiment classification along with scores.",
"is_listed": true,
@ -259,7 +259,7 @@
"name": "Strategic Consulting Expert"
},
"app_id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
"categories": ["Assistant"],
"category": "Assistant",
"copyright": "Copyright 2023 Dify",
"description": "I can answer your questions related to strategic marketing.",
"is_listed": true,
@ -275,7 +275,7 @@
"name": "Code Converter"
},
"app_id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
"categories": ["Programming"],
"category": "Programming",
"copyright": "Copyright 2023 Dify",
"description": "This is an application that provides the ability to convert code snippets in multiple programming languages. You can input the code you wish to convert, select the target programming language, and get the desired output.",
"is_listed": true,
@ -291,7 +291,7 @@
"name": "Question Classifier + Knowledge + Chatbot "
},
"app_id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Basic Workflow Template, a chatbot capable of identifying intents alongside with a knowledge base.",
"is_listed": true,
@ -307,7 +307,7 @@
"name": "AI Front-end interviewer"
},
"app_id": "127efead-8944-4e20-ba9d-12402eb345e0",
"categories": ["HR"],
"category": "HR",
"copyright": "Copyright 2023 Dify",
"description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
"is_listed": true,
@ -323,7 +323,7 @@
"name": "Knowledge Retrieval + Chatbot "
},
"app_id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Basic Workflow Template, A chatbot with a knowledge base. ",
"is_listed": true,
@ -339,7 +339,7 @@
"name": "Email Assistant Workflow "
},
"app_id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "A multifunctional email assistant capable of summarizing, replying, composing, proofreading, and checking grammar.",
"is_listed": true,
@ -355,7 +355,7 @@
"name": "Customer Review Analysis Workflow "
},
"app_id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
"categories": ["Workflow"],
"category": "Workflow",
"copyright": null,
"description": "Utilize LLM (Large Language Models) to classify customer reviews and forward them to the internal system.",
"is_listed": true,

View File

@ -52,7 +52,7 @@ class RecommendedAppResponse(ResponseModel):
copyright: str | None = None
privacy_policy: str | None = None
custom_disclaimer: str | None = None
categories: list[str] = Field(default_factory=list)
category: str | None = None
position: int | None = None
is_listed: bool | None = None
can_trial: bool | None = None

View File

@ -876,10 +876,10 @@ class ToolBuiltinProviderSetDefaultApi(Resource):
@login_required
@account_initialization_required
def post(self, provider):
_, current_tenant_id = current_account_with_tenant()
current_user, current_tenant_id = current_account_with_tenant()
payload = BuiltinProviderDefaultCredentialPayload.model_validate(console_ns.payload or {})
return BuiltinToolManageService.set_default_provider(
tenant_id=current_tenant_id, provider=provider, id=payload.id
tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=payload.id
)

View File

@ -1,4 +1,5 @@
import logging
import posixpath
from collections.abc import Callable, Generator
from typing import Literal, Union, overload
@ -17,6 +18,15 @@ class Storage:
storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE)
with app.app_context():
self.storage_runner = storage_factory()
prefix = dify_config.STORAGE_PATH_PREFIX.strip("/") if dify_config.STORAGE_PATH_PREFIX else ""
if prefix and ".." in prefix.split("/"):
raise ValueError(f"STORAGE_PATH_PREFIX must not contain '..': {dify_config.STORAGE_PATH_PREFIX}")
self._path_prefix = prefix
def _prefix(self, filename: str) -> str:
if not self._path_prefix:
return filename
return posixpath.join(self._path_prefix, filename)
@staticmethod
def get_storage_factory(storage_type: str) -> Callable[[], BaseStorage]:
@ -86,7 +96,7 @@ class Storage:
raise ValueError(f"unsupported storage type {storage_type}")
def save(self, filename: str, data: bytes):
self.storage_runner.save(filename, data)
self.storage_runner.save(self._prefix(filename), data)
@overload
def load(self, filename: str, /, *, stream: Literal[False] = False) -> bytes: ...
@ -105,22 +115,26 @@ class Storage:
return self.load_once(filename)
def load_once(self, filename: str) -> bytes:
return self.storage_runner.load_once(filename)
return self.storage_runner.load_once(self._prefix(filename))
def load_stream(self, filename: str) -> Generator:
return self.storage_runner.load_stream(filename)
return self.storage_runner.load_stream(self._prefix(filename))
def download(self, filename, target_filepath):
self.storage_runner.download(filename, target_filepath)
self.storage_runner.download(self._prefix(filename), target_filepath)
def exists(self, filename):
return self.storage_runner.exists(filename)
return self.storage_runner.exists(self._prefix(filename))
def delete(self, filename: str):
return self.storage_runner.delete(filename)
return self.storage_runner.delete(self._prefix(filename))
def scan(self, path: str, files: bool = True, directories: bool = False) -> list[str]:
return self.storage_runner.scan(path, files=files, directories=directories)
results = self.storage_runner.scan(self._prefix(path), files=files, directories=directories)
if not self._path_prefix:
return results
prefix_with_slash = self._path_prefix + "/"
return [r.removeprefix(prefix_with_slash) for r in results]
storage = Storage()

View File

@ -13,7 +13,7 @@ class AliyunOssStorage(BaseStorage):
def __init__(self):
super().__init__()
self.bucket_name = dify_config.ALIYUN_OSS_BUCKET_NAME
self.folder = dify_config.ALIYUN_OSS_PATH
self.folder = None if dify_config.STORAGE_PATH_PREFIX else dify_config.ALIYUN_OSS_PATH
oss_auth_method = aliyun_s3.Auth
region = None
if dify_config.ALIYUN_OSS_AUTH_VERSION == "v4":

View File

@ -1,26 +0,0 @@
"""add recommended app categories
Revision ID: a4f2d8c9b731
Revises: 227822d22895
Create Date: 2026-04-29 12:00:00.000000
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "a4f2d8c9b731"
down_revision = "227822d22895"
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table("recommended_apps", schema=None) as batch_op:
batch_op.add_column(sa.Column("categories", sa.JSON(), nullable=True))
def downgrade():
with op.batch_alter_table("recommended_apps", schema=None) as batch_op:
batch_op.drop_column("categories")

View File

@ -878,7 +878,6 @@ class RecommendedApp(TypeBase):
copyright: Mapped[str] = mapped_column(String(255), nullable=False)
privacy_policy: Mapped[str] = mapped_column(String(255), nullable=False)
category: Mapped[str] = mapped_column(String(255), nullable=False)
categories: Mapped[list[str] | None] = mapped_column(sa.JSON, nullable=True, default=None)
custom_disclaimer: Mapped[str] = mapped_column(LongText, default="")
position: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
is_listed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True)

View File

@ -1,49 +0,0 @@
"""Apply Redis-backed category ordering for DB-backed Explore apps."""
import json
import logging
from collections.abc import Collection
from typing import Any
from extensions.ext_redis import redis_client
logger = logging.getLogger(__name__)
EXPLORE_APP_CATEGORY_ORDER_KEY_PREFIX = "explore:apps:category_order"
def _category_order_key(language: str) -> str:
return f"{EXPLORE_APP_CATEGORY_ORDER_KEY_PREFIX}:{language}"
def get_explore_app_category_order(language: str) -> list[str]:
try:
raw_categories = redis_client.get(_category_order_key(language))
except Exception:
logger.exception("Failed to read explore app category order from Redis.")
return []
if not raw_categories:
return []
if isinstance(raw_categories, bytes):
raw_categories = raw_categories.decode("utf-8")
try:
categories: Any = json.loads(raw_categories)
except (TypeError, json.JSONDecodeError):
logger.warning("Invalid explore app category order payload for language %s.", language)
return []
if not isinstance(categories, list):
return []
return [category for category in categories if isinstance(category, str)]
def order_categories(categories: Collection[str], language: str) -> list[str]:
configured_order = get_explore_app_category_order(language)
if configured_order:
return configured_order
return sorted(categories)

View File

@ -6,7 +6,6 @@ from constants.languages import languages
from extensions.ext_database import db
from models.model import App, RecommendedApp
from services.app_dsl_service import AppDslService
from services.recommend_app.category_order import order_categories
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
from services.recommend_app.recommend_app_type import RecommendAppType
@ -19,7 +18,7 @@ class RecommendedAppItemDict(TypedDict):
copyright: Any
privacy_policy: Any
custom_disclaimer: str
categories: list[str]
category: str
position: int
is_listed: bool
@ -81,7 +80,6 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
if not site:
continue
app_categories = recommended_app.categories or []
recommended_app_result: RecommendedAppItemDict = {
"id": recommended_app.id,
"app": recommended_app.app,
@ -90,18 +88,15 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
"copyright": site.copyright,
"privacy_policy": site.privacy_policy,
"custom_disclaimer": site.custom_disclaimer,
"categories": app_categories,
"category": recommended_app.category,
"position": recommended_app.position,
"is_listed": recommended_app.is_listed,
}
recommended_apps_result.append(recommended_app_result)
categories.update(app_categories)
categories.add(recommended_app.category)
return RecommendedAppsResultDict(
recommended_apps=recommended_apps_result,
categories=order_categories(categories, language),
)
return RecommendedAppsResultDict(recommended_apps=recommended_apps_result, categories=sorted(categories))
@classmethod
def fetch_recommended_app_detail_from_db(cls, app_id: str) -> RecommendedAppDetailDict | None:

View File

@ -408,7 +408,7 @@ class BuiltinToolManageService:
return {"result": "success"}
@staticmethod
def set_default_provider(tenant_id: str, provider: str, id: str):
def set_default_provider(tenant_id: str, user_id: str, provider: str, id: str):
"""
set default provider
"""
@ -422,11 +422,12 @@ class BuiltinToolManageService:
if target_provider is None:
raise ValueError("provider not found")
# clear default provider (tenant-scoped: only one default per provider per workspace)
# clear default provider
session.execute(
update(BuiltinToolProvider)
.where(
BuiltinToolProvider.tenant_id == tenant_id,
BuiltinToolProvider.user_id == user_id,
BuiltinToolProvider.provider == provider,
BuiltinToolProvider.is_default.is_(True),
)

View File

@ -47,7 +47,6 @@ def _create_recommended_app(
*,
app_id: str,
category: str = "chat",
categories: list[str] | None = None,
language: str = "en-US",
is_listed: bool = True,
position: int = 1,
@ -58,7 +57,6 @@ def _create_recommended_app(
copyright="copy",
privacy_policy="pp",
category=category,
categories=[category] if categories is None else categories,
language=language,
is_listed=is_listed,
position=position,
@ -115,53 +113,6 @@ class TestFetchRecommendedAppsFromDb:
assert "assistant" in result["categories"]
assert "writing" in result["categories"]
def test_returns_multiple_categories_for_one_app(
self, flask_app_with_containers, db_session_with_containers: Session
):
tenant_id = str(uuid4())
created_app = _create_app(db_session_with_containers, tenant_id=tenant_id)
_create_site(db_session_with_containers, app_id=created_app.id)
_create_recommended_app(
db_session_with_containers,
app_id=created_app.id,
category="writing",
categories=["writing", "assistant"],
)
db_session_with_containers.expire_all()
result = DatabaseRecommendAppRetrieval.fetch_recommended_apps_from_db("en-US")
recommended_app = next(item for item in result["recommended_apps"] if item["app_id"] == created_app.id)
assert recommended_app["categories"] == ["writing", "assistant"]
assert "writing" in result["categories"]
assert "assistant" in result["categories"]
def test_ignores_legacy_category_when_categories_are_empty(
self,
flask_app_with_containers,
db_session_with_containers: Session,
):
legacy_category = f"legacy-empty-{uuid4()}"
tenant_id = str(uuid4())
created_app = _create_app(db_session_with_containers, tenant_id=tenant_id)
_create_site(db_session_with_containers, app_id=created_app.id)
_create_recommended_app(
db_session_with_containers,
app_id=created_app.id,
category=legacy_category,
categories=[],
)
db_session_with_containers.expire_all()
result = DatabaseRecommendAppRetrieval.fetch_recommended_apps_from_db("en-US")
recommended_app = next(item for item in result["recommended_apps"] if item["app_id"] == created_app.id)
assert "category" not in recommended_app
assert recommended_app["categories"] == []
assert legacy_category not in result["categories"]
def test_falls_back_to_default_language_when_empty(
self, flask_app_with_containers, db_session_with_containers: Session
):

View File

@ -126,7 +126,7 @@ class TestRecommendedAppResponseModels:
},
"app_id": "app-1",
"description": "desc",
"categories": ["cat", "other"],
"category": "cat",
"position": 1,
"is_listed": True,
"can_trial": False,
@ -137,5 +137,4 @@ class TestRecommendedAppResponseModels:
).model_dump(mode="json")
assert response["recommended_apps"][0]["app_id"] == "app-1"
assert response["recommended_apps"][0]["categories"] == ["cat", "other"]
assert response["categories"] == ["cat"]

View File

@ -1,26 +0,0 @@
import json
from unittest.mock import patch
from services.recommend_app.category_order import get_explore_app_category_order, order_categories
@patch("services.recommend_app.category_order.redis_client.get")
def test_get_explore_app_category_order_returns_redis_list(mock_get):
mock_get.return_value = json.dumps(["C", "A", "B"]).encode()
assert get_explore_app_category_order("en-US") == ["C", "A", "B"]
mock_get.assert_called_once_with("explore:apps:category_order:en-US")
@patch("services.recommend_app.category_order.redis_client.get")
def test_order_categories_uses_redis_order_as_source_of_truth(mock_get):
mock_get.return_value = json.dumps(["C", "A", "B"]).encode()
assert order_categories({"A", "B", "C", "D"}, "en-US") == ["C", "A", "B"]
@patch("services.recommend_app.category_order.redis_client.get")
def test_order_categories_falls_back_to_sorted_categories_without_redis_order(mock_get):
mock_get.return_value = None
assert order_categories({"B", "A", "C"}, "en-US") == ["A", "B", "C"]

View File

@ -180,7 +180,7 @@ class TestSetDefaultProvider:
session.scalar.return_value = None
with pytest.raises(ValueError, match="provider not found"):
BuiltinToolManageService.set_default_provider("t", "p", "id")
BuiltinToolManageService.set_default_provider("t", "u", "p", "id")
@patch(f"{MODULE}.sessionmaker")
@patch(f"{MODULE}.db")
@ -189,29 +189,11 @@ class TestSetDefaultProvider:
target = MagicMock()
session.scalar.return_value = target
result = BuiltinToolManageService.set_default_provider("t", "p", "id")
result = BuiltinToolManageService.set_default_provider("t", "u", "p", "id")
assert result == {"result": "success"}
assert target.is_default is True
@patch(f"{MODULE}.sessionmaker")
@patch(f"{MODULE}.db")
def test_clear_default_is_tenant_scoped_not_user_scoped(self, mock_db, mock_sm_cls):
# Regression: clearing prior defaults must NOT filter by user_id, otherwise
# two workspace members can each leave their own credential as default at
# the same time (the default flag is tenant-scoped, not per-user).
session = _mock_sessionmaker(mock_sm_cls)
session.scalar.return_value = MagicMock()
BuiltinToolManageService.set_default_provider("tenant-1", "google", "cred-id")
session.execute.assert_called_once()
update_stmt = session.execute.call_args.args[0]
compiled = str(update_stmt.compile(compile_kwargs={"literal_binds": True}))
assert "user_id" not in compiled
assert "tenant_id" in compiled
assert "provider" in compiled
class TestUpdateBuiltinToolProvider:
@patch(f"{MODULE}.sessionmaker")

51
docker/.env.default Normal file
View File

@ -0,0 +1,51 @@
# ------------------------------------------------------------------
# Minimal defaults for Docker Compose deployments.
#
# Keep local changes in .env. Use .env.example as the full reference
# for advanced and service-specific settings.
# ------------------------------------------------------------------
# Public URLs used when Dify generates links. Change these together when
# exposing Dify under another hostname, IP address, or port.
CONSOLE_WEB_URL=http://localhost
SERVICE_API_URL=http://localhost
APP_WEB_URL=http://localhost
FILES_URL=http://localhost
INTERNAL_FILES_URL=http://api:5001
TRIGGER_URL=http://localhost
ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id}
NEXT_PUBLIC_SOCKET_URL=ws://localhost
EXPOSE_PLUGIN_DEBUGGING_HOST=localhost
EXPOSE_PLUGIN_DEBUGGING_PORT=5003
# Built-in metadata database defaults.
DB_TYPE=postgresql
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=db_postgres
DB_PORT=5432
DB_DATABASE=dify
# Built-in Redis defaults.
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=difyai123456
# Default file storage.
STORAGE_TYPE=opendal
OPENDAL_SCHEME=fs
OPENDAL_FS_ROOT=storage
# Default vector database.
VECTOR_STORE=weaviate
# Internal service authentication. Paired values must match.
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
# Host ports.
EXPOSE_NGINX_PORT=80
EXPOSE_NGINX_SSL_PORT=443
# Docker Compose profiles for bundled services.
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}

View File

@ -7,28 +7,28 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
- **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections.\
For more information, refer `docker/certbot/README.md`.
- **Persistent Environment Variables**: Environment variables are now managed through a `.env` file, ensuring that your configurations persist across deployments.
- **Persistent Environment Variables**: Default environment variables are managed through `.env.default`, while local overrides are stored in `.env`, ensuring that your configurations persist across deployments.
> What is `.env`? </br> </br>
> The `.env` file is a crucial component in Docker and Docker Compose environments, serving as a centralized configuration file where you can define environment variables that are accessible to the containers at runtime. This file simplifies the management of environment settings across different stages of development, testing, and production, providing consistency and ease of configuration to deployments.
> The `.env` file is a local override file. Keep it small by adding only the values that differ from `.env.default`. Use `.env.example` as the full reference when you need advanced configuration.
- **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file.
- **Mandatory .env File**: A `.env` file is now required to run `docker compose up`. This file is crucial for configuring your deployment and for any custom settings to persist through upgrades.
- **Local .env Overrides**: The `dify-compose` and `dify-compose.ps1` wrappers create `.env` if it is missing and generate a persistent `SECRET_KEY` for this deployment.
### How to Deploy Dify with `docker-compose.yaml`
1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system.
1. **Environment Setup**:
- Navigate to the `docker` directory.
- Copy the `.env.example` file to a new file named `.env` by running `cp .env.example .env`.
- Customize the `.env` file as needed. Refer to the `.env.example` file for detailed configuration options.
- **Optional (Recommended for upgrades)**:
You may use the environment synchronization tool to help keep your `.env` file aligned with the latest `.env.example` updates, while preserving your custom settings.
This is especially useful when upgrading Dify or managing a large, customized `.env` file.
- No copy step is required. The `dify-compose` wrappers create `.env` if it is missing and write a generated `SECRET_KEY` to it.
- When prompted on first run, press Enter to use the default deployment, or answer `y` to stop and edit `.env` first.
- Customize `.env` only when you need to override defaults from `.env.default`. Refer to `.env.example` for the full list of available variables.
- **Optional (for advanced deployments)**:
If you maintain a full `.env` file copied from `.env.example`, you may use the environment synchronization tool to keep it aligned with the latest `.env.example` updates while preserving your custom settings.
See the [Environment Variables Synchronization](#environment-variables-synchronization) section below.
1. **Running the Services**:
- Execute `docker compose up` from the `docker` directory to start the services.
- Execute `./dify-compose up -d` from the `docker` directory to start the services. On Windows PowerShell, run `.\dify-compose.ps1 up -d`.
- To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`.
1. **SSL Certificate Setup**:
- Refer `docker/certbot/README.md` to set up SSL certificates using Certbot.
@ -58,7 +58,13 @@ For users migrating from the `docker-legacy` setup:
1. **Data Migration**:
- Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary.
### Overview of `.env`
### Overview of `.env.default`, `.env`, and `.env.example`
- `.env.default` contains the minimal default configuration for Docker Compose deployments.
- `.env` contains the generated `SECRET_KEY` plus any local overrides.
- `.env.example` is the full reference for advanced configuration.
The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary environment file, append paired internal service keys when needed, and remove the temporary file after Docker Compose starts.
#### Key Modules and Customization
@ -118,9 +124,11 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
### Environment Variables Synchronization
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.example`.
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.default` or `.env.example`.
To help keep your existing `.env` file up to date **without losing your custom values**, an optional environment variables synchronization tool is provided.
If you use the default override-only workflow, review `.env.default` and add only the values you need to override to `.env`.
If you maintain a full `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided.
> This tool performs a **one-way synchronization** from `.env.example` to `.env`.
> Existing values in `.env` are never overwritten automatically.
@ -143,9 +151,9 @@ Before synchronization, the current `.env` file is saved to the `env-backup/` di
**When to use**
- After upgrading Dify to a newer version
- After upgrading Dify to a newer version with a full `.env` file
- When `.env.example` has been updated with new environment variables
- When managing a large or heavily customized `.env` file
- When managing a large or heavily customized `.env` file copied from `.env.example`
**Usage**

334
docker/dify-compose Executable file
View File

@ -0,0 +1,334 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
DEFAULT_ENV_FILE=".env.default"
USER_ENV_FILE=".env"
log() {
printf '%s\n' "$*" >&2
}
die() {
printf 'Error: %s\n' "$*" >&2
exit 1
}
detect_compose() {
if docker compose version >/dev/null 2>&1; then
COMPOSE_CMD=(docker compose)
return
fi
if command -v docker-compose >/dev/null 2>&1; then
COMPOSE_CMD=(docker-compose)
return
fi
die "Docker Compose is not available. Install Docker Compose, then run this command again."
}
generate_secret_key() {
if command -v openssl >/dev/null 2>&1; then
openssl rand -base64 42
return
fi
if command -v dd >/dev/null 2>&1 && command -v base64 >/dev/null 2>&1; then
dd if=/dev/urandom bs=42 count=1 2>/dev/null | base64 | tr -d '\n'
printf '\n'
return
fi
return 1
}
ensure_env_files() {
[[ -f "$DEFAULT_ENV_FILE" ]] || die "$DEFAULT_ENV_FILE is missing."
if [[ -f "$USER_ENV_FILE" ]]; then
return
fi
: >"$USER_ENV_FILE"
if [[ ! -t 0 ]]; then
log "Created $USER_ENV_FILE for local overrides."
return
fi
printf 'Created %s for local overrides.\n' "$USER_ENV_FILE"
printf 'Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N] '
read -r answer
case "${answer:-}" in
y | Y | yes | YES | Yes)
cat <<'EOF'
Edit .env with the settings you want to override, using .env.example as the full reference.
Run ./dify-compose up -d again when you are ready.
EOF
exit 0
;;
esac
}
user_env_value() {
local key="$1"
awk -F= -v target="$key" '
/^[[:space:]]*#/ || !/=/{ next }
{
key = $1
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
if (key == target) {
value = substr($0, index($0, "=") + 1)
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
value = substr(value, 2, length(value) - 2)
}
result = value
}
}
END { print result }
' "$USER_ENV_FILE"
}
set_user_env_value() {
local key="$1"
local value="$2"
local temp_file
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-env.XXXXXX")"
awk -F= -v target="$key" -v replacement="$key=$value" '
BEGIN { replaced = 0 }
/^[[:space:]]*#/ || !/=/{ print; next }
{
key = $1
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
if (key == target) {
if (!replaced) {
print replacement
replaced = 1
}
next
}
print
}
END {
if (!replaced) {
print replacement
}
}
' "$USER_ENV_FILE" >"$temp_file"
mv "$temp_file" "$USER_ENV_FILE"
}
ensure_secret_key() {
local current_secret_key
local secret_key
current_secret_key="$(user_env_value SECRET_KEY)"
if [[ -n "$current_secret_key" ]]; then
return
fi
secret_key="$(generate_secret_key)" || die "Unable to generate SECRET_KEY. Install openssl or configure SECRET_KEY in .env."
set_user_env_value SECRET_KEY "$secret_key"
log "Generated SECRET_KEY in $USER_ENV_FILE."
}
env_value() {
local key="$1"
awk -F= -v target="$key" '
/^[[:space:]]*#/ || !/=/{ next }
{
key = $1
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
if (key == target) {
value = substr($0, index($0, "=") + 1)
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
value = substr(value, 2, length(value) - 2)
}
result = value
}
}
END { print result }
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE"
}
user_overrides() {
local key="$1"
grep -Eq "^[[:space:]]*${key}[[:space:]]*=" "$USER_ENV_FILE"
}
write_merged_env() {
awk '
function trim(s) {
sub(/^[[:space:]]+/, "", s)
sub(/[[:space:]]+$/, "", s)
return s
}
/^[[:space:]]*#/ || !/=/{ next }
{
key = $0
sub(/=.*/, "", key)
key = trim(key)
if (key == "") {
next
}
value = substr($0, index($0, "=") + 1)
value = trim(value)
if (!(key in seen)) {
order[++count] = key
seen[key] = 1
}
values[key] = value
}
END {
for (i = 1; i <= count; i++) {
key = order[i]
print key "=" values[key]
}
}
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE" >"$MERGED_ENV_FILE"
}
set_merged_env_value() {
local key="$1"
local value="$2"
local temp_file
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-compose-env.XXXXXX")"
awk -F= -v target="$key" -v replacement="$key=$value" '
BEGIN { replaced = 0 }
/^[[:space:]]*#/ || !/=/{ print; next }
{
key = $1
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
if (key == target) {
if (!replaced) {
print replacement
replaced = 1
}
next
}
print
}
END {
if (!replaced) {
print replacement
}
}
' "$MERGED_ENV_FILE" >"$temp_file"
mv "$temp_file" "$MERGED_ENV_FILE"
}
set_if_not_overridden() {
local key="$1"
local value="$2"
if user_overrides "$key"; then
return
fi
set_merged_env_value "$key" "$value"
}
metadata_db_host() {
case "$1" in
mysql) printf 'db_mysql' ;;
postgresql | '') printf 'db_postgres' ;;
*) printf '%s' "$(env_value DB_HOST)" ;;
esac
}
metadata_db_port() {
case "$1" in
mysql) printf '3306' ;;
postgresql | '') printf '5432' ;;
*) printf '%s' "$(env_value DB_PORT)" ;;
esac
}
metadata_db_user() {
case "$1" in
mysql) printf 'root' ;;
postgresql | '') printf 'postgres' ;;
*) printf '%s' "$(env_value DB_USERNAME)" ;;
esac
}
build_merged_env() {
MERGED_ENV_FILE="$(mktemp "${TMPDIR:-/tmp}/dify-compose.XXXXXX")"
trap 'rm -f "$MERGED_ENV_FILE"' EXIT
write_merged_env
local db_type
local redis_host
local redis_port
local redis_username
local redis_password
local redis_auth
local code_execution_api_key
local weaviate_api_key
db_type="$(env_value DB_TYPE)"
set_if_not_overridden DB_HOST "$(metadata_db_host "$db_type")"
set_if_not_overridden DB_PORT "$(metadata_db_port "$db_type")"
set_if_not_overridden DB_USERNAME "$(metadata_db_user "$db_type")"
if ! user_overrides CELERY_BROKER_URL; then
redis_host="$(env_value REDIS_HOST)"
redis_port="$(env_value REDIS_PORT)"
redis_username="$(env_value REDIS_USERNAME)"
redis_password="$(env_value REDIS_PASSWORD)"
redis_auth=""
if [[ -n "$redis_username" && -n "$redis_password" ]]; then
redis_auth="${redis_username}:${redis_password}@"
elif [[ -n "$redis_password" ]]; then
redis_auth=":${redis_password}@"
elif [[ -n "$redis_username" ]]; then
redis_auth="${redis_username}@"
fi
set_merged_env_value CELERY_BROKER_URL "redis://${redis_auth}${redis_host:-redis}:${redis_port:-6379}/1"
fi
if ! user_overrides SANDBOX_API_KEY; then
code_execution_api_key="$(env_value CODE_EXECUTION_API_KEY)"
set_if_not_overridden SANDBOX_API_KEY "${code_execution_api_key:-dify-sandbox}"
fi
if ! user_overrides WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS; then
weaviate_api_key="$(env_value WEAVIATE_API_KEY)"
set_if_not_overridden WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS \
"${weaviate_api_key:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}"
fi
}
main() {
detect_compose
ensure_env_files
ensure_secret_key
build_merged_env
if [[ "$#" -eq 0 ]]; then
set -- up -d
fi
"${COMPOSE_CMD[@]}" --env-file "$MERGED_ENV_FILE" "$@"
}
main "$@"

317
docker/dify-compose.ps1 Normal file
View File

@ -0,0 +1,317 @@
$ErrorActionPreference = "Stop"
Set-StrictMode -Version Latest
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
Set-Location $ScriptDir
$DefaultEnvFile = ".env.default"
$UserEnvFile = ".env"
$MergedEnvFile = $null
$Utf8NoBom = New-Object System.Text.UTF8Encoding -ArgumentList $false
function Write-Info {
param([string]$Message)
[Console]::Error.WriteLine($Message)
}
function Fail {
param([string]$Message)
[Console]::Error.WriteLine("Error: $Message")
exit 1
}
function Test-CommandSuccess {
param([string[]]$Command)
try {
$Executable = $Command[0]
$CommandArgs = @()
if ($Command.Length -gt 1) {
$CommandArgs = @($Command[1..($Command.Length - 1)])
}
& $Executable @CommandArgs *> $null
return $LASTEXITCODE -eq 0
}
catch {
return $false
}
}
function Get-ComposeCommand {
if (Test-CommandSuccess @("docker", "compose", "version")) {
return @("docker", "compose")
}
if ((Get-Command "docker-compose" -ErrorAction SilentlyContinue) -and (Test-CommandSuccess @("docker-compose", "version"))) {
return @("docker-compose")
}
Fail "Docker Compose is not available. Install Docker Compose, then run this command again."
}
function New-SecretKey {
$Bytes = New-Object byte[] 42
$Generator = [System.Security.Cryptography.RandomNumberGenerator]::Create()
try {
$Generator.GetBytes($Bytes)
}
finally {
$Generator.Dispose()
}
return [Convert]::ToBase64String($Bytes)
}
function Ensure-EnvFiles {
if (-not (Test-Path $DefaultEnvFile -PathType Leaf)) {
Fail "$DefaultEnvFile is missing."
}
if (Test-Path $UserEnvFile -PathType Leaf) {
return
}
New-Item -ItemType File -Path $UserEnvFile | Out-Null
if ([Console]::IsInputRedirected) {
Write-Info "Created $UserEnvFile for local overrides."
return
}
Write-Info "Created $UserEnvFile for local overrides."
$Answer = Read-Host "Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N]"
if ($Answer -match "^(y|yes)$") {
Write-Output "Edit .env with the settings you want to override, using .env.example as the full reference."
Write-Output "Run .\dify-compose.ps1 up -d again when you are ready."
exit 0
}
}
function Read-EnvFile {
param([string]$Path)
$Values = [ordered]@{}
if (-not (Test-Path $Path -PathType Leaf)) {
return $Values
}
foreach ($Line in Get-Content -Path $Path) {
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
continue
}
$SeparatorIndex = $Line.IndexOf("=")
$Key = $Line.Substring(0, $SeparatorIndex).Trim()
$Value = $Line.Substring($SeparatorIndex + 1).Trim()
if (($Value.StartsWith('"') -and $Value.EndsWith('"')) -or ($Value.StartsWith("'") -and $Value.EndsWith("'"))) {
$Value = $Value.Substring(1, $Value.Length - 2)
}
if ($Key.Length -gt 0) {
$Values[$Key] = $Value
}
}
return $Values
}
function Set-UserEnvValue {
param(
[string]$Key,
[string]$Value
)
$Path = [string](Resolve-Path $UserEnvFile)
$Lines = [System.IO.File]::ReadAllLines($Path, [System.Text.Encoding]::UTF8)
$Output = New-Object System.Collections.Generic.List[string]
$Replaced = $false
foreach ($Line in $Lines) {
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
$Output.Add($Line)
continue
}
$SeparatorIndex = $Line.IndexOf("=")
$CurrentKey = $Line.Substring(0, $SeparatorIndex).Trim()
if ($CurrentKey -eq $Key) {
if (-not $Replaced) {
$Output.Add("$Key=$Value")
$Replaced = $true
}
continue
}
$Output.Add($Line)
}
if (-not $Replaced) {
$Output.Add("$Key=$Value")
}
[System.IO.File]::WriteAllLines($Path, $Output, $Utf8NoBom)
}
function Ensure-SecretKey {
$Values = Read-EnvFile $UserEnvFile
if ($Values.Contains("SECRET_KEY") -and $Values["SECRET_KEY"]) {
return
}
Set-UserEnvValue "SECRET_KEY" (New-SecretKey)
Write-Info "Generated SECRET_KEY in $UserEnvFile."
}
function Merge-EnvValues {
$Values = [ordered]@{}
foreach ($Entry in (Read-EnvFile $DefaultEnvFile).GetEnumerator()) {
$Values[$Entry.Key] = $Entry.Value
}
foreach ($Entry in (Read-EnvFile $UserEnvFile).GetEnumerator()) {
$Values[$Entry.Key] = $Entry.Value
}
return $Values
}
function User-Overrides {
param([string]$Key)
if (-not (Test-Path $UserEnvFile -PathType Leaf)) {
return $false
}
return [bool](Select-String -Path $UserEnvFile -Pattern "^\s*$([regex]::Escape($Key))\s*=" -Quiet)
}
function Metadata-DbHost {
param([string]$DbType, $Values)
switch ($DbType) {
"mysql" { return "db_mysql" }
"postgresql" { return "db_postgres" }
"" { return "db_postgres" }
default { return $Values["DB_HOST"] }
}
}
function Metadata-DbPort {
param([string]$DbType, $Values)
switch ($DbType) {
"mysql" { return "3306" }
"postgresql" { return "5432" }
"" { return "5432" }
default { return $Values["DB_PORT"] }
}
}
function Metadata-DbUser {
param([string]$DbType, $Values)
switch ($DbType) {
"mysql" { return "root" }
"postgresql" { return "postgres" }
"" { return "postgres" }
default { return $Values["DB_USERNAME"] }
}
}
function Write-MergedEnv {
param($Values)
$Output = New-Object System.Collections.Generic.List[string]
foreach ($Entry in $Values.GetEnumerator()) {
$Output.Add("$($Entry.Key)=$($Entry.Value)")
}
[System.IO.File]::WriteAllLines($MergedEnvFile, $Output, $Utf8NoBom)
}
function Build-MergedEnv {
$Values = Merge-EnvValues
$script:MergedEnvFile = [System.IO.Path]::GetTempFileName()
$DbType = if ($Values.Contains("DB_TYPE")) { $Values["DB_TYPE"] } else { "postgresql" }
if (-not (User-Overrides "DB_HOST")) {
$Values["DB_HOST"] = Metadata-DbHost $DbType $Values
}
if (-not (User-Overrides "DB_PORT")) {
$Values["DB_PORT"] = Metadata-DbPort $DbType $Values
}
if (-not (User-Overrides "DB_USERNAME")) {
$Values["DB_USERNAME"] = Metadata-DbUser $DbType $Values
}
if (-not (User-Overrides "CELERY_BROKER_URL")) {
$RedisHost = if ($Values.Contains("REDIS_HOST") -and $Values["REDIS_HOST"]) { $Values["REDIS_HOST"] } else { "redis" }
$RedisPort = if ($Values.Contains("REDIS_PORT") -and $Values["REDIS_PORT"]) { $Values["REDIS_PORT"] } else { "6379" }
$RedisUsername = if ($Values.Contains("REDIS_USERNAME")) { $Values["REDIS_USERNAME"] } else { "" }
$RedisPassword = if ($Values.Contains("REDIS_PASSWORD")) { $Values["REDIS_PASSWORD"] } else { "" }
$RedisAuth = ""
if ($RedisUsername -and $RedisPassword) {
$RedisAuth = "${RedisUsername}:${RedisPassword}@"
}
elseif ($RedisPassword) {
$RedisAuth = ":${RedisPassword}@"
}
elseif ($RedisUsername) {
$RedisAuth = "${RedisUsername}@"
}
$Values["CELERY_BROKER_URL"] = "redis://$RedisAuth${RedisHost}:${RedisPort}/1"
}
if (-not (User-Overrides "SANDBOX_API_KEY")) {
$CodeExecutionApiKey = if ($Values.Contains("CODE_EXECUTION_API_KEY") -and $Values["CODE_EXECUTION_API_KEY"]) { $Values["CODE_EXECUTION_API_KEY"] } else { "dify-sandbox" }
$Values["SANDBOX_API_KEY"] = $CodeExecutionApiKey
}
if (-not (User-Overrides "WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS")) {
$WeaviateApiKey = if ($Values.Contains("WEAVIATE_API_KEY") -and $Values["WEAVIATE_API_KEY"]) { $Values["WEAVIATE_API_KEY"] } else { "WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih" }
$Values["WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS"] = $WeaviateApiKey
}
Write-MergedEnv $Values
}
$ComposeCommand = Get-ComposeCommand
try {
Ensure-EnvFiles
Ensure-SecretKey
Build-MergedEnv
$ComposeArgs = @($args)
if ($ComposeArgs.Count -eq 0) {
$ComposeArgs = @("up", "-d")
}
$ComposeCommandArgs = @()
if ($ComposeCommand.Length -gt 1) {
$ComposeCommandArgs = @($ComposeCommand[1..($ComposeCommand.Length - 1)])
}
$ComposeExecutable = $ComposeCommand[0]
& $ComposeExecutable @ComposeCommandArgs --env-file $MergedEnvFile @ComposeArgs
exit $LASTEXITCODE
}
finally {
if ($MergedEnvFile -and (Test-Path $MergedEnvFile -PathType Leaf)) {
Remove-Item -Force $MergedEnvFile
}
}

View File

@ -0,0 +1,953 @@
x-shared-env: &shared-api-worker-env
services:
# Init container to fix permissions
init_permissions:
image: busybox:latest
command:
- sh
- -c
- |
FLAG_FILE="/app/api/storage/.init_permissions"
if [ -f "$${FLAG_FILE}" ]; then
echo "Permissions already initialized. Exiting."
exit 0
fi
echo "Initializing permissions for /app/api/storage"
chown -R 1001:1001 /app/api/storage && touch "$${FLAG_FILE}"
echo "Permissions initialized. Exiting."
volumes:
- ./volumes/app/storage:/app/api/storage
restart: "no"
# API service
api:
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'api' starts the API server.
MODE: api
SENTRY_DSN: ${API_SENTRY_DSN:-}
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
PLUGIN_REMOTE_INSTALL_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost}
PLUGIN_REMOTE_INSTALL_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0}
INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
depends_on:
init_permissions:
condition: service_completed_successfully
db_postgres:
condition: service_healthy
required: false
db_mysql:
condition: service_healthy
required: false
oceanbase:
condition: service_healthy
required: false
seekdb:
condition: service_healthy
required: false
redis:
condition: service_started
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
networks:
- ssrf_proxy_network
- default
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'worker' starts the Celery worker for processing all queues.
MODE: worker
SENTRY_DSN: ${API_SENTRY_DSN:-}
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
depends_on:
init_permissions:
condition: service_completed_successfully
db_postgres:
condition: service_healthy
required: false
db_mysql:
condition: service_healthy
required: false
oceanbase:
condition: service_healthy
required: false
seekdb:
condition: service_healthy
required: false
redis:
condition: service_started
volumes:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
healthcheck:
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks.
MODE: beat
depends_on:
init_permissions:
condition: service_completed_successfully
db_postgres:
condition: service_healthy
required: false
db_mysql:
condition: service_healthy
required: false
oceanbase:
condition: service_healthy
required: false
seekdb:
condition: service_healthy
required: false
redis:
condition: service_started
healthcheck:
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default
# Frontend web application.
web:
image: langgenius/dify-web:1.14.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
APP_API_URL: ${APP_API_URL:-}
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
CSP_WHITELIST: ${CSP_WHITELIST:-}
ALLOW_EMBED: ${ALLOW_EMBED:-false}
ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-10}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000}
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10}
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50}
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
# The PostgreSQL database.
db_postgres:
image: postgres:15-alpine
profiles:
- postgresql
restart: always
environment:
POSTGRES_USER: ${DB_USERNAME:-postgres}
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
POSTGRES_DB: ${DB_DATABASE:-dify}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
command: >
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
-c 'shared_buffers=${POSTGRES_SHARED_BUFFERS:-128MB}'
-c 'work_mem=${POSTGRES_WORK_MEM:-4MB}'
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}'
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
healthcheck:
test:
[
"CMD",
"pg_isready",
"-h",
"db_postgres",
"-U",
"${DB_USERNAME:-postgres}",
"-d",
"${DB_DATABASE:-dify}",
]
interval: 1s
timeout: 3s
retries: 60
# The mysql database.
db_mysql:
image: mysql:8.0
profiles:
- mysql
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
MYSQL_DATABASE: ${DB_DATABASE:-dify}
command: >
--max_connections=1000
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
--innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M}
--innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2}
volumes:
- ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql
healthcheck:
test:
[
"CMD",
"mysqladmin",
"ping",
"-u",
"root",
"-p${DB_PASSWORD:-difyai123456}",
]
interval: 1s
timeout: 3s
retries: 30
# The redis cache.
redis:
image: redis:6-alpine
restart: always
environment:
REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456}
volumes:
# Mount the redis data directory to the container.
- ./volumes/redis/data:/data
# Set the redis password when startup redis server.
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
healthcheck:
test:
[
"CMD-SHELL",
"redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG",
]
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.15
restart: always
environment:
# The DifySandbox configurations
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
GIN_MODE: ${SANDBOX_GIN_MODE:-release}
WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15}
ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true}
HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
PIP_MIRROR_URL: ${PIP_MIRROR_URL:-}
volumes:
- ./volumes/sandbox/dependencies:/dependencies
- ./volumes/sandbox/conf:/conf
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8194/health"]
networks:
- ssrf_proxy_network
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.6.0-local
restart: always
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
DB_SSL_MODE: ${DB_SSL_MODE:-disable}
SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002}
SERVER_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi}
MAX_PLUGIN_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false}
DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001}
DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0}
PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003}
PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd}
FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true}
PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120}
PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600}
PLUGIN_STDIO_BUFFER_SIZE: ${PLUGIN_STDIO_BUFFER_SIZE:-1024}
PLUGIN_STDIO_MAX_BUFFER_SIZE: ${PLUGIN_STDIO_MAX_BUFFER_SIZE:-5242880}
PIP_MIRROR_URL: ${PIP_MIRROR_URL:-}
PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local}
PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage}
PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin}
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-}
AWS_REGION: ${PLUGIN_AWS_REGION:-}
AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-}
AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-}
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
VOLCENGINE_TOS_ENDPOINT: ${PLUGIN_VOLCENGINE_TOS_ENDPOINT:-}
VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-}
VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-}
VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-}
SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false}
SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-}
ports:
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
volumes:
- ./volumes/plugin_daemon:/app/storage
depends_on:
db_postgres:
condition: service_healthy
required: false
db_mysql:
condition: service_healthy
required: false
oceanbase:
condition: service_healthy
required: false
seekdb:
condition: service_healthy
required: false
# ssrf_proxy server
# for more information, please refer to
# https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
ssrf_proxy:
image: ubuntu/squid:latest
restart: always
volumes:
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
entrypoint:
[
"sh",
"-c",
"cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
]
environment:
# pls clearly modify the squid env vars to fit your network environment.
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
networks:
- ssrf_proxy_network
- default
# Certbot service
# use `docker-compose --profile certbot up` to start the certbot service.
certbot:
image: certbot/certbot
profiles:
- certbot
volumes:
- ./volumes/certbot/conf:/etc/letsencrypt
- ./volumes/certbot/www:/var/www/html
- ./volumes/certbot/logs:/var/log/letsencrypt
- ./volumes/certbot/conf/live:/etc/letsencrypt/live
- ./certbot/update-cert.template.txt:/update-cert.template.txt
- ./certbot/docker-entrypoint.sh:/docker-entrypoint.sh
environment:
- CERTBOT_EMAIL=${CERTBOT_EMAIL:-}
- CERTBOT_DOMAIN=${CERTBOT_DOMAIN:-}
- CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-}
entrypoint: ["/docker-entrypoint.sh"]
command: ["tail", "-f", "/dev/null"]
# The nginx reverse proxy.
# used for reverse proxying the API service and Web service.
nginx:
image: nginx:latest
restart: always
volumes:
- ./nginx/nginx.conf.template:/etc/nginx/nginx.conf.template
- ./nginx/proxy.conf.template:/etc/nginx/proxy.conf.template
- ./nginx/https.conf.template:/etc/nginx/https.conf.template
- ./nginx/conf.d:/etc/nginx/conf.d
- ./nginx/docker-entrypoint.sh:/docker-entrypoint-mount.sh
- ./nginx/ssl:/etc/ssl # cert dir (legacy)
- ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container)
- ./volumes/certbot/conf:/etc/letsencrypt
- ./volumes/certbot/www:/var/www/html
entrypoint:
[
"sh",
"-c",
"cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
]
environment:
NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_}
NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false}
NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443}
NGINX_PORT: ${NGINX_PORT:-80}
# You're required to add your own SSL certificates/keys to the `./nginx/ssl` directory
# and modify the env vars below in .env if HTTPS_ENABLED is true.
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3}
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s}
NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s}
NGINX_ENABLE_CERTBOT_CHALLENGE: ${NGINX_ENABLE_CERTBOT_CHALLENGE:-false}
CERTBOT_DOMAIN: ${CERTBOT_DOMAIN:-}
depends_on:
- api
- web
ports:
- "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}"
- "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}"
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.27.0
profiles:
- weaviate
restart: always
volumes:
# Mount the Weaviate data directory to the con tainer.
- ./volumes/weaviate:/var/lib/weaviate
environment:
# The Weaviate configurations
# You can refer to the [Weaviate](https://weaviate.io/developers/weaviate/config-refs/env-vars) documentation for more information.
PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none}
CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1}
AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true}
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true}
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
DISABLE_TELEMETRY: ${WEAVIATE_DISABLE_TELEMETRY:-false}
ENABLE_TOKENIZER_GSE: ${WEAVIATE_ENABLE_TOKENIZER_GSE:-false}
ENABLE_TOKENIZER_KAGOME_JA: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA:-false}
ENABLE_TOKENIZER_KAGOME_KR: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR:-false}
# OceanBase vector database
oceanbase:
image: oceanbase/oceanbase-ce:4.3.5-lts
container_name: oceanbase
profiles:
- oceanbase
restart: always
volumes:
- ./volumes/oceanbase/data:/root/ob
- ./volumes/oceanbase/conf:/root/.obd/cluster
- ./volumes/oceanbase/init.d:/root/boot/init.d
environment:
OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G}
OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
OB_SERVER_IP: 127.0.0.1
MODE: mini
LANG: C.UTF-8
LC_ALL: C.UTF-8
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
test:
[
"CMD-SHELL",
'obclient -h127.0.0.1 -P2881 -uroot@test -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"',
]
interval: 10s
retries: 30
start_period: 30s
timeout: 10s
# seekdb vector database
seekdb:
image: oceanbase/seekdb:latest
container_name: seekdb
profiles:
- seekdb
restart: always
volumes:
- ./volumes/seekdb:/var/lib/oceanbase
environment:
ROOT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G}
REPORTER: dify-ai-seekdb
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
test:
[
"CMD-SHELL",
'mysql -h127.0.0.1 -P2881 -uroot -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"',
]
interval: 5s
retries: 60
timeout: 5s
# Qdrant vector store.
# (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.)
qdrant:
image: langgenius/qdrant:v1.8.3
profiles:
- qdrant
restart: always
volumes:
- ./volumes/qdrant:/qdrant/storage
environment:
QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456}
# The Couchbase vector store.
couchbase-server:
build: ./couchbase-server
profiles:
- couchbase
restart: always
environment:
- CLUSTER_NAME=dify_search
- COUCHBASE_ADMINISTRATOR_USERNAME=${COUCHBASE_USER:-Administrator}
- COUCHBASE_ADMINISTRATOR_PASSWORD=${COUCHBASE_PASSWORD:-password}
- COUCHBASE_BUCKET=${COUCHBASE_BUCKET_NAME:-Embeddings}
- COUCHBASE_BUCKET_RAMSIZE=512
- COUCHBASE_RAM_SIZE=2048
- COUCHBASE_EVENTING_RAM_SIZE=512
- COUCHBASE_INDEX_RAM_SIZE=512
- COUCHBASE_FTS_RAM_SIZE=1024
hostname: couchbase-server
container_name: couchbase-server
working_dir: /opt/couchbase
stdin_open: true
tty: true
entrypoint: [""]
command: sh -c "/opt/couchbase/init/init-cbserver.sh"
volumes:
- ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data
healthcheck:
# ensure bucket was created before proceeding
test:
[
"CMD-SHELL",
"curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1",
]
interval: 10s
retries: 10
start_period: 30s
timeout: 10s
# The pgvector vector database.
pgvector:
image: pgvector/pgvector:pg16
profiles:
- pgvector
restart: always
environment:
PGUSER: ${PGVECTOR_PGUSER:-postgres}
# The password for the default postgres user.
POSTGRES_PASSWORD: ${PGVECTOR_POSTGRES_PASSWORD:-difyai123456}
# The name of the default postgres database.
POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
# postgres data directory
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
# pg_bigm module for full text search
PG_BIGM: ${PGVECTOR_PG_BIGM:-false}
PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
- ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
entrypoint: ["/docker-entrypoint.sh"]
healthcheck:
test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
# get image from https://www.vastdata.com.cn/
vastbase:
image: vastdata/vastbase-vector
profiles:
- vastbase
restart: always
environment:
- VB_DBCOMPATIBILITY=PG
- VB_DB=dify
- VB_USERNAME=dify
- VB_PASSWORD=Difyai123456
ports:
- "5434:5432"
volumes:
- ./vastbase/lic:/home/vastbase/vastbase/lic
- ./vastbase/data:/home/vastbase/data
- ./vastbase/backup:/home/vastbase/backup
- ./vastbase/backup_log:/home/vastbase/backup_log
healthcheck:
test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
# pgvecto-rs vector store
pgvecto-rs:
image: tensorchord/pgvecto-rs:pg16-v0.3.0
profiles:
- pgvecto-rs
restart: always
environment:
PGUSER: ${PGVECTOR_PGUSER:-postgres}
# The password for the default postgres user.
POSTGRES_PASSWORD: ${PGVECTOR_POSTGRES_PASSWORD:-difyai123456}
# The name of the default postgres database.
POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
# postgres data directory
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
volumes:
- ./volumes/pgvecto_rs/data:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
# Chroma vector database
chroma:
image: ghcr.io/chroma-core/chroma:0.5.20
profiles:
- chroma
restart: always
volumes:
- ./volumes/chroma:/chroma/chroma
environment:
CHROMA_SERVER_AUTHN_CREDENTIALS: ${CHROMA_SERVER_AUTHN_CREDENTIALS:-difyai123456}
CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider}
IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE}
# InterSystems IRIS vector database
iris:
image: containers.intersystems.com/intersystems/iris-community:2025.3
profiles:
- iris
container_name: iris
restart: always
init: true
ports:
- "${IRIS_SUPER_SERVER_PORT:-1972}:1972"
- "${IRIS_WEB_SERVER_PORT:-52773}:52773"
volumes:
- ./volumes/iris:/durable
- ./iris/iris-init.script:/iris-init.script
- ./iris/docker-entrypoint.sh:/custom-entrypoint.sh
entrypoint: ["/custom-entrypoint.sh"]
tty: true
environment:
TZ: ${IRIS_TIMEZONE:-UTC}
ISC_DATA_DIRECTORY: /durable/iris
# Oracle vector database
oracle:
image: container-registry.oracle.com/database/free:latest
profiles:
- oracle
restart: always
volumes:
- source: oradata
type: volume
target: /opt/oracle/oradata
- ./startupscripts:/opt/oracle/scripts/startup
environment:
ORACLE_PWD: ${ORACLE_PWD:-Dify123456}
ORACLE_CHARACTERSET: ${ORACLE_CHARACTERSET:-AL32UTF8}
# Milvus vector database services
etcd:
container_name: milvus-etcd
image: quay.io/coreos/etcd:v3.5.5
profiles:
- milvus
environment:
ETCD_AUTO_COMPACTION_MODE: ${ETCD_AUTO_COMPACTION_MODE:-revision}
ETCD_AUTO_COMPACTION_RETENTION: ${ETCD_AUTO_COMPACTION_RETENTION:-1000}
ETCD_QUOTA_BACKEND_BYTES: ${ETCD_QUOTA_BACKEND_BYTES:-4294967296}
ETCD_SNAPSHOT_COUNT: ${ETCD_SNAPSHOT_COUNT:-50000}
volumes:
- ./volumes/milvus/etcd:/etcd
command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
healthcheck:
test: ["CMD", "etcdctl", "endpoint", "health"]
interval: 30s
timeout: 20s
retries: 3
networks:
- milvus
minio:
container_name: milvus-minio
image: minio/minio:RELEASE.2023-03-20T20-16-18Z
profiles:
- milvus
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-minioadmin}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:-minioadmin}
volumes:
- ./volumes/milvus/minio:/minio_data
command: minio server /minio_data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
networks:
- milvus
milvus-standalone:
container_name: milvus-standalone
image: milvusdb/milvus:v2.6.3
profiles:
- milvus
command: ["milvus", "run", "standalone"]
environment:
ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379}
MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000}
common.security.authorizationEnabled: ${MILVUS_AUTHORIZATION_ENABLED:-true}
volumes:
- ./volumes/milvus/milvus:/var/lib/milvus
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"]
interval: 30s
start_period: 90s
timeout: 20s
retries: 3
depends_on:
- etcd
- minio
ports:
- 19530:19530
- 9091:9091
networks:
- milvus
# Opensearch vector database
opensearch:
container_name: opensearch
image: opensearchproject/opensearch:latest
profiles:
- opensearch
environment:
discovery.type: ${OPENSEARCH_DISCOVERY_TYPE:-single-node}
bootstrap.memory_lock: ${OPENSEARCH_BOOTSTRAP_MEMORY_LOCK:-true}
OPENSEARCH_JAVA_OPTS: -Xms${OPENSEARCH_JAVA_OPTS_MIN:-512m} -Xmx${OPENSEARCH_JAVA_OPTS_MAX:-1024m}
OPENSEARCH_INITIAL_ADMIN_PASSWORD: ${OPENSEARCH_INITIAL_ADMIN_PASSWORD:-Qazwsxedc!@#123}
ulimits:
memlock:
soft: ${OPENSEARCH_MEMLOCK_SOFT:--1}
hard: ${OPENSEARCH_MEMLOCK_HARD:--1}
nofile:
soft: ${OPENSEARCH_NOFILE_SOFT:-65536}
hard: ${OPENSEARCH_NOFILE_HARD:-65536}
volumes:
- ./volumes/opensearch/data:/usr/share/opensearch/data
networks:
- opensearch-net
opensearch-dashboards:
container_name: opensearch-dashboards
image: opensearchproject/opensearch-dashboards:latest
profiles:
- opensearch
environment:
OPENSEARCH_HOSTS: '["https://opensearch:9200"]'
volumes:
- ./volumes/opensearch/opensearch_dashboards.yml:/usr/share/opensearch-dashboards/config/opensearch_dashboards.yml
networks:
- opensearch-net
depends_on:
- opensearch
# opengauss vector database.
opengauss:
image: opengauss/opengauss:7.0.0-RC1
profiles:
- opengauss
privileged: true
restart: always
environment:
GS_USERNAME: ${OPENGAUSS_USER:-postgres}
GS_PASSWORD: ${OPENGAUSS_PASSWORD:-Dify@123}
GS_PORT: ${OPENGAUSS_PORT:-6600}
GS_DB: ${OPENGAUSS_DATABASE:-dify}
volumes:
- ./volumes/opengauss/data:/var/lib/opengauss/data
healthcheck:
test: ["CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1"]
interval: 10s
timeout: 10s
retries: 10
ports:
- ${OPENGAUSS_PORT:-6600}:${OPENGAUSS_PORT:-6600}
# MyScale vector database
myscale:
container_name: myscale
image: myscale/myscaledb:1.6.4
profiles:
- myscale
restart: always
tty: true
volumes:
- ./volumes/myscale/data:/var/lib/clickhouse
- ./volumes/myscale/log:/var/log/clickhouse-server
- ./volumes/myscale/config/users.d/custom_users_config.xml:/etc/clickhouse-server/users.d/custom_users_config.xml
ports:
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
# Matrixone vector store.
matrixone:
hostname: matrixone
image: matrixorigin/matrixone:2.1.1
profiles:
- matrixone
restart: always
volumes:
- ./volumes/matrixone/data:/mo-data
ports:
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3
container_name: elasticsearch
profiles:
- elasticsearch
- elasticsearch-ja
restart: always
volumes:
- ./elasticsearch/docker-entrypoint.sh:/docker-entrypoint-mount.sh
- dify_es01_data:/usr/share/elasticsearch/data
environment:
ELASTIC_PASSWORD: ${ELASTICSEARCH_PASSWORD:-elastic}
VECTOR_STORE: ${VECTOR_STORE:-}
cluster.name: dify-es-cluster
node.name: dify-es0
discovery.type: single-node
xpack.license.self_generated.type: basic
xpack.security.enabled: "true"
xpack.security.enrollment.enabled: "false"
xpack.security.http.ssl.enabled: "false"
ports:
- ${ELASTICSEARCH_PORT:-9200}:9200
deploy:
resources:
limits:
memory: 2g
entrypoint: ["sh", "-c", "sh /docker-entrypoint-mount.sh"]
healthcheck:
test:
["CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty"]
interval: 30s
timeout: 10s
retries: 50
# https://www.elastic.co/guide/en/kibana/current/docker.html
# https://www.elastic.co/guide/en/kibana/current/settings.html
kibana:
image: docker.elastic.co/kibana/kibana:8.14.3
container_name: kibana
profiles:
- elasticsearch
depends_on:
- elasticsearch
restart: always
environment:
XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa
NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana
XPACK_SECURITY_ENABLED: "true"
XPACK_SECURITY_ENROLLMENT_ENABLED: "false"
XPACK_SECURITY_HTTP_SSL_ENABLED: "false"
XPACK_FLEET_ISAIRGAPPED: "true"
I18N_LOCALE: zh-CN
SERVER_PORT: "5601"
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
ports:
- ${KIBANA_PORT:-5601}:5601
healthcheck:
test: ["CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1"]
interval: 30s
timeout: 10s
retries: 3
# unstructured .
# (if used, you need to set ETL_TYPE to Unstructured in the api & worker service.)
unstructured:
image: downloads.unstructured.io/unstructured-io/unstructured-api:latest
profiles:
- unstructured
restart: always
volumes:
- ./volumes/unstructured:/app/data
networks:
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
ssrf_proxy_network:
driver: bridge
internal: true
milvus:
driver: bridge
opensearch-net:
driver: bridge
internal: true
volumes:
oradata:
dify_es01_data:

View File

@ -1,3 +1,728 @@
# ==================================================================
# WARNING: This file is auto-generated by generate_docker_compose
# Do not modify this file directly. Instead, update the .env.example
# or docker-compose-template.yaml and regenerate this file.
# ==================================================================
x-shared-env: &shared-api-worker-env
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-}
SERVICE_API_URL: ${SERVICE_API_URL:-}
TRIGGER_URL: ${TRIGGER_URL:-http://localhost}
APP_API_URL: ${APP_API_URL:-}
APP_WEB_URL: ${APP_WEB_URL:-}
FILES_URL: ${FILES_URL:-}
INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-}
LANG: ${LANG:-C.UTF-8}
LC_ALL: ${LC_ALL:-C.UTF-8}
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
LOG_LEVEL: ${LOG_LEVEL:-INFO}
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20}
LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5}
LOG_DATEFORMAT: ${LOG_DATEFORMAT:-%Y-%m-%d %H:%M:%S}
LOG_TZ: ${LOG_TZ:-UTC}
DEBUG: ${DEBUG:-false}
FLASK_DEBUG: ${FLASK_DEBUG:-false}
ENABLE_REQUEST_LOGGING: ${ENABLE_REQUEST_LOGGING:-False}
SECRET_KEY: ${SECRET_KEY:-sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U}
INIT_PASSWORD: ${INIT_PASSWORD:-}
DEPLOY_ENV: ${DEPLOY_ENV:-PRODUCTION}
CHECK_UPDATE_URL: ${CHECK_UPDATE_URL:-https://updates.dify.ai}
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false}
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
APP_MAX_ACTIVE_REQUESTS: ${APP_MAX_ACTIVE_REQUESTS:-0}
APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-1200}
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0}
DIFY_PORT: ${DIFY_PORT:-5001}
SERVER_WORKER_AMOUNT: ${SERVER_WORKER_AMOUNT:-1}
SERVER_WORKER_CLASS: ${SERVER_WORKER_CLASS:-gevent}
SERVER_WORKER_CONNECTIONS: ${SERVER_WORKER_CONNECTIONS:-10}
CELERY_WORKER_CLASS: ${CELERY_WORKER_CLASS:-}
GUNICORN_TIMEOUT: ${GUNICORN_TIMEOUT:-360}
CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT:-4}
CELERY_AUTO_SCALE: ${CELERY_AUTO_SCALE:-false}
CELERY_MAX_WORKERS: ${CELERY_MAX_WORKERS:-}
CELERY_MIN_WORKERS: ${CELERY_MIN_WORKERS:-}
API_TOOL_DEFAULT_CONNECT_TIMEOUT: ${API_TOOL_DEFAULT_CONNECT_TIMEOUT:-10}
API_TOOL_DEFAULT_READ_TIMEOUT: ${API_TOOL_DEFAULT_READ_TIMEOUT:-60}
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false}
DB_TYPE: ${DB_TYPE:-postgresql}
DB_USERNAME: ${DB_USERNAME:-postgres}
DB_PASSWORD: ${DB_PASSWORD:-difyai123456}
DB_HOST: ${DB_HOST:-db_postgres}
DB_PORT: ${DB_PORT:-5432}
DB_DATABASE: ${DB_DATABASE:-dify}
SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE:-30}
SQLALCHEMY_MAX_OVERFLOW: ${SQLALCHEMY_MAX_OVERFLOW:-10}
SQLALCHEMY_POOL_RECYCLE: ${SQLALCHEMY_POOL_RECYCLE:-3600}
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO:-false}
SQLALCHEMY_POOL_PRE_PING: ${SQLALCHEMY_POOL_PRE_PING:-false}
SQLALCHEMY_POOL_USE_LIFO: ${SQLALCHEMY_POOL_USE_LIFO:-false}
SQLALCHEMY_POOL_TIMEOUT: ${SQLALCHEMY_POOL_TIMEOUT:-30}
POSTGRES_MAX_CONNECTIONS: ${POSTGRES_MAX_CONNECTIONS:-200}
POSTGRES_SHARED_BUFFERS: ${POSTGRES_SHARED_BUFFERS:-128MB}
POSTGRES_WORK_MEM: ${POSTGRES_WORK_MEM:-4MB}
POSTGRES_MAINTENANCE_WORK_MEM: ${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}
POSTGRES_EFFECTIVE_CACHE_SIZE: ${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}
POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-0}
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}
MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS:-1000}
MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE:-128M}
MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT: ${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2}
REDIS_HOST: ${REDIS_HOST:-redis}
REDIS_PORT: ${REDIS_PORT:-6379}
REDIS_USERNAME: ${REDIS_USERNAME:-}
REDIS_PASSWORD: ${REDIS_PASSWORD:-difyai123456}
REDIS_USE_SSL: ${REDIS_USE_SSL:-false}
REDIS_SSL_CERT_REQS: ${REDIS_SSL_CERT_REQS:-CERT_NONE}
REDIS_SSL_CA_CERTS: ${REDIS_SSL_CA_CERTS:-}
REDIS_SSL_CERTFILE: ${REDIS_SSL_CERTFILE:-}
REDIS_SSL_KEYFILE: ${REDIS_SSL_KEYFILE:-}
REDIS_DB: ${REDIS_DB:-0}
REDIS_KEY_PREFIX: ${REDIS_KEY_PREFIX:-}
REDIS_MAX_CONNECTIONS: ${REDIS_MAX_CONNECTIONS:-}
REDIS_USE_SENTINEL: ${REDIS_USE_SENTINEL:-false}
REDIS_SENTINELS: ${REDIS_SENTINELS:-}
REDIS_SENTINEL_SERVICE_NAME: ${REDIS_SENTINEL_SERVICE_NAME:-}
REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-}
REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-}
REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1}
REDIS_USE_CLUSTERS: ${REDIS_USE_CLUSTERS:-false}
REDIS_CLUSTERS: ${REDIS_CLUSTERS:-}
REDIS_CLUSTERS_PASSWORD: ${REDIS_CLUSTERS_PASSWORD:-}
REDIS_RETRY_RETRIES: ${REDIS_RETRY_RETRIES:-3}
REDIS_RETRY_BACKOFF_BASE: ${REDIS_RETRY_BACKOFF_BASE:-1.0}
REDIS_RETRY_BACKOFF_CAP: ${REDIS_RETRY_BACKOFF_CAP:-10.0}
REDIS_SOCKET_TIMEOUT: ${REDIS_SOCKET_TIMEOUT:-5.0}
REDIS_SOCKET_CONNECT_TIMEOUT: ${REDIS_SOCKET_CONNECT_TIMEOUT:-5.0}
REDIS_HEALTH_CHECK_INTERVAL: ${REDIS_HEALTH_CHECK_INTERVAL:-30}
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1}
CELERY_BACKEND: ${CELERY_BACKEND:-redis}
BROKER_USE_SSL: ${BROKER_USE_SSL:-false}
CELERY_USE_SENTINEL: ${CELERY_USE_SENTINEL:-false}
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5}
STORAGE_TYPE: ${STORAGE_TYPE:-opendal}
OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs}
OPENDAL_FS_ROOT: ${OPENDAL_FS_ROOT:-storage}
CLICKZETTA_VOLUME_TYPE: ${CLICKZETTA_VOLUME_TYPE:-user}
CLICKZETTA_VOLUME_NAME: ${CLICKZETTA_VOLUME_NAME:-}
CLICKZETTA_VOLUME_TABLE_PREFIX: ${CLICKZETTA_VOLUME_TABLE_PREFIX:-dataset_}
CLICKZETTA_VOLUME_DIFY_PREFIX: ${CLICKZETTA_VOLUME_DIFY_PREFIX:-dify_km}
S3_ENDPOINT: ${S3_ENDPOINT:-}
S3_REGION: ${S3_REGION:-us-east-1}
S3_BUCKET_NAME: ${S3_BUCKET_NAME:-difyai}
S3_ACCESS_KEY: ${S3_ACCESS_KEY:-}
S3_SECRET_KEY: ${S3_SECRET_KEY:-}
S3_ADDRESS_STYLE: ${S3_ADDRESS_STYLE:-auto}
S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM:-false}
ARCHIVE_STORAGE_ENABLED: ${ARCHIVE_STORAGE_ENABLED:-false}
ARCHIVE_STORAGE_ENDPOINT: ${ARCHIVE_STORAGE_ENDPOINT:-}
ARCHIVE_STORAGE_ARCHIVE_BUCKET: ${ARCHIVE_STORAGE_ARCHIVE_BUCKET:-}
ARCHIVE_STORAGE_EXPORT_BUCKET: ${ARCHIVE_STORAGE_EXPORT_BUCKET:-}
ARCHIVE_STORAGE_ACCESS_KEY: ${ARCHIVE_STORAGE_ACCESS_KEY:-}
ARCHIVE_STORAGE_SECRET_KEY: ${ARCHIVE_STORAGE_SECRET_KEY:-}
ARCHIVE_STORAGE_REGION: ${ARCHIVE_STORAGE_REGION:-auto}
AZURE_BLOB_ACCOUNT_NAME: ${AZURE_BLOB_ACCOUNT_NAME:-difyai}
AZURE_BLOB_ACCOUNT_KEY: ${AZURE_BLOB_ACCOUNT_KEY:-difyai}
AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME:-difyai-container}
AZURE_BLOB_ACCOUNT_URL: ${AZURE_BLOB_ACCOUNT_URL:-https://<your_account_name>.blob.core.windows.net}
GOOGLE_STORAGE_BUCKET_NAME: ${GOOGLE_STORAGE_BUCKET_NAME:-your-bucket-name}
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64:-}
ALIYUN_OSS_BUCKET_NAME: ${ALIYUN_OSS_BUCKET_NAME:-your-bucket-name}
ALIYUN_OSS_ACCESS_KEY: ${ALIYUN_OSS_ACCESS_KEY:-your-access-key}
ALIYUN_OSS_SECRET_KEY: ${ALIYUN_OSS_SECRET_KEY:-your-secret-key}
ALIYUN_OSS_ENDPOINT: ${ALIYUN_OSS_ENDPOINT:-https://oss-ap-southeast-1-internal.aliyuncs.com}
ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION:-ap-southeast-1}
ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION:-v4}
ALIYUN_OSS_PATH: ${ALIYUN_OSS_PATH:-your-path}
TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME:-your-bucket-name}
TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY:-your-secret-key}
TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-your-secret-id}
TENCENT_COS_REGION: ${TENCENT_COS_REGION:-your-region}
TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME:-your-scheme}
TENCENT_COS_CUSTOM_DOMAIN: ${TENCENT_COS_CUSTOM_DOMAIN:-your-custom-domain}
OCI_ENDPOINT: ${OCI_ENDPOINT:-https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com}
OCI_BUCKET_NAME: ${OCI_BUCKET_NAME:-your-bucket-name}
OCI_ACCESS_KEY: ${OCI_ACCESS_KEY:-your-access-key}
OCI_SECRET_KEY: ${OCI_SECRET_KEY:-your-secret-key}
OCI_REGION: ${OCI_REGION:-us-ashburn-1}
HUAWEI_OBS_BUCKET_NAME: ${HUAWEI_OBS_BUCKET_NAME:-your-bucket-name}
HUAWEI_OBS_SECRET_KEY: ${HUAWEI_OBS_SECRET_KEY:-your-secret-key}
HUAWEI_OBS_ACCESS_KEY: ${HUAWEI_OBS_ACCESS_KEY:-your-access-key}
HUAWEI_OBS_SERVER: ${HUAWEI_OBS_SERVER:-your-server-url}
HUAWEI_OBS_PATH_STYLE: ${HUAWEI_OBS_PATH_STYLE:-false}
VOLCENGINE_TOS_BUCKET_NAME: ${VOLCENGINE_TOS_BUCKET_NAME:-your-bucket-name}
VOLCENGINE_TOS_SECRET_KEY: ${VOLCENGINE_TOS_SECRET_KEY:-your-secret-key}
VOLCENGINE_TOS_ACCESS_KEY: ${VOLCENGINE_TOS_ACCESS_KEY:-your-access-key}
VOLCENGINE_TOS_ENDPOINT: ${VOLCENGINE_TOS_ENDPOINT:-your-server-url}
VOLCENGINE_TOS_REGION: ${VOLCENGINE_TOS_REGION:-your-region}
BAIDU_OBS_BUCKET_NAME: ${BAIDU_OBS_BUCKET_NAME:-your-bucket-name}
BAIDU_OBS_SECRET_KEY: ${BAIDU_OBS_SECRET_KEY:-your-secret-key}
BAIDU_OBS_ACCESS_KEY: ${BAIDU_OBS_ACCESS_KEY:-your-access-key}
BAIDU_OBS_ENDPOINT: ${BAIDU_OBS_ENDPOINT:-your-server-url}
SUPABASE_BUCKET_NAME: ${SUPABASE_BUCKET_NAME:-your-bucket-name}
SUPABASE_API_KEY: ${SUPABASE_API_KEY:-your-access-key}
SUPABASE_URL: ${SUPABASE_URL:-your-server-url}
VECTOR_STORE: ${VECTOR_STORE:-weaviate}
VECTOR_INDEX_NAME_PREFIX: ${VECTOR_INDEX_NAME_PREFIX:-Vector_index}
WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080}
WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
WEAVIATE_GRPC_ENDPOINT: ${WEAVIATE_GRPC_ENDPOINT:-grpc://weaviate:50051}
WEAVIATE_TOKENIZATION: ${WEAVIATE_TOKENIZATION:-word}
OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase}
OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881}
OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test}
OCEANBASE_VECTOR_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
OCEANBASE_VECTOR_DATABASE: ${OCEANBASE_VECTOR_DATABASE:-test}
OCEANBASE_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
OCEANBASE_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G}
OCEANBASE_ENABLE_HYBRID_SEARCH: ${OCEANBASE_ENABLE_HYBRID_SEARCH:-false}
OCEANBASE_FULLTEXT_PARSER: ${OCEANBASE_FULLTEXT_PARSER:-ik}
SEEKDB_MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G}
QDRANT_URL: ${QDRANT_URL:-http://qdrant:6333}
QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456}
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
QDRANT_REPLICATION_FACTOR: ${QDRANT_REPLICATION_FACTOR:-1}
MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530}
MILVUS_DATABASE: ${MILVUS_DATABASE:-}
MILVUS_TOKEN: ${MILVUS_TOKEN:-}
MILVUS_USER: ${MILVUS_USER:-}
MILVUS_PASSWORD: ${MILVUS_PASSWORD:-}
MILVUS_ENABLE_HYBRID_SEARCH: ${MILVUS_ENABLE_HYBRID_SEARCH:-False}
MILVUS_ANALYZER_PARAMS: ${MILVUS_ANALYZER_PARAMS:-}
MYSCALE_HOST: ${MYSCALE_HOST:-myscale}
MYSCALE_PORT: ${MYSCALE_PORT:-8123}
MYSCALE_USER: ${MYSCALE_USER:-default}
MYSCALE_PASSWORD: ${MYSCALE_PASSWORD:-}
MYSCALE_DATABASE: ${MYSCALE_DATABASE:-dify}
MYSCALE_FTS_PARAMS: ${MYSCALE_FTS_PARAMS:-}
COUCHBASE_CONNECTION_STRING: ${COUCHBASE_CONNECTION_STRING:-couchbase://couchbase-server}
COUCHBASE_USER: ${COUCHBASE_USER:-Administrator}
COUCHBASE_PASSWORD: ${COUCHBASE_PASSWORD:-password}
COUCHBASE_BUCKET_NAME: ${COUCHBASE_BUCKET_NAME:-Embeddings}
COUCHBASE_SCOPE_NAME: ${COUCHBASE_SCOPE_NAME:-_default}
HOLOGRES_HOST: ${HOLOGRES_HOST:-}
HOLOGRES_PORT: ${HOLOGRES_PORT:-80}
HOLOGRES_DATABASE: ${HOLOGRES_DATABASE:-}
HOLOGRES_ACCESS_KEY_ID: ${HOLOGRES_ACCESS_KEY_ID:-}
HOLOGRES_ACCESS_KEY_SECRET: ${HOLOGRES_ACCESS_KEY_SECRET:-}
HOLOGRES_SCHEMA: ${HOLOGRES_SCHEMA:-public}
HOLOGRES_TOKENIZER: ${HOLOGRES_TOKENIZER:-jieba}
HOLOGRES_DISTANCE_METHOD: ${HOLOGRES_DISTANCE_METHOD:-Cosine}
HOLOGRES_BASE_QUANTIZATION_TYPE: ${HOLOGRES_BASE_QUANTIZATION_TYPE:-rabitq}
HOLOGRES_MAX_DEGREE: ${HOLOGRES_MAX_DEGREE:-64}
HOLOGRES_EF_CONSTRUCTION: ${HOLOGRES_EF_CONSTRUCTION:-400}
PGVECTOR_HOST: ${PGVECTOR_HOST:-pgvector}
PGVECTOR_PORT: ${PGVECTOR_PORT:-5432}
PGVECTOR_USER: ${PGVECTOR_USER:-postgres}
PGVECTOR_PASSWORD: ${PGVECTOR_PASSWORD:-difyai123456}
PGVECTOR_DATABASE: ${PGVECTOR_DATABASE:-dify}
PGVECTOR_MIN_CONNECTION: ${PGVECTOR_MIN_CONNECTION:-1}
PGVECTOR_MAX_CONNECTION: ${PGVECTOR_MAX_CONNECTION:-5}
PGVECTOR_PG_BIGM: ${PGVECTOR_PG_BIGM:-false}
PGVECTOR_PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
VASTBASE_HOST: ${VASTBASE_HOST:-vastbase}
VASTBASE_PORT: ${VASTBASE_PORT:-5432}
VASTBASE_USER: ${VASTBASE_USER:-dify}
VASTBASE_PASSWORD: ${VASTBASE_PASSWORD:-Difyai123456}
VASTBASE_DATABASE: ${VASTBASE_DATABASE:-dify}
VASTBASE_MIN_CONNECTION: ${VASTBASE_MIN_CONNECTION:-1}
VASTBASE_MAX_CONNECTION: ${VASTBASE_MAX_CONNECTION:-5}
PGVECTO_RS_HOST: ${PGVECTO_RS_HOST:-pgvecto-rs}
PGVECTO_RS_PORT: ${PGVECTO_RS_PORT:-5432}
PGVECTO_RS_USER: ${PGVECTO_RS_USER:-postgres}
PGVECTO_RS_PASSWORD: ${PGVECTO_RS_PASSWORD:-difyai123456}
PGVECTO_RS_DATABASE: ${PGVECTO_RS_DATABASE:-dify}
ANALYTICDB_KEY_ID: ${ANALYTICDB_KEY_ID:-your-ak}
ANALYTICDB_KEY_SECRET: ${ANALYTICDB_KEY_SECRET:-your-sk}
ANALYTICDB_REGION_ID: ${ANALYTICDB_REGION_ID:-cn-hangzhou}
ANALYTICDB_INSTANCE_ID: ${ANALYTICDB_INSTANCE_ID:-gp-ab123456}
ANALYTICDB_ACCOUNT: ${ANALYTICDB_ACCOUNT:-testaccount}
ANALYTICDB_PASSWORD: ${ANALYTICDB_PASSWORD:-testpassword}
ANALYTICDB_NAMESPACE: ${ANALYTICDB_NAMESPACE:-dify}
ANALYTICDB_NAMESPACE_PASSWORD: ${ANALYTICDB_NAMESPACE_PASSWORD:-difypassword}
ANALYTICDB_HOST: ${ANALYTICDB_HOST:-gp-test.aliyuncs.com}
ANALYTICDB_PORT: ${ANALYTICDB_PORT:-5432}
ANALYTICDB_MIN_CONNECTION: ${ANALYTICDB_MIN_CONNECTION:-1}
ANALYTICDB_MAX_CONNECTION: ${ANALYTICDB_MAX_CONNECTION:-5}
TIDB_VECTOR_HOST: ${TIDB_VECTOR_HOST:-tidb}
TIDB_VECTOR_PORT: ${TIDB_VECTOR_PORT:-4000}
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-}
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-}
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify}
MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone}
MATRIXONE_PORT: ${MATRIXONE_PORT:-6001}
MATRIXONE_USER: ${MATRIXONE_USER:-dump}
MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111}
MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify}
TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1}
TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify}
TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20}
TIDB_ON_QDRANT_GRPC_ENABLED: ${TIDB_ON_QDRANT_GRPC_ENABLED:-false}
TIDB_ON_QDRANT_GRPC_PORT: ${TIDB_ON_QDRANT_GRPC_PORT:-6334}
TIDB_PUBLIC_KEY: ${TIDB_PUBLIC_KEY:-dify}
TIDB_PRIVATE_KEY: ${TIDB_PRIVATE_KEY:-dify}
TIDB_API_URL: ${TIDB_API_URL:-http://127.0.0.1}
TIDB_IAM_API_URL: ${TIDB_IAM_API_URL:-http://127.0.0.1}
TIDB_REGION: ${TIDB_REGION:-regions/aws-us-east-1}
TIDB_PROJECT_ID: ${TIDB_PROJECT_ID:-dify}
TIDB_SPEND_LIMIT: ${TIDB_SPEND_LIMIT:-100}
CHROMA_HOST: ${CHROMA_HOST:-127.0.0.1}
CHROMA_PORT: ${CHROMA_PORT:-8000}
CHROMA_TENANT: ${CHROMA_TENANT:-default_tenant}
CHROMA_DATABASE: ${CHROMA_DATABASE:-default_database}
CHROMA_AUTH_PROVIDER: ${CHROMA_AUTH_PROVIDER:-chromadb.auth.token_authn.TokenAuthClientProvider}
CHROMA_AUTH_CREDENTIALS: ${CHROMA_AUTH_CREDENTIALS:-}
ORACLE_USER: ${ORACLE_USER:-dify}
ORACLE_PASSWORD: ${ORACLE_PASSWORD:-dify}
ORACLE_DSN: ${ORACLE_DSN:-oracle:1521/FREEPDB1}
ORACLE_CONFIG_DIR: ${ORACLE_CONFIG_DIR:-/app/api/storage/wallet}
ORACLE_WALLET_LOCATION: ${ORACLE_WALLET_LOCATION:-/app/api/storage/wallet}
ORACLE_WALLET_PASSWORD: ${ORACLE_WALLET_PASSWORD:-dify}
ORACLE_IS_AUTONOMOUS: ${ORACLE_IS_AUTONOMOUS:-false}
ALIBABACLOUD_MYSQL_HOST: ${ALIBABACLOUD_MYSQL_HOST:-127.0.0.1}
ALIBABACLOUD_MYSQL_PORT: ${ALIBABACLOUD_MYSQL_PORT:-3306}
ALIBABACLOUD_MYSQL_USER: ${ALIBABACLOUD_MYSQL_USER:-root}
ALIBABACLOUD_MYSQL_PASSWORD: ${ALIBABACLOUD_MYSQL_PASSWORD:-difyai123456}
ALIBABACLOUD_MYSQL_DATABASE: ${ALIBABACLOUD_MYSQL_DATABASE:-dify}
ALIBABACLOUD_MYSQL_MAX_CONNECTION: ${ALIBABACLOUD_MYSQL_MAX_CONNECTION:-5}
ALIBABACLOUD_MYSQL_HNSW_M: ${ALIBABACLOUD_MYSQL_HNSW_M:-6}
RELYT_HOST: ${RELYT_HOST:-db}
RELYT_PORT: ${RELYT_PORT:-5432}
RELYT_USER: ${RELYT_USER:-postgres}
RELYT_PASSWORD: ${RELYT_PASSWORD:-difyai123456}
RELYT_DATABASE: ${RELYT_DATABASE:-postgres}
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true}
OPENSEARCH_VERIFY_CERTS: ${OPENSEARCH_VERIFY_CERTS:-true}
OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic}
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin}
OPENSEARCH_AWS_REGION: ${OPENSEARCH_AWS_REGION:-ap-southeast-1}
OPENSEARCH_AWS_SERVICE: ${OPENSEARCH_AWS_SERVICE:-aoss}
TENCENT_VECTOR_DB_URL: ${TENCENT_VECTOR_DB_URL:-http://127.0.0.1}
TENCENT_VECTOR_DB_API_KEY: ${TENCENT_VECTOR_DB_API_KEY:-dify}
TENCENT_VECTOR_DB_TIMEOUT: ${TENCENT_VECTOR_DB_TIMEOUT:-30}
TENCENT_VECTOR_DB_USERNAME: ${TENCENT_VECTOR_DB_USERNAME:-dify}
TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE:-dify}
TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD:-1}
TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS:-2}
TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH: ${TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH:-false}
ELASTICSEARCH_HOST: ${ELASTICSEARCH_HOST:-0.0.0.0}
ELASTICSEARCH_PORT: ${ELASTICSEARCH_PORT:-9200}
ELASTICSEARCH_USERNAME: ${ELASTICSEARCH_USERNAME:-elastic}
ELASTICSEARCH_PASSWORD: ${ELASTICSEARCH_PASSWORD:-elastic}
KIBANA_PORT: ${KIBANA_PORT:-5601}
ELASTICSEARCH_USE_CLOUD: ${ELASTICSEARCH_USE_CLOUD:-false}
ELASTICSEARCH_CLOUD_URL: ${ELASTICSEARCH_CLOUD_URL:-YOUR-ELASTICSEARCH_CLOUD_URL}
ELASTICSEARCH_API_KEY: ${ELASTICSEARCH_API_KEY:-YOUR-ELASTICSEARCH_API_KEY}
ELASTICSEARCH_VERIFY_CERTS: ${ELASTICSEARCH_VERIFY_CERTS:-False}
ELASTICSEARCH_CA_CERTS: ${ELASTICSEARCH_CA_CERTS:-}
ELASTICSEARCH_REQUEST_TIMEOUT: ${ELASTICSEARCH_REQUEST_TIMEOUT:-100000}
ELASTICSEARCH_RETRY_ON_TIMEOUT: ${ELASTICSEARCH_RETRY_ON_TIMEOUT:-True}
ELASTICSEARCH_MAX_RETRIES: ${ELASTICSEARCH_MAX_RETRIES:-10}
BAIDU_VECTOR_DB_ENDPOINT: ${BAIDU_VECTOR_DB_ENDPOINT:-http://127.0.0.1:5287}
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: ${BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS:-30000}
BAIDU_VECTOR_DB_ACCOUNT: ${BAIDU_VECTOR_DB_ACCOUNT:-root}
BAIDU_VECTOR_DB_API_KEY: ${BAIDU_VECTOR_DB_API_KEY:-dify}
BAIDU_VECTOR_DB_DATABASE: ${BAIDU_VECTOR_DB_DATABASE:-dify}
BAIDU_VECTOR_DB_SHARD: ${BAIDU_VECTOR_DB_SHARD:-1}
BAIDU_VECTOR_DB_REPLICAS: ${BAIDU_VECTOR_DB_REPLICAS:-3}
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: ${BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER:-DEFAULT_ANALYZER}
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: ${BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE:-COARSE_MODE}
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT:-500}
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO:-0.05}
BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS: ${BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS:-300}
VIKINGDB_ACCESS_KEY: ${VIKINGDB_ACCESS_KEY:-your-ak}
VIKINGDB_SECRET_KEY: ${VIKINGDB_SECRET_KEY:-your-sk}
VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai}
VIKINGDB_HOST: ${VIKINGDB_HOST:-api-vikingdb.xxx.volces.com}
VIKINGDB_SCHEMA: ${VIKINGDB_SCHEMA:-http}
VIKINGDB_CONNECTION_TIMEOUT: ${VIKINGDB_CONNECTION_TIMEOUT:-30}
VIKINGDB_SOCKET_TIMEOUT: ${VIKINGDB_SOCKET_TIMEOUT:-30}
LINDORM_URL: ${LINDORM_URL:-http://localhost:30070}
LINDORM_USERNAME: ${LINDORM_USERNAME:-admin}
LINDORM_PASSWORD: ${LINDORM_PASSWORD:-admin}
LINDORM_USING_UGC: ${LINDORM_USING_UGC:-True}
LINDORM_QUERY_TIMEOUT: ${LINDORM_QUERY_TIMEOUT:-1}
OPENGAUSS_HOST: ${OPENGAUSS_HOST:-opengauss}
OPENGAUSS_PORT: ${OPENGAUSS_PORT:-6600}
OPENGAUSS_USER: ${OPENGAUSS_USER:-postgres}
OPENGAUSS_PASSWORD: ${OPENGAUSS_PASSWORD:-Dify@123}
OPENGAUSS_DATABASE: ${OPENGAUSS_DATABASE:-dify}
OPENGAUSS_MIN_CONNECTION: ${OPENGAUSS_MIN_CONNECTION:-1}
OPENGAUSS_MAX_CONNECTION: ${OPENGAUSS_MAX_CONNECTION:-5}
OPENGAUSS_ENABLE_PQ: ${OPENGAUSS_ENABLE_PQ:-false}
HUAWEI_CLOUD_HOSTS: ${HUAWEI_CLOUD_HOSTS:-https://127.0.0.1:9200}
HUAWEI_CLOUD_USER: ${HUAWEI_CLOUD_USER:-admin}
HUAWEI_CLOUD_PASSWORD: ${HUAWEI_CLOUD_PASSWORD:-admin}
UPSTASH_VECTOR_URL: ${UPSTASH_VECTOR_URL:-https://xxx-vector.upstash.io}
UPSTASH_VECTOR_TOKEN: ${UPSTASH_VECTOR_TOKEN:-dify}
TABLESTORE_ENDPOINT: ${TABLESTORE_ENDPOINT:-https://instance-name.cn-hangzhou.ots.aliyuncs.com}
TABLESTORE_INSTANCE_NAME: ${TABLESTORE_INSTANCE_NAME:-instance-name}
TABLESTORE_ACCESS_KEY_ID: ${TABLESTORE_ACCESS_KEY_ID:-xxx}
TABLESTORE_ACCESS_KEY_SECRET: ${TABLESTORE_ACCESS_KEY_SECRET:-xxx}
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE: ${TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE:-false}
CLICKZETTA_USERNAME: ${CLICKZETTA_USERNAME:-}
CLICKZETTA_PASSWORD: ${CLICKZETTA_PASSWORD:-}
CLICKZETTA_INSTANCE: ${CLICKZETTA_INSTANCE:-}
CLICKZETTA_SERVICE: ${CLICKZETTA_SERVICE:-api.clickzetta.com}
CLICKZETTA_WORKSPACE: ${CLICKZETTA_WORKSPACE:-quick_start}
CLICKZETTA_VCLUSTER: ${CLICKZETTA_VCLUSTER:-default_ap}
CLICKZETTA_SCHEMA: ${CLICKZETTA_SCHEMA:-dify}
CLICKZETTA_BATCH_SIZE: ${CLICKZETTA_BATCH_SIZE:-100}
CLICKZETTA_ENABLE_INVERTED_INDEX: ${CLICKZETTA_ENABLE_INVERTED_INDEX:-true}
CLICKZETTA_ANALYZER_TYPE: ${CLICKZETTA_ANALYZER_TYPE:-chinese}
CLICKZETTA_ANALYZER_MODE: ${CLICKZETTA_ANALYZER_MODE:-smart}
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: ${CLICKZETTA_VECTOR_DISTANCE_FUNCTION:-cosine_distance}
IRIS_HOST: ${IRIS_HOST:-iris}
IRIS_SUPER_SERVER_PORT: ${IRIS_SUPER_SERVER_PORT:-1972}
IRIS_WEB_SERVER_PORT: ${IRIS_WEB_SERVER_PORT:-52773}
IRIS_USER: ${IRIS_USER:-_SYSTEM}
IRIS_PASSWORD: ${IRIS_PASSWORD:-Dify@1234}
IRIS_DATABASE: ${IRIS_DATABASE:-USER}
IRIS_SCHEMA: ${IRIS_SCHEMA:-dify}
IRIS_CONNECTION_URL: ${IRIS_CONNECTION_URL:-}
IRIS_MIN_CONNECTION: ${IRIS_MIN_CONNECTION:-1}
IRIS_MAX_CONNECTION: ${IRIS_MAX_CONNECTION:-3}
IRIS_TEXT_INDEX: ${IRIS_TEXT_INDEX:-true}
IRIS_TEXT_INDEX_LANGUAGE: ${IRIS_TEXT_INDEX_LANGUAGE:-en}
IRIS_TIMEZONE: ${IRIS_TIMEZONE:-UTC}
UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15}
UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5}
UPLOAD_FILE_EXTENSION_BLACKLIST: ${UPLOAD_FILE_EXTENSION_BLACKLIST:-}
SINGLE_CHUNK_ATTACHMENT_LIMIT: ${SINGLE_CHUNK_ATTACHMENT_LIMIT:-10}
IMAGE_FILE_BATCH_LIMIT: ${IMAGE_FILE_BATCH_LIMIT:-10}
ATTACHMENT_IMAGE_FILE_SIZE_LIMIT: ${ATTACHMENT_IMAGE_FILE_SIZE_LIMIT:-2}
ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT: ${ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT:-60}
ETL_TYPE: ${ETL_TYPE:-dify}
UNSTRUCTURED_API_URL: ${UNSTRUCTURED_API_URL:-}
UNSTRUCTURED_API_KEY: ${UNSTRUCTURED_API_KEY:-}
SCARF_NO_ANALYTICS: ${SCARF_NO_ANALYTICS:-true}
PROMPT_GENERATION_MAX_TOKENS: ${PROMPT_GENERATION_MAX_TOKENS:-512}
CODE_GENERATION_MAX_TOKENS: ${CODE_GENERATION_MAX_TOKENS:-1024}
PLUGIN_BASED_TOKEN_COUNTING_ENABLED: ${PLUGIN_BASED_TOKEN_COUNTING_ENABLED:-false}
MULTIMODAL_SEND_FORMAT: ${MULTIMODAL_SEND_FORMAT:-base64}
UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT:-10}
UPLOAD_VIDEO_FILE_SIZE_LIMIT: ${UPLOAD_VIDEO_FILE_SIZE_LIMIT:-100}
UPLOAD_AUDIO_FILE_SIZE_LIMIT: ${UPLOAD_AUDIO_FILE_SIZE_LIMIT:-50}
SENTRY_DSN: ${SENTRY_DSN:-}
API_SENTRY_DSN: ${API_SENTRY_DSN:-}
API_SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
API_SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0}
WEB_SENTRY_DSN: ${WEB_SENTRY_DSN:-}
PLUGIN_SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false}
PLUGIN_SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-}
NOTION_INTEGRATION_TYPE: ${NOTION_INTEGRATION_TYPE:-public}
NOTION_CLIENT_SECRET: ${NOTION_CLIENT_SECRET:-}
NOTION_CLIENT_ID: ${NOTION_CLIENT_ID:-}
NOTION_INTERNAL_SECRET: ${NOTION_INTERNAL_SECRET:-}
MAIL_TYPE: ${MAIL_TYPE:-}
MAIL_DEFAULT_SEND_FROM: ${MAIL_DEFAULT_SEND_FROM:-}
RESEND_API_URL: ${RESEND_API_URL:-https://api.resend.com}
RESEND_API_KEY: ${RESEND_API_KEY:-}
SMTP_SERVER: ${SMTP_SERVER:-}
SMTP_PORT: ${SMTP_PORT:-465}
SMTP_USERNAME: ${SMTP_USERNAME:-}
SMTP_PASSWORD: ${SMTP_PASSWORD:-}
SMTP_USE_TLS: ${SMTP_USE_TLS:-true}
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false}
SMTP_LOCAL_HOSTNAME: ${SMTP_LOCAL_HOSTNAME:-}
SENDGRID_API_KEY: ${SENDGRID_API_KEY:-}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000}
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72}
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5}
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES: ${EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES:-5}
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: ${CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES:-5}
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5}
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox}
CODE_EXECUTION_SSL_VERIFY: ${CODE_EXECUTION_SSL_VERIFY:-True}
CODE_EXECUTION_POOL_MAX_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_CONNECTIONS:-100}
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: ${CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY:-5.0}
CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807}
CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808}
CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5}
CODE_MAX_PRECISION: ${CODE_MAX_PRECISION:-20}
CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-400000}
CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30}
CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30}
CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000}
CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10}
CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60}
CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10}
TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-400000}
WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500}
WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200}
WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5}
MAX_VARIABLE_SIZE: ${MAX_VARIABLE_SIZE:-204800}
WORKFLOW_FILE_UPLOAD_LIMIT: ${WORKFLOW_FILE_UPLOAD_LIMIT:-10}
GRAPH_ENGINE_MIN_WORKERS: ${GRAPH_ENGINE_MIN_WORKERS:-1}
GRAPH_ENGINE_MAX_WORKERS: ${GRAPH_ENGINE_MAX_WORKERS:-10}
GRAPH_ENGINE_SCALE_UP_THRESHOLD: ${GRAPH_ENGINE_SCALE_UP_THRESHOLD:-3}
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME: ${GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME:-5.0}
WORKFLOW_NODE_EXECUTION_STORAGE: ${WORKFLOW_NODE_EXECUTION_STORAGE:-rdbms}
CORE_WORKFLOW_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository}
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository}
API_WORKFLOW_RUN_REPOSITORY: ${API_WORKFLOW_RUN_REPOSITORY:-repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository}
API_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${API_WORKFLOW_NODE_EXECUTION_REPOSITORY:-repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository}
WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false}
WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30}
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100}
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: ${WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS:-}
ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-}
ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-}
ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-}
ALIYUN_SLS_REGION: ${ALIYUN_SLS_REGION:-}
ALIYUN_SLS_PROJECT_NAME: ${ALIYUN_SLS_PROJECT_NAME:-}
ALIYUN_SLS_LOGSTORE_TTL: ${ALIYUN_SLS_LOGSTORE_TTL:-365}
LOGSTORE_DUAL_WRITE_ENABLED: ${LOGSTORE_DUAL_WRITE_ENABLED:-false}
LOGSTORE_DUAL_READ_ENABLED: ${LOGSTORE_DUAL_READ_ENABLED:-true}
LOGSTORE_ENABLE_PUT_GRAPH_FIELD: ${LOGSTORE_ENABLE_PUT_GRAPH_FIELD:-true}
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760}
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576}
HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True}
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: ${HTTP_REQUEST_MAX_CONNECT_TIMEOUT:-10}
HTTP_REQUEST_MAX_READ_TIMEOUT: ${HTTP_REQUEST_MAX_READ_TIMEOUT:-600}
HTTP_REQUEST_MAX_WRITE_TIMEOUT: ${HTTP_REQUEST_MAX_WRITE_TIMEOUT:-600}
WEBHOOK_REQUEST_BODY_MAX_SIZE: ${WEBHOOK_REQUEST_BODY_MAX_SIZE:-10485760}
RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false}
SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128}
SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128}
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10}
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false}
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50}
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
MYSQL_HOST_VOLUME: ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}
SANDBOX_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
SANDBOX_GIN_MODE: ${SANDBOX_GIN_MODE:-release}
SANDBOX_WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15}
SANDBOX_ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true}
SANDBOX_HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128}
SANDBOX_HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128}
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
WEAVIATE_PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
WEAVIATE_QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-true}
WEAVIATE_DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none}
WEAVIATE_CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1}
WEAVIATE_AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true}
WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
WEAVIATE_AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true}
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
WEAVIATE_DISABLE_TELEMETRY: ${WEAVIATE_DISABLE_TELEMETRY:-false}
WEAVIATE_ENABLE_TOKENIZER_GSE: ${WEAVIATE_ENABLE_TOKENIZER_GSE:-false}
WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA:-false}
WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR:-false}
CHROMA_SERVER_AUTHN_CREDENTIALS: ${CHROMA_SERVER_AUTHN_CREDENTIALS:-difyai123456}
CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider}
CHROMA_IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE}
ORACLE_PWD: ${ORACLE_PWD:-Dify123456}
ORACLE_CHARACTERSET: ${ORACLE_CHARACTERSET:-AL32UTF8}
ETCD_AUTO_COMPACTION_MODE: ${ETCD_AUTO_COMPACTION_MODE:-revision}
ETCD_AUTO_COMPACTION_RETENTION: ${ETCD_AUTO_COMPACTION_RETENTION:-1000}
ETCD_QUOTA_BACKEND_BYTES: ${ETCD_QUOTA_BACKEND_BYTES:-4294967296}
ETCD_SNAPSHOT_COUNT: ${ETCD_SNAPSHOT_COUNT:-50000}
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-minioadmin}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:-minioadmin}
ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379}
MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000}
MILVUS_AUTHORIZATION_ENABLED: ${MILVUS_AUTHORIZATION_ENABLED:-true}
PGVECTOR_PGUSER: ${PGVECTOR_PGUSER:-postgres}
PGVECTOR_POSTGRES_PASSWORD: ${PGVECTOR_POSTGRES_PASSWORD:-difyai123456}
PGVECTOR_POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
PGVECTOR_PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
OPENSEARCH_DISCOVERY_TYPE: ${OPENSEARCH_DISCOVERY_TYPE:-single-node}
OPENSEARCH_BOOTSTRAP_MEMORY_LOCK: ${OPENSEARCH_BOOTSTRAP_MEMORY_LOCK:-true}
OPENSEARCH_JAVA_OPTS_MIN: ${OPENSEARCH_JAVA_OPTS_MIN:-512m}
OPENSEARCH_JAVA_OPTS_MAX: ${OPENSEARCH_JAVA_OPTS_MAX:-1024m}
OPENSEARCH_INITIAL_ADMIN_PASSWORD: ${OPENSEARCH_INITIAL_ADMIN_PASSWORD:-Qazwsxedc!@#123}
OPENSEARCH_MEMLOCK_SOFT: ${OPENSEARCH_MEMLOCK_SOFT:--1}
OPENSEARCH_MEMLOCK_HARD: ${OPENSEARCH_MEMLOCK_HARD:--1}
OPENSEARCH_NOFILE_SOFT: ${OPENSEARCH_NOFILE_SOFT:-65536}
OPENSEARCH_NOFILE_HARD: ${OPENSEARCH_NOFILE_HARD:-65536}
NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_}
NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false}
NGINX_PORT: ${NGINX_PORT:-80}
NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443}
NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt}
NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key}
NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3}
NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto}
NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M}
NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65}
NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s}
NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s}
NGINX_ENABLE_CERTBOT_CHALLENGE: ${NGINX_ENABLE_CERTBOT_CHALLENGE:-false}
CERTBOT_EMAIL: ${CERTBOT_EMAIL:-}
CERTBOT_DOMAIN: ${CERTBOT_DOMAIN:-}
CERTBOT_OPTIONS: ${CERTBOT_OPTIONS:-}
SSRF_HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
SSRF_COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
SSRF_REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
SSRF_SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
SSRF_DEFAULT_TIME_OUT: ${SSRF_DEFAULT_TIME_OUT:-5}
SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5}
SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5}
SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5}
SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100}
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20}
SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0}
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80}
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
POSITION_TOOL_INCLUDES: ${POSITION_TOOL_INCLUDES:-}
POSITION_TOOL_EXCLUDES: ${POSITION_TOOL_EXCLUDES:-}
POSITION_PROVIDER_PINS: ${POSITION_PROVIDER_PINS:-}
POSITION_PROVIDER_INCLUDES: ${POSITION_PROVIDER_INCLUDES:-}
POSITION_PROVIDER_EXCLUDES: ${POSITION_PROVIDER_EXCLUDES:-}
CSP_WHITELIST: ${CSP_WHITELIST:-}
CREATE_TIDB_SERVICE_JOB_ENABLED: ${CREATE_TIDB_SERVICE_JOB_ENABLED:-false}
MAX_SUBMIT_COUNT: ${MAX_SUBMIT_COUNT:-100}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-10}
DB_PLUGIN_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
EXPOSE_PLUGIN_DAEMON_PORT: ${EXPOSE_PLUGIN_DAEMON_PORT:-5002}
PLUGIN_DAEMON_PORT: ${PLUGIN_DAEMON_PORT:-5002}
PLUGIN_DAEMON_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi}
PLUGIN_DAEMON_URL: ${PLUGIN_DAEMON_URL:-http://plugin_daemon:5002}
PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
PLUGIN_MODEL_SCHEMA_CACHE_TTL: ${PLUGIN_MODEL_SCHEMA_CACHE_TTL:-3600}
PLUGIN_PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false}
PLUGIN_DEBUGGING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0}
PLUGIN_DEBUGGING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003}
EXPOSE_PLUGIN_DEBUGGING_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost}
EXPOSE_PLUGIN_DEBUGGING_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}
PLUGIN_DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
PLUGIN_DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001}
ENDPOINT_URL_TEMPLATE: ${ENDPOINT_URL_TEMPLATE:-http://localhost/e/{hook_id}}
MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true}
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
CREATORS_PLATFORM_FEATURES_ENABLED: ${CREATORS_PLATFORM_FEATURES_ENABLED:-true}
CREATORS_PLATFORM_API_URL: ${CREATORS_PLATFORM_API_URL:-https://creators.dify.ai}
CREATORS_PLATFORM_OAUTH_CLIENT_ID: ${CREATORS_PLATFORM_OAUTH_CLIENT_ID:-}
FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true}
ENFORCE_LANGGENIUS_PLUGIN_SIGNATURES: ${ENFORCE_LANGGENIUS_PLUGIN_SIGNATURES:-true}
PLUGIN_STDIO_BUFFER_SIZE: ${PLUGIN_STDIO_BUFFER_SIZE:-1024}
PLUGIN_STDIO_MAX_BUFFER_SIZE: ${PLUGIN_STDIO_MAX_BUFFER_SIZE:-5242880}
PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120}
PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600}
PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0}
PIP_MIRROR_URL: ${PIP_MIRROR_URL:-}
PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local}
PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage}
PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd}
PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin}
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
PLUGIN_S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
PLUGIN_AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
PLUGIN_AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-}
PLUGIN_AWS_REGION: ${PLUGIN_AWS_REGION:-}
PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-}
PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-}
PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
PLUGIN_ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
PLUGIN_ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
PLUGIN_ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
PLUGIN_ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
PLUGIN_VOLCENGINE_TOS_ENDPOINT: ${PLUGIN_VOLCENGINE_TOS_ENDPOINT:-}
PLUGIN_VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-}
PLUGIN_VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-}
PLUGIN_VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-}
ENABLE_OTEL: ${ENABLE_OTEL:-false}
OTLP_TRACE_ENDPOINT: ${OTLP_TRACE_ENDPOINT:-}
OTLP_METRIC_ENDPOINT: ${OTLP_METRIC_ENDPOINT:-}
OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318}
OTLP_API_KEY: ${OTLP_API_KEY:-}
OTEL_EXPORTER_OTLP_PROTOCOL: ${OTEL_EXPORTER_OTLP_PROTOCOL:-}
OTEL_EXPORTER_TYPE: ${OTEL_EXPORTER_TYPE:-otlp}
OTEL_SAMPLING_RATE: ${OTEL_SAMPLING_RATE:-0.1}
OTEL_BATCH_EXPORT_SCHEDULE_DELAY: ${OTEL_BATCH_EXPORT_SCHEDULE_DELAY:-5000}
OTEL_MAX_QUEUE_SIZE: ${OTEL_MAX_QUEUE_SIZE:-2048}
OTEL_MAX_EXPORT_BATCH_SIZE: ${OTEL_MAX_EXPORT_BATCH_SIZE:-512}
OTEL_METRIC_EXPORT_INTERVAL: ${OTEL_METRIC_EXPORT_INTERVAL:-60000}
OTEL_BATCH_EXPORT_TIMEOUT: ${OTEL_BATCH_EXPORT_TIMEOUT:-10000}
OTEL_METRIC_EXPORT_TIMEOUT: ${OTEL_METRIC_EXPORT_TIMEOUT:-30000}
ALLOW_EMBED: ${ALLOW_EMBED:-false}
QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200}
QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-}
QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30}
SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-false}
SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html}
DSL_EXPORT_ENCRYPT_DATASET_ID: ${DSL_EXPORT_ENCRYPT_DATASET_ID:-true}
DATASET_MAX_SEGMENTS_PER_REQUEST: ${DATASET_MAX_SEGMENTS_PER_REQUEST:-0}
ENABLE_CLEAN_EMBEDDING_CACHE_TASK: ${ENABLE_CLEAN_EMBEDDING_CACHE_TASK:-false}
ENABLE_CLEAN_UNUSED_DATASETS_TASK: ${ENABLE_CLEAN_UNUSED_DATASETS_TASK:-false}
ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false}
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false}
ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false}
ENABLE_WORKFLOW_RUN_CLEANUP_TASK: ${ENABLE_WORKFLOW_RUN_CLEANUP_TASK:-false}
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false}
ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false}
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true}
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: ${ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK:-true}
WORKFLOW_SCHEDULE_POLLER_INTERVAL: ${WORKFLOW_SCHEDULE_POLLER_INTERVAL:-1}
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: ${WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE:-100}
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: ${WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK:-0}
TENANT_ISOLATED_TASK_CONCURRENCY: ${TENANT_ISOLATED_TASK_CONCURRENCY:-1}
ANNOTATION_IMPORT_FILE_SIZE_LIMIT: ${ANNOTATION_IMPORT_FILE_SIZE_LIMIT:-2}
ANNOTATION_IMPORT_MAX_RECORDS: ${ANNOTATION_IMPORT_MAX_RECORDS:-10000}
ANNOTATION_IMPORT_MIN_RECORDS: ${ANNOTATION_IMPORT_MIN_RECORDS:-1}
ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE: ${ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE:-5}
ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR: ${ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR:-20}
ANNOTATION_IMPORT_MAX_CONCURRENT: ${ANNOTATION_IMPORT_MAX_CONCURRENT:-5}
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
EVENT_BUS_REDIS_URL: ${EVENT_BUS_REDIS_URL:-}
EVENT_BUS_REDIS_CHANNEL_TYPE: ${EVENT_BUS_REDIS_CHANNEL_TYPE:-pubsub}
EVENT_BUS_REDIS_USE_CLUSTERS: ${EVENT_BUS_REDIS_USE_CLUSTERS:-false}
ENABLE_HUMAN_INPUT_TIMEOUT_TASK: ${ENABLE_HUMAN_INPUT_TIMEOUT_TASK:-true}
HUMAN_INPUT_TIMEOUT_TASK_INTERVAL: ${HUMAN_INPUT_TIMEOUT_TASK_INTERVAL:-1}
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL:-90000}
services:
# Init container to fix permissions
init_permissions:
@ -22,12 +747,9 @@ services:
api:
image: langgenius/dify-api:1.14.0
restart: always
env_file:
# Defaults checked into git; user overrides go in .env.
# `cp .env.example .env` once before `docker compose up`.
- .env.example
- .env
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'api' starts the API server.
MODE: api
SENTRY_DSN: ${API_SENTRY_DSN:-}
@ -73,10 +795,9 @@ services:
worker:
image: langgenius/dify-api:1.14.0
restart: always
env_file:
- .env.example
- .env
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'worker' starts the Celery worker for processing all queues.
MODE: worker
SENTRY_DSN: ${API_SENTRY_DSN:-}
@ -120,10 +841,9 @@ services:
worker_beat:
image: langgenius/dify-api:1.14.0
restart: always
env_file:
- .env.example
- .env
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
# Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks.
MODE: beat
depends_on:
@ -298,10 +1018,9 @@ services:
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.6.0-local
restart: always
env_file:
- .env.example
- .env
environment:
# Use the shared environment variables.
<<: *shared-api-worker-env
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
DB_SSL_MODE: ${DB_SSL_MODE:-disable}
SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002}

127
docker/generate_docker_compose Executable file
View File

@ -0,0 +1,127 @@
#!/usr/bin/env python3
import os
import re
import sys
# Variables that exist only for Docker Compose orchestration and must NOT be
# injected into containers as environment variables.
SHARED_ENV_EXCLUDE = frozenset(
[
# Docker Compose profile selection
"COMPOSE_PROFILES",
# Worker health check orchestration flags (consumed by docker-compose,
# not by the application running inside the container)
"COMPOSE_WORKER_HEALTHCHECK_DISABLED",
"COMPOSE_WORKER_HEALTHCHECK_INTERVAL",
"COMPOSE_WORKER_HEALTHCHECK_TIMEOUT",
]
)
def parse_env_example(file_path):
"""
Parses the .env.example file and returns a dictionary with variable names as keys and default values as values.
"""
env_vars = {}
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
line = line.strip()
# Ignore empty lines and comments
if not line or line.startswith("#"):
continue
# Use regex to parse KEY=VALUE
match = re.match(r"^([^=]+)=(.*)$", line)
if match:
key = match.group(1).strip()
value = match.group(2).strip()
# Remove possible quotes around the value
if (value.startswith('"') and value.endswith('"')) or (
value.startswith("'") and value.endswith("'")
):
value = value[1:-1]
env_vars[key] = value
else:
print(f"Warning: Unable to parse line {line_number}: {line}")
return env_vars
def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"):
"""
Generates a shared environment variables block as a YAML string.
"""
lines = [f"x-shared-env: &{anchor_name}"]
for key, default in env_vars.items():
if key in SHARED_ENV_EXCLUDE:
continue
# If default value is empty, use ${KEY:-}
if default == "":
lines.append(f" {key}: ${{{key}:-}}")
else:
# If default value contains special characters, wrap it in quotes
if re.search(r"[:\s]", default):
default = f"{default}"
lines.append(f" {key}: ${{{key}:-{default}}}")
return "\n".join(lines)
def insert_shared_env(template_path, output_path, shared_env_block, header_comments):
"""
Inserts the shared environment variables block and header comments into the template file,
removing any existing x-shared-env anchors, and generates the final docker-compose.yaml file.
Always writes with LF line endings.
"""
with open(template_path, "r", encoding="utf-8") as f:
template_content = f.read()
# Remove existing x-shared-env: &shared-api-worker-env lines
template_content = re.sub(
r"^x-shared-env: &shared-api-worker-env\s*\n?",
"",
template_content,
flags=re.MULTILINE,
)
# Prepare the final content with header comments and shared env block
final_content = f"{header_comments}\n{shared_env_block}\n\n{template_content}"
with open(output_path, "w", encoding="utf-8", newline="\n") as f:
f.write(final_content)
print(f"Generated {output_path}")
def main():
env_example_path = ".env.example"
template_path = "docker-compose-template.yaml"
output_path = "docker-compose.yaml"
anchor_name = "shared-api-worker-env" # Can be modified as needed
# Define header comments to be added at the top of docker-compose.yaml
header_comments = (
"# ==================================================================\n"
"# WARNING: This file is auto-generated by generate_docker_compose\n"
"# Do not modify this file directly. Instead, update the .env.example\n"
"# or docker-compose-template.yaml and regenerate this file.\n"
"# ==================================================================\n"
)
# Check if required files exist
for path in [env_example_path, template_path]:
if not os.path.isfile(path):
print(f"Error: File {path} does not exist.")
sys.exit(1)
# Parse .env.example file
env_vars = parse_env_example(env_example_path)
if not env_vars:
print("Warning: No environment variables found in .env.example.")
# Generate shared environment variables block
shared_env_block = generate_shared_env_block(env_vars, anchor_name)
# Insert shared environment variables block and header comments into the template
insert_shared_env(template_path, output_path, shared_env_block, header_comments)
if __name__ == "__main__":
main()

View File

@ -127,7 +127,7 @@ const createApp = (overrides: Partial<App> = {}): App => ({
copyright: overrides.copyright ?? '',
privacy_policy: overrides.privacy_policy ?? null,
custom_disclaimer: overrides.custom_disclaimer ?? null,
categories: overrides.categories ?? ['Writing'],
category: overrides.category ?? 'Writing',
position: overrides.position ?? 1,
is_listed: overrides.is_listed ?? true,
install_count: overrides.install_count ?? 0,
@ -165,9 +165,9 @@ describe('Explore App List Flow', () => {
mockExploreData = {
categories: ['Writing', 'Translate', 'Programming'],
allList: [
createApp({ app_id: 'app-1', app: { ...createApp().app, name: 'Writer Bot' }, categories: ['Writing'] }),
createApp({ app_id: 'app-2', app: { ...createApp().app, id: 'app-id-2', name: 'Translator' }, categories: ['Translate'] }),
createApp({ app_id: 'app-3', app: { ...createApp().app, id: 'app-id-3', name: 'Code Helper' }, categories: ['Programming'] }),
createApp({ app_id: 'app-1', app: { ...createApp().app, name: 'Writer Bot' }, category: 'Writing' }),
createApp({ app_id: 'app-2', app: { ...createApp().app, id: 'app-id-2', name: 'Translator' }, category: 'Translate' }),
createApp({ app_id: 'app-3', app: { ...createApp().app, id: 'app-id-3', name: 'Code Helper' }, category: 'Programming' }),
],
}
})
@ -190,30 +190,6 @@ describe('Explore App List Flow', () => {
expect(screen.queryByText('Code Helper')).not.toBeInTheDocument()
})
it('should only use categories when filtering by selected category', () => {
mockTabValue = 'Writing'
mockExploreData = {
categories: ['Writing', 'Translate'],
allList: [
createApp({
app_id: 'app-1',
app: { ...createApp().app, name: 'Active Writer' },
categories: ['Writing'],
}),
createApp({
app_id: 'app-2',
app: { ...createApp().app, id: 'app-id-2', name: 'Legacy Writer' },
categories: [],
}),
],
}
renderAppList()
expect(screen.getByText('Active Writer')).toBeInTheDocument()
expect(screen.queryByText('Legacy Writer')).not.toBeInTheDocument()
})
it('should filter apps by search keyword', async () => {
renderAppList()

View File

@ -35,7 +35,7 @@ const mockApp: App = {
copyright: 'Test Corp',
privacy_policy: null,
custom_disclaimer: null,
categories: ['Assistant'],
category: 'Assistant',
position: 1,
is_listed: true,
install_count: 100,
@ -253,7 +253,7 @@ describe('AppCard', () => {
template_id: mockApp.app_id,
template_name: mockApp.app.name,
template_mode: mockApp.app.mode,
template_categories: mockApp.categories,
template_category: mockApp.category,
page: 'studio',
})
expect(mockSetShowTryAppPanel).toHaveBeenCalledWith(true, {

View File

@ -35,7 +35,7 @@ const AppCard = ({
template_id: app.app_id,
template_name: appBasicInfo.name,
template_mode: appBasicInfo.mode,
template_categories: app.categories,
template_category: app.category,
page: 'studio',
})
setShowTryAppPanel?.(true, { appId: app.app_id, app })

View File

@ -115,7 +115,7 @@ vi.mock('@/next/navigation', () => ({
const createAppEntry = (name: string, category: string) => ({
app_id: name,
categories: [category],
category,
app: {
id: name,
name,

View File

@ -74,7 +74,7 @@ const Apps = ({
const filteredByCategory = allList.filter((item) => {
if (currCategory === allCategoriesEn)
return true
return item.categories?.includes(currCategory) ?? false
return item.category === currCategory
})
if (currentType.length === 0)
return filteredByCategory

View File

@ -31,7 +31,7 @@ const mockFetchAppDetail = vi.mocked(fetchAppDetail)
const mockTemplateApp: App = {
app_id: 'template-1',
categories: ['Assistant'],
category: 'Assistant',
app: {
id: 'template-1',
mode: AppModeEnum.CHAT,

View File

@ -151,7 +151,7 @@ const Apps = () => {
<TryApp
appId={currentTryAppParams?.appId || ''}
app={currentTryAppParams?.app}
categories={currentTryAppParams?.app?.categories}
category={currentTryAppParams?.app?.category}
onClose={hideTryAppPanel}
onCreate={handleShowFromTryApp}
/>

View File

@ -22,7 +22,7 @@ const createApp = (overrides?: Partial<App>): App => ({
copyright: '2024',
privacy_policy: null,
custom_disclaimer: null,
categories: ['Assistant'],
category: 'Assistant',
position: 1,
is_listed: true,
install_count: 0,
@ -167,7 +167,7 @@ describe('AppCard', () => {
template_id: app.app_id,
template_name: app.app.name,
template_mode: app.app.mode,
template_categories: app.categories,
template_category: app.category,
page: 'explore',
})
})

View File

@ -37,7 +37,7 @@ const AppCard = ({
template_id: app.app_id,
template_name: appBasicInfo.name,
template_mode: appBasicInfo.mode,
template_categories: app.categories,
template_category: app.category,
page: 'explore',
})
onTry({ appId: app.app_id, app })

View File

@ -115,7 +115,7 @@ const createApp = (overrides: Partial<App> = {}): App => ({
copyright: overrides.copyright ?? '',
privacy_policy: overrides.privacy_policy ?? null,
custom_disclaimer: overrides.custom_disclaimer ?? null,
categories: overrides.categories ?? ['Writing'],
category: overrides.category ?? 'Writing',
position: overrides.position ?? 1,
is_listed: overrides.is_listed ?? true,
install_count: overrides.install_count ?? 0,
@ -185,7 +185,7 @@ describe('AppList', () => {
it('should render app cards when data is available', () => {
mockExploreData = {
categories: ['Writing', 'Translate'],
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, categories: ['Translate'] })],
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, category: 'Translate' })],
}
renderAppList()
@ -199,7 +199,7 @@ describe('AppList', () => {
it('should filter apps by selected category', () => {
mockExploreData = {
categories: ['Writing', 'Translate'],
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, categories: ['Translate'] })],
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, category: 'Translate' })],
}
renderAppList(false, undefined, { category: 'Writing' })

View File

@ -77,10 +77,7 @@ const Apps = ({
const filteredList = useMemo(() => {
if (!data)
return []
return data.allList.filter(item => (
currCategory === allCategoriesEn
|| item.categories?.includes(currCategory)
))
return data.allList.filter(item => currCategory === allCategoriesEn || item.category === currCategory)
}, [data, currCategory, allCategoriesEn])
const searchFilteredList = useMemo(() => {
@ -280,7 +277,7 @@ const Apps = ({
<TryApp
appId={currentTryApp?.appId || ''}
app={currentTryApp?.app}
categories={currentTryApp?.app?.categories}
category={currentTryApp?.app?.category}
onClose={hideTryAppPanel}
onCreate={handleShowFromTryApp}
/>

View File

@ -33,11 +33,6 @@ const Category: FC<ICategoryProps> = ({
isSelected && 'border-components-main-nav-nav-button-border bg-components-main-nav-nav-button-bg-active text-components-main-nav-nav-button-text-active shadow-xs',
)
const renderCategoryName = (name: AppCategory) => {
const categoryKey = `category.${name}` as keyof typeof exploreI18n
return categoryKey in exploreI18n ? t(categoryKey, { ns: 'explore' }) : name
}
return (
<div className={cn(className, 'flex flex-wrap gap-1 text-[13px]')}>
<div
@ -53,7 +48,7 @@ const Category: FC<ICategoryProps> = ({
className={itemClassName(name === value)}
onClick={() => onChange(name)}
>
{renderCategoryName(name)}
{`category.${name}` in exploreI18n ? t(`category.${name}`, { ns: 'explore' }) : name}
</div>
))}
</div>

View File

@ -39,14 +39,14 @@ vi.mock('../app-info', () => ({
default: ({
appId,
appDetail,
categories,
category,
className,
onCreate,
}: { appId: string, appDetail: TryAppInfo, categories?: string[], className?: string, onCreate: () => void }) => (
}: { appId: string, appDetail: TryAppInfo, category?: string, className?: string, onCreate: () => void }) => (
<div
data-testid="app-info-component"
data-app-id={appId}
data-categories={categories?.join(',')}
data-category={category}
className={className}
>
<button data-testid="create-button" onClick={onCreate}>Create</button>
@ -283,12 +283,12 @@ describe('TryApp (main index.tsx)', () => {
})
})
describe('categories prop', () => {
it('passes categories to AppInfo when provided', async () => {
describe('category prop', () => {
it('passes category to AppInfo when provided', async () => {
render(
<TryApp
appId="test-app-id"
categories={['AI Assistant', 'Workflow']}
category="AI Assistant"
onClose={vi.fn()}
onCreate={vi.fn()}
/>,
@ -296,11 +296,11 @@ describe('TryApp (main index.tsx)', () => {
await waitFor(() => {
const appInfo = document.body.querySelector('[data-testid="app-info-component"]')
expect(appInfo).toHaveAttribute('data-categories', 'AI Assistant,Workflow')
expect(appInfo).toHaveAttribute('data-category', 'AI Assistant')
})
})
it('does not pass categories to AppInfo when not provided', async () => {
it('does not pass category to AppInfo when not provided', async () => {
render(
<TryApp
appId="test-app-id"
@ -311,7 +311,7 @@ describe('TryApp (main index.tsx)', () => {
await waitFor(() => {
const appInfo = document.body.querySelector('[data-testid="app-info-component"]')
expect(appInfo).not.toHaveAttribute('data-categories', expect.any(String))
expect(appInfo).not.toHaveAttribute('data-category', expect.any(String))
})
})
})

View File

@ -235,8 +235,8 @@ describe('AppInfo', () => {
})
})
describe('categories', () => {
it('renders categories when provided', () => {
describe('category', () => {
it('renders category when provided', () => {
const appDetail = createMockAppDetail('chat')
const mockOnCreate = vi.fn()
@ -244,17 +244,16 @@ describe('AppInfo', () => {
<AppInfo
appId="test-app-id"
appDetail={appDetail}
categories={['AI Assistant', 'Workflow']}
category="AI Assistant"
onCreate={mockOnCreate}
/>,
)
expect(screen.getByText('explore.tryApp.category')).toBeInTheDocument()
expect(screen.getByText('AI Assistant')).toBeInTheDocument()
expect(screen.getByText('Workflow')).toBeInTheDocument()
})
it('does not render categories section when not provided', () => {
it('does not render category section when not provided', () => {
const appDetail = createMockAppDetail('chat')
const mockOnCreate = vi.fn()

View File

@ -12,7 +12,7 @@ import useGetRequirements from './use-get-requirements'
type Props = {
appId: string
appDetail: TryAppInfo
categories?: string[]
category?: string
className?: string
onCreate: () => void
}
@ -52,13 +52,12 @@ const RequirementIcon: FC<RequirementIconProps> = ({ iconUrl }) => {
const AppInfo: FC<Props> = ({
appId,
className,
categories,
category,
appDetail,
onCreate,
}) => {
const { t } = useTranslation()
const mode = appDetail?.mode
const visibleCategories = Array.from(new Set(categories?.filter(Boolean) ?? []))
const { requirements } = useGetRequirements({ appDetail, appId })
return (
<div className={cn('flex h-full flex-col px-4 pt-2', className)}>
@ -99,19 +98,10 @@ const AppInfo: FC<Props> = ({
<span className="truncate">{t('tryApp.createFromSampleApp', { ns: 'explore' })}</span>
</Button>
{visibleCategories.length > 0 && (
{category && (
<div className="mt-6 shrink-0">
<div className={headerClassName}>{t('tryApp.category', { ns: 'explore' })}</div>
<div className="flex flex-wrap gap-1.5">
{visibleCategories.map(category => (
<span
key={category}
className="rounded-md border-[0.5px] border-components-panel-border-subtle bg-components-badge-white-to-dark px-2 py-0.5 system-xs-medium text-text-secondary shadow-xs"
>
{category}
</span>
))}
</div>
<div className="system-md-regular text-text-secondary">{category}</div>
</div>
)}
{requirements.length > 0 && (

View File

@ -20,7 +20,7 @@ import Tab, { TypeEnum } from './tab'
type Props = {
appId: string
app?: AppType
categories?: string[]
category?: string
onClose: () => void
onCreate: () => void
}
@ -28,7 +28,7 @@ type Props = {
const TryApp: FC<Props> = ({
appId,
app,
categories,
category,
onClose,
onCreate,
}) => {
@ -81,7 +81,7 @@ const TryApp: FC<Props> = ({
className="w-[360px] shrink-0"
appDetail={appDetail}
appId={appId}
categories={categories}
category={category}
onCreate={onCreate}
/>
</div>

View File

@ -12,7 +12,7 @@ type AppBasicInfo = {
use_icon_as_answer_icon: boolean
}
export type AppCategory = string
export type AppCategory = 'Writing' | 'Translate' | 'HR' | 'Programming' | 'Assistant' | 'Agent' | 'Recommended' | 'Workflow'
export type App = {
app: AppBasicInfo
@ -21,7 +21,7 @@ export type App = {
copyright: string
privacy_policy: string | null
custom_disclaimer: string | null
categories: AppCategory[]
category: AppCategory
position: number
is_listed: boolean
install_count: number