mirror of
https://github.com/langgenius/dify.git
synced 2026-03-25 00:07:56 +08:00
Merge branch 'main' into refactor/web-service-hooks
This commit is contained in:
@ -502,6 +502,8 @@ LOG_FILE_BACKUP_COUNT=5
|
||||
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||
# Log Timezone
|
||||
LOG_TZ=UTC
|
||||
# Log output format: text or json
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
# Log format
|
||||
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
|
||||
|
||||
|
||||
@ -2,9 +2,11 @@ import logging
|
||||
import time
|
||||
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
from configs import dify_config
|
||||
from contexts.wrapper import RecyclableContextVar
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -25,28 +27,35 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
# add before request hook
|
||||
@dify_app.before_request
|
||||
def before_request():
|
||||
# add an unique identifier to each request
|
||||
# Initialize logging context for this request
|
||||
init_request_context()
|
||||
RecyclableContextVar.increment_thread_recycles()
|
||||
|
||||
# add after request hook for injecting X-Trace-Id header from OpenTelemetry span context
|
||||
# add after request hook for injecting trace headers from OpenTelemetry span context
|
||||
# Only adds headers when OTEL is enabled and has valid context
|
||||
@dify_app.after_request
|
||||
def add_trace_id_header(response):
|
||||
def add_trace_headers(response):
|
||||
try:
|
||||
span = get_current_span()
|
||||
ctx = span.get_span_context() if span else None
|
||||
if ctx and ctx.is_valid:
|
||||
trace_id_hex = format(ctx.trace_id, "032x")
|
||||
# Avoid duplicates if some middleware added it
|
||||
if "X-Trace-Id" not in response.headers:
|
||||
response.headers["X-Trace-Id"] = trace_id_hex
|
||||
|
||||
if not ctx or not ctx.is_valid:
|
||||
return response
|
||||
|
||||
# Inject trace headers from OTEL context
|
||||
if ctx.trace_id != INVALID_TRACE_ID and "X-Trace-Id" not in response.headers:
|
||||
response.headers["X-Trace-Id"] = format(ctx.trace_id, "032x")
|
||||
if ctx.span_id != INVALID_SPAN_ID and "X-Span-Id" not in response.headers:
|
||||
response.headers["X-Span-Id"] = format(ctx.span_id, "016x")
|
||||
|
||||
except Exception:
|
||||
# Never break the response due to tracing header injection
|
||||
logger.warning("Failed to add trace ID to response header", exc_info=True)
|
||||
logger.warning("Failed to add trace headers to response", exc_info=True)
|
||||
return response
|
||||
|
||||
# Capture the decorator's return value to avoid pyright reportUnusedFunction
|
||||
_ = before_request
|
||||
_ = add_trace_id_header
|
||||
_ = add_trace_headers
|
||||
|
||||
return dify_app
|
||||
|
||||
|
||||
@ -587,6 +587,11 @@ class LoggingConfig(BaseSettings):
|
||||
default="INFO",
|
||||
)
|
||||
|
||||
LOG_OUTPUT_FORMAT: Literal["text", "json"] = Field(
|
||||
description="Log output format: 'text' for human-readable, 'json' for structured JSON logs.",
|
||||
default="text",
|
||||
)
|
||||
|
||||
LOG_FILE: str | None = Field(
|
||||
description="File path for log output.",
|
||||
default=None,
|
||||
|
||||
@ -30,7 +30,6 @@ class SimpleModelProviderEntity(BaseModel):
|
||||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: list[ModelType]
|
||||
|
||||
def __init__(self, provider_entity: ProviderEntity):
|
||||
@ -44,7 +43,6 @@ class SimpleModelProviderEntity(BaseModel):
|
||||
label=provider_entity.label,
|
||||
icon_small=provider_entity.icon_small,
|
||||
icon_small_dark=provider_entity.icon_small_dark,
|
||||
icon_large=provider_entity.icon_large,
|
||||
supported_model_types=provider_entity.supported_model_types,
|
||||
)
|
||||
|
||||
@ -94,7 +92,6 @@ class DefaultModelProviderEntity(BaseModel):
|
||||
provider: str
|
||||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: Sequence[ModelType] = []
|
||||
|
||||
|
||||
|
||||
@ -88,7 +88,41 @@ def _get_user_provided_host_header(headers: dict | None) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def _inject_trace_headers(headers: dict | None) -> dict:
|
||||
"""
|
||||
Inject W3C traceparent header for distributed tracing.
|
||||
|
||||
When OTEL is enabled, HTTPXClientInstrumentor handles trace propagation automatically.
|
||||
When OTEL is disabled, we manually inject the traceparent header.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
# Skip if already present (case-insensitive check)
|
||||
for key in headers:
|
||||
if key.lower() == "traceparent":
|
||||
return headers
|
||||
|
||||
# Skip if OTEL is enabled - HTTPXClientInstrumentor handles this automatically
|
||||
if dify_config.ENABLE_OTEL:
|
||||
return headers
|
||||
|
||||
# Generate and inject traceparent for non-OTEL scenarios
|
||||
try:
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
traceparent = generate_traceparent_header()
|
||||
if traceparent:
|
||||
headers["traceparent"] = traceparent
|
||||
except Exception:
|
||||
# Silently ignore errors to avoid breaking requests
|
||||
logger.debug("Failed to generate traceparent header", exc_info=True)
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
# Convert requests-style allow_redirects to httpx-style follow_redirects
|
||||
if "allow_redirects" in kwargs:
|
||||
allow_redirects = kwargs.pop("allow_redirects")
|
||||
if "follow_redirects" not in kwargs:
|
||||
@ -106,18 +140,21 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
|
||||
verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY)
|
||||
client = _get_ssrf_client(verify_option)
|
||||
|
||||
# Inject traceparent header for distributed tracing (when OTEL is not enabled)
|
||||
headers = kwargs.get("headers") or {}
|
||||
headers = _inject_trace_headers(headers)
|
||||
kwargs["headers"] = headers
|
||||
|
||||
# Preserve user-provided Host header
|
||||
# When using a forward proxy, httpx may override the Host header based on the URL.
|
||||
# We extract and preserve any explicitly set Host header to support virtual hosting.
|
||||
headers = kwargs.get("headers", {})
|
||||
user_provided_host = _get_user_provided_host_header(headers)
|
||||
|
||||
retries = 0
|
||||
while retries <= max_retries:
|
||||
try:
|
||||
# Build the request manually to preserve the Host header
|
||||
# httpx may override the Host header when using a proxy, so we use
|
||||
# the request API to explicitly set headers before sending
|
||||
# Preserve the user-provided Host header
|
||||
# httpx may override the Host header when using a proxy
|
||||
headers = {k: v for k, v in headers.items() if k.lower() != "host"}
|
||||
if user_provided_host is not None:
|
||||
headers["host"] = user_provided_host
|
||||
|
||||
@ -103,3 +103,60 @@ def parse_traceparent_header(traceparent: str) -> str | None:
|
||||
if len(parts) == 4 and len(parts[1]) == 32:
|
||||
return parts[1]
|
||||
return None
|
||||
|
||||
|
||||
def get_span_id_from_otel_context() -> str | None:
|
||||
"""
|
||||
Retrieve the current span ID from the active OpenTelemetry trace context.
|
||||
|
||||
Returns:
|
||||
A 16-character hex string representing the span ID, or None if not available.
|
||||
"""
|
||||
try:
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID
|
||||
|
||||
span = get_current_span()
|
||||
if not span:
|
||||
return None
|
||||
|
||||
span_context = span.get_span_context()
|
||||
if not span_context or span_context.span_id == INVALID_SPAN_ID:
|
||||
return None
|
||||
|
||||
return f"{span_context.span_id:016x}"
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def generate_traceparent_header() -> str | None:
|
||||
"""
|
||||
Generate a W3C traceparent header from the current context.
|
||||
|
||||
Uses OpenTelemetry context if available, otherwise uses the
|
||||
ContextVar-based trace_id from the logging context.
|
||||
|
||||
Format: {version}-{trace_id}-{span_id}-{flags}
|
||||
Example: 00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01
|
||||
|
||||
Returns:
|
||||
A valid traceparent header string, or None if generation fails.
|
||||
"""
|
||||
import uuid
|
||||
|
||||
# Try OTEL context first
|
||||
trace_id = get_trace_id_from_otel_context()
|
||||
span_id = get_span_id_from_otel_context()
|
||||
|
||||
if trace_id and span_id:
|
||||
return f"00-{trace_id}-{span_id}-01"
|
||||
|
||||
# Fallback: use ContextVar-based trace_id or generate new one
|
||||
from core.logging.context import get_trace_id as get_logging_trace_id
|
||||
|
||||
trace_id = get_logging_trace_id() or uuid.uuid4().hex
|
||||
|
||||
# Generate a new span_id (16 hex chars)
|
||||
span_id = uuid.uuid4().hex[:16]
|
||||
|
||||
return f"00-{trace_id}-{span_id}-01"
|
||||
|
||||
20
api/core/logging/__init__.py
Normal file
20
api/core/logging/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
"""Structured logging components for Dify."""
|
||||
|
||||
from core.logging.context import (
|
||||
clear_request_context,
|
||||
get_request_id,
|
||||
get_trace_id,
|
||||
init_request_context,
|
||||
)
|
||||
from core.logging.filters import IdentityContextFilter, TraceContextFilter
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
__all__ = [
|
||||
"IdentityContextFilter",
|
||||
"StructuredJSONFormatter",
|
||||
"TraceContextFilter",
|
||||
"clear_request_context",
|
||||
"get_request_id",
|
||||
"get_trace_id",
|
||||
"init_request_context",
|
||||
]
|
||||
35
api/core/logging/context.py
Normal file
35
api/core/logging/context.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""Request context for logging - framework agnostic.
|
||||
|
||||
This module provides request-scoped context variables for logging,
|
||||
using Python's contextvars for thread-safe and async-safe storage.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from contextvars import ContextVar
|
||||
|
||||
_request_id: ContextVar[str] = ContextVar("log_request_id", default="")
|
||||
_trace_id: ContextVar[str] = ContextVar("log_trace_id", default="")
|
||||
|
||||
|
||||
def get_request_id() -> str:
|
||||
"""Get current request ID (10 hex chars)."""
|
||||
return _request_id.get()
|
||||
|
||||
|
||||
def get_trace_id() -> str:
|
||||
"""Get fallback trace ID when OTEL is unavailable (32 hex chars)."""
|
||||
return _trace_id.get()
|
||||
|
||||
|
||||
def init_request_context() -> None:
|
||||
"""Initialize request context. Call at start of each request."""
|
||||
req_id = uuid.uuid4().hex[:10]
|
||||
trace_id = uuid.uuid5(uuid.NAMESPACE_DNS, req_id).hex
|
||||
_request_id.set(req_id)
|
||||
_trace_id.set(trace_id)
|
||||
|
||||
|
||||
def clear_request_context() -> None:
|
||||
"""Clear request context. Call at end of request (optional)."""
|
||||
_request_id.set("")
|
||||
_trace_id.set("")
|
||||
94
api/core/logging/filters.py
Normal file
94
api/core/logging/filters.py
Normal file
@ -0,0 +1,94 @@
|
||||
"""Logging filters for structured logging."""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
|
||||
import flask
|
||||
|
||||
from core.logging.context import get_request_id, get_trace_id
|
||||
|
||||
|
||||
class TraceContextFilter(logging.Filter):
|
||||
"""
|
||||
Filter that adds trace_id and span_id to log records.
|
||||
Integrates with OpenTelemetry when available, falls back to ContextVar-based trace_id.
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# Get trace context from OpenTelemetry
|
||||
trace_id, span_id = self._get_otel_context()
|
||||
|
||||
# Set trace_id (fallback to ContextVar if no OTEL context)
|
||||
if trace_id:
|
||||
record.trace_id = trace_id
|
||||
else:
|
||||
record.trace_id = get_trace_id()
|
||||
|
||||
record.span_id = span_id or ""
|
||||
|
||||
# For backward compatibility, also set req_id
|
||||
record.req_id = get_request_id()
|
||||
|
||||
return True
|
||||
|
||||
def _get_otel_context(self) -> tuple[str, str]:
|
||||
"""Extract trace_id and span_id from OpenTelemetry context."""
|
||||
with contextlib.suppress(Exception):
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
span = get_current_span()
|
||||
if span and span.get_span_context():
|
||||
ctx = span.get_span_context()
|
||||
if ctx.is_valid and ctx.trace_id != INVALID_TRACE_ID:
|
||||
trace_id = f"{ctx.trace_id:032x}"
|
||||
span_id = f"{ctx.span_id:016x}" if ctx.span_id != INVALID_SPAN_ID else ""
|
||||
return trace_id, span_id
|
||||
return "", ""
|
||||
|
||||
|
||||
class IdentityContextFilter(logging.Filter):
|
||||
"""
|
||||
Filter that adds user identity context to log records.
|
||||
Extracts tenant_id, user_id, and user_type from Flask-Login current_user.
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
identity = self._extract_identity()
|
||||
record.tenant_id = identity.get("tenant_id", "")
|
||||
record.user_id = identity.get("user_id", "")
|
||||
record.user_type = identity.get("user_type", "")
|
||||
return True
|
||||
|
||||
def _extract_identity(self) -> dict[str, str]:
|
||||
"""Extract identity from current_user if in request context."""
|
||||
try:
|
||||
if not flask.has_request_context():
|
||||
return {}
|
||||
from flask_login import current_user
|
||||
|
||||
# Check if user is authenticated using the proxy
|
||||
if not current_user.is_authenticated:
|
||||
return {}
|
||||
|
||||
# Access the underlying user object
|
||||
user = current_user
|
||||
|
||||
from models import Account
|
||||
from models.model import EndUser
|
||||
|
||||
identity: dict[str, str] = {}
|
||||
|
||||
if isinstance(user, Account):
|
||||
if user.current_tenant_id:
|
||||
identity["tenant_id"] = user.current_tenant_id
|
||||
identity["user_id"] = user.id
|
||||
identity["user_type"] = "account"
|
||||
elif isinstance(user, EndUser):
|
||||
identity["tenant_id"] = user.tenant_id
|
||||
identity["user_id"] = user.id
|
||||
identity["user_type"] = user.type or "end_user"
|
||||
|
||||
return identity
|
||||
except Exception:
|
||||
return {}
|
||||
107
api/core/logging/structured_formatter.py
Normal file
107
api/core/logging/structured_formatter.py
Normal file
@ -0,0 +1,107 @@
|
||||
"""Structured JSON log formatter for Dify."""
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
class StructuredJSONFormatter(logging.Formatter):
|
||||
"""
|
||||
JSON log formatter following the specified schema:
|
||||
{
|
||||
"ts": "ISO 8601 UTC",
|
||||
"severity": "INFO|ERROR|WARN|DEBUG",
|
||||
"service": "service name",
|
||||
"caller": "file:line",
|
||||
"trace_id": "hex 32",
|
||||
"span_id": "hex 16",
|
||||
"identity": { "tenant_id", "user_id", "user_type" },
|
||||
"message": "log message",
|
||||
"attributes": { ... },
|
||||
"stack_trace": "..."
|
||||
}
|
||||
"""
|
||||
|
||||
SEVERITY_MAP: dict[int, str] = {
|
||||
logging.DEBUG: "DEBUG",
|
||||
logging.INFO: "INFO",
|
||||
logging.WARNING: "WARN",
|
||||
logging.ERROR: "ERROR",
|
||||
logging.CRITICAL: "ERROR",
|
||||
}
|
||||
|
||||
def __init__(self, service_name: str | None = None):
|
||||
super().__init__()
|
||||
self._service_name = service_name or dify_config.APPLICATION_NAME
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
log_dict = self._build_log_dict(record)
|
||||
try:
|
||||
return orjson.dumps(log_dict).decode("utf-8")
|
||||
except TypeError:
|
||||
# Fallback: convert non-serializable objects to string
|
||||
import json
|
||||
|
||||
return json.dumps(log_dict, default=str, ensure_ascii=False)
|
||||
|
||||
def _build_log_dict(self, record: logging.LogRecord) -> dict[str, Any]:
|
||||
# Core fields
|
||||
log_dict: dict[str, Any] = {
|
||||
"ts": datetime.now(UTC).isoformat(timespec="milliseconds").replace("+00:00", "Z"),
|
||||
"severity": self.SEVERITY_MAP.get(record.levelno, "INFO"),
|
||||
"service": self._service_name,
|
||||
"caller": f"{record.filename}:{record.lineno}",
|
||||
"message": record.getMessage(),
|
||||
}
|
||||
|
||||
# Trace context (from TraceContextFilter)
|
||||
trace_id = getattr(record, "trace_id", "")
|
||||
span_id = getattr(record, "span_id", "")
|
||||
|
||||
if trace_id:
|
||||
log_dict["trace_id"] = trace_id
|
||||
if span_id:
|
||||
log_dict["span_id"] = span_id
|
||||
|
||||
# Identity context (from IdentityContextFilter)
|
||||
identity = self._extract_identity(record)
|
||||
if identity:
|
||||
log_dict["identity"] = identity
|
||||
|
||||
# Dynamic attributes
|
||||
attributes = getattr(record, "attributes", None)
|
||||
if attributes:
|
||||
log_dict["attributes"] = attributes
|
||||
|
||||
# Stack trace for errors with exceptions
|
||||
if record.exc_info and record.levelno >= logging.ERROR:
|
||||
log_dict["stack_trace"] = self._format_exception(record.exc_info)
|
||||
|
||||
return log_dict
|
||||
|
||||
def _extract_identity(self, record: logging.LogRecord) -> dict[str, str] | None:
|
||||
tenant_id = getattr(record, "tenant_id", None)
|
||||
user_id = getattr(record, "user_id", None)
|
||||
user_type = getattr(record, "user_type", None)
|
||||
|
||||
if not any([tenant_id, user_id, user_type]):
|
||||
return None
|
||||
|
||||
identity: dict[str, str] = {}
|
||||
if tenant_id:
|
||||
identity["tenant_id"] = tenant_id
|
||||
if user_id:
|
||||
identity["user_id"] = user_id
|
||||
if user_type:
|
||||
identity["user_type"] = user_type
|
||||
return identity
|
||||
|
||||
def _format_exception(self, exc_info: tuple[Any, ...]) -> str:
|
||||
if exc_info and exc_info[0] is not None:
|
||||
return "".join(traceback.format_exception(*exc_info))
|
||||
return ""
|
||||
@ -100,7 +100,6 @@ class SimpleProviderEntity(BaseModel):
|
||||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
supported_model_types: Sequence[ModelType]
|
||||
models: list[AIModelEntity] = []
|
||||
|
||||
@ -123,7 +122,6 @@ class ProviderEntity(BaseModel):
|
||||
label: I18nObject
|
||||
description: I18nObject | None = None
|
||||
icon_small: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
background: str | None = None
|
||||
help: ProviderHelpEntity | None = None
|
||||
@ -157,7 +155,6 @@ class ProviderEntity(BaseModel):
|
||||
provider=self.provider,
|
||||
label=self.label,
|
||||
icon_small=self.icon_small,
|
||||
icon_large=self.icon_large,
|
||||
supported_model_types=self.supported_model_types,
|
||||
models=self.models,
|
||||
)
|
||||
|
||||
@ -285,7 +285,7 @@ class ModelProviderFactory:
|
||||
"""
|
||||
Get provider icon
|
||||
:param provider: provider name
|
||||
:param icon_type: icon type (icon_small or icon_large)
|
||||
:param icon_type: icon type (icon_small or icon_small_dark)
|
||||
:param lang: language (zh_Hans or en_US)
|
||||
:return: provider icon
|
||||
"""
|
||||
@ -309,13 +309,7 @@ class ModelProviderFactory:
|
||||
else:
|
||||
file_name = provider_schema.icon_small_dark.en_US
|
||||
else:
|
||||
if not provider_schema.icon_large:
|
||||
raise ValueError(f"Provider {provider} does not have large icon.")
|
||||
|
||||
if lang.lower() == "zh_hans":
|
||||
file_name = provider_schema.icon_large.zh_Hans
|
||||
else:
|
||||
file_name = provider_schema.icon_large.en_US
|
||||
raise ValueError(f"Unsupported icon type: {icon_type}.")
|
||||
|
||||
if not file_name:
|
||||
raise ValueError(f"Provider {provider} does not have icon.")
|
||||
|
||||
@ -103,6 +103,9 @@ class BasePluginClient:
|
||||
prepared_headers["X-Api-Key"] = dify_config.PLUGIN_DAEMON_KEY
|
||||
prepared_headers.setdefault("Accept-Encoding", "gzip, deflate, br")
|
||||
|
||||
# Inject traceparent header for distributed tracing
|
||||
self._inject_trace_headers(prepared_headers)
|
||||
|
||||
prepared_data: bytes | dict[str, Any] | str | None = (
|
||||
data if isinstance(data, (bytes, str, dict)) or data is None else None
|
||||
)
|
||||
@ -114,6 +117,31 @@ class BasePluginClient:
|
||||
|
||||
return str(url), prepared_headers, prepared_data, params, files
|
||||
|
||||
def _inject_trace_headers(self, headers: dict[str, str]) -> None:
|
||||
"""
|
||||
Inject W3C traceparent header for distributed tracing.
|
||||
|
||||
This ensures trace context is propagated to plugin daemon even if
|
||||
HTTPXClientInstrumentor doesn't cover module-level httpx functions.
|
||||
"""
|
||||
if not dify_config.ENABLE_OTEL:
|
||||
return
|
||||
|
||||
import contextlib
|
||||
|
||||
# Skip if already present (case-insensitive check)
|
||||
for key in headers:
|
||||
if key.lower() == "traceparent":
|
||||
return
|
||||
|
||||
# Inject traceparent - works as fallback when OTEL instrumentation doesn't cover this call
|
||||
with contextlib.suppress(Exception):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
traceparent = generate_traceparent_header()
|
||||
if traceparent:
|
||||
headers["traceparent"] = traceparent
|
||||
|
||||
def _stream_request(
|
||||
self,
|
||||
method: str,
|
||||
|
||||
@ -331,7 +331,6 @@ class ProviderManager:
|
||||
provider=provider_schema.provider,
|
||||
label=provider_schema.label,
|
||||
icon_small=provider_schema.icon_small,
|
||||
icon_large=provider_schema.icon_large,
|
||||
supported_model_types=provider_schema.supported_model_types,
|
||||
),
|
||||
)
|
||||
|
||||
@ -46,7 +46,11 @@ def _get_celery_ssl_options() -> dict[str, Any] | None:
|
||||
def init_app(app: DifyApp) -> Celery:
|
||||
class FlaskTask(Task):
|
||||
def __call__(self, *args: object, **kwargs: object) -> object:
|
||||
from core.logging.context import init_request_context
|
||||
|
||||
with app.app_context():
|
||||
# Initialize logging context for this task (similar to before_request in Flask)
|
||||
init_request_context()
|
||||
return self.run(*args, **kwargs)
|
||||
|
||||
broker_transport_options = {}
|
||||
|
||||
@ -1,18 +1,19 @@
|
||||
"""Logging extension for Dify Flask application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
import flask
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.trace_id_helper import get_trace_id_from_otel_context
|
||||
from dify_app import DifyApp
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
"""Initialize logging with support for text or JSON format."""
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# File handler
|
||||
log_file = dify_config.LOG_FILE
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
@ -25,27 +26,53 @@ def init_app(app: DifyApp):
|
||||
)
|
||||
)
|
||||
|
||||
# Always add StreamHandler to log to console
|
||||
# Console handler
|
||||
sh = logging.StreamHandler(sys.stdout)
|
||||
log_handlers.append(sh)
|
||||
|
||||
# Apply RequestIdFilter to all handlers
|
||||
for handler in log_handlers:
|
||||
handler.addFilter(RequestIdFilter())
|
||||
# Apply filters to all handlers
|
||||
from core.logging.filters import IdentityContextFilter, TraceContextFilter
|
||||
|
||||
for handler in log_handlers:
|
||||
handler.addFilter(TraceContextFilter())
|
||||
handler.addFilter(IdentityContextFilter())
|
||||
|
||||
# Configure formatter based on format type
|
||||
formatter = _create_formatter()
|
||||
for handler in log_handlers:
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=dify_config.LOG_LEVEL,
|
||||
format=dify_config.LOG_FORMAT,
|
||||
datefmt=dify_config.LOG_DATEFORMAT,
|
||||
handlers=log_handlers,
|
||||
force=True,
|
||||
)
|
||||
|
||||
# Apply RequestIdFormatter to all handlers
|
||||
apply_request_id_formatter()
|
||||
|
||||
# Disable propagation for noisy loggers to avoid duplicate logs
|
||||
logging.getLogger("sqlalchemy.engine").propagate = False
|
||||
|
||||
# Apply timezone if specified (only for text format)
|
||||
if dify_config.LOG_OUTPUT_FORMAT == "text":
|
||||
_apply_timezone(log_handlers)
|
||||
|
||||
|
||||
def _create_formatter() -> logging.Formatter:
|
||||
"""Create appropriate formatter based on configuration."""
|
||||
if dify_config.LOG_OUTPUT_FORMAT == "json":
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
return StructuredJSONFormatter()
|
||||
else:
|
||||
# Text format - use existing pattern with backward compatible formatter
|
||||
return _TextFormatter(
|
||||
fmt=dify_config.LOG_FORMAT,
|
||||
datefmt=dify_config.LOG_DATEFORMAT,
|
||||
)
|
||||
|
||||
|
||||
def _apply_timezone(handlers: list[logging.Handler]):
|
||||
"""Apply timezone conversion to text formatters."""
|
||||
log_tz = dify_config.LOG_TZ
|
||||
if log_tz:
|
||||
from datetime import datetime
|
||||
@ -57,34 +84,51 @@ def init_app(app: DifyApp):
|
||||
def time_converter(seconds):
|
||||
return datetime.fromtimestamp(seconds, tz=timezone).timetuple()
|
||||
|
||||
for handler in logging.root.handlers:
|
||||
for handler in handlers:
|
||||
if handler.formatter:
|
||||
handler.formatter.converter = time_converter
|
||||
handler.formatter.converter = time_converter # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def get_request_id():
|
||||
if getattr(flask.g, "request_id", None):
|
||||
return flask.g.request_id
|
||||
class _TextFormatter(logging.Formatter):
|
||||
"""Text formatter that ensures trace_id and req_id are always present."""
|
||||
|
||||
new_uuid = uuid.uuid4().hex[:10]
|
||||
flask.g.request_id = new_uuid
|
||||
|
||||
return new_uuid
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
if not hasattr(record, "req_id"):
|
||||
record.req_id = ""
|
||||
if not hasattr(record, "trace_id"):
|
||||
record.trace_id = ""
|
||||
if not hasattr(record, "span_id"):
|
||||
record.span_id = ""
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_request_id() -> str:
|
||||
"""Get request ID for current request context.
|
||||
|
||||
Deprecated: Use core.logging.context.get_request_id() directly.
|
||||
"""
|
||||
from core.logging.context import get_request_id as _get_request_id
|
||||
|
||||
return _get_request_id()
|
||||
|
||||
|
||||
# Backward compatibility aliases
|
||||
class RequestIdFilter(logging.Filter):
|
||||
# This is a logging filter that makes the request ID available for use in
|
||||
# the logging format. Note that we're checking if we're in a request
|
||||
# context, as we may want to log things before Flask is fully loaded.
|
||||
def filter(self, record):
|
||||
trace_id = get_trace_id_from_otel_context() or ""
|
||||
record.req_id = get_request_id() if flask.has_request_context() else ""
|
||||
record.trace_id = trace_id
|
||||
"""Deprecated: Use TraceContextFilter from core.logging.filters instead."""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
from core.logging.context import get_request_id as _get_request_id
|
||||
from core.logging.context import get_trace_id as _get_trace_id
|
||||
|
||||
record.req_id = _get_request_id()
|
||||
record.trace_id = _get_trace_id()
|
||||
return True
|
||||
|
||||
|
||||
class RequestIdFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
"""Deprecated: Use _TextFormatter instead."""
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
if not hasattr(record, "req_id"):
|
||||
record.req_id = ""
|
||||
if not hasattr(record, "trace_id"):
|
||||
@ -93,6 +137,7 @@ class RequestIdFormatter(logging.Formatter):
|
||||
|
||||
|
||||
def apply_request_id_formatter():
|
||||
"""Deprecated: Formatter is now applied in init_app."""
|
||||
for handler in logging.root.handlers:
|
||||
if handler.formatter:
|
||||
handler.formatter = RequestIdFormatter(dify_config.LOG_FORMAT, dify_config.LOG_DATEFORMAT)
|
||||
|
||||
@ -19,26 +19,43 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExceptionLoggingHandler(logging.Handler):
|
||||
"""
|
||||
Handler that records exceptions to the current OpenTelemetry span.
|
||||
|
||||
Unlike creating a new span, this records exceptions on the existing span
|
||||
to maintain trace context consistency throughout the request lifecycle.
|
||||
"""
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
with contextlib.suppress(Exception):
|
||||
if record.exc_info:
|
||||
tracer = get_tracer_provider().get_tracer("dify.exception.logging")
|
||||
with tracer.start_as_current_span(
|
||||
"log.exception",
|
||||
attributes={
|
||||
"log.level": record.levelname,
|
||||
"log.message": record.getMessage(),
|
||||
"log.logger": record.name,
|
||||
"log.file.path": record.pathname,
|
||||
"log.file.line": record.lineno,
|
||||
},
|
||||
) as span:
|
||||
span.set_status(StatusCode.ERROR)
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
if not record.exc_info:
|
||||
return
|
||||
|
||||
from opentelemetry.trace import get_current_span
|
||||
|
||||
span = get_current_span()
|
||||
if not span or not span.is_recording():
|
||||
return
|
||||
|
||||
# Record exception on the current span instead of creating a new one
|
||||
span.set_status(StatusCode.ERROR, record.getMessage())
|
||||
|
||||
# Add log context as span events/attributes
|
||||
span.add_event(
|
||||
"log.exception",
|
||||
attributes={
|
||||
"log.level": record.levelname,
|
||||
"log.message": record.getMessage(),
|
||||
"log.logger": record.name,
|
||||
"log.file.path": record.pathname,
|
||||
"log.file.line": record.lineno,
|
||||
},
|
||||
)
|
||||
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
|
||||
|
||||
def instrument_exception_logging() -> None:
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
@ -109,11 +108,8 @@ def register_external_error_handlers(api: Api):
|
||||
data.setdefault("code", "unknown")
|
||||
data.setdefault("status", status_code)
|
||||
|
||||
# Log stack
|
||||
exc_info: Any = sys.exc_info()
|
||||
if exc_info[1] is None:
|
||||
exc_info = (None, None, None)
|
||||
current_app.log_exception(exc_info)
|
||||
# Note: Exception logging is handled by Flask/Flask-RESTX framework automatically
|
||||
# Explicit log_exception call removed to avoid duplicate log entries
|
||||
|
||||
return data, status_code
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00bacef91f18'
|
||||
down_revision = '8ec536f3c800'
|
||||
@ -23,31 +20,17 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,14 +7,10 @@ Create Date: 2024-01-10 04:40:57.257824
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '114eed84c228'
|
||||
down_revision = 'c71211c8f604'
|
||||
@ -32,13 +28,7 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '161cadc1af8d'
|
||||
down_revision = '7e6a8693e07a'
|
||||
@ -23,16 +20,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6af6a521a53e'
|
||||
down_revision = 'd57ba9ebb251'
|
||||
@ -23,58 +18,30 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,7 +8,6 @@ Create Date: 2024-11-01 04:34:23.816198
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd3f6769a94a3'
|
||||
|
||||
@ -28,85 +28,45 @@ def upgrade():
|
||||
op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -49,57 +49,33 @@ def upgrade():
|
||||
op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL")
|
||||
op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL")
|
||||
op.execute("UPDATE workflows SET features = '' WHERE features IS NULL")
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
|
||||
@ -86,57 +86,30 @@ def upgrade():
|
||||
|
||||
def migrate_existing_provider_models_data():
|
||||
"""migrate provider_models table data to provider_model_credentials"""
|
||||
conn = op.get_bind()
|
||||
# Define table structure for data manipulation
|
||||
if _is_pg(conn):
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
else:
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
# Define table structure for data manipulatio
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
else:
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
|
||||
|
||||
# Get database connection
|
||||
@ -183,14 +156,8 @@ def migrate_existing_provider_models_data():
|
||||
|
||||
def downgrade():
|
||||
# Re-add encrypted_config column to provider_models table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
|
||||
if not context.is_offline_mode():
|
||||
# Migrate data back from provider_model_credentials to provider_models
|
||||
|
||||
@ -8,7 +8,6 @@ Create Date: 2025-08-20 17:47:17.015695
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
|
||||
@ -9,8 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -23,12 +21,7 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# Add encrypted_headers column to tool_mcp_providers table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
|
||||
else:
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@ -44,6 +44,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_oauth_tenant_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -70,6 +71,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -104,6 +106,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
|
||||
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
|
||||
|
||||
@ -133,6 +136,7 @@ def upgrade():
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
|
||||
|
||||
@ -174,6 +178,7 @@ def upgrade():
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -193,7 +198,6 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
@ -211,6 +215,7 @@ def upgrade():
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
|
||||
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
@ -236,6 +241,7 @@ def upgrade():
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipelines',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -266,6 +272,7 @@ def upgrade():
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_draft_variable_files',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -292,6 +299,7 @@ def upgrade():
|
||||
sa.Column('value_type', sa.String(20), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_node_execution_offload',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
@ -316,6 +324,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
|
||||
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
|
||||
@ -342,6 +351,7 @@ def upgrade():
|
||||
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
|
||||
)
|
||||
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
|
||||
@ -9,8 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -33,15 +31,9 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -9,7 +9,6 @@ Create Date: 2025-10-22 16:11:31.805407
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
@ -105,6 +105,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('trigger_subscriptions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
@ -143,6 +144,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op:
|
||||
batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True)
|
||||
batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False)
|
||||
@ -176,6 +178,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False)
|
||||
|
||||
@ -207,6 +210,7 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False)
|
||||
|
||||
@ -264,6 +268,7 @@ def upgrade():
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False)
|
||||
batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False)
|
||||
@ -299,6 +304,7 @@ def upgrade():
|
||||
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
|
||||
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '23db93619b9d'
|
||||
down_revision = '8ae9bc661daa'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -62,14 +62,8 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
|
||||
batch_op.drop_index('app_annotation_settings_app_idx')
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2a3aebbbf4bb'
|
||||
down_revision = 'c031d46af369'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,14 +7,10 @@ Create Date: 2023-09-22 15:41:01.243183
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2e9819ca5b28'
|
||||
down_revision = 'ab23c11305d4'
|
||||
@ -24,35 +20,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,14 +7,10 @@ Create Date: 2024-03-07 08:30:29.133614
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '42e85ed5564d'
|
||||
down_revision = 'f9107f83abab'
|
||||
@ -24,59 +20,31 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -6,14 +6,10 @@ Create Date: 2024-01-12 03:42:27.362415
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4829e54d2fee'
|
||||
down_revision = '114eed84c228'
|
||||
@ -23,39 +19,21 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -6,14 +6,10 @@ Create Date: 2024-03-14 04:54:56.679506
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '563cf8bf777b'
|
||||
down_revision = 'b5429b71023c'
|
||||
@ -23,35 +19,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -48,12 +48,9 @@ def upgrade():
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '714aafe25d39'
|
||||
down_revision = 'f2a6fc85e260'
|
||||
@ -23,16 +20,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '77e83833755c'
|
||||
down_revision = '6dcb43972bdc'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -27,7 +27,6 @@ def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
@ -40,7 +39,6 @@ def upgrade():
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
@ -52,12 +50,9 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '88072f0caa04'
|
||||
down_revision = '246ba09cbbdb'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '89c7899ca936'
|
||||
down_revision = '187385f442fc'
|
||||
@ -23,39 +20,21 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.Text(),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.Text(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8ec536f3c800'
|
||||
down_revision = 'ad472b61a054'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -57,12 +57,9 @@ def upgrade():
|
||||
batch_op.create_index('message_file_created_by_idx', ['created_by'], unique=False)
|
||||
batch_op.create_index('message_file_message_idx', ['message_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
|
||||
@ -24,7 +24,6 @@ def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
@ -35,7 +34,6 @@ def upgrade():
|
||||
batch_op.drop_index('saved_message_message_idx')
|
||||
batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a5b56fb053ef'
|
||||
down_revision = 'd3d503a3471c'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a9836e3baeee'
|
||||
down_revision = '968fff4c0ab9'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b24be59fbb04'
|
||||
down_revision = 'de95f5c77138'
|
||||
@ -23,14 +20,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -11,9 +11,6 @@ from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b3a09c049e8e'
|
||||
down_revision = '2e9819ca5b28'
|
||||
@ -23,20 +20,11 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,7 +7,6 @@ Create Date: 2024-06-17 10:01:00.255189
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
@ -54,12 +54,9 @@ def upgrade():
|
||||
batch_op.create_index('app_annotation_hit_histories_annotation_idx', ['annotation_id'], unique=False)
|
||||
batch_op.create_index('app_annotation_hit_histories_app_idx', ['app_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
@ -68,54 +65,31 @@ def upgrade():
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'"), nullable=False))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False))
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
else:
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
with op.batch_alter_table('message_annotations', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.drop_column('hit_count')
|
||||
batch_op.drop_column('question')
|
||||
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.drop_column('type')
|
||||
|
||||
@ -12,9 +12,6 @@ from sqlalchemy.dialects import postgresql
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f2a6fc85e260'
|
||||
down_revision = '46976cc39132'
|
||||
@ -24,16 +21,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', postgresql.UUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
else:
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False))
|
||||
batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -70,7 +70,6 @@ class ProviderResponse(BaseModel):
|
||||
description: I18nObject | None = None
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
background: str | None = None
|
||||
help: ProviderHelpEntity | None = None
|
||||
supported_model_types: Sequence[ModelType]
|
||||
@ -98,11 +97,6 @@ class ProviderResponse(BaseModel):
|
||||
en_US=f"{url_prefix}/icon_small_dark/en_US",
|
||||
zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans",
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
@ -116,7 +110,6 @@ class ProviderWithModelsResponse(BaseModel):
|
||||
label: I18nObject
|
||||
icon_small: I18nObject | None = None
|
||||
icon_small_dark: I18nObject | None = None
|
||||
icon_large: I18nObject | None = None
|
||||
status: CustomConfigurationStatus
|
||||
models: list[ProviderModelWithStatusEntity]
|
||||
|
||||
@ -134,11 +127,6 @@ class ProviderWithModelsResponse(BaseModel):
|
||||
self.icon_small_dark = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
@ -163,11 +151,6 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
|
||||
self.icon_small_dark = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
|
||||
)
|
||||
|
||||
if self.icon_large is not None:
|
||||
self.icon_large = I18nObject(
|
||||
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
|
||||
@ -99,7 +99,6 @@ class ModelProviderService:
|
||||
description=provider_configuration.provider.description,
|
||||
icon_small=provider_configuration.provider.icon_small,
|
||||
icon_small_dark=provider_configuration.provider.icon_small_dark,
|
||||
icon_large=provider_configuration.provider.icon_large,
|
||||
background=provider_configuration.provider.background,
|
||||
help=provider_configuration.provider.help,
|
||||
supported_model_types=provider_configuration.provider.supported_model_types,
|
||||
@ -423,7 +422,6 @@ class ModelProviderService:
|
||||
label=first_model.provider.label,
|
||||
icon_small=first_model.provider.icon_small,
|
||||
icon_small_dark=first_model.provider.icon_small_dark,
|
||||
icon_large=first_model.provider.icon_large,
|
||||
status=CustomConfigurationStatus.ACTIVE,
|
||||
models=[
|
||||
ProviderModelWithStatusEntity(
|
||||
@ -488,7 +486,6 @@ class ModelProviderService:
|
||||
provider=result.provider.provider,
|
||||
label=result.provider.label,
|
||||
icon_small=result.provider.icon_small,
|
||||
icon_large=result.provider.icon_large,
|
||||
supported_model_types=result.provider.supported_model_types,
|
||||
),
|
||||
)
|
||||
@ -522,7 +519,7 @@ class ModelProviderService:
|
||||
|
||||
:param tenant_id: workspace id
|
||||
:param provider: provider name
|
||||
:param icon_type: icon type (icon_small or icon_large)
|
||||
:param icon_type: icon type (icon_small or icon_small_dark)
|
||||
:param lang: language (zh_Hans or en_US)
|
||||
:return:
|
||||
"""
|
||||
|
||||
@ -48,10 +48,6 @@ class MockModelClass(PluginModelClient):
|
||||
en_US="https://example.com/icon_small.png",
|
||||
zh_Hans="https://example.com/icon_small.png",
|
||||
),
|
||||
icon_large=I18nObject(
|
||||
en_US="https://example.com/icon_large.png",
|
||||
zh_Hans="https://example.com/icon_large.png",
|
||||
),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL],
|
||||
models=[
|
||||
|
||||
@ -228,7 +228,6 @@ class TestModelProviderService:
|
||||
mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
|
||||
mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity.icon_small_dark = None
|
||||
mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity.background = "#FF6B6B"
|
||||
mock_provider_entity.help = None
|
||||
mock_provider_entity.supported_model_types = [ModelType.LLM, ModelType.TEXT_EMBEDDING]
|
||||
@ -302,7 +301,6 @@ class TestModelProviderService:
|
||||
mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
|
||||
mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity_llm.icon_small_dark = None
|
||||
mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity_llm.background = "#FF6B6B"
|
||||
mock_provider_entity_llm.help = None
|
||||
mock_provider_entity_llm.supported_model_types = [ModelType.LLM]
|
||||
@ -316,7 +314,6 @@ class TestModelProviderService:
|
||||
mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"}
|
||||
mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
|
||||
mock_provider_entity_embedding.icon_small_dark = None
|
||||
mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
|
||||
mock_provider_entity_embedding.background = "#4ECDC4"
|
||||
mock_provider_entity_embedding.help = None
|
||||
mock_provider_entity_embedding.supported_model_types = [ModelType.TEXT_EMBEDDING]
|
||||
@ -419,7 +416,6 @@ class TestModelProviderService:
|
||||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[],
|
||||
models=[],
|
||||
@ -431,7 +427,6 @@ class TestModelProviderService:
|
||||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
configurate_methods=[],
|
||||
models=[],
|
||||
@ -655,7 +650,6 @@ class TestModelProviderService:
|
||||
provider="openai",
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
|
||||
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
|
||||
supported_model_types=[ModelType.LLM],
|
||||
),
|
||||
)
|
||||
@ -1027,7 +1021,6 @@ class TestModelProviderService:
|
||||
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
|
||||
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
|
||||
icon_small_dark=None,
|
||||
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
|
||||
),
|
||||
model="gpt-3.5-turbo",
|
||||
model_type=ModelType.LLM,
|
||||
@ -1045,7 +1038,6 @@ class TestModelProviderService:
|
||||
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
|
||||
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
|
||||
icon_small_dark=None,
|
||||
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
|
||||
),
|
||||
model="gpt-4",
|
||||
model_type=ModelType.LLM,
|
||||
|
||||
@ -14,12 +14,12 @@ def test_successful_request(mock_get_client):
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com")
|
||||
assert response.status_code == 200
|
||||
mock_client.request.assert_called_once()
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
@ -27,7 +27,6 @@ def test_retry_exceed_max_retries(mock_get_client):
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
@ -72,34 +71,12 @@ class TestGetUserProvidedHostHeader:
|
||||
assert result in ("first.com", "second.com")
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_host_header_preservation_without_user_header(mock_get_client):
|
||||
"""Test that when no Host header is provided, the default behavior is maintained."""
|
||||
mock_client = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com")
|
||||
|
||||
assert response.status_code == 200
|
||||
# Host should not be set if not provided by user
|
||||
assert "Host" not in mock_request.headers or mock_request.headers.get("Host") is None
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_host_header_preservation_with_user_header(mock_get_client):
|
||||
"""Test that user-provided Host header is preserved in the request."""
|
||||
mock_client = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.send.return_value = mock_response
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
@ -107,3 +84,93 @@ def test_host_header_preservation_with_user_header(mock_get_client):
|
||||
response = make_request("GET", "http://example.com", headers={"Host": custom_host})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Verify client.request was called with the host header preserved (lowercase)
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs["headers"]["host"] == custom_host
|
||||
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
@pytest.mark.parametrize("host_key", ["host", "HOST", "Host"])
|
||||
def test_host_header_preservation_case_insensitive(mock_get_client, host_key):
|
||||
"""Test that Host header is preserved regardless of case."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
response = make_request("GET", "http://example.com", headers={host_key: "api.example.com"})
|
||||
|
||||
assert response.status_code == 200
|
||||
# Host header should be normalized to lowercase "host"
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs["headers"]["host"] == "api.example.com"
|
||||
|
||||
|
||||
class TestFollowRedirectsParameter:
|
||||
"""Tests for follow_redirects parameter handling.
|
||||
|
||||
These tests verify that follow_redirects is correctly passed to client.request().
|
||||
"""
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_passed_to_request(self, mock_get_client):
|
||||
"""Verify follow_redirects IS passed to client.request()."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
make_request("GET", "http://example.com", follow_redirects=True)
|
||||
|
||||
# Verify follow_redirects was passed to request
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_allow_redirects_converted_to_follow_redirects(self, mock_get_client):
|
||||
"""Verify allow_redirects (requests-style) is converted to follow_redirects (httpx-style)."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Use allow_redirects (requests-style parameter)
|
||||
make_request("GET", "http://example.com", allow_redirects=True)
|
||||
|
||||
# Verify it was converted to follow_redirects
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
assert "allow_redirects" not in call_kwargs
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_not_set_when_not_specified(self, mock_get_client):
|
||||
"""Verify follow_redirects is not in kwargs when not specified (httpx default behavior)."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
make_request("GET", "http://example.com")
|
||||
|
||||
# follow_redirects should not be in kwargs, letting httpx use its default
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert "follow_redirects" not in call_kwargs
|
||||
|
||||
@patch("core.helper.ssrf_proxy._get_ssrf_client")
|
||||
def test_follow_redirects_takes_precedence_over_allow_redirects(self, mock_get_client):
|
||||
"""Verify follow_redirects takes precedence when both are specified."""
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.request.return_value = mock_response
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Both specified - follow_redirects should take precedence
|
||||
make_request("GET", "http://example.com", allow_redirects=False, follow_redirects=True)
|
||||
|
||||
call_kwargs = mock_client.request.call_args.kwargs
|
||||
assert call_kwargs.get("follow_redirects") is True
|
||||
|
||||
0
api/tests/unit_tests/core/logging/__init__.py
Normal file
0
api/tests/unit_tests/core/logging/__init__.py
Normal file
79
api/tests/unit_tests/core/logging/test_context.py
Normal file
79
api/tests/unit_tests/core/logging/test_context.py
Normal file
@ -0,0 +1,79 @@
|
||||
"""Tests for logging context module."""
|
||||
|
||||
import uuid
|
||||
|
||||
from core.logging.context import (
|
||||
clear_request_context,
|
||||
get_request_id,
|
||||
get_trace_id,
|
||||
init_request_context,
|
||||
)
|
||||
|
||||
|
||||
class TestLoggingContext:
|
||||
"""Tests for the logging context functions."""
|
||||
|
||||
def test_init_creates_request_id(self):
|
||||
"""init_request_context should create a 10-char request ID."""
|
||||
init_request_context()
|
||||
request_id = get_request_id()
|
||||
assert len(request_id) == 10
|
||||
assert all(c in "0123456789abcdef" for c in request_id)
|
||||
|
||||
def test_init_creates_trace_id(self):
|
||||
"""init_request_context should create a 32-char trace ID."""
|
||||
init_request_context()
|
||||
trace_id = get_trace_id()
|
||||
assert len(trace_id) == 32
|
||||
assert all(c in "0123456789abcdef" for c in trace_id)
|
||||
|
||||
def test_trace_id_derived_from_request_id(self):
|
||||
"""trace_id should be deterministically derived from request_id."""
|
||||
init_request_context()
|
||||
request_id = get_request_id()
|
||||
trace_id = get_trace_id()
|
||||
|
||||
# Verify trace_id is derived using uuid5
|
||||
expected_trace = uuid.uuid5(uuid.NAMESPACE_DNS, request_id).hex
|
||||
assert trace_id == expected_trace
|
||||
|
||||
def test_clear_resets_context(self):
|
||||
"""clear_request_context should reset both IDs to empty strings."""
|
||||
init_request_context()
|
||||
assert get_request_id() != ""
|
||||
assert get_trace_id() != ""
|
||||
|
||||
clear_request_context()
|
||||
assert get_request_id() == ""
|
||||
assert get_trace_id() == ""
|
||||
|
||||
def test_default_values_are_empty(self):
|
||||
"""Default values should be empty strings before init."""
|
||||
clear_request_context()
|
||||
assert get_request_id() == ""
|
||||
assert get_trace_id() == ""
|
||||
|
||||
def test_multiple_inits_create_different_ids(self):
|
||||
"""Each init should create new unique IDs."""
|
||||
init_request_context()
|
||||
first_request_id = get_request_id()
|
||||
first_trace_id = get_trace_id()
|
||||
|
||||
init_request_context()
|
||||
second_request_id = get_request_id()
|
||||
second_trace_id = get_trace_id()
|
||||
|
||||
assert first_request_id != second_request_id
|
||||
assert first_trace_id != second_trace_id
|
||||
|
||||
def test_context_isolation(self):
|
||||
"""Context should be isolated per-call (no thread leakage in same thread)."""
|
||||
init_request_context()
|
||||
id1 = get_request_id()
|
||||
|
||||
# Simulate another request
|
||||
init_request_context()
|
||||
id2 = get_request_id()
|
||||
|
||||
# IDs should be different
|
||||
assert id1 != id2
|
||||
114
api/tests/unit_tests/core/logging/test_filters.py
Normal file
114
api/tests/unit_tests/core/logging/test_filters.py
Normal file
@ -0,0 +1,114 @@
|
||||
"""Tests for logging filters."""
|
||||
|
||||
import logging
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def log_record():
|
||||
return logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="",
|
||||
lineno=0,
|
||||
msg="test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
|
||||
class TestTraceContextFilter:
|
||||
def test_sets_empty_trace_id_without_context(self, log_record):
|
||||
from core.logging.context import clear_request_context
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
# Ensure no context is set
|
||||
clear_request_context()
|
||||
|
||||
filter = TraceContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
|
||||
assert result is True
|
||||
assert hasattr(log_record, "trace_id")
|
||||
assert hasattr(log_record, "span_id")
|
||||
assert hasattr(log_record, "req_id")
|
||||
# Without context, IDs should be empty
|
||||
assert log_record.trace_id == ""
|
||||
assert log_record.req_id == ""
|
||||
|
||||
def test_sets_trace_id_from_context(self, log_record):
|
||||
"""Test that trace_id and req_id are set from ContextVar when initialized."""
|
||||
from core.logging.context import init_request_context
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
# Initialize context (no Flask needed!)
|
||||
init_request_context()
|
||||
|
||||
filter = TraceContextFilter()
|
||||
filter.filter(log_record)
|
||||
|
||||
# With context initialized, IDs should be set
|
||||
assert log_record.trace_id != ""
|
||||
assert len(log_record.trace_id) == 32
|
||||
assert log_record.req_id != ""
|
||||
assert len(log_record.req_id) == 10
|
||||
|
||||
def test_filter_always_returns_true(self, log_record):
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
filter = TraceContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
|
||||
def test_sets_trace_id_from_otel_when_available(self, log_record):
|
||||
from core.logging.filters import TraceContextFilter
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with (
|
||||
mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0),
|
||||
):
|
||||
filter = TraceContextFilter()
|
||||
filter.filter(log_record)
|
||||
|
||||
assert log_record.trace_id == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
assert log_record.span_id == "051581bf3bb55c45"
|
||||
|
||||
|
||||
class TestIdentityContextFilter:
|
||||
def test_sets_empty_identity_without_request_context(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
|
||||
assert result is True
|
||||
assert log_record.tenant_id == ""
|
||||
assert log_record.user_id == ""
|
||||
assert log_record.user_type == ""
|
||||
|
||||
def test_filter_always_returns_true(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
|
||||
def test_handles_exception_gracefully(self, log_record):
|
||||
from core.logging.filters import IdentityContextFilter
|
||||
|
||||
filter = IdentityContextFilter()
|
||||
|
||||
# Should not raise even if something goes wrong
|
||||
with mock.patch("core.logging.filters.flask.has_request_context", side_effect=Exception("Test error")):
|
||||
result = filter.filter(log_record)
|
||||
assert result is True
|
||||
assert log_record.tenant_id == ""
|
||||
267
api/tests/unit_tests/core/logging/test_structured_formatter.py
Normal file
267
api/tests/unit_tests/core/logging/test_structured_formatter.py
Normal file
@ -0,0 +1,267 @@
|
||||
"""Tests for structured JSON formatter."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import orjson
|
||||
|
||||
|
||||
class TestStructuredJSONFormatter:
|
||||
def test_basic_log_format(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter(service_name="test-service")
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=42,
|
||||
msg="Test message",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["severity"] == "INFO"
|
||||
assert log_dict["service"] == "test-service"
|
||||
assert log_dict["caller"] == "test.py:42"
|
||||
assert log_dict["message"] == "Test message"
|
||||
assert "ts" in log_dict
|
||||
assert log_dict["ts"].endswith("Z")
|
||||
|
||||
def test_severity_mapping(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
test_cases = [
|
||||
(logging.DEBUG, "DEBUG"),
|
||||
(logging.INFO, "INFO"),
|
||||
(logging.WARNING, "WARN"),
|
||||
(logging.ERROR, "ERROR"),
|
||||
(logging.CRITICAL, "ERROR"),
|
||||
]
|
||||
|
||||
for level, expected_severity in test_cases:
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=level,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
assert log_dict["severity"] == expected_severity, f"Level {level} should map to {expected_severity}"
|
||||
|
||||
def test_error_with_stack_trace(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
try:
|
||||
raise ValueError("Test error")
|
||||
except ValueError:
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.ERROR,
|
||||
pathname="test.py",
|
||||
lineno=10,
|
||||
msg="Error occurred",
|
||||
args=(),
|
||||
exc_info=exc_info,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["severity"] == "ERROR"
|
||||
assert "stack_trace" in log_dict
|
||||
assert "ValueError: Test error" in log_dict["stack_trace"]
|
||||
|
||||
def test_no_stack_trace_for_info(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
|
||||
try:
|
||||
raise ValueError("Test error")
|
||||
except ValueError:
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=10,
|
||||
msg="Info message",
|
||||
args=(),
|
||||
exc_info=exc_info,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "stack_trace" not in log_dict
|
||||
|
||||
def test_trace_context_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.trace_id = "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
record.span_id = "051581bf3bb55c45"
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["trace_id"] == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
assert log_dict["span_id"] == "051581bf3bb55c45"
|
||||
|
||||
def test_identity_context_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.tenant_id = "t-global-corp"
|
||||
record.user_id = "u-admin-007"
|
||||
record.user_type = "admin"
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "identity" in log_dict
|
||||
assert log_dict["identity"]["tenant_id"] == "t-global-corp"
|
||||
assert log_dict["identity"]["user_id"] == "u-admin-007"
|
||||
assert log_dict["identity"]["user_type"] == "admin"
|
||||
|
||||
def test_no_identity_when_empty(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert "identity" not in log_dict
|
||||
|
||||
def test_attributes_included(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
record.attributes = {"order_id": "ord-123", "amount": 99.99}
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["attributes"]["order_id"] == "ord-123"
|
||||
assert log_dict["attributes"]["amount"] == 99.99
|
||||
|
||||
def test_message_with_args(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="User %s logged in from %s",
|
||||
args=("john", "192.168.1.1"),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
assert log_dict["message"] == "User john logged in from 192.168.1.1"
|
||||
|
||||
def test_timestamp_format(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
|
||||
output = formatter.format(record)
|
||||
log_dict = orjson.loads(output)
|
||||
|
||||
# Verify ISO 8601 format with Z suffix
|
||||
ts = log_dict["ts"]
|
||||
assert ts.endswith("Z")
|
||||
assert "T" in ts
|
||||
# Should have milliseconds
|
||||
assert "." in ts
|
||||
|
||||
def test_fallback_for_non_serializable_attributes(self):
|
||||
from core.logging.structured_formatter import StructuredJSONFormatter
|
||||
|
||||
formatter = StructuredJSONFormatter()
|
||||
record = logging.LogRecord(
|
||||
name="test",
|
||||
level=logging.INFO,
|
||||
pathname="test.py",
|
||||
lineno=1,
|
||||
msg="Test with non-serializable",
|
||||
args=(),
|
||||
exc_info=None,
|
||||
)
|
||||
# Set is not serializable by orjson
|
||||
record.attributes = {"items": {1, 2, 3}, "custom": object()}
|
||||
|
||||
# Should not raise, fallback to json.dumps with default=str
|
||||
output = formatter.format(record)
|
||||
|
||||
# Verify it's valid JSON (parsed by stdlib json since orjson may fail)
|
||||
import json
|
||||
|
||||
log_dict = json.loads(output)
|
||||
assert log_dict["message"] == "Test with non-serializable"
|
||||
assert "attributes" in log_dict
|
||||
102
api/tests/unit_tests/core/logging/test_trace_helpers.py
Normal file
102
api/tests/unit_tests/core/logging/test_trace_helpers.py
Normal file
@ -0,0 +1,102 @@
|
||||
"""Tests for trace helper functions."""
|
||||
|
||||
import re
|
||||
from unittest import mock
|
||||
|
||||
|
||||
class TestGetSpanIdFromOtelContext:
|
||||
def test_returns_none_without_span(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result is None
|
||||
|
||||
def test_returns_span_id_when_available(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span):
|
||||
with mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result == "051581bf3bb55c45"
|
||||
|
||||
def test_returns_none_on_exception(self):
|
||||
from core.helper.trace_id_helper import get_span_id_from_otel_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", side_effect=Exception("Test error")):
|
||||
result = get_span_id_from_otel_context()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGenerateTraceparentHeader:
|
||||
def test_generates_valid_format(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
assert result is not None
|
||||
# Format: 00-{trace_id}-{span_id}-01
|
||||
parts = result.split("-")
|
||||
assert len(parts) == 4
|
||||
assert parts[0] == "00" # version
|
||||
assert len(parts[1]) == 32 # trace_id (32 hex chars)
|
||||
assert len(parts[2]) == 16 # span_id (16 hex chars)
|
||||
assert parts[3] == "01" # flags
|
||||
|
||||
def test_uses_otel_context_when_available(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
mock_span = mock.MagicMock()
|
||||
mock_context = mock.MagicMock()
|
||||
mock_context.trace_id = 0x5B8AA5A2D2C872E8321CF37308D69DF2
|
||||
mock_context.span_id = 0x051581BF3BB55C45
|
||||
mock_span.get_span_context.return_value = mock_context
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=mock_span):
|
||||
with (
|
||||
mock.patch("opentelemetry.trace.span.INVALID_TRACE_ID", 0),
|
||||
mock.patch("opentelemetry.trace.span.INVALID_SPAN_ID", 0),
|
||||
):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
assert result == "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01"
|
||||
|
||||
def test_generates_hex_only_values(self):
|
||||
from core.helper.trace_id_helper import generate_traceparent_header
|
||||
|
||||
with mock.patch("opentelemetry.trace.get_current_span", return_value=None):
|
||||
result = generate_traceparent_header()
|
||||
|
||||
parts = result.split("-")
|
||||
# All parts should be valid hex
|
||||
assert re.match(r"^[0-9a-f]+$", parts[1])
|
||||
assert re.match(r"^[0-9a-f]+$", parts[2])
|
||||
|
||||
|
||||
class TestParseTraceparentHeader:
|
||||
def test_parses_valid_traceparent(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
traceparent = "00-5b8aa5a2d2c872e8321cf37308d69df2-051581bf3bb55c45-01"
|
||||
result = parse_traceparent_header(traceparent)
|
||||
|
||||
assert result == "5b8aa5a2d2c872e8321cf37308d69df2"
|
||||
|
||||
def test_returns_none_for_invalid_format(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
# Wrong number of parts
|
||||
assert parse_traceparent_header("00-abc-def") is None
|
||||
# Wrong trace_id length
|
||||
assert parse_traceparent_header("00-abc-def-01") is None
|
||||
|
||||
def test_returns_none_for_empty_string(self):
|
||||
from core.helper.trace_id_helper import parse_traceparent_header
|
||||
|
||||
assert parse_traceparent_header("") is None
|
||||
@ -32,7 +32,6 @@ def mock_provider_entity():
|
||||
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
|
||||
description=I18nObject(en_US="OpenAI provider", zh_Hans="OpenAI 提供商"),
|
||||
icon_small=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
|
||||
icon_large=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
|
||||
background="background.png",
|
||||
help=None,
|
||||
supported_model_types=[ModelType.LLM],
|
||||
|
||||
@ -99,29 +99,20 @@ def test_external_api_json_message_and_bad_request_rewrite():
|
||||
assert res.get_json()["message"] == "Invalid JSON payload received or JSON payload is empty."
|
||||
|
||||
|
||||
def test_external_api_param_mapping_and_quota_and_exc_info_none():
|
||||
# Force exc_info() to return (None,None,None) only during request
|
||||
import libs.external_api as ext
|
||||
def test_external_api_param_mapping_and_quota():
|
||||
app = _create_api_app()
|
||||
client = app.test_client()
|
||||
|
||||
orig_exc_info = ext.sys.exc_info
|
||||
try:
|
||||
ext.sys.exc_info = lambda: (None, None, None)
|
||||
# Param errors mapping payload path
|
||||
res = client.get("/api/param-errors")
|
||||
assert res.status_code == 400
|
||||
data = res.get_json()
|
||||
assert data["code"] == "invalid_param"
|
||||
assert data["params"] == "field"
|
||||
|
||||
app = _create_api_app()
|
||||
client = app.test_client()
|
||||
|
||||
# Param errors mapping payload path
|
||||
res = client.get("/api/param-errors")
|
||||
assert res.status_code == 400
|
||||
data = res.get_json()
|
||||
assert data["code"] == "invalid_param"
|
||||
assert data["params"] == "field"
|
||||
|
||||
# Quota path — depending on Flask-RESTX internals it may be handled
|
||||
res = client.get("/api/quota")
|
||||
assert res.status_code in (400, 429)
|
||||
finally:
|
||||
ext.sys.exc_info = orig_exc_info # type: ignore[assignment]
|
||||
# Quota path — depending on Flask-RESTX internals it may be handled
|
||||
res = client.get("/api/quota")
|
||||
assert res.status_code in (400, 429)
|
||||
|
||||
|
||||
def test_unauthorized_and_force_logout_clears_cookies():
|
||||
|
||||
@ -27,7 +27,6 @@ def service_with_fake_configurations():
|
||||
description=None,
|
||||
icon_small=None,
|
||||
icon_small_dark=None,
|
||||
icon_large=None,
|
||||
background=None,
|
||||
help=None,
|
||||
supported_model_types=[ModelType.LLM],
|
||||
|
||||
@ -69,6 +69,8 @@ PYTHONIOENCODING=utf-8
|
||||
# The log level for the application.
|
||||
# Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
|
||||
LOG_LEVEL=INFO
|
||||
# Log output format: text or json
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
# Log file path
|
||||
LOG_FILE=/app/logs/server.log
|
||||
# Log file max size, the unit is MB
|
||||
|
||||
@ -129,6 +129,7 @@ services:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
|
||||
REDIS_HOST: ${REDIS_HOST:-redis}
|
||||
REDIS_PORT: ${REDIS_PORT:-6379}
|
||||
|
||||
@ -17,6 +17,7 @@ x-shared-env: &shared-api-worker-env
|
||||
LC_ALL: ${LC_ALL:-en_US.UTF-8}
|
||||
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
|
||||
LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20}
|
||||
LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5}
|
||||
|
||||
@ -93,7 +93,6 @@ function createMockProviderContext(overrides: Partial<ProviderContextState> = {}
|
||||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
@ -711,7 +710,6 @@ describe('DebugWithSingleModel', () => {
|
||||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
@ -742,7 +740,6 @@ describe('DebugWithSingleModel', () => {
|
||||
provider: 'different-provider',
|
||||
label: { en_US: 'Different Provider', zh_Hans: '不同提供商' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [],
|
||||
},
|
||||
@ -925,7 +922,6 @@ describe('DebugWithSingleModel', () => {
|
||||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
@ -975,7 +971,6 @@ describe('DebugWithSingleModel', () => {
|
||||
provider: 'openai',
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
|
||||
status: ModelStatusEnum.active,
|
||||
models: [
|
||||
{
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -1,20 +0,0 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './OpenaiBlue.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'OpenaiBlue'
|
||||
|
||||
export default Icon
|
||||
File diff suppressed because one or more lines are too long
@ -1,20 +0,0 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './OpenaiTeal.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'OpenaiTeal'
|
||||
|
||||
export default Icon
|
||||
File diff suppressed because one or more lines are too long
@ -1,20 +0,0 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './OpenaiViolet.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'OpenaiViolet'
|
||||
|
||||
export default Icon
|
||||
@ -26,12 +26,9 @@ export { default as Localai } from './Localai'
|
||||
export { default as LocalaiText } from './LocalaiText'
|
||||
export { default as Microsoft } from './Microsoft'
|
||||
export { default as OpenaiBlack } from './OpenaiBlack'
|
||||
export { default as OpenaiBlue } from './OpenaiBlue'
|
||||
export { default as OpenaiGreen } from './OpenaiGreen'
|
||||
export { default as OpenaiTeal } from './OpenaiTeal'
|
||||
export { default as OpenaiText } from './OpenaiText'
|
||||
export { default as OpenaiTransparent } from './OpenaiTransparent'
|
||||
export { default as OpenaiViolet } from './OpenaiViolet'
|
||||
export { default as OpenaiYellow } from './OpenaiYellow'
|
||||
export { default as Openllm } from './Openllm'
|
||||
export { default as OpenllmText } from './OpenllmText'
|
||||
|
||||
@ -110,7 +110,7 @@ const GotoAnything: FC<Props> = ({
|
||||
isWorkflowPage,
|
||||
isRagPipelinePage,
|
||||
defaultLocale,
|
||||
Object.keys(Actions).sort().join(','),
|
||||
Actions,
|
||||
],
|
||||
queryFn: async () => {
|
||||
const query = searchQueryDebouncedValue.toLowerCase()
|
||||
|
||||
@ -218,7 +218,6 @@ export type ModelProvider = {
|
||||
}
|
||||
icon_small: TypeWithI18N
|
||||
icon_small_dark?: TypeWithI18N
|
||||
icon_large: TypeWithI18N
|
||||
background?: string
|
||||
supported_model_types: ModelTypeEnum[]
|
||||
configurate_methods: ConfigurationMethodEnum[]
|
||||
@ -254,7 +253,6 @@ export type ModelProvider = {
|
||||
|
||||
export type Model = {
|
||||
provider: string
|
||||
icon_large: TypeWithI18N
|
||||
icon_small: TypeWithI18N
|
||||
icon_small_dark?: TypeWithI18N
|
||||
label: TypeWithI18N
|
||||
@ -267,7 +265,6 @@ export type DefaultModelResponse = {
|
||||
model_type: ModelTypeEnum
|
||||
provider: {
|
||||
provider: string
|
||||
icon_large: TypeWithI18N
|
||||
icon_small: TypeWithI18N
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ import type {
|
||||
Model,
|
||||
ModelProvider,
|
||||
} from '../declarations'
|
||||
import { OpenaiBlue, OpenaiTeal, OpenaiViolet, OpenaiYellow } from '@/app/components/base/icons/src/public/llm'
|
||||
import { OpenaiYellow } from '@/app/components/base/icons/src/public/llm'
|
||||
import { Group } from '@/app/components/base/icons/src/vender/other'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { renderI18nObject } from '@/i18n-config'
|
||||
@ -29,12 +29,6 @@ const ModelIcon: FC<ModelIconProps> = ({
|
||||
const language = useLanguage()
|
||||
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.startsWith('o'))
|
||||
return <div className="flex items-center justify-center"><OpenaiYellow className={cn('h-5 w-5', className)} /></div>
|
||||
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.includes('gpt-4.1'))
|
||||
return <div className="flex items-center justify-center"><OpenaiTeal className={cn('h-5 w-5', className)} /></div>
|
||||
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.includes('gpt-4o'))
|
||||
return <div className="flex items-center justify-center"><OpenaiBlue className={cn('h-5 w-5', className)} /></div>
|
||||
if (provider?.provider && ['openai', 'langgenius/openai/openai'].includes(provider.provider) && modelName?.startsWith('gpt-4'))
|
||||
return <div className="flex items-center justify-center"><OpenaiViolet className={cn('h-5 w-5', className)} /></div>
|
||||
|
||||
if (provider?.icon_small) {
|
||||
return (
|
||||
|
||||
@ -219,7 +219,6 @@ const createModelItem = (overrides: Partial<ModelItem> = {}): ModelItem => ({
|
||||
*/
|
||||
const createModel = (overrides: Partial<Model> = {}): Model => ({
|
||||
provider: 'openai',
|
||||
icon_large: { en_US: 'icon-large.png', zh_Hans: 'icon-large.png' },
|
||||
icon_small: { en_US: 'icon-small.png', zh_Hans: 'icon-small.png' },
|
||||
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
|
||||
models: [createModelItem()],
|
||||
|
||||
@ -1,266 +0,0 @@
|
||||
.main {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 6rem;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.description {
|
||||
display: inherit;
|
||||
justify-content: inherit;
|
||||
align-items: inherit;
|
||||
font-size: 0.85rem;
|
||||
max-width: var(--max-width);
|
||||
width: 100%;
|
||||
z-index: 2;
|
||||
font-family: var(--font-mono);
|
||||
}
|
||||
|
||||
.description a {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.description p {
|
||||
position: relative;
|
||||
margin: 0;
|
||||
padding: 1rem;
|
||||
background-color: rgba(var(--callout-rgb), 0.5);
|
||||
border: 1px solid rgba(var(--callout-border-rgb), 0.3);
|
||||
border-radius: var(--border-radius);
|
||||
}
|
||||
|
||||
.code {
|
||||
font-weight: 700;
|
||||
font-family: var(--font-mono);
|
||||
}
|
||||
|
||||
.grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(3, minmax(33%, auto));
|
||||
width: var(--max-width);
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 1rem 1.2rem;
|
||||
border-radius: var(--border-radius);
|
||||
background: rgba(var(--card-rgb), 0);
|
||||
border: 1px solid rgba(var(--card-border-rgb), 0);
|
||||
transition: background 200ms, border 200ms;
|
||||
}
|
||||
|
||||
.card span {
|
||||
display: inline-block;
|
||||
transition: transform 200ms;
|
||||
}
|
||||
|
||||
.card h2 {
|
||||
font-weight: 600;
|
||||
margin-bottom: 0.7rem;
|
||||
}
|
||||
|
||||
.card p {
|
||||
margin: 0;
|
||||
opacity: 0.6;
|
||||
font-size: 0.9rem;
|
||||
line-height: 1.5;
|
||||
max-width: 34ch;
|
||||
}
|
||||
|
||||
.center {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
padding: 4rem 0;
|
||||
}
|
||||
|
||||
.center::before {
|
||||
background: var(--secondary-glow);
|
||||
border-radius: 50%;
|
||||
width: 480px;
|
||||
height: 360px;
|
||||
margin-left: -400px;
|
||||
}
|
||||
|
||||
.center::after {
|
||||
background: var(--primary-glow);
|
||||
width: 240px;
|
||||
height: 180px;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
.center::before,
|
||||
.center::after {
|
||||
content: '';
|
||||
left: 50%;
|
||||
position: absolute;
|
||||
filter: blur(45px);
|
||||
transform: translateZ(0);
|
||||
}
|
||||
|
||||
.logo,
|
||||
.thirteen {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.thirteen {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
width: 75px;
|
||||
height: 75px;
|
||||
padding: 25px 10px;
|
||||
margin-left: 16px;
|
||||
transform: translateZ(0);
|
||||
border-radius: var(--border-radius);
|
||||
overflow: hidden;
|
||||
box-shadow: 0px 2px 8px -1px #0000001a;
|
||||
}
|
||||
|
||||
.thirteen::before,
|
||||
.thirteen::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
z-index: -1;
|
||||
}
|
||||
|
||||
/* Conic Gradient Animation */
|
||||
.thirteen::before {
|
||||
animation: 6s rotate linear infinite;
|
||||
width: 200%;
|
||||
height: 200%;
|
||||
background: var(--tile-border);
|
||||
}
|
||||
|
||||
/* Inner Square */
|
||||
.thirteen::after {
|
||||
inset: 0;
|
||||
padding: 1px;
|
||||
border-radius: var(--border-radius);
|
||||
background: linear-gradient(to bottom right,
|
||||
rgba(var(--tile-start-rgb), 1),
|
||||
rgba(var(--tile-end-rgb), 1));
|
||||
background-clip: content-box;
|
||||
}
|
||||
|
||||
/* Enable hover only on non-touch devices */
|
||||
@media (hover: hover) and (pointer: fine) {
|
||||
.card:hover {
|
||||
background: rgba(var(--card-rgb), 0.1);
|
||||
border: 1px solid rgba(var(--card-border-rgb), 0.15);
|
||||
}
|
||||
|
||||
.card:hover span {
|
||||
transform: translateX(4px);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion) {
|
||||
.thirteen::before {
|
||||
animation: none;
|
||||
}
|
||||
|
||||
.card:hover span {
|
||||
transform: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* Mobile and Tablet */
|
||||
@media (max-width: 1023px) {
|
||||
.content {
|
||||
padding: 4rem;
|
||||
}
|
||||
|
||||
.grid {
|
||||
grid-template-columns: 1fr;
|
||||
margin-bottom: 120px;
|
||||
max-width: 320px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 1rem 2.5rem;
|
||||
}
|
||||
|
||||
.card h2 {
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.center {
|
||||
padding: 8rem 0 6rem;
|
||||
}
|
||||
|
||||
.center::before {
|
||||
transform: none;
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
.description {
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.description a {
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.description p,
|
||||
.description div {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
position: fixed;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.description p {
|
||||
align-items: center;
|
||||
inset: 0 0 auto;
|
||||
padding: 2rem 1rem 1.4rem;
|
||||
border-radius: 0;
|
||||
border: none;
|
||||
border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25);
|
||||
background: linear-gradient(to bottom,
|
||||
rgba(var(--background-start-rgb), 1),
|
||||
rgba(var(--callout-rgb), 0.5));
|
||||
background-clip: padding-box;
|
||||
backdrop-filter: blur(24px);
|
||||
}
|
||||
|
||||
.description div {
|
||||
align-items: flex-end;
|
||||
pointer-events: none;
|
||||
inset: auto 0 0;
|
||||
padding: 2rem;
|
||||
height: 200px;
|
||||
background: linear-gradient(to bottom,
|
||||
transparent 0%,
|
||||
rgb(var(--background-end-rgb)) 40%);
|
||||
z-index: 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
.vercelLogo {
|
||||
filter: invert(1);
|
||||
}
|
||||
|
||||
.logo,
|
||||
.thirteen img {
|
||||
filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes rotate {
|
||||
from {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
|
||||
to {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,6 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import tailwind from 'eslint-plugin-tailwindcss'
|
||||
@ -79,6 +80,7 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
storybook.configs['flat/recommended'],
|
||||
...pluginQuery.configs['flat/recommended'],
|
||||
// sonar
|
||||
{
|
||||
rules: {
|
||||
|
||||
@ -165,6 +165,7 @@
|
||||
"@storybook/addon-themes": "9.1.13",
|
||||
"@storybook/nextjs": "9.1.13",
|
||||
"@storybook/react": "9.1.13",
|
||||
"@tanstack/eslint-plugin-query": "^5.91.2",
|
||||
"@tanstack/react-devtools": "^0.9.0",
|
||||
"@tanstack/react-form-devtools": "^0.2.9",
|
||||
"@tanstack/react-query-devtools": "^5.90.2",
|
||||
|
||||
30
web/pnpm-lock.yaml
generated
30
web/pnpm-lock.yaml
generated
@ -406,6 +406,9 @@ importers:
|
||||
'@storybook/react':
|
||||
specifier: 9.1.13
|
||||
version: 9.1.13(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)
|
||||
'@tanstack/eslint-plugin-query':
|
||||
specifier: ^5.91.2
|
||||
version: 5.91.2(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)
|
||||
'@tanstack/react-devtools':
|
||||
specifier: ^0.9.0
|
||||
version: 0.9.0(@types/react-dom@19.2.3(@types/react@19.2.7))(@types/react@19.2.7)(csstype@3.2.3)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(solid-js@1.9.10)
|
||||
@ -3387,6 +3390,11 @@ packages:
|
||||
peerDependencies:
|
||||
solid-js: '>=1.9.7'
|
||||
|
||||
'@tanstack/eslint-plugin-query@5.91.2':
|
||||
resolution: {integrity: sha512-UPeWKl/Acu1IuuHJlsN+eITUHqAaa9/04geHHPedY8siVarSaWprY0SVMKrkpKfk5ehRT7+/MZ5QwWuEtkWrFw==}
|
||||
peerDependencies:
|
||||
eslint: ^8.57.0 || ^9.0.0
|
||||
|
||||
'@tanstack/form-core@1.27.1':
|
||||
resolution: {integrity: sha512-hPM+0tUnZ2C2zb2TE1lar1JJ0S0cbnQHlUwFcCnVBpMV3rjtUzkoM766gUpWrlmTGCzNad0GbJ0aTxVsjT6J8g==}
|
||||
|
||||
@ -10130,7 +10138,7 @@ snapshots:
|
||||
'@es-joy/jsdoccomment@0.76.0':
|
||||
dependencies:
|
||||
'@types/estree': 1.0.8
|
||||
'@typescript-eslint/types': 8.50.0
|
||||
'@typescript-eslint/types': 8.50.1
|
||||
comment-parser: 1.4.1
|
||||
esquery: 1.6.0
|
||||
jsdoc-type-pratt-parser: 6.10.0
|
||||
@ -10138,7 +10146,7 @@ snapshots:
|
||||
'@es-joy/jsdoccomment@0.78.0':
|
||||
dependencies:
|
||||
'@types/estree': 1.0.8
|
||||
'@typescript-eslint/types': 8.50.0
|
||||
'@typescript-eslint/types': 8.50.1
|
||||
comment-parser: 1.4.1
|
||||
esquery: 1.6.0
|
||||
jsdoc-type-pratt-parser: 7.0.0
|
||||
@ -11957,7 +11965,7 @@ snapshots:
|
||||
'@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@1.21.7))':
|
||||
dependencies:
|
||||
'@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@1.21.7))
|
||||
'@typescript-eslint/types': 8.49.0
|
||||
'@typescript-eslint/types': 8.50.1
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
eslint-visitor-keys: 4.2.1
|
||||
espree: 10.4.0
|
||||
@ -12039,6 +12047,14 @@ snapshots:
|
||||
- csstype
|
||||
- utf-8-validate
|
||||
|
||||
'@tanstack/eslint-plugin-query@5.91.2(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)':
|
||||
dependencies:
|
||||
'@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
- typescript
|
||||
|
||||
'@tanstack/form-core@1.27.1':
|
||||
dependencies:
|
||||
'@tanstack/devtools-event-client': 0.3.5
|
||||
@ -12512,8 +12528,8 @@ snapshots:
|
||||
|
||||
'@typescript-eslint/project-service@8.50.0(typescript@5.9.3)':
|
||||
dependencies:
|
||||
'@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3)
|
||||
'@typescript-eslint/types': 8.50.0
|
||||
'@typescript-eslint/tsconfig-utils': 8.50.1(typescript@5.9.3)
|
||||
'@typescript-eslint/types': 8.50.1
|
||||
debug: 4.4.3
|
||||
typescript: 5.9.3
|
||||
transitivePeerDependencies:
|
||||
@ -12742,7 +12758,7 @@ snapshots:
|
||||
|
||||
'@vitest/eslint-plugin@1.6.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)(vitest@4.0.16(@types/node@18.15.0)(happy-dom@20.0.11)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.0))(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))':
|
||||
dependencies:
|
||||
'@typescript-eslint/scope-manager': 8.49.0
|
||||
'@typescript-eslint/scope-manager': 8.50.1
|
||||
'@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
optionalDependencies:
|
||||
@ -14240,7 +14256,7 @@ snapshots:
|
||||
|
||||
eslint-plugin-perfectionist@4.15.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3):
|
||||
dependencies:
|
||||
'@typescript-eslint/types': 8.49.0
|
||||
'@typescript-eslint/types': 8.50.1
|
||||
'@typescript-eslint/utils': 8.50.1(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
natural-orderby: 5.0.0
|
||||
|
||||
@ -73,7 +73,7 @@ export const useUpdateAccessMode = () => {
|
||||
export const useGetUserCanAccessApp = ({ appId, isInstalledApp = true, enabled }: { appId?: string, isInstalledApp?: boolean, enabled?: boolean }) => {
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
return useQuery({
|
||||
queryKey: [NAME_SPACE, 'user-can-access-app', appId],
|
||||
queryKey: [NAME_SPACE, 'user-can-access-app', appId, systemFeatures.webapp_auth.enabled, isInstalledApp],
|
||||
queryFn: () => {
|
||||
if (systemFeatures.webapp_auth.enabled)
|
||||
return getUserCanAccess(appId!, isInstalledApp)
|
||||
|
||||
@ -48,7 +48,7 @@ export const useDocumentList = (payload: {
|
||||
if (normalizedStatus && normalizedStatus !== 'all')
|
||||
params.status = normalizedStatus
|
||||
return useQuery<DocumentListResponse>({
|
||||
queryKey: [...useDocumentListKey, datasetId, keyword, page, limit, sort, normalizedStatus],
|
||||
queryKey: [...useDocumentListKey, datasetId, params],
|
||||
queryFn: () => fetchDocumentList(datasetId, params),
|
||||
refetchInterval,
|
||||
})
|
||||
@ -126,7 +126,7 @@ export const useDocumentDetail = (payload: {
|
||||
}) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
return useQuery<DocumentDetailResponse>({
|
||||
queryKey: [...useDocumentDetailKey, 'withoutMetaData', datasetId, documentId],
|
||||
queryKey: [...useDocumentDetailKey, 'withoutMetaData', datasetId, documentId, params],
|
||||
queryFn: () => fetchDocumentDetail(datasetId, documentId, params),
|
||||
})
|
||||
}
|
||||
@ -138,7 +138,7 @@ export const useDocumentMetadata = (payload: {
|
||||
}) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
return useQuery<DocumentDetailResponse>({
|
||||
queryKey: [...useDocumentDetailKey, 'onlyMetaData', datasetId, documentId],
|
||||
queryKey: [...useDocumentDetailKey, 'onlyMetaData', datasetId, documentId, params],
|
||||
queryFn: () => fetchDocumentDetail(datasetId, documentId, params),
|
||||
})
|
||||
}
|
||||
|
||||
@ -39,9 +39,9 @@ export const useSegmentList = (
|
||||
disable?: boolean,
|
||||
) => {
|
||||
const { datasetId, documentId, params } = payload
|
||||
const { page, limit, keyword, enabled } = params
|
||||
|
||||
return useQuery<SegmentsResponse>({
|
||||
queryKey: [...useSegmentListKey, { datasetId, documentId, page, limit, keyword, enabled }],
|
||||
queryKey: [...useSegmentListKey, datasetId, documentId, params],
|
||||
queryFn: () => {
|
||||
return fetchSegmentList(datasetId, documentId, params)
|
||||
},
|
||||
@ -115,9 +115,9 @@ export const useChildSegmentList = (
|
||||
disable?: boolean,
|
||||
) => {
|
||||
const { datasetId, documentId, segmentId, params } = payload
|
||||
const { page, limit, keyword } = params
|
||||
|
||||
return useQuery({
|
||||
queryKey: [...useChildSegmentListKey, { datasetId, documentId, segmentId, page, limit, keyword }],
|
||||
queryKey: [...useChildSegmentListKey, datasetId, documentId, segmentId, params],
|
||||
queryFn: () => {
|
||||
return fetchChildSegments(datasetId, documentId, segmentId, params)
|
||||
},
|
||||
|
||||
@ -59,7 +59,7 @@ export const useUpdateAppPinStatus = () => {
|
||||
export const useGetInstalledAppAccessModeByAppId = (appId: string | null) => {
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
return useQuery({
|
||||
queryKey: [NAME_SPACE, 'appAccessMode', appId],
|
||||
queryKey: [NAME_SPACE, 'appAccessMode', appId, systemFeatures.webapp_auth.enabled],
|
||||
queryFn: () => {
|
||||
if (systemFeatures.webapp_auth.enabled === false) {
|
||||
return {
|
||||
|
||||
@ -82,7 +82,7 @@ export const useGetModelCredential = (
|
||||
) => {
|
||||
return useQuery({
|
||||
enabled,
|
||||
queryKey: [NAME_SPACE, 'model-list', provider, model, modelType, credentialId],
|
||||
queryKey: [NAME_SPACE, 'model-list', provider, model, modelType, credentialId, configFrom],
|
||||
queryFn: () => fetchModelCredential(provider, model, modelType, configFrom, credentialId),
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
|
||||
@ -63,9 +63,8 @@ const NAME_SPACE = 'pipeline'
|
||||
|
||||
export const PipelineTemplateListQueryKeyPrefix = [NAME_SPACE, 'template-list']
|
||||
export const usePipelineTemplateList = (params: PipelineTemplateListParams, enabled = true) => {
|
||||
const { type, language } = params
|
||||
return useQuery<PipelineTemplateListResponse>({
|
||||
queryKey: [...PipelineTemplateListQueryKeyPrefix, type, language],
|
||||
queryKey: [...PipelineTemplateListQueryKeyPrefix, params],
|
||||
queryFn: () => {
|
||||
return fetchPipelineTemplateList(params)
|
||||
},
|
||||
|
||||
@ -596,20 +596,21 @@ export const useMutationCheckDependencies = () => {
|
||||
}
|
||||
|
||||
export const useModelInList = (currentProvider?: ModelProvider, modelId?: string) => {
|
||||
const provider = currentProvider?.provider
|
||||
return useQuery({
|
||||
queryKey: ['modelInList', currentProvider?.provider, modelId],
|
||||
queryKey: ['modelInList', provider, modelId],
|
||||
queryFn: async () => {
|
||||
if (!modelId || !currentProvider)
|
||||
if (!modelId || !provider)
|
||||
return false
|
||||
try {
|
||||
const modelsData = await fetchModelProviderModelList(`/workspaces/current/model-providers/${currentProvider?.provider}/models`)
|
||||
const modelsData = await fetchModelProviderModelList(`/workspaces/current/model-providers/${provider}/models`)
|
||||
return !!modelId && !!modelsData.data.find(item => item.model === modelId)
|
||||
}
|
||||
catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
enabled: !!modelId && !!currentProvider,
|
||||
enabled: !!modelId && !!provider,
|
||||
})
|
||||
}
|
||||
|
||||
@ -652,7 +653,7 @@ export const usePluginReadme = ({ plugin_unique_identifier, language }: { plugin
|
||||
export const usePluginReadmeAsset = ({ file_name, plugin_unique_identifier }: { file_name?: string, plugin_unique_identifier?: string }) => {
|
||||
const normalizedFileName = file_name?.replace(/(^\.\/_assets\/|^_assets\/)/, '')
|
||||
return useQuery({
|
||||
queryKey: ['pluginReadmeAsset', plugin_unique_identifier, file_name],
|
||||
queryKey: ['pluginReadmeAsset', plugin_unique_identifier, normalizedFileName],
|
||||
queryFn: () => fetchPluginAsset({ plugin_unique_identifier: plugin_unique_identifier || '', file_name: normalizedFileName || '' }),
|
||||
enabled: !!plugin_unique_identifier && !!file_name && /(^\.\/_assets|^_assets)/.test(file_name),
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user