mirror of
https://github.com/langgenius/dify.git
synced 2026-01-23 13:35:35 +08:00
Compare commits
15 Commits
mysql-adap
...
test/build
| Author | SHA1 | Date | |
|---|---|---|---|
| bc691464a2 | |||
| d444fa1c70 | |||
| b3a4721815 | |||
| 4637435e42 | |||
| 7a2e951474 | |||
| 1e127df4ab | |||
| ca7794305b | |||
| fd255e81e1 | |||
| 09d31d1263 | |||
| 47dc26f011 | |||
| 123bb3ec08 | |||
| 90f77282e3 | |||
| 5208867ccc | |||
| edc7ccc795 | |||
| c9798f6425 |
2
.github/workflows/api-tests.yml
vendored
2
.github/workflows/api-tests.yml
vendored
@ -62,7 +62,7 @@ jobs:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
services: |
|
||||
db_postgres
|
||||
db
|
||||
redis
|
||||
sandbox
|
||||
ssrf_proxy
|
||||
|
||||
55
.github/workflows/db-migration-test.yml
vendored
55
.github/workflows/db-migration-test.yml
vendored
@ -8,7 +8,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
db-migration-test-postgres:
|
||||
db-migration-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@ -45,7 +45,7 @@ jobs:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
services: |
|
||||
db_postgres
|
||||
db
|
||||
redis
|
||||
|
||||
- name: Prepare configs
|
||||
@ -57,54 +57,3 @@ jobs:
|
||||
env:
|
||||
DEBUG: true
|
||||
run: uv run --directory api flask upgrade-db
|
||||
|
||||
db-migration-test-mysql:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api
|
||||
- name: Ensure Offline migration are supported
|
||||
run: |
|
||||
# upgrade
|
||||
uv run --directory api flask db upgrade 'base:head' --sql
|
||||
# downgrade
|
||||
uv run --directory api flask db downgrade 'head:base' --sql
|
||||
|
||||
- name: Prepare middleware env
|
||||
run: |
|
||||
cd docker
|
||||
cp middleware.env.example middleware.env
|
||||
|
||||
- name: Set up Middlewares
|
||||
uses: hoverkraft-tech/compose-action@v2.0.2
|
||||
with:
|
||||
compose-file: |
|
||||
docker/docker-compose.middleware.yaml
|
||||
services: |
|
||||
db_mysql
|
||||
redis
|
||||
|
||||
- name: Prepare configs for MySQL
|
||||
run: |
|
||||
cd api
|
||||
cp .env.example .env
|
||||
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' .env
|
||||
|
||||
- name: Run DB Migration
|
||||
env:
|
||||
DEBUG: true
|
||||
run: uv run --directory api flask upgrade-db
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -183,7 +183,6 @@ docker/volumes/couchbase/*
|
||||
docker/volumes/oceanbase/*
|
||||
docker/volumes/plugin_daemon/*
|
||||
docker/volumes/matrixone/*
|
||||
docker/volumes/mysql/data/*
|
||||
!docker/volumes/oceanbase/init.d
|
||||
|
||||
docker/nginx/conf.d/default.conf
|
||||
|
||||
@ -69,31 +69,12 @@ REDIS_CLUSTERS_PASSWORD=
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
||||
CELERY_BACKEND=redis
|
||||
|
||||
# Database configuration
|
||||
DB_TYPE=postgresql
|
||||
|
||||
# PostgreSQL configuration
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=difyai123456
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_DATABASE=dify
|
||||
|
||||
# MySQL configuration
|
||||
MYSQL_USER=mysql
|
||||
MYSQL_PASSWORD=difyai123456
|
||||
MYSQL_HOST=localhost
|
||||
MYSQL_PORT=3306
|
||||
MYSQL_DATABASE=dify
|
||||
|
||||
# OceanBase configuration
|
||||
OCEANBASE_USER=root@test
|
||||
OCEANBASE_PASSWORD=difyai123456
|
||||
OCEANBASE_HOST=localhost
|
||||
OCEANBASE_PORT=2881
|
||||
OCEANBASE_DATABASE=test
|
||||
|
||||
# PostgreSQL database configuration
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
SQLALCHEMY_POOL_PRE_PING=true
|
||||
SQLALCHEMY_POOL_TIMEOUT=30
|
||||
|
||||
@ -191,15 +172,6 @@ WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||
WEAVIATE_GRPC_ENABLED=false
|
||||
WEAVIATE_BATCH_SIZE=100
|
||||
|
||||
# OceanBase Vector configuration
|
||||
OCEANBASE_HOST=127.0.0.1
|
||||
OCEANBASE_PORT=2881
|
||||
OCEANBASE_USER=root@test
|
||||
OCEANBASE_PASSWORD=difyai123456
|
||||
OCEANBASE_DATABASE=test
|
||||
OCEANBASE_MEMORY_LIMIT=6G
|
||||
OCEANBASE_ENABLE_HYBRID_SEARCH=false
|
||||
|
||||
# Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
|
||||
QDRANT_URL=http://localhost:6333
|
||||
QDRANT_API_KEY=difyai123456
|
||||
@ -365,6 +337,15 @@ LINDORM_PASSWORD=admin
|
||||
LINDORM_USING_UGC=True
|
||||
LINDORM_QUERY_TIMEOUT=1
|
||||
|
||||
# OceanBase Vector configuration
|
||||
OCEANBASE_VECTOR_HOST=127.0.0.1
|
||||
OCEANBASE_VECTOR_PORT=2881
|
||||
OCEANBASE_VECTOR_USER=root@test
|
||||
OCEANBASE_VECTOR_PASSWORD=difyai123456
|
||||
OCEANBASE_VECTOR_DATABASE=test
|
||||
OCEANBASE_MEMORY_LIMIT=6G
|
||||
OCEANBASE_ENABLE_HYBRID_SEARCH=false
|
||||
|
||||
# AlibabaCloud MySQL Vector configuration
|
||||
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
|
||||
ALIBABACLOUD_MYSQL_PORT=3306
|
||||
|
||||
@ -15,8 +15,8 @@
|
||||
```bash
|
||||
cd ../docker
|
||||
cp middleware.env.example middleware.env
|
||||
# change the profile to mysql if you are not using postgres,change the profile to other vector database if you are not using weaviate
|
||||
docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
|
||||
# change the profile to other vector database if you are not using weaviate
|
||||
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
|
||||
cd ../api
|
||||
```
|
||||
|
||||
|
||||
@ -1422,7 +1422,10 @@ def setup_datasource_oauth_client(provider, client_params):
|
||||
|
||||
|
||||
@click.command("transform-datasource-credentials", help="Transform datasource credentials.")
|
||||
def transform_datasource_credentials():
|
||||
@click.option(
|
||||
"--environment", prompt=True, help="the environment to transform datasource credentials", default="online"
|
||||
)
|
||||
def transform_datasource_credentials(environment: str):
|
||||
"""
|
||||
Transform datasource credentials
|
||||
"""
|
||||
@ -1433,9 +1436,14 @@ def transform_datasource_credentials():
|
||||
notion_plugin_id = "langgenius/notion_datasource"
|
||||
firecrawl_plugin_id = "langgenius/firecrawl_datasource"
|
||||
jina_plugin_id = "langgenius/jina_datasource"
|
||||
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
if environment == "online":
|
||||
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
else:
|
||||
notion_plugin_unique_identifier = None
|
||||
firecrawl_plugin_unique_identifier = None
|
||||
jina_plugin_unique_identifier = None
|
||||
oauth_credential_type = CredentialType.OAUTH2
|
||||
api_key_credential_type = CredentialType.API_KEY
|
||||
|
||||
|
||||
@ -105,141 +105,31 @@ class KeywordStoreConfig(BaseSettings):
|
||||
|
||||
|
||||
class DatabaseConfig(BaseSettings):
|
||||
# Database type selector
|
||||
DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field(
|
||||
description="Database type to use. OceanBase is MySQL-compatible.",
|
||||
default="postgresql",
|
||||
)
|
||||
|
||||
# PostgreSQL configuration
|
||||
POSTGRES_HOST: str = Field(
|
||||
description="PostgreSQL hostname or IP address.",
|
||||
DB_HOST: str = Field(
|
||||
description="Hostname or IP address of the database server.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
POSTGRES_PORT: PositiveInt = Field(
|
||||
description="PostgreSQL port number.",
|
||||
DB_PORT: PositiveInt = Field(
|
||||
description="Port number for database connection.",
|
||||
default=5432,
|
||||
)
|
||||
|
||||
POSTGRES_USER: str = Field(
|
||||
description="PostgreSQL username.",
|
||||
DB_USERNAME: str = Field(
|
||||
description="Username for database authentication.",
|
||||
default="postgres",
|
||||
)
|
||||
|
||||
POSTGRES_PASSWORD: str = Field(
|
||||
description="PostgreSQL password.",
|
||||
default="difyai123456",
|
||||
DB_PASSWORD: str = Field(
|
||||
description="Password for database authentication.",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSTGRES_DATABASE: str = Field(
|
||||
description="PostgreSQL database name.",
|
||||
DB_DATABASE: str = Field(
|
||||
description="Name of the database to connect to.",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
# MySQL configuration
|
||||
MYSQL_HOST: str = Field(
|
||||
description="MySQL hostname or IP address.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
MYSQL_PORT: PositiveInt = Field(
|
||||
description="MySQL port number.",
|
||||
default=3306,
|
||||
)
|
||||
|
||||
MYSQL_USER: str = Field(
|
||||
description="MySQL username.",
|
||||
default="root",
|
||||
)
|
||||
|
||||
MYSQL_PASSWORD: str = Field(
|
||||
description="MySQL password.",
|
||||
default="difyai123456",
|
||||
)
|
||||
|
||||
MYSQL_DATABASE: str = Field(
|
||||
description="MySQL database name.",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
# OceanBase configuration(MySQL-compatible)
|
||||
OCEANBASE_HOST: str = Field(
|
||||
description="OceanBase hostname or IP address.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
OCEANBASE_PORT: PositiveInt = Field(
|
||||
description="OceanBase port number.",
|
||||
default=2881,
|
||||
)
|
||||
|
||||
OCEANBASE_USER: str = Field(
|
||||
description="OceanBase username.",
|
||||
default="root@test",
|
||||
)
|
||||
|
||||
OCEANBASE_PASSWORD: str = Field(
|
||||
description="OceanBase password.",
|
||||
default="difyai123456",
|
||||
)
|
||||
|
||||
OCEANBASE_DATABASE: str = Field(
|
||||
description="OceanBase database name.",
|
||||
default="test",
|
||||
)
|
||||
|
||||
# Dynamic properties based on DB_TYPE
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def DB_HOST(self) -> str:
|
||||
if self.DB_TYPE == "postgresql":
|
||||
return self.POSTGRES_HOST
|
||||
elif self.DB_TYPE == "mysql":
|
||||
return self.MYSQL_HOST
|
||||
elif self.DB_TYPE == "oceanbase":
|
||||
return self.OCEANBASE_HOST
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def DB_PORT(self) -> int:
|
||||
if self.DB_TYPE == "postgresql":
|
||||
return self.POSTGRES_PORT
|
||||
elif self.DB_TYPE == "mysql":
|
||||
return self.MYSQL_PORT
|
||||
elif self.DB_TYPE == "oceanbase":
|
||||
return self.OCEANBASE_PORT
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def DB_USERNAME(self) -> str:
|
||||
if self.DB_TYPE == "postgresql":
|
||||
return self.POSTGRES_USER
|
||||
elif self.DB_TYPE == "mysql":
|
||||
return self.MYSQL_USER
|
||||
elif self.DB_TYPE == "oceanbase":
|
||||
return self.OCEANBASE_USER
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def DB_PASSWORD(self) -> str:
|
||||
if self.DB_TYPE == "postgresql":
|
||||
return self.POSTGRES_PASSWORD
|
||||
elif self.DB_TYPE == "mysql":
|
||||
return self.MYSQL_PASSWORD
|
||||
elif self.DB_TYPE == "oceanbase":
|
||||
return self.OCEANBASE_PASSWORD
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def DB_DATABASE(self) -> str:
|
||||
if self.DB_TYPE == "postgresql":
|
||||
return self.POSTGRES_DATABASE
|
||||
elif self.DB_TYPE == "mysql":
|
||||
return self.MYSQL_DATABASE
|
||||
elif self.DB_TYPE == "oceanbase":
|
||||
return self.OCEANBASE_DATABASE
|
||||
|
||||
DB_CHARSET: str = Field(
|
||||
description="Character set for database connection.",
|
||||
default="",
|
||||
@ -250,10 +140,10 @@ class DatabaseConfig(BaseSettings):
|
||||
default="",
|
||||
)
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str:
|
||||
return "postgresql" if self.DB_TYPE == "postgresql" else "mysql+pymysql"
|
||||
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
|
||||
description="Database URI scheme for SQLAlchemy connection.",
|
||||
default="postgresql",
|
||||
)
|
||||
|
||||
@computed_field # type: ignore[prop-decorator]
|
||||
@property
|
||||
@ -314,15 +204,15 @@ class DatabaseConfig(BaseSettings):
|
||||
# Parse DB_EXTRAS for 'options'
|
||||
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
|
||||
options = db_extras_dict.get("options", "")
|
||||
connect_args = {}
|
||||
# Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property
|
||||
if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"):
|
||||
timezone_opt = "-c timezone=UTC"
|
||||
if options:
|
||||
merged_options = f"{options} {timezone_opt}"
|
||||
else:
|
||||
merged_options = timezone_opt
|
||||
connect_args = {"options": merged_options}
|
||||
# Always include timezone
|
||||
timezone_opt = "-c timezone=UTC"
|
||||
if options:
|
||||
# Merge user options and timezone
|
||||
merged_options = f"{options} {timezone_opt}"
|
||||
else:
|
||||
merged_options = timezone_opt
|
||||
|
||||
connect_args = {"options": merged_options}
|
||||
|
||||
return {
|
||||
"pool_size": self.SQLALCHEMY_POOL_SIZE,
|
||||
|
||||
@ -7,29 +7,29 @@ class OceanBaseVectorConfig(BaseSettings):
|
||||
Configuration settings for OceanBase Vector database
|
||||
"""
|
||||
|
||||
OCEANBASE_HOST: str = Field(
|
||||
description="OceanBase hostname or IP address.",
|
||||
default="localhost",
|
||||
OCEANBASE_VECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_PORT: PositiveInt = Field(
|
||||
description="OceanBase port number.",
|
||||
|
||||
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
|
||||
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
|
||||
default=2881,
|
||||
)
|
||||
|
||||
OCEANBASE_USER: str = Field(
|
||||
description="OceanBase username.",
|
||||
default="root@test",
|
||||
|
||||
OCEANBASE_VECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_PASSWORD: str = Field(
|
||||
description="OceanBase password.",
|
||||
default="difyai123456",
|
||||
|
||||
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_DATABASE: str = Field(
|
||||
description="OceanBase database name.",
|
||||
default="test",
|
||||
|
||||
OCEANBASE_VECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the OceanBase Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_ENABLE_HYBRID_SEARCH: bool = Field(
|
||||
|
||||
@ -10,9 +10,9 @@ from controllers.console.wraps import account_initialization_required, setup_req
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import parse_time_range
|
||||
from libs.helper import DatetimeString, convert_datetime_to_date
|
||||
from libs.helper import DatetimeString
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models import AppMode
|
||||
from models import AppMode, Message
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
|
||||
@ -44,9 +44,8 @@ class DailyMessageStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(*) AS message_count
|
||||
FROM
|
||||
messages
|
||||
@ -109,17 +108,6 @@ class DailyConversationStatistic(Resource):
|
||||
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
COUNT(DISTINCT conversation_id) AS conversation_count
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id
|
||||
AND invoke_from != :invoke_from"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
|
||||
assert account.timezone is not None
|
||||
|
||||
try:
|
||||
@ -127,21 +115,30 @@ WHERE
|
||||
except ValueError as e:
|
||||
abort(400, description=str(e))
|
||||
|
||||
stmt = (
|
||||
sa.select(
|
||||
sa.func.date(
|
||||
sa.func.date_trunc("day", sa.text("created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz"))
|
||||
).label("date"),
|
||||
sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"),
|
||||
)
|
||||
.select_from(Message)
|
||||
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER)
|
||||
)
|
||||
|
||||
if start_datetime_utc:
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
stmt = stmt.where(Message.created_at >= start_datetime_utc)
|
||||
|
||||
if end_datetime_utc:
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
stmt = stmt.where(Message.created_at < end_datetime_utc)
|
||||
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
stmt = stmt.group_by("date").order_by("date")
|
||||
|
||||
response_data = []
|
||||
with db.engine.begin() as conn:
|
||||
rs = conn.execute(sa.text(sql_query), arg_dict)
|
||||
for i in rs:
|
||||
response_data.append({"date": str(i.date), "conversation_count": i.conversation_count})
|
||||
rs = conn.execute(stmt, {"tz": account.timezone})
|
||||
for row in rs:
|
||||
response_data.append({"date": str(row.date), "conversation_count": row.conversation_count})
|
||||
|
||||
return jsonify({"data": response_data})
|
||||
|
||||
@ -175,9 +172,8 @@ class DailyTerminalsStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(DISTINCT messages.from_end_user_id) AS terminal_count
|
||||
FROM
|
||||
messages
|
||||
@ -241,9 +237,8 @@ class DailyTokenCostStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
(SUM(messages.message_tokens) + SUM(messages.answer_tokens)) AS token_count,
|
||||
SUM(total_price) AS total_price
|
||||
FROM
|
||||
@ -310,9 +305,8 @@ class AverageSessionInteractionStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("c.created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(subquery.message_count) AS interactions
|
||||
FROM
|
||||
(
|
||||
@ -395,9 +389,8 @@ class UserSatisfactionRateStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("m.created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(m.id) AS message_count,
|
||||
COUNT(mf.id) AS feedback_count
|
||||
FROM
|
||||
@ -470,9 +463,8 @@ class AverageResponseTimeStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(provider_response_latency) AS latency
|
||||
FROM
|
||||
messages
|
||||
@ -536,9 +528,8 @@ class TokensPerSecondStatistic(Resource):
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
converted_created_at = convert_datetime_to_date("created_at")
|
||||
sql_query = f"""SELECT
|
||||
{converted_created_at} AS date,
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
CASE
|
||||
WHEN SUM(provider_response_latency) = 0 THEN 0
|
||||
ELSE (SUM(answer_tokens) / SUM(provider_response_latency))
|
||||
|
||||
@ -104,6 +104,11 @@ class AppGenerateEntity(BaseModel):
|
||||
|
||||
inputs: Mapping[str, Any]
|
||||
files: Sequence[File]
|
||||
|
||||
# Unique identifier of the user initiating the execution.
|
||||
# This corresponds to `Account.id` for platform users or `EndUser.id` for end users.
|
||||
#
|
||||
# Note: The `user_id` field does not indicate whether the user is a platform user or an end user.
|
||||
user_id: str
|
||||
|
||||
# extras
|
||||
|
||||
@ -1,15 +1,64 @@
|
||||
from typing import Annotated, Literal, Self, TypeAlias
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_events.base import GraphEngineEvent
|
||||
from core.workflow.graph_events.graph import GraphRunPausedEvent
|
||||
from models.model import AppMode
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
|
||||
# Wrapper types for `WorkflowAppGenerateEntity` and
|
||||
# `AdvancedChatAppGenerateEntity`. These wrappers enable type discrimination
|
||||
# and correct reconstruction of the entity field during (de)serialization.
|
||||
class _WorkflowGenerateEntityWrapper(BaseModel):
|
||||
type: Literal[AppMode.WORKFLOW] = AppMode.WORKFLOW
|
||||
entity: WorkflowAppGenerateEntity
|
||||
|
||||
|
||||
class _AdvancedChatAppGenerateEntityWrapper(BaseModel):
|
||||
type: Literal[AppMode.ADVANCED_CHAT] = AppMode.ADVANCED_CHAT
|
||||
entity: AdvancedChatAppGenerateEntity
|
||||
|
||||
|
||||
_GenerateEntityUnion: TypeAlias = Annotated[
|
||||
_WorkflowGenerateEntityWrapper | _AdvancedChatAppGenerateEntityWrapper,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
class WorkflowResumptionContext(BaseModel):
|
||||
"""WorkflowResumptionContext captures all state necessary for resumption."""
|
||||
|
||||
version: Literal["1"] = "1"
|
||||
|
||||
# Only workflow / chatflow could be paused.
|
||||
generate_entity: _GenerateEntityUnion
|
||||
serialized_graph_runtime_state: str
|
||||
|
||||
def dumps(self) -> str:
|
||||
return self.model_dump_json()
|
||||
|
||||
@classmethod
|
||||
def loads(cls, value: str) -> Self:
|
||||
return cls.model_validate_json(value)
|
||||
|
||||
def get_generate_entity(self) -> WorkflowAppGenerateEntity | AdvancedChatAppGenerateEntity:
|
||||
return self.generate_entity.entity
|
||||
|
||||
|
||||
class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
def __init__(self, session_factory: Engine | sessionmaker, state_owner_user_id: str):
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: Engine | sessionmaker,
|
||||
generate_entity: WorkflowAppGenerateEntity | AdvancedChatAppGenerateEntity,
|
||||
state_owner_user_id: str,
|
||||
):
|
||||
"""Create a PauseStatePersistenceLayer.
|
||||
|
||||
The `state_owner_user_id` is used when creating state file for pause.
|
||||
@ -19,6 +68,7 @@ class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
session_factory = sessionmaker(session_factory)
|
||||
self._session_maker = session_factory
|
||||
self._state_owner_user_id = state_owner_user_id
|
||||
self._generate_entity = generate_entity
|
||||
|
||||
def _get_repo(self) -> APIWorkflowRunRepository:
|
||||
return DifyAPIRepositoryFactory.create_api_workflow_run_repository(self._session_maker)
|
||||
@ -49,13 +99,27 @@ class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
return
|
||||
|
||||
assert self.graph_runtime_state is not None
|
||||
|
||||
entity_wrapper: _GenerateEntityUnion
|
||||
if isinstance(self._generate_entity, WorkflowAppGenerateEntity):
|
||||
entity_wrapper = _WorkflowGenerateEntityWrapper(entity=self._generate_entity)
|
||||
elif isinstance(self._generate_entity, AdvancedChatAppGenerateEntity):
|
||||
entity_wrapper = _AdvancedChatAppGenerateEntityWrapper(entity=self._generate_entity)
|
||||
else:
|
||||
raise AssertionError(f"unknown entity type: type={type(self._generate_entity)}")
|
||||
|
||||
state = WorkflowResumptionContext(
|
||||
serialized_graph_runtime_state=self.graph_runtime_state.dumps(),
|
||||
generate_entity=entity_wrapper,
|
||||
)
|
||||
|
||||
workflow_run_id: str | None = self.graph_runtime_state.system_variable.workflow_execution_id
|
||||
assert workflow_run_id is not None
|
||||
repo = self._get_repo()
|
||||
repo.create_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
state_owner_user_id=self._state_owner_user_id,
|
||||
state=self.graph_runtime_state.dumps(),
|
||||
state=state.dumps(),
|
||||
)
|
||||
|
||||
def on_graph_end(self, error: Exception | None) -> None:
|
||||
|
||||
@ -1,21 +1,22 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Union, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from openinference.semconv.trace import OpenInferenceSpanKindValues, SpanAttributes
|
||||
from opentelemetry import trace
|
||||
from openinference.semconv.trace import OpenInferenceMimeTypeValues, OpenInferenceSpanKindValues, SpanAttributes
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GrpcOTLPSpanExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HttpOTLPSpanExporter
|
||||
from opentelemetry.sdk import trace as trace_sdk
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
||||
from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
|
||||
from opentelemetry.trace import SpanContext, TraceFlags, TraceState
|
||||
from sqlalchemy import select
|
||||
from opentelemetry.semconv.trace import SpanAttributes as OTELSpanAttributes
|
||||
from opentelemetry.trace import Span, Status, StatusCode, set_span_in_context, use_span
|
||||
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import ArizeConfig, PhoenixConfig
|
||||
@ -30,9 +31,10 @@ from core.ops.entities.trace_entity import (
|
||||
TraceTaskName,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.repositories import DifyCoreRepositoryFactory
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
from models.workflow import WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -99,22 +101,45 @@ def datetime_to_nanos(dt: datetime | None) -> int:
|
||||
return int(dt.timestamp() * 1_000_000_000)
|
||||
|
||||
|
||||
def string_to_trace_id128(string: str | None) -> int:
|
||||
"""
|
||||
Convert any input string into a stable 128-bit integer trace ID.
|
||||
def error_to_string(error: Exception | str | None) -> str:
|
||||
"""Convert an error to a string with traceback information."""
|
||||
error_message = "Empty Stack Trace"
|
||||
if error:
|
||||
if isinstance(error, Exception):
|
||||
string_stacktrace = "".join(traceback.format_exception(error))
|
||||
error_message = f"{error.__class__.__name__}: {error}\n\n{string_stacktrace}"
|
||||
else:
|
||||
error_message = str(error)
|
||||
return error_message
|
||||
|
||||
This uses SHA-256 hashing and takes the first 16 bytes (128 bits) of the digest.
|
||||
It's suitable for generating consistent, unique identifiers from strings.
|
||||
"""
|
||||
if string is None:
|
||||
string = ""
|
||||
hash_object = hashlib.sha256(string.encode())
|
||||
|
||||
# Take the first 16 bytes (128 bits) of the hash digest
|
||||
digest = hash_object.digest()[:16]
|
||||
def set_span_status(current_span: Span, error: Exception | str | None = None):
|
||||
"""Set the status of the current span based on the presence of an error."""
|
||||
if error:
|
||||
error_string = error_to_string(error)
|
||||
current_span.set_status(Status(StatusCode.ERROR, error_string))
|
||||
|
||||
# Convert to a 128-bit integer
|
||||
return int.from_bytes(digest, byteorder="big")
|
||||
if isinstance(error, Exception):
|
||||
current_span.record_exception(error)
|
||||
else:
|
||||
exception_type = error.__class__.__name__
|
||||
exception_message = str(error)
|
||||
if not exception_message:
|
||||
exception_message = repr(error)
|
||||
attributes: dict[str, AttributeValue] = {
|
||||
OTELSpanAttributes.EXCEPTION_TYPE: exception_type,
|
||||
OTELSpanAttributes.EXCEPTION_MESSAGE: exception_message,
|
||||
OTELSpanAttributes.EXCEPTION_ESCAPED: False,
|
||||
OTELSpanAttributes.EXCEPTION_STACKTRACE: error_string,
|
||||
}
|
||||
current_span.add_event(name="exception", attributes=attributes)
|
||||
else:
|
||||
current_span.set_status(Status(StatusCode.OK))
|
||||
|
||||
|
||||
def safe_json_dumps(obj: Any) -> str:
|
||||
"""A convenience wrapper around `json.dumps` that ensures that any object can be safely encoded."""
|
||||
return json.dumps(obj, default=str, ensure_ascii=False)
|
||||
|
||||
|
||||
class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
@ -131,9 +156,12 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
self.tracer, self.processor = setup_tracer(arize_phoenix_config)
|
||||
self.project = arize_phoenix_config.project
|
||||
self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001")
|
||||
self.propagator = TraceContextTextMapPropagator()
|
||||
self.dify_trace_ids: set[str] = set()
|
||||
|
||||
def trace(self, trace_info: BaseTraceInfo):
|
||||
logger.info("[Arize/Phoenix] Trace: %s", trace_info)
|
||||
logger.info("[Arize/Phoenix] Trace Entity Info: %s", trace_info)
|
||||
logger.info("[Arize/Phoenix] Trace Entity Type: %s", type(trace_info))
|
||||
try:
|
||||
if isinstance(trace_info, WorkflowTraceInfo):
|
||||
self.workflow_trace(trace_info)
|
||||
@ -151,7 +179,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
self.generate_name_trace(trace_info)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("[Arize/Phoenix] Error in the trace: %s", str(e), exc_info=True)
|
||||
logger.error("[Arize/Phoenix] Trace Entity Error: %s", str(e), exc_info=True)
|
||||
raise
|
||||
|
||||
def workflow_trace(self, trace_info: WorkflowTraceInfo):
|
||||
@ -166,15 +194,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
workflow_metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.workflow_run_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id or trace_info.workflow_run_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
workflow_span = self.tracer.start_span(
|
||||
name=TraceTaskName.WORKFLOW_TRACE.value,
|
||||
@ -186,31 +208,58 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
# Through workflow_run_id, get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
|
||||
# Find the app's creator account
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
service_account = self.get_service_account_with_tenant(app_id)
|
||||
|
||||
workflow_node_execution_repository = DifyCoreRepositoryFactory.create_workflow_node_execution_repository(
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Get all executions for this workflow run
|
||||
workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run(
|
||||
workflow_run_id=trace_info.workflow_run_id
|
||||
)
|
||||
|
||||
try:
|
||||
# Process workflow nodes
|
||||
for node_execution in self._get_workflow_nodes(trace_info.workflow_run_id):
|
||||
for node_execution in workflow_node_executions:
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
inputs_value = node_execution.inputs or {}
|
||||
outputs_value = node_execution.outputs or {}
|
||||
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data or {}
|
||||
execution_metadata = node_execution.metadata or {}
|
||||
node_metadata = {str(k): v for k, v in execution_metadata.items()}
|
||||
|
||||
node_metadata = {
|
||||
"node_id": node_execution.id,
|
||||
"node_type": node_execution.node_type,
|
||||
"node_status": node_execution.status,
|
||||
"tenant_id": node_execution.tenant_id,
|
||||
"app_id": node_execution.app_id,
|
||||
"app_name": node_execution.title,
|
||||
"status": node_execution.status,
|
||||
"level": "ERROR" if node_execution.status != "succeeded" else "DEFAULT",
|
||||
}
|
||||
|
||||
if node_execution.execution_metadata:
|
||||
node_metadata.update(json.loads(node_execution.execution_metadata))
|
||||
node_metadata.update(
|
||||
{
|
||||
"node_id": node_execution.id,
|
||||
"node_type": node_execution.node_type,
|
||||
"node_status": node_execution.status,
|
||||
"tenant_id": tenant_id,
|
||||
"app_id": app_id,
|
||||
"app_name": node_execution.title,
|
||||
"status": node_execution.status,
|
||||
"level": "ERROR" if node_execution.status == "failed" else "DEFAULT",
|
||||
}
|
||||
)
|
||||
|
||||
# Determine the correct span kind based on node type
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN
|
||||
@ -223,8 +272,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
if model:
|
||||
node_metadata["ls_model_name"] = model
|
||||
|
||||
outputs = json.loads(node_execution.outputs).get("usage", {}) if "outputs" in node_execution else {}
|
||||
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
|
||||
usage_data = (
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs_value.get("usage", {})
|
||||
)
|
||||
if usage_data:
|
||||
node_metadata["total_tokens"] = usage_data.get("total_tokens", 0)
|
||||
node_metadata["prompt_tokens"] = usage_data.get("prompt_tokens", 0)
|
||||
@ -236,17 +286,20 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
else:
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN
|
||||
|
||||
workflow_span_context = set_span_in_context(workflow_span)
|
||||
node_span = self.tracer.start_span(
|
||||
name=node_execution.node_type,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}",
|
||||
SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}",
|
||||
SpanAttributes.INPUT_VALUE: safe_json_dumps(inputs_value),
|
||||
SpanAttributes.INPUT_MIME_TYPE: OpenInferenceMimeTypeValues.JSON.value,
|
||||
SpanAttributes.OUTPUT_VALUE: safe_json_dumps(outputs_value),
|
||||
SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.JSON.value,
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind.value,
|
||||
SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False),
|
||||
SpanAttributes.METADATA: safe_json_dumps(node_metadata),
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(created_at),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=workflow_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
@ -260,11 +313,8 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
llm_attributes[SpanAttributes.LLM_PROVIDER] = provider
|
||||
if model:
|
||||
llm_attributes[SpanAttributes.LLM_MODEL_NAME] = model
|
||||
outputs = (
|
||||
json.loads(node_execution.outputs).get("usage", {}) if "outputs" in node_execution else {}
|
||||
)
|
||||
usage_data = (
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs_value.get("usage", {})
|
||||
)
|
||||
if usage_data:
|
||||
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_TOTAL] = usage_data.get("total_tokens", 0)
|
||||
@ -275,8 +325,16 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
llm_attributes.update(self._construct_llm_attributes(process_data.get("prompts", [])))
|
||||
node_span.set_attributes(llm_attributes)
|
||||
finally:
|
||||
if node_execution.status == "failed":
|
||||
set_span_status(node_span, node_execution.error)
|
||||
else:
|
||||
set_span_status(node_span)
|
||||
node_span.end(end_time=datetime_to_nanos(finished_at))
|
||||
finally:
|
||||
if trace_info.error:
|
||||
set_span_status(workflow_span, trace_info.error)
|
||||
else:
|
||||
set_span_status(workflow_span)
|
||||
workflow_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo):
|
||||
@ -322,34 +380,18 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
|
||||
}
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.message_id)
|
||||
message_span_id = RandomIdGenerator().generate_span_id()
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=message_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
message_span = self.tracer.start_span(
|
||||
name=TraceTaskName.MESSAGE_TRACE.value,
|
||||
attributes=attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
message_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
|
||||
# Convert outputs to string based on type
|
||||
if isinstance(trace_info.outputs, dict | list):
|
||||
outputs_str = json.dumps(trace_info.outputs, ensure_ascii=False)
|
||||
@ -383,26 +425,26 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
if model_params := metadata_dict.get("model_parameters"):
|
||||
llm_attributes[SpanAttributes.LLM_INVOCATION_PARAMETERS] = json.dumps(model_params)
|
||||
|
||||
message_span_context = set_span_in_context(message_span)
|
||||
llm_span = self.tracer.start_span(
|
||||
name="llm",
|
||||
attributes=llm_attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=message_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
llm_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
if trace_info.message_data.error:
|
||||
set_span_status(llm_span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(llm_span)
|
||||
finally:
|
||||
llm_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
finally:
|
||||
if trace_info.error:
|
||||
set_span_status(message_span, trace_info.error)
|
||||
else:
|
||||
set_span_status(message_span)
|
||||
message_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def moderation_trace(self, trace_info: ModerationTraceInfo):
|
||||
@ -418,15 +460,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.MODERATION_TRACE.value,
|
||||
@ -445,19 +481,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
@ -480,15 +511,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value,
|
||||
@ -499,19 +524,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
@ -533,15 +553,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value,
|
||||
@ -554,19 +568,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"end_time": end_time.isoformat() if end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
@ -580,20 +589,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"tool_config": json.dumps(trace_info.tool_config, ensure_ascii=False),
|
||||
}
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
tool_span_id = RandomIdGenerator().generate_span_id()
|
||||
logger.info("[Arize/Phoenix] Creating tool trace with trace_id: %s, span_id: %s", trace_id, tool_span_id)
|
||||
|
||||
# Create span context with the same trace_id as the parent
|
||||
# todo: Create with the appropriate parent span context, so that the tool span is
|
||||
# a child of the appropriate span (e.g. message span)
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=tool_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
tool_params_str = (
|
||||
json.dumps(trace_info.tool_parameters, ensure_ascii=False)
|
||||
@ -612,19 +610,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.TOOL_PARAMETERS: tool_params_str,
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
@ -641,15 +634,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id or trace_info.conversation_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.GENERATE_NAME_TRACE.value,
|
||||
@ -663,22 +650,34 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"end_time": trace_info.end_time.isoformat() if trace_info.end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def ensure_root_span(self, dify_trace_id: str | None):
|
||||
"""Ensure a unique root span exists for the given Dify trace ID."""
|
||||
if str(dify_trace_id) not in self.dify_trace_ids:
|
||||
self.carrier: dict[str, str] = {}
|
||||
|
||||
root_span = self.tracer.start_span(name="Dify")
|
||||
root_span.set_attribute(SpanAttributes.OPENINFERENCE_SPAN_KIND, OpenInferenceSpanKindValues.CHAIN.value)
|
||||
root_span.set_attribute("dify_project_name", str(self.project))
|
||||
root_span.set_attribute("dify_trace_id", str(dify_trace_id))
|
||||
|
||||
with use_span(root_span, end_on_exit=False):
|
||||
self.propagator.inject(carrier=self.carrier)
|
||||
|
||||
set_span_status(root_span)
|
||||
root_span.end()
|
||||
self.dify_trace_ids.add(str(dify_trace_id))
|
||||
|
||||
def api_check(self):
|
||||
try:
|
||||
with self.tracer.start_span("api_check") as span:
|
||||
@ -698,26 +697,6 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
logger.info("[Arize/Phoenix] Get run url failed: %s", str(e), exc_info=True)
|
||||
raise ValueError(f"[Arize/Phoenix] Get run url failed: {str(e)}")
|
||||
|
||||
def _get_workflow_nodes(self, workflow_run_id: str):
|
||||
"""Helper method to get workflow nodes"""
|
||||
workflow_nodes = db.session.scalars(
|
||||
select(
|
||||
WorkflowNodeExecutionModel.id,
|
||||
WorkflowNodeExecutionModel.tenant_id,
|
||||
WorkflowNodeExecutionModel.app_id,
|
||||
WorkflowNodeExecutionModel.title,
|
||||
WorkflowNodeExecutionModel.node_type,
|
||||
WorkflowNodeExecutionModel.status,
|
||||
WorkflowNodeExecutionModel.inputs,
|
||||
WorkflowNodeExecutionModel.outputs,
|
||||
WorkflowNodeExecutionModel.created_at,
|
||||
WorkflowNodeExecutionModel.elapsed_time,
|
||||
WorkflowNodeExecutionModel.process_data,
|
||||
WorkflowNodeExecutionModel.execution_metadata,
|
||||
).where(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id)
|
||||
).all()
|
||||
return workflow_nodes
|
||||
|
||||
def _construct_llm_attributes(self, prompts: dict | list | str | None) -> dict[str, str]:
|
||||
"""Helper method to construct LLM attributes with passed prompts."""
|
||||
attributes = {}
|
||||
|
||||
@ -37,13 +37,13 @@ class OceanBaseVectorConfig(BaseModel):
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict):
|
||||
if not values["host"]:
|
||||
raise ValueError("config OCEANBASE_HOST is required")
|
||||
raise ValueError("config OCEANBASE_VECTOR_HOST is required")
|
||||
if not values["port"]:
|
||||
raise ValueError("config OCEANBASE_PORT is required")
|
||||
raise ValueError("config OCEANBASE_VECTOR_PORT is required")
|
||||
if not values["user"]:
|
||||
raise ValueError("config OCEANBASE_USER is required")
|
||||
raise ValueError("config OCEANBASE_VECTOR_USER is required")
|
||||
if not values["database"]:
|
||||
raise ValueError("config OCEANBASE_DATABASE is required")
|
||||
raise ValueError("config OCEANBASE_VECTOR_DATABASE is required")
|
||||
return values
|
||||
|
||||
|
||||
@ -316,11 +316,11 @@ class OceanBaseVectorFactory(AbstractVectorFactory):
|
||||
return OceanBaseVector(
|
||||
collection_name,
|
||||
OceanBaseVectorConfig(
|
||||
host=dify_config.OCEANBASE_HOST or "",
|
||||
port=dify_config.OCEANBASE_PORT or 0,
|
||||
user=dify_config.OCEANBASE_USER or "",
|
||||
password=(dify_config.OCEANBASE_PASSWORD or ""),
|
||||
database=dify_config.OCEANBASE_DATABASE or "",
|
||||
host=dify_config.OCEANBASE_VECTOR_HOST or "",
|
||||
port=dify_config.OCEANBASE_VECTOR_PORT or 0,
|
||||
user=dify_config.OCEANBASE_VECTOR_USER or "",
|
||||
password=(dify_config.OCEANBASE_VECTOR_PASSWORD or ""),
|
||||
database=dify_config.OCEANBASE_VECTOR_DATABASE or "",
|
||||
enable_hybrid_search=dify_config.OCEANBASE_ENABLE_HYBRID_SEARCH or False,
|
||||
),
|
||||
)
|
||||
|
||||
@ -14,7 +14,6 @@ from sqlalchemy.orm import Session
|
||||
from yarl import URL
|
||||
|
||||
import contexts
|
||||
from configs import dify_config
|
||||
from core.helper.provider_cache import ToolProviderCredentialsCache
|
||||
from core.plugin.impl.tool import PluginToolManager
|
||||
from core.tools.__base.tool_provider import ToolProviderController
|
||||
@ -34,6 +33,7 @@ from services.tools.mcp_tools_manage_service import MCPToolManageService
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.nodes.tool.entities import ToolEntity
|
||||
|
||||
from configs import dify_config
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.helper.module_import_helper import load_single_subclass_from_source
|
||||
@ -621,28 +621,12 @@ class ToolManager:
|
||||
"""
|
||||
# according to multi credentials, select the one with is_default=True first, then created_at oldest
|
||||
# for compatibility with old version
|
||||
if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql":
|
||||
# PostgreSQL: Use DISTINCT ON
|
||||
sql = """
|
||||
sql = """
|
||||
SELECT DISTINCT ON (tenant_id, provider) id
|
||||
FROM tool_builtin_providers
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY tenant_id, provider, is_default DESC, created_at DESC
|
||||
"""
|
||||
else:
|
||||
# MySQL: Use window function to achieve same result
|
||||
sql = """
|
||||
SELECT id FROM (
|
||||
SELECT id,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY tenant_id, provider
|
||||
ORDER BY is_default DESC, created_at DESC
|
||||
) as rn
|
||||
FROM tool_builtin_providers
|
||||
WHERE tenant_id = :tenant_id
|
||||
) ranked WHERE rn = 1
|
||||
"""
|
||||
|
||||
with Session(db.engine, autoflush=False) as session:
|
||||
ids = [row.id for row in session.execute(sa.text(sql), {"tenant_id": tenant_id}).all()]
|
||||
return session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all()
|
||||
|
||||
@ -16,7 +16,6 @@ from uuid import uuid4
|
||||
from flask import Flask
|
||||
from typing_extensions import override
|
||||
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent
|
||||
from core.workflow.nodes.base.node import Node
|
||||
@ -108,8 +107,8 @@ class Worker(threading.Thread):
|
||||
except Exception as e:
|
||||
error_event = NodeRunFailedEvent(
|
||||
id=str(uuid4()),
|
||||
node_id="unknown",
|
||||
node_type=NodeType.CODE,
|
||||
node_id=node.id,
|
||||
node_type=node.node_type,
|
||||
in_iteration_id=None,
|
||||
error=str(e),
|
||||
start_at=datetime.now(),
|
||||
|
||||
@ -177,15 +177,6 @@ def timezone(timezone_string):
|
||||
raise ValueError(error)
|
||||
|
||||
|
||||
def convert_datetime_to_date(field, target_timezone: str = ":tz"):
|
||||
if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql":
|
||||
return f"DATE(DATE_TRUNC('day', {field} AT TIME ZONE 'UTC' AT TIME ZONE {target_timezone}))"
|
||||
elif "mysql" in dify_config.SQLALCHEMY_DATABASE_URI_SCHEME:
|
||||
return f"DATE(CONVERT_TZ({field}, 'UTC', {target_timezone}))"
|
||||
else:
|
||||
raise NotImplementedError(f"Unsupported database URI scheme: {dify_config.SQLALCHEMY_DATABASE_URI_SCHEME}")
|
||||
|
||||
|
||||
def generate_string(n):
|
||||
letters_digits = string.ascii_letters + string.digits
|
||||
result = ""
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-07 04:07:34.482983
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00bacef91f18'
|
||||
down_revision = '8ec536f3c800'
|
||||
@ -23,31 +17,17 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
|
||||
batch_op.drop_column('description_str')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
|
||||
batch_op.drop_column('description')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-06-12 07:49:07.666510
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '04c602f5dc9b'
|
||||
down_revision = '4ff534e1eb11'
|
||||
@ -24,28 +19,15 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tracing_app_configs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tracing_app_configs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
|
||||
)
|
||||
op.create_table('tracing_app_configs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2024-01-12 06:47:21.656262
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '053da0c1d756'
|
||||
down_revision = '4829e54d2fee'
|
||||
@ -25,31 +18,16 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_conversation_variables',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('variables_str', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_conversation_variables',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('variables_str', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tool_conversation_variables',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('variables_str', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), nullable=True))
|
||||
batch_op.alter_column('icon',
|
||||
|
||||
@ -9,12 +9,6 @@ import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '114eed84c228'
|
||||
down_revision = 'c71211c8f604'
|
||||
@ -32,13 +26,7 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,11 +8,7 @@ Create Date: 2024-07-05 14:30:59.472593
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import models as models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '161cadc1af8d'
|
||||
@ -23,16 +19,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
# Step 1: Add column without NOT NULL constraint
|
||||
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-08-06 16:57:51.248337
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '16fa53d9faec'
|
||||
down_revision = '8d2d099ceb74'
|
||||
@ -25,87 +18,44 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('provider_models',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=True),
|
||||
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
|
||||
)
|
||||
else:
|
||||
op.create_table('provider_models',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
|
||||
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
|
||||
)
|
||||
|
||||
op.create_table('provider_models',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=True),
|
||||
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
|
||||
)
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_model_tenant_id_provider_idx', ['tenant_id', 'provider_name'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tenant_default_models',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tenant_default_models',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tenant_default_models',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.create_index('tenant_default_model_tenant_id_provider_type_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tenant_preferred_model_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tenant_preferred_model_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tenant_preferred_model_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
|
||||
batch_op.create_index('tenant_preferred_model_provider_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False)
|
||||
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2024-04-01 09:48:54.232201
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '17b5ab037c40'
|
||||
down_revision = 'a8f9b3c45e4a'
|
||||
@ -21,14 +17,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'"), nullable=False))
|
||||
|
||||
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -10,10 +10,6 @@ from alembic import op
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '63a83fcf12ba'
|
||||
down_revision = '1787fbae959a'
|
||||
@ -23,39 +19,21 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow__conversation_variables',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow__conversation_variables',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('data', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
|
||||
)
|
||||
|
||||
op.create_table('workflow__conversation_variables',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
|
||||
)
|
||||
with op.batch_alter_table('workflow__conversation_variables', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('workflow__conversation_variables_app_id_idx'), ['app_id'], unique=False)
|
||||
batch_op.create_index(batch_op.f('workflow__conversation_variables_created_at_idx'), ['created_at'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('conversation_variables', models.types.LongText(), default='{}', nullable=False))
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-08-15 09:56:59.012490
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0251a1c768cc'
|
||||
down_revision = 'bbadea11becb'
|
||||
@ -24,35 +19,18 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tidb_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('cluster_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('cluster_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False),
|
||||
sa.Column('account', sa.String(length=255), nullable=False),
|
||||
sa.Column('password', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tidb_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('cluster_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('cluster_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'"), nullable=False),
|
||||
sa.Column('account', sa.String(length=255), nullable=False),
|
||||
sa.Column('password', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tidb_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('cluster_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('cluster_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False),
|
||||
sa.Column('account', sa.String(length=255), nullable=False),
|
||||
sa.Column('password', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False)
|
||||
batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False)
|
||||
|
||||
@ -10,10 +10,6 @@ from alembic import op
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd57ba9ebb251'
|
||||
down_revision = '675b5321501b'
|
||||
@ -26,14 +22,8 @@ def upgrade():
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('parent_message_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
# Set parent_message_id for existing messages to distinguish them from new messages with actual parent IDs or NULLs
|
||||
conn = op.get_bind()
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Use uuid_nil() function
|
||||
op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL')
|
||||
else:
|
||||
# MySQL: Use a specific UUID value to represent nil
|
||||
op.execute("UPDATE messages SET parent_message_id = '00000000-0000-0000-0000-000000000000' WHERE parent_message_id IS NULL")
|
||||
# Set parent_message_id for existing messages to uuid_nil() to distinguish them from new messages with actual parent IDs or NULLs
|
||||
op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -6,11 +6,7 @@ Create Date: 2024-09-24 09:22:43.570120
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
@ -23,58 +19,30 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.alter_column('segment_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('data_source_type',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('document_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '33f5fac87f29'
|
||||
@ -24,66 +19,34 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('external_knowledge_apis',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('settings', sa.Text(), nullable=True),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('external_knowledge_apis',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('settings', models.types.LongText(), nullable=True),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
|
||||
)
|
||||
|
||||
op.create_table('external_knowledge_apis',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('settings', sa.Text(), nullable=True),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
|
||||
)
|
||||
with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op:
|
||||
batch_op.create_index('external_knowledge_apis_name_idx', ['name'], unique=False)
|
||||
batch_op.create_index('external_knowledge_apis_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('external_knowledge_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_id', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('external_knowledge_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
|
||||
)
|
||||
|
||||
op.create_table('external_knowledge_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('external_knowledge_id', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
|
||||
)
|
||||
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('external_knowledge_bindings_dataset_idx', ['dataset_id'], unique=False)
|
||||
batch_op.create_index('external_knowledge_bindings_external_knowledge_api_idx', ['external_knowledge_api_id'], unique=False)
|
||||
|
||||
@ -16,10 +16,6 @@ branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
def upgrade():
|
||||
def _has_name_or_size_column() -> bool:
|
||||
# We cannot access the database in offline mode, so assume
|
||||
@ -50,26 +46,14 @@ def upgrade():
|
||||
if _has_name_or_size_column():
|
||||
return
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
|
||||
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
|
||||
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
|
||||
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
|
||||
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("name", sa.String(length=255), nullable=True))
|
||||
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
|
||||
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
|
||||
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.alter_column("name", existing_type=sa.String(length=255), nullable=False)
|
||||
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
|
||||
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
|
||||
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
|
||||
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
|
||||
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
|
||||
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '43fa78bc3b7d'
|
||||
@ -24,25 +19,13 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('whitelists',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('whitelists',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
|
||||
)
|
||||
|
||||
op.create_table('whitelists',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
|
||||
)
|
||||
with op.batch_alter_table('whitelists', schema=None) as batch_op:
|
||||
batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '08ec4f75af5e'
|
||||
@ -24,26 +19,14 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('account_plugin_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
|
||||
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
|
||||
)
|
||||
else:
|
||||
op.create_table('account_plugin_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
|
||||
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
|
||||
)
|
||||
op.create_table('account_plugin_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
|
||||
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -10,10 +10,6 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f4d7ce70a7ca'
|
||||
down_revision = '93ad8c19c40b'
|
||||
@ -23,43 +19,23 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=False,
|
||||
existing_default=sa.text("''"))
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=False,
|
||||
existing_default=sa.text("''"))
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_url',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,9 +7,6 @@ Create Date: 2024-11-01 06:22:27.981398
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
@ -22,91 +19,49 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.TEXT(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.alter_column('custom_disclaimer',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -10,10 +10,6 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '09a8d1878d9b'
|
||||
down_revision = 'd07474999927'
|
||||
@ -23,103 +19,55 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=sa.JSON(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=sa.JSON(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=False)
|
||||
|
||||
op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL")
|
||||
op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL")
|
||||
op.execute("UPDATE workflows SET features = '' WHERE features IS NULL")
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=models.types.LongText(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=postgresql.TIMESTAMP(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('features',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('graph',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=sa.JSON(),
|
||||
nullable=True)
|
||||
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=sa.JSON(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('inputs',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,11 +8,6 @@ Create Date: 2024-11-22 07:01:17.550037
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@ -24,53 +19,27 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('child_chunks',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('word_count', sa.Integer(), nullable=False),
|
||||
sa.Column('index_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
|
||||
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('indexing_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('child_chunks',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('content', models.types.LongText(), nullable=False),
|
||||
sa.Column('word_count', sa.Integer(), nullable=False),
|
||||
sa.Column('index_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
|
||||
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('indexing_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('error', models.types.LongText(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
|
||||
)
|
||||
|
||||
op.create_table('child_chunks',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('word_count', sa.Integer(), nullable=False),
|
||||
sa.Column('index_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
|
||||
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('indexing_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
|
||||
)
|
||||
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
|
||||
batch_op.create_index('child_chunk_dataset_id_idx', ['tenant_id', 'dataset_id', 'document_id', 'segment_id', 'index_node_id'], unique=False)
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '11b07f66c737'
|
||||
@ -30,30 +25,15 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False),
|
||||
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
|
||||
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), autoincrement=False, nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), autoincrement=False, nullable=False),
|
||||
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
|
||||
sa.Column('encrypted_credentials', models.types.LongText(), autoincrement=False, nullable=True),
|
||||
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False),
|
||||
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
|
||||
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '923752d42eb6'
|
||||
@ -24,29 +19,15 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('dataset_auto_disable_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('dataset_auto_disable_logs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_auto_disable_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_auto_disable_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('dataset_auto_disable_log_created_atx', ['created_at'], unique=False)
|
||||
batch_op.create_index('dataset_auto_disable_log_dataset_idx', ['dataset_id'], unique=False)
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f051706725cc'
|
||||
@ -24,27 +19,14 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('rate_limit_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
|
||||
sa.Column('operation', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('rate_limit_logs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
|
||||
sa.Column('operation', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
|
||||
)
|
||||
|
||||
op.create_table('rate_limit_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
|
||||
sa.Column('operation', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
|
||||
)
|
||||
with op.batch_alter_table('rate_limit_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('rate_limit_log_operation_idx', ['operation'], unique=False)
|
||||
batch_op.create_index('rate_limit_log_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd20049ed0af6'
|
||||
@ -24,68 +19,34 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('dataset_metadata_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('dataset_metadata_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_metadata_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('dataset_metadata_binding_dataset_idx', ['dataset_id'], unique=False)
|
||||
batch_op.create_index('dataset_metadata_binding_document_idx', ['document_id'], unique=False)
|
||||
batch_op.create_index('dataset_metadata_binding_metadata_idx', ['metadata_id'], unique=False)
|
||||
batch_op.create_index('dataset_metadata_binding_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('dataset_metadatas',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('dataset_metadatas',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_metadatas',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op:
|
||||
batch_op.create_index('dataset_metadata_dataset_idx', ['dataset_id'], unique=False)
|
||||
batch_op.create_index('dataset_metadata_tenant_idx', ['tenant_id'], unique=False)
|
||||
@ -93,31 +54,23 @@ def upgrade():
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('built_in_field_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin')
|
||||
else:
|
||||
pass
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.drop_index('document_metadata_idx', postgresql_using='gin')
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
pass
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.drop_index('document_metadata_idx', postgresql_using='gin')
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.drop_column('built_in_field_enabled')
|
||||
|
||||
@ -17,23 +17,10 @@ branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default=''))
|
||||
batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default=''))
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('marked_name', sa.String(length=255), nullable=False, server_default=''))
|
||||
batch_op.add_column(sa.Column('marked_comment', sa.String(length=255), nullable=False, server_default=''))
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default=''))
|
||||
batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default=''))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@ -8,14 +8,9 @@ Create Date: 2025-05-15 15:31:03.128680
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "2adcbe1f5dfb"
|
||||
down_revision = "d28f2004b072"
|
||||
@ -25,46 +20,24 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table(
|
||||
"workflow_draft_variables",
|
||||
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("app_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("node_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("description", sa.String(length=255), nullable=False),
|
||||
sa.Column("selector", sa.String(length=255), nullable=False),
|
||||
sa.Column("value_type", sa.String(length=20), nullable=False),
|
||||
sa.Column("value", sa.Text(), nullable=False),
|
||||
sa.Column("visible", sa.Boolean(), nullable=False),
|
||||
sa.Column("editable", sa.Boolean(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
|
||||
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
|
||||
)
|
||||
else:
|
||||
op.create_table(
|
||||
"workflow_draft_variables",
|
||||
sa.Column("id", models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column("app_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("node_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("description", sa.String(length=255), nullable=False),
|
||||
sa.Column("selector", sa.String(length=255), nullable=False),
|
||||
sa.Column("value_type", sa.String(length=20), nullable=False),
|
||||
sa.Column("value", models.types.LongText(), nullable=False),
|
||||
sa.Column("visible", sa.Boolean(), nullable=False),
|
||||
sa.Column("editable", sa.Boolean(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
|
||||
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
|
||||
)
|
||||
op.create_table(
|
||||
"workflow_draft_variables",
|
||||
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("app_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("node_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("description", sa.String(length=255), nullable=False),
|
||||
sa.Column("selector", sa.String(length=255), nullable=False),
|
||||
sa.Column("value_type", sa.String(length=20), nullable=False),
|
||||
sa.Column("value", sa.Text(), nullable=False),
|
||||
sa.Column("visible", sa.Boolean(), nullable=False),
|
||||
sa.Column("editable", sa.Boolean(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
|
||||
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,10 +7,6 @@ Create Date: 2025-06-06 14:24:44.213018
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -22,30 +18,19 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
|
||||
# context manager to wrap the index creation statement.
|
||||
# Reference:
|
||||
#
|
||||
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
|
||||
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
op.f('workflow_node_executions_tenant_id_idx'),
|
||||
"workflow_node_executions",
|
||||
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
|
||||
unique=False,
|
||||
postgresql_concurrently=True,
|
||||
)
|
||||
else:
|
||||
# `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
|
||||
# context manager to wrap the index creation statement.
|
||||
# Reference:
|
||||
#
|
||||
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
|
||||
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
op.f('workflow_node_executions_tenant_id_idx'),
|
||||
"workflow_node_executions",
|
||||
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
|
||||
unique=False,
|
||||
postgresql_concurrently=True,
|
||||
)
|
||||
|
||||
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
|
||||
@ -66,13 +51,8 @@ def downgrade():
|
||||
# Reference:
|
||||
#
|
||||
# https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True)
|
||||
else:
|
||||
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'))
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True)
|
||||
|
||||
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
|
||||
batch_op.drop_column('node_execution_id')
|
||||
|
||||
@ -8,11 +8,6 @@ Create Date: 2025-06-25 09:36:07.510570
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@ -24,80 +19,40 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('app_mcp_servers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('server_code', sa.String(length=255), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
|
||||
sa.Column('parameters', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
|
||||
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
|
||||
)
|
||||
else:
|
||||
op.create_table('app_mcp_servers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('server_code', sa.String(length=255), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False),
|
||||
sa.Column('parameters', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
|
||||
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_mcp_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('server_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('server_url', sa.Text(), nullable=False),
|
||||
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=True),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('authed', sa.Boolean(), nullable=False),
|
||||
sa.Column('tools', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_mcp_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('server_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('server_url', models.types.LongText(), nullable=False),
|
||||
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=True),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
|
||||
sa.Column('authed', sa.Boolean(), nullable=False),
|
||||
sa.Column('tools', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
|
||||
)
|
||||
op.create_table('app_mcp_servers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('server_code', sa.String(length=255), nullable=False),
|
||||
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
|
||||
sa.Column('parameters', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
|
||||
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
|
||||
)
|
||||
op.create_table('tool_mcp_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('server_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('server_url', sa.Text(), nullable=False),
|
||||
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=True),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('authed', sa.Boolean(), nullable=False),
|
||||
sa.Column('tools', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
|
||||
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -27,10 +27,6 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1c9ba48be8e4'
|
||||
down_revision = '58eb7bdb93fe'
|
||||
@ -44,11 +40,7 @@ def upgrade():
|
||||
# The ability to specify source timestamp has been removed because its type signature is incompatible with
|
||||
# PostgreSQL 18's `uuidv7` function. This capability is rarely needed in practice, as IDs can be
|
||||
# generated and controlled within the application layer.
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Create uuidv7 functions
|
||||
op.execute(sa.text(r"""
|
||||
op.execute(sa.text(r"""
|
||||
/* Main function to generate a uuidv7 value with millisecond precision */
|
||||
CREATE FUNCTION uuidv7() RETURNS uuid
|
||||
AS
|
||||
@ -71,7 +63,7 @@ COMMENT ON FUNCTION uuidv7 IS
|
||||
'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness';
|
||||
"""))
|
||||
|
||||
op.execute(sa.text(r"""
|
||||
op.execute(sa.text(r"""
|
||||
CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid
|
||||
AS
|
||||
$$
|
||||
@ -87,15 +79,8 @@ COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS
|
||||
'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.';
|
||||
"""
|
||||
))
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7"))
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
|
||||
else:
|
||||
pass
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7"))
|
||||
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
|
||||
|
||||
@ -8,11 +8,6 @@ Create Date: 2025-06-24 17:05:43.118647
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@ -24,63 +19,31 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_oauth_system_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_oauth_system_clients',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_oauth_tenant_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_oauth_tenant_clients',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
|
||||
)
|
||||
op.create_table('tool_oauth_system_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
|
||||
)
|
||||
op.create_table('tool_oauth_tenant_clients',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=512), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
|
||||
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
|
||||
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
|
||||
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
|
||||
else:
|
||||
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'"), nullable=False))
|
||||
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'"), nullable=False))
|
||||
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
|
||||
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
|
||||
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
|
||||
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
|
||||
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
|
||||
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
|
||||
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8bcc02c9bd07'
|
||||
@ -24,36 +19,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tenant_plugin_auto_upgrade_strategies',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
|
||||
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
|
||||
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
|
||||
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
|
||||
)
|
||||
else:
|
||||
op.create_table('tenant_plugin_auto_upgrade_strategies',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
|
||||
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
|
||||
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
|
||||
sa.Column('exclude_plugins', sa.JSON(), nullable=False),
|
||||
sa.Column('include_plugins', sa.JSON(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
|
||||
)
|
||||
op.create_table('tenant_plugin_auto_upgrade_strategies',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
|
||||
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
|
||||
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
|
||||
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -7,10 +7,6 @@ Create Date: 2025-07-24 14:50:48.779833
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -22,18 +18,8 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
|
||||
else:
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
|
||||
|
||||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
|
||||
else:
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
|
||||
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
|
||||
|
||||
@ -11,10 +11,6 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table, column
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e8446f481c1e'
|
||||
down_revision = 'fa8b0fa6f407'
|
||||
@ -24,30 +20,16 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# Create provider_credentials table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('provider_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('provider_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
|
||||
)
|
||||
op.create_table('provider_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
|
||||
)
|
||||
|
||||
# Create index for provider_credentials
|
||||
with op.batch_alter_table('provider_credentials', schema=None) as batch_op:
|
||||
@ -78,49 +60,27 @@ def upgrade():
|
||||
|
||||
def migrate_existing_providers_data():
|
||||
"""migrate providers table data to provider_credentials"""
|
||||
conn = op.get_bind()
|
||||
# Define table structure for data manipulation
|
||||
if _is_pg(conn):
|
||||
providers_table = table('providers',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
else:
|
||||
providers_table = table('providers',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
provider_credential_table = table('provider_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
else:
|
||||
provider_credential_table = table('provider_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
# Define table structure for data manipulation
|
||||
providers_table = table('providers',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
provider_credential_table = table('provider_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
|
||||
# Get database connection
|
||||
conn = op.get_bind()
|
||||
@ -163,14 +123,8 @@ def migrate_existing_providers_data():
|
||||
|
||||
def downgrade():
|
||||
# Re-add encrypted_config column to providers table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
|
||||
# Migrate data back from provider_credentials to providers
|
||||
|
||||
|
||||
@ -13,10 +13,6 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table, column
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0e154742a5fa'
|
||||
down_revision = 'e8446f481c1e'
|
||||
@ -26,34 +22,18 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# Create provider_model_credentials table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('provider_model_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('provider_model_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
|
||||
)
|
||||
op.create_table('provider_model_credentials',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('credential_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
|
||||
)
|
||||
|
||||
# Create index for provider_model_credentials
|
||||
with op.batch_alter_table('provider_model_credentials', schema=None) as batch_op:
|
||||
@ -86,57 +66,31 @@ def upgrade():
|
||||
|
||||
def migrate_existing_provider_models_data():
|
||||
"""migrate provider_models table data to provider_model_credentials"""
|
||||
conn = op.get_bind()
|
||||
# Define table structure for data manipulation
|
||||
if _is_pg(conn):
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
else:
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
if _is_pg(conn):
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
else:
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', models.types.LongText()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
# Define table structure for data manipulation
|
||||
provider_models_table = table('provider_models',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime()),
|
||||
column('credential_id', models.types.StringUUID()),
|
||||
)
|
||||
|
||||
provider_model_credentials_table = table('provider_model_credentials',
|
||||
column('id', models.types.StringUUID()),
|
||||
column('tenant_id', models.types.StringUUID()),
|
||||
column('provider_name', sa.String()),
|
||||
column('model_name', sa.String()),
|
||||
column('model_type', sa.String()),
|
||||
column('credential_name', sa.String()),
|
||||
column('encrypted_config', sa.Text()),
|
||||
column('created_at', sa.DateTime()),
|
||||
column('updated_at', sa.DateTime())
|
||||
)
|
||||
|
||||
|
||||
# Get database connection
|
||||
@ -183,14 +137,8 @@ def migrate_existing_provider_models_data():
|
||||
|
||||
def downgrade():
|
||||
# Re-add encrypted_config column to provider_models table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('provider_models', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
|
||||
|
||||
if not context.is_offline_mode():
|
||||
# Migrate data back from provider_model_credentials to provider_models
|
||||
|
||||
@ -8,11 +8,6 @@ Create Date: 2025-08-20 17:47:17.015695
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@ -24,33 +19,17 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('oauth_provider_apps',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('app_icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False),
|
||||
sa.Column('client_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_secret', sa.String(length=255), nullable=False),
|
||||
sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('oauth_provider_apps',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('app_icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_label', sa.JSON(), default='{}', nullable=False),
|
||||
sa.Column('client_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_secret', sa.String(length=255), nullable=False),
|
||||
sa.Column('redirect_uris', sa.JSON(), default='[]', nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
|
||||
)
|
||||
|
||||
op.create_table('oauth_provider_apps',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('app_icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False),
|
||||
sa.Column('client_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_secret', sa.String(length=255), nullable=False),
|
||||
sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
|
||||
)
|
||||
with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op:
|
||||
batch_op.create_index('oauth_provider_app_client_id_idx', ['client_id'], unique=False)
|
||||
|
||||
|
||||
@ -7,10 +7,6 @@ Create Date: 2025-08-29 10:07:54.163626
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -23,12 +19,7 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# Add encrypted_headers column to tool_mcp_providers table
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
|
||||
else:
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
|
||||
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@ -7,9 +7,6 @@ Create Date: 2025-09-11 15:37:17.771298
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -22,14 +19,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'"), nullable=True))
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -9,11 +9,6 @@ from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '68519ad5cd18'
|
||||
@ -24,314 +19,152 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_oauth_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
|
||||
)
|
||||
else:
|
||||
op.create_table('datasource_oauth_params',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('system_credentials', sa.JSON(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_oauth_tenant_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
|
||||
)
|
||||
else:
|
||||
op.create_table('datasource_oauth_tenant_params',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_params', sa.JSON(), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('datasource_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('auth_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('avatar_url', sa.Text(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
|
||||
)
|
||||
else:
|
||||
op.create_table('datasource_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=128), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('auth_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.JSON(), nullable=False),
|
||||
sa.Column('avatar_url', models.types.LongText(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
|
||||
)
|
||||
op.create_table('datasource_oauth_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
|
||||
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
|
||||
)
|
||||
op.create_table('datasource_oauth_tenant_params',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
|
||||
)
|
||||
op.create_table('datasource_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('plugin_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('auth_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('avatar_url', sa.Text(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
|
||||
)
|
||||
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
|
||||
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('document_pipeline_execution_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('datasource_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('datasource_info', sa.Text(), nullable=False),
|
||||
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('input_data', sa.JSON(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('document_pipeline_execution_logs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('datasource_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('datasource_info', models.types.LongText(), nullable=False),
|
||||
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('input_data', sa.JSON(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
|
||||
)
|
||||
op.create_table('document_pipeline_execution_logs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('datasource_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('datasource_info', sa.Text(), nullable=False),
|
||||
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('input_data', sa.JSON(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
|
||||
)
|
||||
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipeline_built_in_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('yaml_content', sa.Text(), nullable=False),
|
||||
sa.Column('copyright', sa.String(length=255), nullable=False),
|
||||
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('pipeline_built_in_templates',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', models.types.LongText(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('yaml_content', models.types.LongText(), nullable=False),
|
||||
sa.Column('copyright', sa.String(length=255), nullable=False),
|
||||
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('yaml_content', sa.Text(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', models.types.LongText(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('yaml_content', models.types.LongText(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
op.create_table('pipeline_built_in_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('yaml_content', sa.Text(), nullable=False),
|
||||
sa.Column('copyright', sa.String(length=255), nullable=False),
|
||||
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
|
||||
)
|
||||
op.create_table('pipeline_customized_templates',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
|
||||
sa.Column('icon', sa.JSON(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('yaml_content', sa.Text(), nullable=False),
|
||||
sa.Column('install_count', sa.Integer(), nullable=False),
|
||||
sa.Column('language', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
|
||||
)
|
||||
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
|
||||
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipeline_recommended_plugins',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.Text(), nullable=False),
|
||||
sa.Column('provider_name', sa.Text(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('pipeline_recommended_plugins',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('plugin_id', models.types.LongText(), nullable=False),
|
||||
sa.Column('provider_name', models.types.LongText(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('pipelines',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('pipelines',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_draft_variable_files',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
|
||||
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
|
||||
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
|
||||
sa.Column('value_type', sa.String(20), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_draft_variable_files',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
|
||||
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
|
||||
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
|
||||
sa.Column('value_type', sa.String(20), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
|
||||
)
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_node_execution_offload',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('type', sa.String(20), nullable=False),
|
||||
sa.Column('file_id', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
|
||||
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_node_execution_offload',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('type', sa.String(20), nullable=False),
|
||||
sa.Column('file_id', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
|
||||
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
|
||||
)
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
|
||||
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
|
||||
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
|
||||
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
|
||||
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
|
||||
batch_op.add_column(sa.Column('icon_info', sa.JSON(), nullable=True))
|
||||
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'"), nullable=True))
|
||||
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
|
||||
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
|
||||
op.create_table('pipeline_recommended_plugins',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('plugin_id', sa.Text(), nullable=False),
|
||||
sa.Column('provider_name', sa.Text(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('active', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
|
||||
)
|
||||
op.create_table('pipelines',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
|
||||
)
|
||||
op.create_table('workflow_draft_variable_files',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
|
||||
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
|
||||
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
|
||||
sa.Column('value_type', sa.String(20), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
|
||||
)
|
||||
op.create_table('workflow_node_execution_offload',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('type', sa.String(20), nullable=False),
|
||||
sa.Column('file_id', models.types.StringUUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
|
||||
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
|
||||
)
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
|
||||
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
|
||||
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
|
||||
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
|
||||
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
|
||||
|
||||
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_id', models.types.StringUUID(), nullable=True, comment='Reference to WorkflowDraftVariableFile if variable is offloaded to external storage'))
|
||||
@ -342,12 +175,9 @@ def upgrade():
|
||||
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
|
||||
)
|
||||
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rag_pipeline_variables', models.types.LongText(), default='{}', nullable=False))
|
||||
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,10 +7,6 @@ Create Date: 2025-10-21 14:30:28.566192
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
@ -33,15 +29,8 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
|
||||
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -9,10 +9,7 @@ Create Date: 2025-10-22 16:11:31.805407
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from libs.uuid_utils import uuidv7
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "03f8dcbc611e"
|
||||
@ -22,32 +19,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
if _is_pg(op.get_context().bind):
|
||||
op.create_table(
|
||||
"workflow_pauses",
|
||||
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("resumed_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("state_object_key", sa.String(length=255), nullable=False),
|
||||
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
|
||||
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
|
||||
)
|
||||
else:
|
||||
op.create_table(
|
||||
"workflow_pauses",
|
||||
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("resumed_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("state_object_key", sa.String(length=255), nullable=False),
|
||||
sa.Column("id", models.types.StringUUID(), default=lambda: str(uuidv7()), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
|
||||
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
|
||||
)
|
||||
op.create_table(
|
||||
"workflow_pauses",
|
||||
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
|
||||
sa.Column("resumed_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("state_object_key", sa.String(length=255), nullable=False),
|
||||
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
|
||||
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -1,202 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: f26e7cdbb0fe
|
||||
Revises: 03f8dcbc611e
|
||||
Create Date: 2025-10-31 15:05:38.637798
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql, mysql
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f26e7cdbb0fe'
|
||||
down_revision = '03f8dcbc611e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('source_info_idx'), postgresql_using='gin')
|
||||
batch_op.alter_column('source_info',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=False)
|
||||
|
||||
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('retrieval_model_idx'), postgresql_using='gin')
|
||||
batch_op.alter_column('retrieval_model',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
batch_op.alter_column('icon_info',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
|
||||
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
|
||||
batch_op.alter_column('system_credentials',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('datasource_oauth_tenant_params', schema=None) as batch_op:
|
||||
batch_op.alter_column('client_params',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.String(length=128),
|
||||
existing_nullable=False)
|
||||
batch_op.alter_column('encrypted_credentials',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('document_metadata_idx'), postgresql_using='gin')
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
|
||||
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
|
||||
batch_op.alter_column('external_knowledge_id',
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.String(length=512),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.drop_column('credential_status')
|
||||
|
||||
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
|
||||
op.execute("""
|
||||
ALTER TABLE tenant_plugin_auto_upgrade_strategies
|
||||
ALTER COLUMN exclude_plugins TYPE JSON
|
||||
USING array_to_json(exclude_plugins)
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
ALTER TABLE tenant_plugin_auto_upgrade_strategies
|
||||
ALTER COLUMN include_plugins TYPE JSON
|
||||
USING array_to_json(include_plugins)
|
||||
""")
|
||||
|
||||
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
|
||||
batch_op.alter_column('plugin_id',
|
||||
existing_type=sa.VARCHAR(length=512),
|
||||
type_=sa.String(length=255),
|
||||
existing_nullable=False)
|
||||
|
||||
else:
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.drop_column('credential_status')
|
||||
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
type_=sa.DateTime(),
|
||||
existing_nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
|
||||
batch_op.alter_column('plugin_id',
|
||||
existing_type=sa.String(length=255),
|
||||
type_=sa.VARCHAR(length=512),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
|
||||
op.execute("""
|
||||
ALTER TABLE tenant_plugin_auto_upgrade_strategies
|
||||
ALTER COLUMN exclude_plugins TYPE JSON
|
||||
USING array_to_json(exclude_plugins)
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
ALTER TABLE tenant_plugin_auto_upgrade_strategies
|
||||
ALTER COLUMN include_plugins TYPE JSON
|
||||
USING array_to_json(include_plugins)
|
||||
""")
|
||||
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
|
||||
batch_op.alter_column('external_knowledge_id',
|
||||
existing_type=sa.String(length=512),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.alter_column('doc_metadata',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
batch_op.create_index(batch_op.f('document_metadata_idx'), ['doc_metadata'], unique=False, postgresql_using='gin')
|
||||
|
||||
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
|
||||
batch_op.alter_column('encrypted_credentials',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
batch_op.alter_column('provider',
|
||||
existing_type=sa.String(length=128),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('datasource_oauth_tenant_params', schema=None) as batch_op:
|
||||
batch_op.alter_column('client_params',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
|
||||
batch_op.alter_column('system_credentials',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('icon_info',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
batch_op.alter_column('retrieval_model',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
batch_op.create_index(batch_op.f('retrieval_model_idx'), ['retrieval_model'], unique=False, postgresql_using='gin')
|
||||
|
||||
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
|
||||
batch_op.alter_column('source_info',
|
||||
existing_type=sa.JSON(),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
batch_op.create_index(batch_op.f('source_info_idx'), ['source_info'], unique=False, postgresql_using='gin')
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.alter_column('updated_at',
|
||||
existing_type=sa.DateTime(),
|
||||
type_=sa.TIMESTAMP(),
|
||||
existing_nullable=False)
|
||||
|
||||
with op.batch_alter_table('providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credential_status', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=20), server_default=sa.text("'active'"), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-18 08:46:37.302657
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '23db93619b9d'
|
||||
down_revision = '8ae9bc661daa'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-12-14 11:26:12.287264
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '246ba09cbbdb'
|
||||
down_revision = '714aafe25d39'
|
||||
@ -25,33 +18,17 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('app_annotation_settings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('collection_binding_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('app_annotation_settings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('collection_binding_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
|
||||
)
|
||||
|
||||
op.create_table('app_annotation_settings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('collection_binding_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
|
||||
)
|
||||
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
|
||||
batch_op.create_index('app_annotation_settings_app_idx', ['app_id'], unique=False)
|
||||
|
||||
@ -63,14 +40,8 @@ def upgrade():
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
|
||||
|
||||
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
|
||||
batch_op.drop_index('app_annotation_settings_app_idx')
|
||||
|
||||
@ -10,10 +10,6 @@ from alembic import op
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2a3aebbbf4bb'
|
||||
down_revision = 'c031d46af369'
|
||||
@ -23,14 +19,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -9,12 +9,6 @@ import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2e9819ca5b28'
|
||||
down_revision = 'ab23c11305d4'
|
||||
@ -24,35 +18,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
|
||||
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
|
||||
batch_op.drop_column('dataset_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
else:
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
|
||||
batch_op.drop_index('api_token_tenant_idx')
|
||||
batch_op.drop_column('tenant_id')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-24 10:58:15.644445
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '380c6aa5a70d'
|
||||
down_revision = 'dfb3b7f477da'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_labels_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False))
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-05-14 09:27:18.857890
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3b18fea55204'
|
||||
down_revision = '7bdef072e63a'
|
||||
@ -24,24 +19,13 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_label_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tool_id', sa.String(length=64), nullable=False),
|
||||
sa.Column('tool_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('label_name', sa.String(length=40), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_label_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tool_id', sa.String(length=64), nullable=False),
|
||||
sa.Column('tool_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('label_name', sa.String(length=40), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
|
||||
)
|
||||
op.create_table('tool_label_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tool_id', sa.String(length=64), nullable=False),
|
||||
sa.Column('tool_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('label_name', sa.String(length=40), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), server_default='', nullable=True))
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2024-04-11 06:17:34.278594
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3c7cac9521c6'
|
||||
down_revision = 'c3311b089690'
|
||||
@ -25,54 +18,28 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tag_bindings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('tag_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('target_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tag_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('tag_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('target_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tag_bindings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('tag_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('target_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tag_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('tag_bind_tag_id_idx', ['tag_id'], unique=False)
|
||||
batch_op.create_index('tag_bind_target_id_idx', ['target_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tags',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('type', sa.String(length=16), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tags',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('type', sa.String(length=16), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_pkey')
|
||||
)
|
||||
|
||||
op.create_table('tags',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('type', sa.String(length=16), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tag_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tags', schema=None) as batch_op:
|
||||
batch_op.create_index('tag_name_idx', ['name'], unique=False)
|
||||
batch_op.create_index('tag_type_idx', ['type'], unique=False)
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2024-01-05 15:26:25.117551
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3ef9b2b6bee6'
|
||||
down_revision = '89c7899ca936'
|
||||
@ -25,96 +18,44 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_api_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('schema', sa.Text(), nullable=False),
|
||||
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('description_str', sa.Text(), nullable=False),
|
||||
sa.Column('tools_str', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_api_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('schema', models.types.LongText(), nullable=False),
|
||||
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('description_str', models.types.LongText(), nullable=False),
|
||||
sa.Column('tools_str', models.types.LongText(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_builtin_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_builtin_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_published_apps',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('llm_description', sa.Text(), nullable=False),
|
||||
sa.Column('query_description', sa.Text(), nullable=False),
|
||||
sa.Column('query_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('author', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
|
||||
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_published_apps',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('description', models.types.LongText(), nullable=False),
|
||||
sa.Column('llm_description', models.types.LongText(), nullable=False),
|
||||
sa.Column('query_description', models.types.LongText(), nullable=False),
|
||||
sa.Column('query_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('author', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
|
||||
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
|
||||
)
|
||||
op.create_table('tool_api_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('schema', sa.Text(), nullable=False),
|
||||
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('description_str', sa.Text(), nullable=False),
|
||||
sa.Column('tools_str', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
|
||||
)
|
||||
op.create_table('tool_builtin_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
|
||||
)
|
||||
op.create_table('tool_published_apps',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('llm_description', sa.Text(), nullable=False),
|
||||
sa.Column('query_description', sa.Text(), nullable=False),
|
||||
sa.Column('query_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('author', sa.String(length=40), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
|
||||
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
|
||||
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -9,12 +9,6 @@ import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '42e85ed5564d'
|
||||
down_revision = 'f9107f83abab'
|
||||
@ -24,59 +18,31 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.alter_column('model_id',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False)
|
||||
batch_op.alter_column('app_model_config_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2024-01-15 11:37:16.782718
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4823da1d26cf'
|
||||
down_revision = '053da0c1d756'
|
||||
@ -25,30 +18,16 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tool_files',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('file_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('mimetype', sa.String(length=255), nullable=False),
|
||||
sa.Column('original_url', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('tool_files',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('file_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('mimetype', sa.String(length=255), nullable=False),
|
||||
sa.Column('original_url', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
|
||||
)
|
||||
op.create_table('tool_files',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('user_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('file_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('mimetype', sa.String(length=255), nullable=False),
|
||||
sa.Column('original_url', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-12 03:42:27.362415
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4829e54d2fee'
|
||||
down_revision = '114eed84c228'
|
||||
@ -23,39 +17,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.alter_column('message_chain_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2023-08-28 20:58:50.077056
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4bcffcd64aa4'
|
||||
down_revision = '853f9b9cd3b6'
|
||||
@ -21,55 +17,29 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'openai'::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'"))
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'openai'"))
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=True,
|
||||
existing_server_default=sa.text("'openai'::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'openai'::character varying"))
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'openai'"))
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'"))
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.alter_column('embedding_model_provider',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'openai'::character varying"))
|
||||
batch_op.alter_column('embedding_model',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
nullable=False,
|
||||
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-05-10 12:08:09.812736
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4e99a8df00ff'
|
||||
down_revision = '64a70a7aab8b'
|
||||
@ -24,67 +19,34 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('load_balancing_model_configs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=True),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('load_balancing_model_configs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
|
||||
)
|
||||
|
||||
op.create_table('load_balancing_model_configs',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('encrypted_config', sa.Text(), nullable=True),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
|
||||
)
|
||||
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
|
||||
batch_op.create_index('load_balancing_model_config_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('provider_model_settings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('provider_model_settings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
|
||||
)
|
||||
|
||||
op.create_table('provider_model_settings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_type', sa.String(length=40), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
|
||||
)
|
||||
with op.batch_alter_table('provider_model_settings', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_model_setting_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
|
||||
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2023-08-11 14:38:15.499460
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5022897aaceb'
|
||||
down_revision = 'bf0aec5ba2cf'
|
||||
@ -21,20 +17,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -10,10 +10,6 @@ from alembic import op
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '53bf8af60645'
|
||||
down_revision = '8e5588e6412e'
|
||||
@ -23,43 +19,23 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.VARCHAR(length=40),
|
||||
type_=sa.String(length=255),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.VARCHAR(length=40),
|
||||
type_=sa.String(length=255),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''"))
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.VARCHAR(length=40),
|
||||
type_=sa.String(length=255),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.String(length=255),
|
||||
type_=sa.VARCHAR(length=40),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
else:
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.String(length=255),
|
||||
type_=sa.VARCHAR(length=40),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''"))
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.alter_column('provider_name',
|
||||
existing_type=sa.String(length=255),
|
||||
type_=sa.VARCHAR(length=40),
|
||||
existing_nullable=False,
|
||||
existing_server_default=sa.text("''::character varying"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-03-14 04:54:56.679506
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '563cf8bf777b'
|
||||
down_revision = 'b5429b71023c'
|
||||
@ -23,35 +17,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=True)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
else:
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=models.types.StringUUID(),
|
||||
nullable=False)
|
||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
||||
batch_op.alter_column('conversation_id',
|
||||
existing_type=postgresql.UUID(),
|
||||
nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2023-06-15 13:33:00.357467
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '614f77cecc48'
|
||||
down_revision = 'a45f4dfde53b'
|
||||
@ -21,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('accounts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('accounts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False))
|
||||
with op.batch_alter_table('accounts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -7,15 +7,8 @@ Create Date: 2023-09-06 16:51:27.385844
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6dcb43972bdc'
|
||||
down_revision = '4bcffcd64aa4'
|
||||
@ -25,53 +18,27 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('dataset_retriever_resources',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('message_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('dataset_name', sa.Text(), nullable=False),
|
||||
sa.Column('document_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('document_name', sa.Text(), nullable=False),
|
||||
sa.Column('data_source_type', sa.Text(), nullable=False),
|
||||
sa.Column('segment_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('score', sa.Float(), nullable=True),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer(), nullable=True),
|
||||
sa.Column('word_count', sa.Integer(), nullable=True),
|
||||
sa.Column('segment_position', sa.Integer(), nullable=True),
|
||||
sa.Column('index_node_hash', sa.Text(), nullable=True),
|
||||
sa.Column('retriever_from', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('dataset_retriever_resources',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('message_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('dataset_name', models.types.LongText(), nullable=False),
|
||||
sa.Column('document_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('document_name', models.types.LongText(), nullable=False),
|
||||
sa.Column('data_source_type', models.types.LongText(), nullable=False),
|
||||
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('score', sa.Float(), nullable=True),
|
||||
sa.Column('content', models.types.LongText(), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer(), nullable=True),
|
||||
sa.Column('word_count', sa.Integer(), nullable=True),
|
||||
sa.Column('segment_position', sa.Integer(), nullable=True),
|
||||
sa.Column('index_node_hash', models.types.LongText(), nullable=True),
|
||||
sa.Column('retriever_from', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_retriever_resources',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('message_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('dataset_name', sa.Text(), nullable=False),
|
||||
sa.Column('document_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('document_name', sa.Text(), nullable=False),
|
||||
sa.Column('data_source_type', sa.Text(), nullable=False),
|
||||
sa.Column('segment_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('score', sa.Float(), nullable=True),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer(), nullable=True),
|
||||
sa.Column('word_count', sa.Integer(), nullable=True),
|
||||
sa.Column('segment_position', sa.Integer(), nullable=True),
|
||||
sa.Column('index_node_hash', sa.Text(), nullable=True),
|
||||
sa.Column('retriever_from', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.create_index('dataset_retriever_resource_message_id_idx', ['message_id'], unique=False)
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-09-13 22:16:48.027810
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6e2cfb077b04'
|
||||
down_revision = '77e83833755c'
|
||||
@ -25,36 +18,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('dataset_collection_bindings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('collection_name', sa.String(length=64), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('dataset_collection_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('collection_name', sa.String(length=64), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_collection_bindings',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('model_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('collection_name', sa.String(length=64), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-12-14 06:38:02.972527
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '714aafe25d39'
|
||||
down_revision = 'f2a6fc85e260'
|
||||
@ -23,16 +17,9 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
|
||||
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-09-06 17:26:40.311927
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '77e83833755c'
|
||||
down_revision = '6dcb43972bdc'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-05-14 07:31:29.702766
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7b45942e39bb'
|
||||
down_revision = '4e99a8df00ff'
|
||||
@ -24,75 +19,44 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('data_source_api_key_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('credentials', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('data_source_api_key_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('credentials', models.types.LongText(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
|
||||
)
|
||||
|
||||
op.create_table('data_source_api_key_auth_bindings',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('category', sa.String(length=255), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('credentials', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
|
||||
)
|
||||
with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('data_source_api_key_auth_binding_provider_idx', ['provider'], unique=False)
|
||||
batch_op.create_index('data_source_api_key_auth_binding_tenant_id_idx', ['tenant_id'], unique=False)
|
||||
|
||||
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
|
||||
batch_op.drop_index('source_binding_tenant_id_idx')
|
||||
if _is_pg(conn):
|
||||
batch_op.drop_index('source_info_idx', postgresql_using='gin')
|
||||
else:
|
||||
pass
|
||||
batch_op.drop_index('source_info_idx')
|
||||
|
||||
op.rename_table('data_source_bindings', 'data_source_oauth_bindings')
|
||||
|
||||
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
|
||||
batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False)
|
||||
if _is_pg(conn):
|
||||
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
|
||||
else:
|
||||
pass
|
||||
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
|
||||
if _is_pg(conn):
|
||||
batch_op.drop_index('source_info_idx', postgresql_using='gin')
|
||||
else:
|
||||
pass
|
||||
batch_op.drop_index('source_info_idx', postgresql_using='gin')
|
||||
batch_op.drop_index('source_binding_tenant_id_idx')
|
||||
|
||||
op.rename_table('data_source_oauth_bindings', 'data_source_bindings')
|
||||
|
||||
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
|
||||
if _is_pg(conn):
|
||||
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
|
||||
else:
|
||||
pass
|
||||
batch_op.create_index('source_info_idx', ['source_info'], unique=False)
|
||||
batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False)
|
||||
|
||||
with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op:
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-05-04 09:47:19.366961
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7bdef072e63a'
|
||||
down_revision = '5fda94355fce'
|
||||
@ -24,42 +19,21 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_workflow_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
|
||||
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_workflow_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('description', models.types.LongText(), nullable=False),
|
||||
sa.Column('parameter_configuration', models.types.LongText(), default='[]', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
|
||||
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
|
||||
)
|
||||
op.create_table('tool_workflow_providers',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('name', sa.String(length=40), nullable=False),
|
||||
sa.Column('icon', sa.String(length=255), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
|
||||
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
|
||||
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-07-10 10:26:50.074515
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7ce5a52e4eee'
|
||||
down_revision = '2beac44e5f5f'
|
||||
@ -25,40 +18,19 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
|
||||
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
|
||||
op.create_table('tool_providers',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('tool_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
|
||||
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
|
||||
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
|
||||
)
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,14 +7,9 @@ Create Date: 2024-06-25 03:20:46.012193
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7e6a8693e07a'
|
||||
down_revision = 'b2602e131636'
|
||||
@ -24,27 +19,14 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('dataset_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('account_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('dataset_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('account_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
|
||||
)
|
||||
|
||||
op.create_table('dataset_permissions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('account_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
|
||||
batch_op.create_index('idx_dataset_permissions_account_id', ['account_id'], unique=False)
|
||||
batch_op.create_index('idx_dataset_permissions_dataset_id', ['dataset_id'], unique=False)
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-12-14 07:36:50.705362
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '88072f0caa04'
|
||||
down_revision = '246ba09cbbdb'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('tenants', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-21 04:10:23.192853
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '89c7899ca936'
|
||||
down_revision = '187385f442fc'
|
||||
@ -23,39 +17,21 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.Text(),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=models.types.LongText(),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.VARCHAR(length=255),
|
||||
type_=sa.Text(),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.Text(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
else:
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=models.types.LongText(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
with op.batch_alter_table('sites', schema=None) as batch_op:
|
||||
batch_op.alter_column('description',
|
||||
existing_type=sa.Text(),
|
||||
type_=sa.VARCHAR(length=255),
|
||||
existing_nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@ -9,12 +9,6 @@ import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8d2d099ceb74'
|
||||
down_revision = '7ce5a52e4eee'
|
||||
@ -24,24 +18,13 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('document_segments', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('answer', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_by', postgresql.UUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
|
||||
with op.batch_alter_table('document_segments', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('answer', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_by', postgresql.UUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'::character varying"), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('document_segments', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('answer', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), nullable=True))
|
||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False))
|
||||
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'"), nullable=False))
|
||||
with op.batch_alter_table('documents', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'::character varying"), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -10,10 +10,6 @@ from alembic import op
|
||||
|
||||
import models as models
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8e5588e6412e'
|
||||
down_revision = '6e957a32015b'
|
||||
@ -23,14 +19,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('environment_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('environment_variables', models.types.LongText(), default='{}', nullable=False))
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('environment_variables', sa.Text(), server_default='{}', nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-07 03:57:35.257545
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8ec536f3c800'
|
||||
down_revision = 'ad472b61a054'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False))
|
||||
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-11-09 11:39:00.006432
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8fe468ba0ca5'
|
||||
down_revision = 'a9836e3baeee'
|
||||
@ -25,52 +18,27 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('message_files',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('message_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('transfer_method', sa.String(length=255), nullable=False),
|
||||
sa.Column('url', sa.Text(), nullable=True),
|
||||
sa.Column('upload_file_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='message_file_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('message_files',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('message_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('transfer_method', sa.String(length=255), nullable=False),
|
||||
sa.Column('url', models.types.LongText(), nullable=True),
|
||||
sa.Column('upload_file_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='message_file_pkey')
|
||||
)
|
||||
|
||||
op.create_table('message_files',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('message_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('transfer_method', sa.String(length=255), nullable=False),
|
||||
sa.Column('url', sa.Text(), nullable=True),
|
||||
sa.Column('upload_file_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='message_file_pkey')
|
||||
)
|
||||
with op.batch_alter_table('message_files', schema=None) as batch_op:
|
||||
batch_op.create_index('message_file_created_by_idx', ['created_by'], unique=False)
|
||||
batch_op.create_index('message_file_message_idx', ['message_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True))
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'::character varying"), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'"), nullable=False))
|
||||
with op.batch_alter_table('upload_files', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'::character varying"), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-10-27 13:05:58.901858
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '968fff4c0ab9'
|
||||
down_revision = 'b3a09c049e8e'
|
||||
@ -25,28 +18,16 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('api_based_extensions',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_endpoint', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_key', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('api_based_extensions',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_endpoint', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_key', models.types.LongText(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey')
|
||||
)
|
||||
|
||||
op.create_table('api_based_extensions',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_endpoint', sa.String(length=255), nullable=False),
|
||||
sa.Column('api_key', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey')
|
||||
)
|
||||
with op.batch_alter_table('api_based_extensions', schema=None) as batch_op:
|
||||
batch_op.create_index('api_based_extension_tenant_idx', ['tenant_id'], unique=False)
|
||||
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2023-05-17 17:29:01.060435
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9f4e3427ea84'
|
||||
down_revision = '64b051264f32'
|
||||
@ -21,30 +17,15 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Keep original syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False)
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False)
|
||||
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('saved_message_message_idx')
|
||||
batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False)
|
||||
else:
|
||||
# MySQL: Use compatible syntax
|
||||
with op.batch_alter_table('pinned_conversations', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False))
|
||||
batch_op.drop_index('pinned_conversation_conversation_idx')
|
||||
batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False)
|
||||
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False))
|
||||
batch_op.drop_index('saved_message_message_idx')
|
||||
batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False)
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False))
|
||||
batch_op.drop_index('saved_message_message_idx')
|
||||
batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2023-05-25 17:50:32.052335
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a45f4dfde53b'
|
||||
down_revision = '9f4e3427ea84'
|
||||
@ -21,18 +17,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False))
|
||||
batch_op.drop_index('recommended_app_is_listed_idx')
|
||||
batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False)
|
||||
else:
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'"), nullable=False))
|
||||
batch_op.drop_index('recommended_app_is_listed_idx')
|
||||
batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False)
|
||||
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False))
|
||||
batch_op.drop_index('recommended_app_is_listed_idx')
|
||||
batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-07-06 17:55:20.894149
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a5b56fb053ef'
|
||||
down_revision = 'd3d503a3471c'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,10 +8,6 @@ Create Date: 2024-04-02 12:17:22.641525
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a8d7385a7b66'
|
||||
down_revision = '17b5ab037c40'
|
||||
@ -21,18 +17,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''::character varying"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name'])
|
||||
else:
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name'])
|
||||
with op.batch_alter_table('embeddings', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''::character varying"), nullable=False))
|
||||
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
|
||||
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-11-02 04:04:57.609485
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a9836e3baeee'
|
||||
down_revision = '968fff4c0ab9'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-01-17 01:31:12.670556
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b24be59fbb04'
|
||||
down_revision = 'de95f5c77138'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2024-02-19 12:47:24.646954
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b289e2408ee2'
|
||||
down_revision = 'a8d7385a7b66'
|
||||
@ -25,190 +18,98 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_app_logs',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_run_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_app_logs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey')
|
||||
)
|
||||
op.create_table('workflow_app_logs',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_run_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_app_log_app_idx', ['tenant_id', 'app_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_node_executions',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('workflow_run_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('index', sa.Integer(), nullable=False),
|
||||
sa.Column('predecessor_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('node_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('inputs', sa.Text(), nullable=True),
|
||||
sa.Column('process_data', sa.Text(), nullable=True),
|
||||
sa.Column('outputs', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('execution_metadata', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_node_executions',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('index', sa.Integer(), nullable=False),
|
||||
sa.Column('predecessor_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('node_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('inputs', models.types.LongText(), nullable=True),
|
||||
sa.Column('process_data', models.types.LongText(), nullable=True),
|
||||
sa.Column('outputs', models.types.LongText(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('error', models.types.LongText(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('execution_metadata', models.types.LongText(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey')
|
||||
)
|
||||
op.create_table('workflow_node_executions',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('workflow_run_id', postgresql.UUID(), nullable=True),
|
||||
sa.Column('index', sa.Integer(), nullable=False),
|
||||
sa.Column('predecessor_node_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('node_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('node_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('inputs', sa.Text(), nullable=True),
|
||||
sa.Column('process_data', sa.Text(), nullable=True),
|
||||
sa.Column('outputs', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('execution_metadata', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_node_execution_node_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'node_id'], unique=False)
|
||||
batch_op.create_index('workflow_node_execution_workflow_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'workflow_run_id'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflow_runs',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('sequence_number', sa.Integer(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', sa.Text(), nullable=True),
|
||||
sa.Column('inputs', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('outputs', sa.Text(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_run_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('workflow_runs',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('sequence_number', sa.Integer(), nullable=False),
|
||||
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', models.types.LongText(), nullable=True),
|
||||
sa.Column('inputs', models.types.LongText(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('outputs', models.types.LongText(), nullable=True),
|
||||
sa.Column('error', models.types.LongText(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_run_pkey')
|
||||
)
|
||||
op.create_table('workflow_runs',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('sequence_number', sa.Integer(), nullable=False),
|
||||
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('triggered_from', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', sa.Text(), nullable=True),
|
||||
sa.Column('inputs', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(length=255), nullable=False),
|
||||
sa.Column('outputs', sa.Text(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False),
|
||||
sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
|
||||
sa.Column('created_by_role', sa.String(length=255), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('finished_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_run_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_run_triggerd_from_idx', ['tenant_id', 'app_id', 'triggered_from'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('workflows',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', sa.Text(), nullable=True),
|
||||
sa.Column('features', sa.Text(), nullable=True),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', postgresql.UUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('workflows',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', models.types.LongText(), nullable=True),
|
||||
sa.Column('features', models.types.LongText(), nullable=True),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_pkey')
|
||||
)
|
||||
|
||||
op.create_table('workflows',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('app_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('version', sa.String(length=255), nullable=False),
|
||||
sa.Column('graph', sa.Text(), nullable=True),
|
||||
sa.Column('features', sa.Text(), nullable=True),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_by', postgresql.UUID(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflows', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_version_idx', ['tenant_id', 'app_id', 'version'], unique=False)
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_id', postgresql.UUID(), nullable=True))
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_id', postgresql.UUID(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_run_id', postgresql.UUID(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('apps', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_id', models.types.StringUUID(), nullable=True))
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True))
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('workflow_run_id', postgresql.UUID(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2023-10-10 15:23:23.395420
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b3a09c049e8e'
|
||||
down_revision = '2e9819ca5b28'
|
||||
@ -23,20 +17,11 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True))
|
||||
else:
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True))
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple'))
|
||||
batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True))
|
||||
batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
@ -7,15 +7,8 @@ Create Date: 2023-08-10 00:03:44.273430
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from uuid import uuid4
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bf0aec5ba2cf'
|
||||
down_revision = 'e35ed59becda'
|
||||
@ -25,48 +18,25 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('provider_orders',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('account_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('payment_product_id', sa.String(length=191), nullable=False),
|
||||
sa.Column('payment_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('transaction_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False),
|
||||
sa.Column('currency', sa.String(length=40), nullable=True),
|
||||
sa.Column('total_amount', sa.Integer(), nullable=True),
|
||||
sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'::character varying"), nullable=False),
|
||||
sa.Column('paid_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('pay_failed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('refunded_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_order_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('provider_orders',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('account_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('payment_product_id', sa.String(length=191), nullable=False),
|
||||
sa.Column('payment_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('transaction_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False),
|
||||
sa.Column('currency', sa.String(length=40), nullable=True),
|
||||
sa.Column('total_amount', sa.Integer(), nullable=True),
|
||||
sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'"), nullable=False),
|
||||
sa.Column('paid_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('pay_failed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('refunded_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_order_pkey')
|
||||
)
|
||||
op.create_table('provider_orders',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('provider_name', sa.String(length=40), nullable=False),
|
||||
sa.Column('account_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('payment_product_id', sa.String(length=191), nullable=False),
|
||||
sa.Column('payment_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('transaction_id', sa.String(length=191), nullable=True),
|
||||
sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False),
|
||||
sa.Column('currency', sa.String(length=40), nullable=True),
|
||||
sa.Column('total_amount', sa.Integer(), nullable=True),
|
||||
sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'::character varying"), nullable=False),
|
||||
sa.Column('paid_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('pay_failed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('refunded_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='provider_order_pkey')
|
||||
)
|
||||
with op.batch_alter_table('provider_orders', schema=None) as batch_op:
|
||||
batch_op.create_index('provider_order_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False)
|
||||
|
||||
|
||||
@ -8,14 +8,9 @@ Create Date: 2024-06-17 10:01:00.255189
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c031d46af369'
|
||||
down_revision = '04c602f5dc9b'
|
||||
@ -25,30 +20,16 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('trace_app_config',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey')
|
||||
)
|
||||
else:
|
||||
op.create_table('trace_app_config',
|
||||
sa.Column('id', models.types.StringUUID(), default=lambda: str(uuid4()), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey')
|
||||
)
|
||||
op.create_table('trace_app_config',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
|
||||
sa.Column('tracing_config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey')
|
||||
)
|
||||
|
||||
with op.batch_alter_table('trace_app_config', schema=None) as batch_op:
|
||||
batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False)
|
||||
|
||||
@ -8,12 +8,6 @@ Create Date: 2024-03-28 11:50:45.364875
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
import models.types
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c3311b089690'
|
||||
down_revision = 'e2eacc9a1b63'
|
||||
@ -23,14 +17,8 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_meta_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
|
||||
else:
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_meta_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False))
|
||||
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('tool_meta_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user