mirror of
https://github.com/langgenius/dify.git
synced 2026-05-05 09:58:04 +08:00
orm filter -> where (#22801)
Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
@ -21,7 +21,7 @@ def clean_embedding_cache_task():
|
||||
try:
|
||||
embedding_ids = (
|
||||
db.session.query(Embedding.id)
|
||||
.filter(Embedding.created_at < thirty_days_ago)
|
||||
.where(Embedding.created_at < thirty_days_ago)
|
||||
.order_by(Embedding.created_at.desc())
|
||||
.limit(100)
|
||||
.all()
|
||||
|
||||
@ -36,7 +36,7 @@ def clean_messages():
|
||||
# Main query with join and filter
|
||||
messages = (
|
||||
db.session.query(Message)
|
||||
.filter(Message.created_at < plan_sandbox_clean_message_day)
|
||||
.where(Message.created_at < plan_sandbox_clean_message_day)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(100)
|
||||
.all()
|
||||
@ -66,25 +66,25 @@ def clean_messages():
|
||||
plan = plan_cache.decode()
|
||||
if plan == "sandbox":
|
||||
# clean related message
|
||||
db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message.id).delete(
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message.id).delete(
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageChain).filter(MessageChain.message_id == message.id).delete(
|
||||
db.session.query(MessageChain).where(MessageChain.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message.id).delete(
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageFile).filter(MessageFile.message_id == message.id).delete(
|
||||
db.session.query(MessageFile).where(MessageFile.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(SavedMessage).filter(SavedMessage.message_id == message.id).delete(
|
||||
db.session.query(SavedMessage).where(SavedMessage.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(Message).filter(Message.id == message.id).delete()
|
||||
db.session.query(Message).where(Message.id == message.id).delete()
|
||||
db.session.commit()
|
||||
end_at = time.perf_counter()
|
||||
click.echo(click.style("Cleaned messages from db success latency: {}".format(end_at - start_at), fg="green"))
|
||||
|
||||
@ -27,7 +27,7 @@ def clean_unused_datasets_task():
|
||||
# Subquery for counting new documents
|
||||
document_subquery_new = (
|
||||
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
|
||||
.filter(
|
||||
.where(
|
||||
Document.indexing_status == "completed",
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
@ -40,7 +40,7 @@ def clean_unused_datasets_task():
|
||||
# Subquery for counting old documents
|
||||
document_subquery_old = (
|
||||
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
|
||||
.filter(
|
||||
.where(
|
||||
Document.indexing_status == "completed",
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
@ -55,7 +55,7 @@ def clean_unused_datasets_task():
|
||||
select(Dataset)
|
||||
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
|
||||
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
|
||||
.filter(
|
||||
.where(
|
||||
Dataset.created_at < plan_sandbox_clean_day,
|
||||
func.coalesce(document_subquery_new.c.document_count, 0) == 0,
|
||||
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
||||
@ -72,7 +72,7 @@ def clean_unused_datasets_task():
|
||||
for dataset in datasets:
|
||||
dataset_query = (
|
||||
db.session.query(DatasetQuery)
|
||||
.filter(DatasetQuery.created_at > plan_sandbox_clean_day, DatasetQuery.dataset_id == dataset.id)
|
||||
.where(DatasetQuery.created_at > plan_sandbox_clean_day, DatasetQuery.dataset_id == dataset.id)
|
||||
.all()
|
||||
)
|
||||
if not dataset_query or len(dataset_query) == 0:
|
||||
@ -80,7 +80,7 @@ def clean_unused_datasets_task():
|
||||
# add auto disable log
|
||||
documents = (
|
||||
db.session.query(Document)
|
||||
.filter(
|
||||
.where(
|
||||
Document.dataset_id == dataset.id,
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
@ -111,7 +111,7 @@ def clean_unused_datasets_task():
|
||||
# Subquery for counting new documents
|
||||
document_subquery_new = (
|
||||
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
|
||||
.filter(
|
||||
.where(
|
||||
Document.indexing_status == "completed",
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
@ -124,7 +124,7 @@ def clean_unused_datasets_task():
|
||||
# Subquery for counting old documents
|
||||
document_subquery_old = (
|
||||
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
|
||||
.filter(
|
||||
.where(
|
||||
Document.indexing_status == "completed",
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
@ -139,7 +139,7 @@ def clean_unused_datasets_task():
|
||||
select(Dataset)
|
||||
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
|
||||
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
|
||||
.filter(
|
||||
.where(
|
||||
Dataset.created_at < plan_pro_clean_day,
|
||||
func.coalesce(document_subquery_new.c.document_count, 0) == 0,
|
||||
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
|
||||
@ -155,7 +155,7 @@ def clean_unused_datasets_task():
|
||||
for dataset in datasets:
|
||||
dataset_query = (
|
||||
db.session.query(DatasetQuery)
|
||||
.filter(DatasetQuery.created_at > plan_pro_clean_day, DatasetQuery.dataset_id == dataset.id)
|
||||
.where(DatasetQuery.created_at > plan_pro_clean_day, DatasetQuery.dataset_id == dataset.id)
|
||||
.all()
|
||||
)
|
||||
if not dataset_query or len(dataset_query) == 0:
|
||||
|
||||
@ -20,7 +20,7 @@ def create_tidb_serverless_task():
|
||||
try:
|
||||
# check the number of idle tidb serverless
|
||||
idle_tidb_serverless_number = (
|
||||
db.session.query(TidbAuthBinding).filter(TidbAuthBinding.active == False).count()
|
||||
db.session.query(TidbAuthBinding).where(TidbAuthBinding.active == False).count()
|
||||
)
|
||||
if idle_tidb_serverless_number >= tidb_serverless_number:
|
||||
break
|
||||
|
||||
@ -30,7 +30,7 @@ def mail_clean_document_notify_task():
|
||||
# send document clean notify mail
|
||||
try:
|
||||
dataset_auto_disable_logs = (
|
||||
db.session.query(DatasetAutoDisableLog).filter(DatasetAutoDisableLog.notified == False).all()
|
||||
db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.notified == False).all()
|
||||
)
|
||||
# group by tenant_id
|
||||
dataset_auto_disable_logs_map: dict[str, list[DatasetAutoDisableLog]] = defaultdict(list)
|
||||
@ -45,7 +45,7 @@ def mail_clean_document_notify_task():
|
||||
if plan != "sandbox":
|
||||
knowledge_details = []
|
||||
# check tenant
|
||||
tenant = db.session.query(Tenant).filter(Tenant.id == tenant_id).first()
|
||||
tenant = db.session.query(Tenant).where(Tenant.id == tenant_id).first()
|
||||
if not tenant:
|
||||
continue
|
||||
# check current owner
|
||||
@ -54,7 +54,7 @@ def mail_clean_document_notify_task():
|
||||
)
|
||||
if not current_owner_join:
|
||||
continue
|
||||
account = db.session.query(Account).filter(Account.id == current_owner_join.account_id).first()
|
||||
account = db.session.query(Account).where(Account.id == current_owner_join.account_id).first()
|
||||
if not account:
|
||||
continue
|
||||
|
||||
@ -67,7 +67,7 @@ def mail_clean_document_notify_task():
|
||||
)
|
||||
|
||||
for dataset_id, document_ids in dataset_auto_dataset_map.items():
|
||||
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset:
|
||||
document_count = len(document_ids)
|
||||
knowledge_details.append(rf"Knowledge base {dataset.name}: {document_count} documents")
|
||||
|
||||
@ -17,7 +17,7 @@ def update_tidb_serverless_status_task():
|
||||
# check the number of idle tidb serverless
|
||||
tidb_serverless_list = (
|
||||
db.session.query(TidbAuthBinding)
|
||||
.filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING")
|
||||
.where(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING")
|
||||
.all()
|
||||
)
|
||||
if len(tidb_serverless_list) == 0:
|
||||
|
||||
Reference in New Issue
Block a user