Merge branch 'main' into feat/mcp

This commit is contained in:
Novice
2025-06-05 17:19:27 +08:00
476 changed files with 8040 additions and 3490 deletions

View File

@ -5,7 +5,7 @@ import uuid
import click
from celery import shared_task # type: ignore
from sqlalchemy import func, select
from sqlalchemy import func
from sqlalchemy.orm import Session
from core.model_manager import ModelManager
@ -68,11 +68,6 @@ def batch_create_segment_to_index_task(
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
)
word_count_change = 0
segments_to_insert: list[str] = []
max_position_stmt = select(func.max(DocumentSegment.position)).where(
DocumentSegment.document_id == dataset_document.id
)
word_count_change = 0
if embedding_model:
tokens_list = embedding_model.get_text_embedding_num_tokens(

View File

@ -114,4 +114,4 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
except DocumentIsPausedError as ex:
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass
logging.exception("document_indexing_sync_task failed, document_id: {}".format(document_id))

View File

@ -81,6 +81,6 @@ def document_indexing_task(dataset_id: str, document_ids: list):
except DocumentIsPausedError as ex:
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass
logging.exception("Document indexing task failed, dataset_id: {}".format(dataset_id))
finally:
db.session.close()

View File

@ -73,6 +73,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
except DocumentIsPausedError as ex:
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass
logging.exception("document_indexing_update_task failed, document_id: {}".format(document_id))
finally:
db.session.close()

View File

@ -99,6 +99,6 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
except DocumentIsPausedError as ex:
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass
logging.exception("duplicate_document_indexing_task failed, dataset_id: {}".format(dataset_id))
finally:
db.session.close()

View File

@ -43,6 +43,6 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
except DocumentIsPausedError as ex:
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass
logging.exception("recover_document_indexing_task failed, document_id: {}".format(document_id))
finally:
db.session.close()

View File

@ -31,7 +31,7 @@ from models import (
)
from models.tools import WorkflowToolProvider
from models.web import PinnedConversation, SavedMessage
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowNodeExecution, WorkflowRun
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowNodeExecutionModel, WorkflowRun
@shared_task(queue="app_deletion", bind=True, max_retries=3)
@ -202,9 +202,9 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
def del_workflow_node_execution(workflow_node_execution_id: str):
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution_id).delete(
synchronize_session=False
)
db.session.query(WorkflowNodeExecutionModel).filter(
WorkflowNodeExecutionModel.id == workflow_node_execution_id
).delete(synchronize_session=False)
_delete_records(
"""select id from workflow_node_executions where tenant_id=:tenant_id and app_id=:app_id limit 1000""",

View File

@ -95,7 +95,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))
redis_client.delete(retry_indexing_cache_key)
pass
logging.exception("retry_document_indexing_task failed, document_id: {}".format(document_id))
finally:
db.session.close()
end_at = time.perf_counter()

View File

@ -87,6 +87,6 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))
redis_client.delete(sync_indexing_cache_key)
pass
logging.exception("sync_website_document_indexing_task failed, document_id: {}".format(document_id))
end_at = time.perf_counter()
logging.info(click.style("Sync document: {} latency: {}".format(document_id, end_at - start_at), fg="green"))