mirror of
https://github.com/langgenius/dify.git
synced 2026-03-02 22:36:41 +08:00
Signed-off-by: majiayu000 <1835304752@qq.com> Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Signed-off-by: -LAN- <laipz8200@outlook.com> Signed-off-by: yihong0618 <zouzou0208@gmail.com> Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com> Co-authored-by: 盐粒 Yanli <yanli@dify.ai> Co-authored-by: wangxiaolei <fatelei@gmail.com> Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: Cursx <33718736+Cursx@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: lif <1835304752@qq.com> Co-authored-by: 非法操作 <hjlarry@163.com> Co-authored-by: Asuka Minato <i@asukaminato.eu.org> Co-authored-by: fenglin <790872612@qq.com> Co-authored-by: qiaofenglin <qiaofenglin@baidu.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: TomoOkuyama <49631611+TomoOkuyama@users.noreply.github.com> Co-authored-by: Tomo Okuyama <tomo.okuyama@intersystems.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: zyssyz123 <916125788@qq.com> Co-authored-by: hj24 <mambahj24@gmail.com> Co-authored-by: Coding On Star <447357187@qq.com> Co-authored-by: CodingOnStar <hanxujiang@dify.ai> Co-authored-by: yyh <92089059+lyzno1@users.noreply.github.com> Co-authored-by: Xiangxuan Qu <fghpdf@outlook.com> Co-authored-by: fghpdf <fghpdf@users.noreply.github.com> Co-authored-by: coopercoder <whitetiger0127@163.com> Co-authored-by: zhaiguangpeng <zhaiguangpeng@didiglobal.com> Co-authored-by: Junyan Qin (Chin) <rockchinq@gmail.com> Co-authored-by: E.G <146701565+GlobalStar117@users.noreply.github.com> Co-authored-by: GlobalStar117 <GlobalStar117@users.noreply.github.com> Co-authored-by: Claude Haiku 4.5 <noreply@anthropic.com> Co-authored-by: CodingOnStar <hanxujiang@dify.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: heyszt <270985384@qq.com> Co-authored-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: Yeuoly <45712896+Yeuoly@users.noreply.github.com> Co-authored-by: zxhlyh <jasonapring2015@outlook.com> Co-authored-by: moonpanda <chuanzegao@163.com> Co-authored-by: warlocgao <warlocgao@tencent.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com> Co-authored-by: KVOJJJin <jzongcode@gmail.com> Co-authored-by: eux <euxx@users.noreply.github.com> Co-authored-by: bangjiehan <bangjiehan@gmail.com> Co-authored-by: FFXN <31929997+FFXN@users.noreply.github.com> Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com> Co-authored-by: Nie Ronghua <nieronghua@sf-express.com> Co-authored-by: JQSevenMiao <141806521+JQSevenMiao@users.noreply.github.com> Co-authored-by: jiasiqi <jiasiqi3@tal.com> Co-authored-by: Seokrin Taron Sung <sungsjade@gmail.com> Co-authored-by: CrabSAMA <40541269+CrabSAMA@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: yihong <zouzou0208@gmail.com> Co-authored-by: Joel <iamjoel007@gmail.com> Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Co-authored-by: yessenia <yessenia.contact@gmail.com> Co-authored-by: Jax <anobaka@qq.com> Co-authored-by: niveshdandyan <155956228+niveshdandyan@users.noreply.github.com> Co-authored-by: OSS Contributor <oss-contributor@example.com> Co-authored-by: niveshdandyan <niveshdandyan@users.noreply.github.com> Co-authored-by: Sean Kenneth Doherty <Smaster7772@gmail.com>
94 lines
3.9 KiB
Python
94 lines
3.9 KiB
Python
import logging
|
|
import time
|
|
|
|
import click
|
|
from celery import shared_task
|
|
from sqlalchemy import delete, select
|
|
|
|
from core.db.session_factory import session_factory
|
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
|
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
|
from extensions.ext_storage import storage
|
|
from models.dataset import Dataset, DatasetMetadataBinding, DocumentSegment
|
|
from models.model import UploadFile
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(queue="dataset")
|
|
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
|
"""
|
|
Clean document when document deleted.
|
|
:param document_ids: document ids
|
|
:param dataset_id: dataset id
|
|
:param doc_form: doc_form
|
|
:param file_ids: file ids
|
|
|
|
Usage: batch_clean_document_task.delay(document_ids, dataset_id)
|
|
"""
|
|
logger.info(click.style("Start batch clean documents when documents deleted", fg="green"))
|
|
start_at = time.perf_counter()
|
|
if not doc_form:
|
|
raise ValueError("doc_form is required")
|
|
|
|
with session_factory.create_session() as session:
|
|
try:
|
|
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
|
|
|
if not dataset:
|
|
raise Exception("Document has no dataset")
|
|
|
|
session.query(DatasetMetadataBinding).where(
|
|
DatasetMetadataBinding.dataset_id == dataset_id,
|
|
DatasetMetadataBinding.document_id.in_(document_ids),
|
|
).delete(synchronize_session=False)
|
|
|
|
segments = session.scalars(
|
|
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
|
).all()
|
|
# check segment is exist
|
|
if segments:
|
|
index_node_ids = [segment.index_node_id for segment in segments]
|
|
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
|
index_processor.clean(
|
|
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
|
)
|
|
|
|
for segment in segments:
|
|
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
|
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
|
for image_file in image_files:
|
|
try:
|
|
if image_file and image_file.key:
|
|
storage.delete(image_file.key)
|
|
except Exception:
|
|
logger.exception(
|
|
"Delete image_files failed when storage deleted, \
|
|
image_upload_file_is: %s",
|
|
image_file.id,
|
|
)
|
|
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
|
session.execute(stmt)
|
|
session.delete(segment)
|
|
if file_ids:
|
|
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
|
for file in files:
|
|
try:
|
|
storage.delete(file.key)
|
|
except Exception:
|
|
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
|
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
|
session.execute(stmt)
|
|
|
|
session.commit()
|
|
|
|
end_at = time.perf_counter()
|
|
logger.info(
|
|
click.style(
|
|
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
|
fg="green",
|
|
)
|
|
)
|
|
except Exception:
|
|
logger.exception("Cleaned documents when documents deleted failed")
|