feat: Make summary index support vision, and make the code more standardized.

This commit is contained in:
FFXN
2026-01-14 17:52:27 +08:00
parent 9b7e807690
commit 7eb65b07c8
23 changed files with 569 additions and 307 deletions

View File

@ -8,13 +8,13 @@ from celery import shared_task
from configs import dify_config
from core.entities.document_task import DocumentTask
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from tasks.generate_summary_index_task import generate_summary_index_task
from core.rag.pipeline.queue import TenantIsolatedTaskQueue
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document
from services.feature_service import FeatureService
from tasks.generate_summary_index_task import generate_summary_index_task
logger = logging.getLogger(__name__)
@ -101,15 +101,15 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
indexing_runner.run(documents)
end_at = time.perf_counter()
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
# Trigger summary index generation for completed documents if enabled
# Only generate for high_quality indexing technique and when summary_index_setting is enabled
# Re-query dataset to get latest summary_index_setting (in case it was updated)
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
logger.warning(f"Dataset {dataset_id} not found after indexing")
logger.warning("Dataset %s not found after indexing", dataset_id)
return
if dataset.indexing_technique == "high_quality":
summary_index_setting = dataset.summary_index_setting
if summary_index_setting and summary_index_setting.get("enable"):
@ -123,37 +123,46 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
)
if document:
logger.info(
f"Checking document {document_id} for summary generation: "
f"status={document.indexing_status}, doc_form={document.doc_form}"
"Checking document %s for summary generation: status=%s, doc_form=%s",
document_id,
document.indexing_status,
document.doc_form,
)
if document.indexing_status == "completed" and document.doc_form != "qa_model":
try:
generate_summary_index_task.delay(dataset.id, document_id, None)
logger.info(
f"Queued summary index generation task for document {document_id} "
f"in dataset {dataset.id} after indexing completed"
"Queued summary index generation task for document %s in dataset %s "
"after indexing completed",
document_id,
dataset.id,
)
except Exception as e:
except Exception:
logger.exception(
f"Failed to queue summary index generation task for document {document_id}: {str(e)}"
"Failed to queue summary index generation task for document %s",
document_id,
)
# Don't fail the entire indexing process if summary task queuing fails
else:
logger.info(
f"Skipping summary generation for document {document_id}: "
f"status={document.indexing_status}, doc_form={document.doc_form}"
"Skipping summary generation for document %s: status=%s, doc_form=%s",
document_id,
document.indexing_status,
document.doc_form,
)
else:
logger.warning(f"Document {document_id} not found after indexing")
logger.warning("Document %s not found after indexing", document_id)
else:
logger.info(
f"Summary index generation skipped for dataset {dataset.id}: "
f"summary_index_setting.enable={summary_index_setting.get('enable') if summary_index_setting else None}"
"Summary index generation skipped for dataset %s: summary_index_setting.enable=%s",
dataset.id,
summary_index_setting.get("enable") if summary_index_setting else None,
)
else:
logger.info(
f"Summary index generation skipped for dataset {dataset.id}: "
f"indexing_technique={dataset.indexing_technique} (not 'high_quality')"
"Summary index generation skipped for dataset %s: indexing_technique=%s (not 'high_quality')",
dataset.id,
dataset.indexing_technique,
)
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))