Refa: empty ids means no-op operation (#13439)

### What problem does this PR solve?

Empty ids means no-op operation.

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
- [x] Documentation Update
- [x] Refactoring

---------

Co-authored-by: writinwaters <cai.keith@gmail.com>
This commit is contained in:
Yongteng Lei
2026-03-06 18:16:42 +08:00
committed by GitHub
parent 7781c51a21
commit 51be1f1442
43 changed files with 446 additions and 190 deletions

View File

@ -235,16 +235,13 @@ async def delete_chats(tenant_id):
success_count = 0
req = await get_request_json()
if not req:
ids = None
else:
ids = req.get("ids")
return get_result()
ids = req.get("ids")
if not ids:
id_list = []
dias = DialogService.query(tenant_id=tenant_id, status=StatusEnum.VALID.value)
for dia in dias:
id_list.append(dia.id)
else:
id_list = ids
return get_result()
id_list = ids
unique_id_list, duplicate_messages = check_duplicate_ids(id_list, "assistant")

View File

@ -202,10 +202,8 @@ async def delete(tenant_id):
items:
type: string
description: |
Specifies the datasets to delete:
- If `null`, all datasets will be deleted.
- If an array of IDs, only the specified datasets will be deleted.
- If an empty array, no datasets will be deleted.
List of dataset IDs to delete.
If `null` or an empty array is provided, no datasets will be deleted.
responses:
200:
description: Successful operation.
@ -218,22 +216,19 @@ async def delete(tenant_id):
try:
kb_id_instance_pairs = []
if req["ids"] is None:
kbs = KnowledgebaseService.query(tenant_id=tenant_id)
for kb in kbs:
kb_id_instance_pairs.append((kb.id, kb))
if req["ids"] is None or len(req["ids"]) == 0:
return get_result()
else:
error_kb_ids = []
for kb_id in req["ids"]:
kb = KnowledgebaseService.get_or_none(id=kb_id, tenant_id=tenant_id)
if kb is None:
error_kb_ids.append(kb_id)
continue
kb_id_instance_pairs.append((kb_id, kb))
if len(error_kb_ids) > 0:
return get_error_permission_result(
message=f"""User '{tenant_id}' lacks permission for datasets: '{", ".join(error_kb_ids)}'""")
error_kb_ids = []
for kb_id in req["ids"]:
kb = KnowledgebaseService.get_or_none(id=kb_id, tenant_id=tenant_id)
if kb is None:
error_kb_ids.append(kb_id)
continue
kb_id_instance_pairs.append((kb_id, kb))
if len(error_kb_ids) > 0:
return get_error_permission_result(
message=f"""User '{tenant_id}' lacks permission for datasets: '{", ".join(error_kb_ids)}'""")
errors = []
success_count = 0
@ -811,4 +806,4 @@ def trace_raptor(tenant_id,dataset_id):
if not ok:
return get_error_data_result(message="RAPTOR Task Not Found or Error Occurred")
return get_result(data=task.to_dict())
return get_result(data=task.to_dict())

View File

@ -727,7 +727,9 @@ async def delete(tenant_id, dataset_id):
type: array
items:
type: string
description: List of document IDs to delete.
description: |
List of document IDs to delete.
If omitted, `null`, or an empty array is provided, no documents will be deleted.
- in: header
name: Authorization
type: string
@ -743,16 +745,13 @@ async def delete(tenant_id, dataset_id):
return get_error_data_result(message=f"You don't own the dataset {dataset_id}. ")
req = await get_request_json()
if not req:
doc_ids = None
else:
doc_ids = req.get("ids")
return get_result()
doc_ids = req.get("ids")
if not doc_ids:
doc_list = []
docs = DocumentService.query(kb_id=dataset_id)
for doc in docs:
doc_list.append(doc.id)
else:
doc_list = doc_ids
return get_result()
doc_list = doc_ids
unique_doc_ids, duplicate_messages = check_duplicate_ids(doc_list, "document")
doc_list = unique_doc_ids
@ -1318,7 +1317,9 @@ async def rm_chunk(tenant_id, dataset_id, document_id):
type: array
items:
type: string
description: List of chunk IDs to remove.
description: |
List of chunk IDs to remove.
If omitted, `null`, or an empty array is provided, no chunks will be deleted.
- in: header
name: Authorization
type: string
@ -1336,17 +1337,20 @@ async def rm_chunk(tenant_id, dataset_id, document_id):
if not docs:
raise LookupError(f"Can't find the document with ID {document_id}!")
req = await get_request_json()
if not req:
return get_result()
chunk_ids = req.get("chunk_ids")
if not chunk_ids:
return get_result()
condition = {"doc_id": document_id}
if "chunk_ids" in req:
unique_chunk_ids, duplicate_messages = check_duplicate_ids(req["chunk_ids"], "chunk")
condition["id"] = unique_chunk_ids
else:
unique_chunk_ids = []
duplicate_messages = []
unique_chunk_ids, duplicate_messages = check_duplicate_ids(chunk_ids, "chunk")
condition["id"] = unique_chunk_ids
chunk_number = settings.docStoreConn.delete(condition, search.index_name(tenant_id), dataset_id)
if chunk_number != 0:
DocumentService.decrement_chunk_num(document_id, dataset_id, 1, chunk_number, 0)
if "chunk_ids" in req and chunk_number != len(unique_chunk_ids):
if chunk_number != len(unique_chunk_ids):
if len(unique_chunk_ids) == 0:
return get_result(message=f"deleted {chunk_number} chunks")
return get_error_data_result(message=f"rm_chunk deleted chunks {chunk_number}, expect {len(unique_chunk_ids)}")

View File

@ -739,18 +739,14 @@ async def delete(tenant_id, chat_id):
errors = []
success_count = 0
req = await get_request_json()
convs = ConversationService.query(dialog_id=chat_id)
if not req:
ids = None
else:
ids = req.get("ids")
return get_result()
ids = req.get("ids")
if not ids:
conv_list = []
for conv in convs:
conv_list.append(conv.id)
else:
conv_list = ids
return get_result()
conv_list = ids
unique_conv_ids, duplicate_messages = check_duplicate_ids(conv_list, "session")
conv_list = unique_conv_ids
@ -791,21 +787,14 @@ async def delete_agent_session(tenant_id, agent_id):
if not cvs:
return get_error_data_result(f"You don't own the agent {agent_id}")
convs = API4ConversationService.query(dialog_id=agent_id)
if not convs:
return get_error_data_result(f"Agent {agent_id} has no sessions")
if not req:
ids = None
else:
ids = req.get("ids")
return get_result()
ids = req.get("ids")
if not ids:
conv_list = []
for conv in convs:
conv_list.append(conv.id)
else:
conv_list = ids
return get_result()
conv_list = ids
unique_conv_ids, duplicate_messages = check_duplicate_ids(conv_list, "session")
conv_list = unique_conv_ids