mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-05-06 02:07:49 +08:00
Refa: empty ids means no-op operation (#13439)
### What problem does this PR solve? Empty ids means no-op operation. ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) - [x] Documentation Update - [x] Refactoring --------- Co-authored-by: writinwaters <cai.keith@gmail.com>
This commit is contained in:
@ -58,6 +58,23 @@ def delete_datasets(auth, payload=None, *, headers=HEADERS, data=None):
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_datasets(auth, *, page_size=1000):
|
||||
# Dataset DELETE now treats null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
dataset_ids = []
|
||||
while True:
|
||||
res = list_datasets(auth, {"page": page, "page_size": page_size})
|
||||
data = res.get("data") or []
|
||||
dataset_ids.extend(dataset["id"] for dataset in data)
|
||||
if len(data) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not dataset_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_datasets(auth, {"ids": dataset_ids})
|
||||
|
||||
|
||||
def batch_create_datasets(auth, num):
|
||||
ids = []
|
||||
for i in range(num):
|
||||
@ -127,6 +144,23 @@ def delete_documents(auth, dataset_id, payload=None):
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_documents(auth, dataset_id, *, page_size=1000):
|
||||
# Document DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
document_ids = []
|
||||
while True:
|
||||
res = list_documents(auth, dataset_id, {"page": page, "page_size": page_size})
|
||||
docs = (res.get("data") or {}).get("docs") or []
|
||||
document_ids.extend(doc["id"] for doc in docs)
|
||||
if len(docs) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not document_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_documents(auth, dataset_id, {"ids": document_ids})
|
||||
|
||||
|
||||
def parse_documents(auth, dataset_id, payload=None):
|
||||
url = f"{HOST_ADDRESS}{FILE_CHUNK_API_URL}".format(dataset_id=dataset_id)
|
||||
res = requests.post(url=url, headers=HEADERS, auth=auth, json=payload)
|
||||
@ -176,6 +210,23 @@ def delete_chunks(auth, dataset_id, document_id, payload=None):
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_chunks(auth, dataset_id, document_id, *, page_size=1000):
|
||||
# Chunk DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
chunk_ids = []
|
||||
while True:
|
||||
res = list_chunks(auth, dataset_id, document_id, {"page": page, "page_size": page_size})
|
||||
chunks = (res.get("data") or {}).get("chunks") or []
|
||||
chunk_ids.extend(chunk["id"] for chunk in chunks)
|
||||
if len(chunks) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not chunk_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_chunks(auth, dataset_id, document_id, {"chunk_ids": chunk_ids})
|
||||
|
||||
|
||||
def retrieval_chunks(auth, payload=None):
|
||||
url = f"{HOST_ADDRESS}{RETRIEVAL_API_URL}"
|
||||
res = requests.post(url=url, headers=HEADERS, auth=auth, json=payload)
|
||||
@ -215,6 +266,23 @@ def delete_chat_assistants(auth, payload=None):
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_chat_assistants(auth, *, page_size=1000):
|
||||
# Chat DELETE now treats null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
chat_ids = []
|
||||
while True:
|
||||
res = list_chat_assistants(auth, {"page": page, "page_size": page_size})
|
||||
data = res.get("data") or []
|
||||
chat_ids.extend(chat["id"] for chat in data)
|
||||
if len(data) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not chat_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_chat_assistants(auth, {"ids": chat_ids})
|
||||
|
||||
|
||||
def batch_create_chat_assistants(auth, num):
|
||||
chat_assistant_ids = []
|
||||
for i in range(num):
|
||||
@ -244,12 +312,27 @@ def update_session_with_chat_assistant(auth, chat_assistant_id, session_id, payl
|
||||
|
||||
def delete_session_with_chat_assistants(auth, chat_assistant_id, payload=None):
|
||||
url = f"{HOST_ADDRESS}{SESSION_WITH_CHAT_ASSISTANT_API_URL}".format(chat_id=chat_assistant_id)
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.delete(url=url, headers=HEADERS, auth=auth, json=payload)
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_sessions_with_chat_assistant(auth, chat_assistant_id, *, page_size=1000):
|
||||
# Session DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
session_ids = []
|
||||
while True:
|
||||
res = list_session_with_chat_assistants(auth, chat_assistant_id, {"page": page, "page_size": page_size})
|
||||
data = res.get("data") or []
|
||||
session_ids.extend(session["id"] for session in data)
|
||||
if len(data) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not session_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_session_with_chat_assistants(auth, chat_assistant_id, {"ids": session_ids})
|
||||
|
||||
|
||||
def batch_add_sessions_with_chat_assistant(auth, chat_assistant_id, num):
|
||||
session_ids = []
|
||||
for i in range(num):
|
||||
@ -350,12 +433,27 @@ def list_agent_sessions(auth, agent_id, params=None):
|
||||
|
||||
def delete_agent_sessions(auth, agent_id, payload=None):
|
||||
url = f"{HOST_ADDRESS}{SESSION_WITH_AGENT_API_URL}".format(agent_id=agent_id)
|
||||
if payload is None:
|
||||
payload = {}
|
||||
res = requests.delete(url=url, headers=HEADERS, auth=auth, json=payload)
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete_all_agent_sessions(auth, agent_id, *, page_size=1000):
|
||||
# Agent session DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
session_ids = []
|
||||
while True:
|
||||
res = list_agent_sessions(auth, agent_id, {"page": page, "page_size": page_size})
|
||||
data = res.get("data") or []
|
||||
session_ids.extend(session["id"] for session in data)
|
||||
if len(data) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not session_ids:
|
||||
return {"code": 0, "message": ""}
|
||||
return delete_agent_sessions(auth, agent_id, {"ids": session_ids})
|
||||
|
||||
|
||||
def agent_completions(auth, agent_id, payload=None):
|
||||
url = f"{HOST_ADDRESS}{AGENT_API_URL}/{agent_id}/completions"
|
||||
res = requests.post(url=url, headers=HEADERS, auth=auth, json=payload)
|
||||
|
||||
@ -21,9 +21,9 @@ from common import (
|
||||
batch_create_chat_assistants,
|
||||
batch_create_datasets,
|
||||
bulk_upload_documents,
|
||||
delete_chat_assistants,
|
||||
delete_datasets,
|
||||
delete_session_with_chat_assistants,
|
||||
delete_all_chat_assistants,
|
||||
delete_all_datasets,
|
||||
delete_all_sessions_with_chat_assistant,
|
||||
list_documents,
|
||||
parse_documents,
|
||||
)
|
||||
@ -89,7 +89,7 @@ def HttpApiAuth(token):
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_datasets(request, HttpApiAuth):
|
||||
def cleanup():
|
||||
delete_datasets(HttpApiAuth, {"ids": None})
|
||||
delete_all_datasets(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -97,7 +97,7 @@ def clear_datasets(request, HttpApiAuth):
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_chat_assistants(request, HttpApiAuth):
|
||||
def cleanup():
|
||||
delete_chat_assistants(HttpApiAuth)
|
||||
delete_all_chat_assistants(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -106,7 +106,7 @@ def clear_chat_assistants(request, HttpApiAuth):
|
||||
def clear_session_with_chat_assistants(request, HttpApiAuth, add_chat_assistants):
|
||||
def cleanup():
|
||||
for chat_assistant_id in chat_assistant_ids:
|
||||
delete_session_with_chat_assistants(HttpApiAuth, chat_assistant_id)
|
||||
delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_assistant_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -116,7 +116,7 @@ def clear_session_with_chat_assistants(request, HttpApiAuth, add_chat_assistants
|
||||
@pytest.fixture(scope="class")
|
||||
def add_dataset(request, HttpApiAuth):
|
||||
def cleanup():
|
||||
delete_datasets(HttpApiAuth, {"ids": None})
|
||||
delete_all_datasets(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -127,7 +127,7 @@ def add_dataset(request, HttpApiAuth):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_dataset_func(request, HttpApiAuth):
|
||||
def cleanup():
|
||||
delete_datasets(HttpApiAuth, {"ids": None})
|
||||
delete_all_datasets(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -154,7 +154,7 @@ def add_chunks(HttpApiAuth, add_document):
|
||||
@pytest.fixture(scope="class")
|
||||
def add_chat_assistants(request, HttpApiAuth, add_document):
|
||||
def cleanup():
|
||||
delete_chat_assistants(HttpApiAuth)
|
||||
delete_all_chat_assistants(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import batch_create_chat_assistants, delete_chat_assistants, list_chat_assistants, list_documents, parse_documents
|
||||
from common import batch_create_chat_assistants, delete_all_chat_assistants, list_chat_assistants, list_documents, parse_documents
|
||||
from utils import wait_for
|
||||
|
||||
|
||||
@ -30,7 +30,7 @@ def condition(_auth, _dataset_id):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_chat_assistants_func(request, HttpApiAuth, add_document):
|
||||
def cleanup():
|
||||
delete_chat_assistants(HttpApiAuth)
|
||||
delete_all_chat_assistants(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -299,6 +299,15 @@ def test_update_internal_failure_paths(monkeypatch):
|
||||
def test_delete_duplicate_no_success_path(monkeypatch):
|
||||
module = _load_chat_module(monkeypatch)
|
||||
|
||||
_set_request_json(monkeypatch, module, {})
|
||||
monkeypatch.setattr(
|
||||
module.DialogService,
|
||||
"query",
|
||||
lambda **_kwargs: (_ for _ in ()).throw(AssertionError("query must not run for empty delete payload")),
|
||||
)
|
||||
res = _run(module.delete_chats.__wrapped__("tenant-1"))
|
||||
assert res["code"] == module.RetCode.SUCCESS
|
||||
|
||||
_set_request_json(monkeypatch, module, {"ids": ["chat-1", "chat-1"]})
|
||||
monkeypatch.setattr(module.DialogService, "query", lambda **_kwargs: [SimpleNamespace(id="chat-1")])
|
||||
monkeypatch.setattr(module.DialogService, "update_by_id", lambda *_args, **_kwargs: 0)
|
||||
|
||||
@ -44,8 +44,8 @@ class TestChatAssistantsDelete:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_code, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, 0, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, 0, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param(None, 0, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, 0, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["invalid_id"]}, 102, "Assistant(invalid_id) not found.", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["\n!?。;!?\"'"]}, 102, """Assistant(\n!?。;!?"\') not found.""", 5, marks=pytest.mark.p3),
|
||||
pytest.param("not json", 100, "AttributeError(\"'str' object has no attribute 'get'\")", 5, marks=pytest.mark.p3),
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
from common import batch_add_chunks, delete_chunks, list_documents, parse_documents
|
||||
from common import batch_add_chunks, delete_all_chunks, list_documents, parse_documents
|
||||
from utils import wait_for
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ def condition(_auth, _dataset_id):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_chunks_func(request, HttpApiAuth, add_document):
|
||||
def cleanup():
|
||||
delete_chunks(HttpApiAuth, dataset_id, document_id, {"chunk_ids": []})
|
||||
delete_all_chunks(HttpApiAuth, dataset_id, document_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -158,12 +158,12 @@ class TestChunksDeletion:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_code, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, 100, """TypeError("argument of type \'NoneType\' is not iterable")""", 5, marks=pytest.mark.skip),
|
||||
pytest.param(None, 0, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"chunk_ids": ["invalid_id"]}, 102, "rm_chunk deleted chunks 0, expect 1", 5, marks=pytest.mark.p3),
|
||||
pytest.param("not json", 100, """UnboundLocalError("local variable \'duplicate_messages\' referenced before assignment")""", 5, marks=pytest.mark.skip(reason="pull/6376")),
|
||||
pytest.param(lambda r: {"chunk_ids": r[:1]}, 0, "", 4, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"chunk_ids": r}, 0, "", 1, marks=pytest.mark.p1),
|
||||
pytest.param({"chunk_ids": []}, 0, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"chunk_ids": []}, 0, "", 5, marks=pytest.mark.p3),
|
||||
],
|
||||
)
|
||||
def test_basic_scenarios(
|
||||
|
||||
@ -16,13 +16,13 @@
|
||||
|
||||
|
||||
import pytest
|
||||
from common import batch_create_datasets, delete_datasets
|
||||
from common import batch_create_datasets, delete_all_datasets
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_datasets(HttpApiAuth, request):
|
||||
def cleanup():
|
||||
delete_datasets(HttpApiAuth, {"ids": None})
|
||||
delete_all_datasets(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -32,7 +32,7 @@ def add_datasets(HttpApiAuth, request):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_datasets_func(HttpApiAuth, request):
|
||||
def cleanup():
|
||||
delete_datasets(HttpApiAuth, {"ids": None})
|
||||
delete_all_datasets(HttpApiAuth)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -134,7 +134,7 @@ class TestDatasetsDelete:
|
||||
assert res["code"] == 0, res
|
||||
|
||||
res = list_datasets(HttpApiAuth)
|
||||
assert len(res["data"]) == 0, res
|
||||
assert len(res["data"]) == 3, res
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.usefixtures("add_dataset_func")
|
||||
|
||||
@ -16,13 +16,13 @@
|
||||
|
||||
|
||||
import pytest
|
||||
from common import bulk_upload_documents, delete_documents
|
||||
from common import bulk_upload_documents, delete_all_documents
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def add_document_func(request, HttpApiAuth, add_dataset, ragflow_tmp_dir):
|
||||
def cleanup():
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": None})
|
||||
delete_all_documents(HttpApiAuth, dataset_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -33,7 +33,7 @@ def add_document_func(request, HttpApiAuth, add_dataset, ragflow_tmp_dir):
|
||||
@pytest.fixture(scope="class")
|
||||
def add_documents(request, HttpApiAuth, add_dataset, ragflow_tmp_dir):
|
||||
def cleanup():
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": None})
|
||||
delete_all_documents(HttpApiAuth, dataset_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -44,7 +44,7 @@ def add_documents(request, HttpApiAuth, add_dataset, ragflow_tmp_dir):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_documents_func(request, HttpApiAuth, add_dataset_func, ragflow_tmp_dir):
|
||||
def cleanup():
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": None})
|
||||
delete_all_documents(HttpApiAuth, dataset_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -45,8 +45,8 @@ class TestDocumentsDeletion:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_code, expected_message, remaining",
|
||||
[
|
||||
(None, 0, "", 0),
|
||||
({"ids": []}, 0, "", 0),
|
||||
(None, 0, "", 3),
|
||||
({"ids": []}, 0, "", 3),
|
||||
({"ids": ["invalid_id"]}, 102, "Documents not found: ['invalid_id']", 3),
|
||||
(
|
||||
{"ids": ["\n!?。;!?\"'"]},
|
||||
|
||||
@ -692,6 +692,10 @@ class TestDocRoutesUnit:
|
||||
assert "don't own the dataset" in res["message"]
|
||||
|
||||
monkeypatch.setattr(module.KnowledgebaseService, "accessible", lambda **_kwargs: True)
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({}))
|
||||
res = _run(module.delete.__wrapped__("tenant-1", "ds-1"))
|
||||
assert res["code"] == module.RetCode.SUCCESS
|
||||
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({"ids": ["doc-1"]}))
|
||||
monkeypatch.setattr(module, "check_duplicate_ids", lambda ids, _kind: (ids, []))
|
||||
monkeypatch.setattr(module.FileService, "get_root_folder", lambda _tenant: {"id": "pf-1"})
|
||||
@ -871,7 +875,11 @@ class TestDocRoutesUnit:
|
||||
|
||||
monkeypatch.setattr(module.DocumentService, "get_by_ids", lambda _ids: [_DummyDoc()])
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({}))
|
||||
_patch_docstore(monkeypatch, module, delete=lambda *_args, **_kwargs: 2)
|
||||
_patch_docstore(
|
||||
monkeypatch,
|
||||
module,
|
||||
delete=lambda *_args, **_kwargs: (_ for _ in ()).throw(AssertionError("delete must not run for empty chunk ids")),
|
||||
)
|
||||
monkeypatch.setattr(module.DocumentService, "decrement_chunk_num", lambda *_args, **_kwargs: None)
|
||||
res = _run(module.rm_chunk.__wrapped__("tenant-1", "ds-1", "doc-1"))
|
||||
assert res["code"] == 0
|
||||
|
||||
@ -63,4 +63,4 @@ class TestMetadataBatchUpdate:
|
||||
assert doc["meta_fields"].get("status") == "processed", f"Expected status='processed', got {doc['meta_fields'].get('status')}"
|
||||
|
||||
# Cleanup
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": None})
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": document_ids})
|
||||
|
||||
@ -14,14 +14,14 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import batch_add_sessions_with_chat_assistant, delete_session_with_chat_assistants
|
||||
from common import batch_add_sessions_with_chat_assistant, delete_all_sessions_with_chat_assistant
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_sessions_with_chat_assistant(request, HttpApiAuth, add_chat_assistants):
|
||||
def cleanup():
|
||||
for chat_assistant_id in chat_assistant_ids:
|
||||
delete_session_with_chat_assistants(HttpApiAuth, chat_assistant_id)
|
||||
delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_assistant_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -33,7 +33,7 @@ def add_sessions_with_chat_assistant(request, HttpApiAuth, add_chat_assistants):
|
||||
def add_sessions_with_chat_assistant_func(request, HttpApiAuth, add_chat_assistants):
|
||||
def cleanup():
|
||||
for chat_assistant_id in chat_assistant_ids:
|
||||
delete_session_with_chat_assistants(HttpApiAuth, chat_assistant_id)
|
||||
delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_assistant_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ from common import (
|
||||
create_agent,
|
||||
create_agent_session,
|
||||
delete_agent,
|
||||
delete_agent_sessions,
|
||||
delete_all_agent_sessions,
|
||||
list_agents,
|
||||
)
|
||||
|
||||
@ -65,7 +65,7 @@ def agent_id(HttpApiAuth, request):
|
||||
agent_id = res["data"][0]["id"]
|
||||
|
||||
def cleanup():
|
||||
delete_agent_sessions(HttpApiAuth, agent_id)
|
||||
delete_all_agent_sessions(HttpApiAuth, agent_id)
|
||||
delete_agent(HttpApiAuth, agent_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -19,6 +19,7 @@ from common import (
|
||||
create_agent,
|
||||
create_agent_session,
|
||||
delete_agent,
|
||||
delete_all_agent_sessions,
|
||||
delete_agent_sessions,
|
||||
list_agent_sessions,
|
||||
list_agents,
|
||||
@ -67,7 +68,7 @@ def agent_id(HttpApiAuth, request):
|
||||
agent_id = res["data"][0]["id"]
|
||||
|
||||
def cleanup():
|
||||
delete_agent_sessions(HttpApiAuth, agent_id)
|
||||
delete_all_agent_sessions(HttpApiAuth, agent_id)
|
||||
delete_agent(HttpApiAuth, agent_id)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
@ -75,6 +76,19 @@ def agent_id(HttpApiAuth, request):
|
||||
|
||||
|
||||
class TestAgentSessions:
|
||||
@pytest.mark.p2
|
||||
def test_delete_agent_sessions_empty_ids_noop(self, HttpApiAuth, agent_id):
|
||||
res = create_agent_session(HttpApiAuth, agent_id, payload={})
|
||||
assert res["code"] == 0, res
|
||||
session_id = res["data"]["id"]
|
||||
|
||||
res = delete_agent_sessions(HttpApiAuth, agent_id, {"ids": []})
|
||||
assert res["code"] == 0, res
|
||||
|
||||
res = list_agent_sessions(HttpApiAuth, agent_id, params={"id": session_id})
|
||||
assert res["code"] == 0, res
|
||||
assert len(res["data"]) == 1, res
|
||||
|
||||
@pytest.mark.p2
|
||||
def test_create_list_delete_agent_sessions(self, HttpApiAuth, agent_id):
|
||||
res = create_agent_session(HttpApiAuth, agent_id, payload={})
|
||||
|
||||
@ -19,8 +19,8 @@ from common import (
|
||||
chat_completions,
|
||||
create_chat_assistant,
|
||||
create_session_with_chat_assistant,
|
||||
delete_chat_assistants,
|
||||
delete_session_with_chat_assistants,
|
||||
delete_all_chat_assistants,
|
||||
delete_all_sessions_with_chat_assistant,
|
||||
list_documents,
|
||||
parse_documents,
|
||||
)
|
||||
@ -52,8 +52,8 @@ class TestChatCompletions:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "chat_completion_test", "dataset_ids": [dataset_id]})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_session_with_chat_assistants(HttpApiAuth, chat_id))
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_id))
|
||||
|
||||
res = create_session_with_chat_assistant(HttpApiAuth, chat_id, {"name": "session_for_completion"})
|
||||
assert res["code"] == 0, res
|
||||
@ -85,8 +85,8 @@ class TestChatCompletions:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "chat_completion_invalid_session", "dataset_ids": []})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_session_with_chat_assistants(HttpApiAuth, chat_id))
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_id))
|
||||
|
||||
res = chat_completions(
|
||||
HttpApiAuth,
|
||||
@ -101,8 +101,8 @@ class TestChatCompletions:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "chat_completion_invalid_meta", "dataset_ids": []})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_session_with_chat_assistants(HttpApiAuth, chat_id))
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_sessions_with_chat_assistant(HttpApiAuth, chat_id))
|
||||
|
||||
res = create_session_with_chat_assistant(HttpApiAuth, chat_id, {"name": "session_for_meta"})
|
||||
assert res["code"] == 0, res
|
||||
|
||||
@ -18,7 +18,7 @@ from common import (
|
||||
bulk_upload_documents,
|
||||
chat_completions_openai,
|
||||
create_chat_assistant,
|
||||
delete_chat_assistants,
|
||||
delete_all_chat_assistants,
|
||||
list_documents,
|
||||
parse_documents,
|
||||
)
|
||||
@ -53,7 +53,7 @@ class TestChatCompletionsOpenAI:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "openai_endpoint_test", "dataset_ids": [dataset_id]})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
|
||||
res = chat_completions_openai(
|
||||
HttpApiAuth,
|
||||
@ -92,7 +92,7 @@ class TestChatCompletionsOpenAI:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "openai_token_count_test", "dataset_ids": [dataset_id]})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
|
||||
# Use a message with known token count
|
||||
# "hello" is 1 token in cl100k_base encoding
|
||||
@ -202,7 +202,7 @@ class TestChatCompletionsOpenAI:
|
||||
res = create_chat_assistant(HttpApiAuth, {"name": "openai_validation_case", "dataset_ids": []})
|
||||
assert res["code"] == 0, res
|
||||
chat_id = res["data"]["id"]
|
||||
request.addfinalizer(lambda: delete_chat_assistants(HttpApiAuth))
|
||||
request.addfinalizer(lambda: delete_all_chat_assistants(HttpApiAuth))
|
||||
|
||||
res = chat_completions_openai(HttpApiAuth, chat_id, payload)
|
||||
assert res.get("code") != 0, res
|
||||
|
||||
@ -141,12 +141,12 @@ class TestSessionWithChatAssistantDelete:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_code, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, 0, """TypeError("argument of type \'NoneType\' is not iterable")""", 0, marks=pytest.mark.skip),
|
||||
pytest.param(None, 0, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["invalid_id"]}, 102, "The chat doesn't own the session invalid_id", 5, marks=pytest.mark.p3),
|
||||
pytest.param("not json", 100, """AttributeError("\'str\' object has no attribute \'get\'")""", 5, marks=pytest.mark.skip),
|
||||
pytest.param(lambda r: {"ids": r[:1]}, 0, "", 4, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"ids": r}, 0, "", 0, marks=pytest.mark.p1),
|
||||
pytest.param({"ids": []}, 0, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, 0, "", 5, marks=pytest.mark.p3),
|
||||
],
|
||||
)
|
||||
def test_basic_scenarios(
|
||||
|
||||
@ -985,6 +985,10 @@ def test_delete_routes_partial_duplicate_unit(monkeypatch):
|
||||
module = _load_session_module(monkeypatch)
|
||||
|
||||
monkeypatch.setattr(module.DialogService, "query", lambda **_kwargs: [SimpleNamespace(id="chat-1")])
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({}))
|
||||
res = _run(inspect.unwrap(module.delete)("tenant-1", "chat-1"))
|
||||
assert res["code"] == 0
|
||||
|
||||
monkeypatch.setattr(module.ConversationService, "delete_by_id", lambda *_args, **_kwargs: True)
|
||||
|
||||
def _conversation_query(**kwargs):
|
||||
@ -1016,6 +1020,10 @@ def test_delete_routes_partial_duplicate_unit(monkeypatch):
|
||||
assert res["data"]["errors"] == ["Duplicate session ids: ok"]
|
||||
|
||||
monkeypatch.setattr(module.UserCanvasService, "query", lambda **_kwargs: [SimpleNamespace(id="agent-1")])
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({}))
|
||||
res = _run(inspect.unwrap(module.delete_agent_session)("tenant-1", "agent-1"))
|
||||
assert res["code"] == 0
|
||||
|
||||
monkeypatch.setattr(module, "get_request_json", lambda: _AwaitableValue({"ids": ["session-1"]}))
|
||||
monkeypatch.setattr(module, "check_duplicate_ids", lambda ids, _kind: (ids, []))
|
||||
|
||||
|
||||
@ -25,6 +25,36 @@ def batch_create_datasets(client: RAGFlow, num: int) -> list[DataSet]:
|
||||
return [client.create_dataset(name=f"dataset_{i}") for i in range(num)]
|
||||
|
||||
|
||||
def delete_all_datasets(client: RAGFlow, *, page_size: int = 1000) -> None:
|
||||
# Dataset DELETE now treats null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
dataset_ids: list[str] = []
|
||||
while True:
|
||||
datasets = client.list_datasets(page=page, page_size=page_size)
|
||||
dataset_ids.extend(dataset.id for dataset in datasets)
|
||||
if len(datasets) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if dataset_ids:
|
||||
client.delete_datasets(ids=dataset_ids)
|
||||
|
||||
|
||||
def delete_all_chats(client: RAGFlow, *, page_size: int = 1000) -> None:
|
||||
# Chat DELETE now treats null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
chat_ids: list[str] = []
|
||||
while True:
|
||||
chats = client.list_chats(page=page, page_size=page_size)
|
||||
chat_ids.extend(chat.id for chat in chats)
|
||||
if len(chats) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if chat_ids:
|
||||
client.delete_chats(ids=chat_ids)
|
||||
|
||||
|
||||
# FILE MANAGEMENT WITHIN DATASET
|
||||
def bulk_upload_documents(dataset: DataSet, num: int, tmp_path: Path) -> list[Document]:
|
||||
document_infos = []
|
||||
@ -37,6 +67,51 @@ def bulk_upload_documents(dataset: DataSet, num: int, tmp_path: Path) -> list[Do
|
||||
return dataset.upload_documents(document_infos)
|
||||
|
||||
|
||||
def delete_all_documents(dataset: DataSet, *, page_size: int = 1000) -> None:
|
||||
# Document DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
document_ids: list[str] = []
|
||||
while True:
|
||||
documents = dataset.list_documents(page=page, page_size=page_size)
|
||||
document_ids.extend(document.id for document in documents)
|
||||
if len(documents) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if document_ids:
|
||||
dataset.delete_documents(ids=document_ids)
|
||||
|
||||
|
||||
def delete_all_sessions(chat_assistant: Chat, *, page_size: int = 1000) -> None:
|
||||
# Session DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
session_ids: list[str] = []
|
||||
while True:
|
||||
sessions = chat_assistant.list_sessions(page=page, page_size=page_size)
|
||||
session_ids.extend(session.id for session in sessions)
|
||||
if len(sessions) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if session_ids:
|
||||
chat_assistant.delete_sessions(ids=session_ids)
|
||||
|
||||
|
||||
def delete_all_chunks(document: Document, *, page_size: int = 1000) -> None:
|
||||
# Chunk DELETE now treats missing/null/empty ids as a no-op, so cleanup must enumerate explicit ids.
|
||||
page = 1
|
||||
chunk_ids: list[str] = []
|
||||
while True:
|
||||
chunks = document.list_chunks(page=page, page_size=page_size)
|
||||
chunk_ids.extend(chunk.id for chunk in chunks)
|
||||
if len(chunks) < page_size:
|
||||
break
|
||||
page += 1
|
||||
|
||||
if chunk_ids:
|
||||
document.delete_chunks(ids=chunk_ids)
|
||||
|
||||
|
||||
# CHUNK MANAGEMENT WITHIN DATASET
|
||||
def batch_add_chunks(document: Document, num: int) -> list[Chunk]:
|
||||
return [document.add_chunk(content=f"chunk test {i}") for i in range(num)]
|
||||
|
||||
@ -23,6 +23,10 @@ from common import (
|
||||
batch_create_chat_assistants,
|
||||
batch_create_datasets,
|
||||
bulk_upload_documents,
|
||||
delete_all_chats,
|
||||
delete_all_chunks,
|
||||
delete_all_datasets,
|
||||
delete_all_sessions,
|
||||
)
|
||||
from configs import HOST_ADDRESS, VERSION
|
||||
from pytest import FixtureRequest
|
||||
@ -88,7 +92,7 @@ def client(token: str) -> RAGFlow:
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_datasets(request: FixtureRequest, client: RAGFlow):
|
||||
def cleanup():
|
||||
client.delete_datasets(ids=None)
|
||||
delete_all_datasets(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -96,7 +100,7 @@ def clear_datasets(request: FixtureRequest, client: RAGFlow):
|
||||
@pytest.fixture(scope="function")
|
||||
def clear_chat_assistants(request: FixtureRequest, client: RAGFlow):
|
||||
def cleanup():
|
||||
client.delete_chats(ids=None)
|
||||
delete_all_chats(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -106,7 +110,7 @@ def clear_session_with_chat_assistants(request, add_chat_assistants):
|
||||
def cleanup():
|
||||
for chat_assistant in chat_assistants:
|
||||
try:
|
||||
chat_assistant.delete_sessions(ids=None)
|
||||
delete_all_sessions(chat_assistant)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@ -118,7 +122,7 @@ def clear_session_with_chat_assistants(request, add_chat_assistants):
|
||||
@pytest.fixture(scope="class")
|
||||
def add_dataset(request: FixtureRequest, client: RAGFlow) -> DataSet:
|
||||
def cleanup():
|
||||
client.delete_datasets(ids=None)
|
||||
delete_all_datasets(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return batch_create_datasets(client, 1)[0]
|
||||
@ -127,7 +131,7 @@ def add_dataset(request: FixtureRequest, client: RAGFlow) -> DataSet:
|
||||
@pytest.fixture(scope="function")
|
||||
def add_dataset_func(request: FixtureRequest, client: RAGFlow) -> DataSet:
|
||||
def cleanup():
|
||||
client.delete_datasets(ids=None)
|
||||
delete_all_datasets(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return batch_create_datasets(client, 1)[0]
|
||||
@ -142,7 +146,7 @@ def add_document(add_dataset: DataSet, ragflow_tmp_dir: Path) -> tuple[DataSet,
|
||||
def add_chunks(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]:
|
||||
def cleanup():
|
||||
try:
|
||||
document.delete_chunks(ids=[])
|
||||
delete_all_chunks(document)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@ -161,7 +165,7 @@ def add_chunks(request: FixtureRequest, add_document: tuple[DataSet, Document])
|
||||
def add_chat_assistants(request, client, add_document) -> tuple[DataSet, Document, list[Chat]]:
|
||||
def cleanup():
|
||||
try:
|
||||
client.delete_chats(ids=None)
|
||||
delete_all_chats(client)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import batch_create_chat_assistants
|
||||
from common import batch_create_chat_assistants, delete_all_chats
|
||||
from pytest import FixtureRequest
|
||||
from ragflow_sdk import Chat, DataSet, Document, RAGFlow
|
||||
from utils import wait_for
|
||||
@ -32,7 +32,7 @@ def condition(_dataset: DataSet):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_chat_assistants_func(request: FixtureRequest, client: RAGFlow, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chat]]:
|
||||
def cleanup():
|
||||
client.delete_chats(ids=None)
|
||||
delete_all_chats(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -23,8 +23,8 @@ class TestChatAssistantsDelete:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param(None, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["invalid_id"]}, "Assistant(invalid_id) not found.", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["\n!?。;!?\"'"]}, """Assistant(\n!?。;!?"\') not found.""", 5, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"ids": r[:1]}, "", 4, marks=pytest.mark.p3),
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
from common import batch_add_chunks
|
||||
from common import batch_add_chunks, delete_all_chunks
|
||||
from pytest import FixtureRequest
|
||||
from ragflow_sdk import Chunk, DataSet, Document
|
||||
from utils import wait_for
|
||||
@ -37,7 +37,7 @@ def condition(_dataset: DataSet):
|
||||
def add_chunks_func(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]:
|
||||
def cleanup():
|
||||
try:
|
||||
document.delete_chunks(ids=[])
|
||||
delete_all_chunks(document)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@ -88,12 +88,12 @@ class TestChunksDeletion:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, "TypeError", 5, marks=pytest.mark.skip),
|
||||
pytest.param(None, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["invalid_id"]}, "rm_chunk deleted chunks 0, expect 1", 5, marks=pytest.mark.p3),
|
||||
pytest.param("not json", "UnboundLocalError", 5, marks=pytest.mark.skip(reason="pull/6376")),
|
||||
pytest.param(lambda r: {"ids": r[:1]}, "", 4, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"ids": r}, "", 1, marks=pytest.mark.p1),
|
||||
pytest.param({"ids": []}, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, "", 5, marks=pytest.mark.p3),
|
||||
],
|
||||
)
|
||||
def test_basic_scenarios(self, add_chunks_func, payload, expected_message, remaining):
|
||||
@ -107,7 +107,10 @@ class TestChunksDeletion:
|
||||
document.delete_chunks(**payload)
|
||||
assert expected_message in str(exception_info.value), str(exception_info.value)
|
||||
else:
|
||||
document.delete_chunks(**payload)
|
||||
if payload is None:
|
||||
document.delete_chunks()
|
||||
else:
|
||||
document.delete_chunks(**payload)
|
||||
|
||||
remaining_chunks = document.list_chunks()
|
||||
assert len(remaining_chunks) == remaining, str(remaining_chunks)
|
||||
|
||||
@ -16,13 +16,13 @@
|
||||
|
||||
|
||||
import pytest
|
||||
from common import batch_create_datasets
|
||||
from common import batch_create_datasets, delete_all_datasets
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_datasets(client, request):
|
||||
def cleanup():
|
||||
client.delete_datasets(**{"ids": None})
|
||||
delete_all_datasets(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
@ -32,7 +32,7 @@ def add_datasets(client, request):
|
||||
@pytest.fixture(scope="function")
|
||||
def add_datasets_func(client, request):
|
||||
def cleanup():
|
||||
client.delete_datasets(**{"ids": None})
|
||||
delete_all_datasets(client)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@ -95,7 +95,7 @@ class TestDatasetsDelete:
|
||||
client.delete_datasets(**payload)
|
||||
|
||||
datasets = client.list_datasets()
|
||||
assert len(datasets) == 0, str(datasets)
|
||||
assert len(datasets) == 3, str(datasets)
|
||||
|
||||
@pytest.mark.p2
|
||||
@pytest.mark.usefixtures("add_dataset_func")
|
||||
|
||||
@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
import pytest
|
||||
from common import bulk_upload_documents
|
||||
from common import bulk_upload_documents, delete_all_documents
|
||||
from pytest import FixtureRequest
|
||||
from ragflow_sdk import DataSet, Document
|
||||
|
||||
@ -27,7 +27,7 @@ def add_document_func(request: FixtureRequest, add_dataset: DataSet, ragflow_tmp
|
||||
documents = bulk_upload_documents(dataset, 1, ragflow_tmp_dir)
|
||||
|
||||
def cleanup():
|
||||
dataset.delete_documents(ids=None)
|
||||
delete_all_documents(dataset)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return dataset, documents[0]
|
||||
@ -39,7 +39,7 @@ def add_documents(request: FixtureRequest, add_dataset: DataSet, ragflow_tmp_dir
|
||||
documents = bulk_upload_documents(dataset, 5, ragflow_tmp_dir)
|
||||
|
||||
def cleanup():
|
||||
dataset.delete_documents(ids=None)
|
||||
delete_all_documents(dataset)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return dataset, documents
|
||||
@ -51,7 +51,7 @@ def add_documents_func(request: FixtureRequest, add_dataset_func: DataSet, ragfl
|
||||
documents = bulk_upload_documents(dataset, 3, ragflow_tmp_dir)
|
||||
|
||||
def cleanup():
|
||||
dataset.delete_documents(ids=None)
|
||||
delete_all_documents(dataset)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return dataset, documents
|
||||
|
||||
@ -24,8 +24,8 @@ class TestDocumentsDeletion:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_message, remaining",
|
||||
[
|
||||
({"ids": None}, "", 0),
|
||||
({"ids": []}, "", 0),
|
||||
({"ids": None}, "", 3),
|
||||
({"ids": []}, "", 3),
|
||||
({"ids": ["invalid_id"]}, "Documents not found: ['invalid_id']", 3),
|
||||
({"ids": ["\n!?。;!?\"'"]}, "Documents not found: ['\\n!?。;!?\"\\'']", 3),
|
||||
("not json", "must be a mapping", 3),
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import batch_add_sessions_with_chat_assistant
|
||||
from common import batch_add_sessions_with_chat_assistant, delete_all_sessions
|
||||
from pytest import FixtureRequest
|
||||
from ragflow_sdk import Chat, DataSet, Document, Session
|
||||
|
||||
@ -24,7 +24,7 @@ def add_sessions_with_chat_assistant(request: FixtureRequest, add_chat_assistant
|
||||
def cleanup():
|
||||
for chat_assistant in chat_assistants:
|
||||
try:
|
||||
chat_assistant.delete_sessions(ids=None)
|
||||
delete_all_sessions(chat_assistant)
|
||||
except Exception :
|
||||
pass
|
||||
|
||||
@ -39,7 +39,7 @@ def add_sessions_with_chat_assistant_func(request: FixtureRequest, add_chat_assi
|
||||
def cleanup():
|
||||
for chat_assistant in chat_assistants:
|
||||
try:
|
||||
chat_assistant.delete_sessions(ids=None)
|
||||
delete_all_sessions(chat_assistant)
|
||||
except Exception :
|
||||
pass
|
||||
|
||||
|
||||
@ -84,12 +84,12 @@ class TestSessionWithChatAssistantDelete:
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_message, remaining",
|
||||
[
|
||||
pytest.param(None, """TypeError("argument of type \'NoneType\' is not iterable")""", 0, marks=pytest.mark.skip),
|
||||
pytest.param(None, "", 5, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": ["invalid_id"]}, "The chat doesn't own the session invalid_id", 5, marks=pytest.mark.p3),
|
||||
pytest.param("not json", """AttributeError("\'str\' object has no attribute \'get\'")""", 5, marks=pytest.mark.skip),
|
||||
pytest.param(lambda r: {"ids": r[:1]}, "", 4, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"ids": r}, "", 0, marks=pytest.mark.p1),
|
||||
pytest.param({"ids": []}, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"ids": []}, "", 5, marks=pytest.mark.p3),
|
||||
],
|
||||
)
|
||||
def test_basic_scenarios(self, add_sessions_with_chat_assistant_func, payload, expected_message, remaining):
|
||||
@ -102,7 +102,10 @@ class TestSessionWithChatAssistantDelete:
|
||||
chat_assistant.delete_sessions(**payload)
|
||||
assert expected_message in str(exception_info.value)
|
||||
else:
|
||||
chat_assistant.delete_sessions(**payload)
|
||||
if payload is None:
|
||||
chat_assistant.delete_sessions()
|
||||
else:
|
||||
chat_assistant.delete_sessions(**payload)
|
||||
|
||||
sessions = chat_assistant.list_sessions()
|
||||
assert len(sessions) == remaining
|
||||
|
||||
@ -673,6 +673,20 @@ def test_rm_chunk_delete_exception_partial_compensation_and_cleanup_unit(monkeyp
|
||||
res = _run(module.rm())
|
||||
assert res["message"] == "Document not found!", res
|
||||
|
||||
_set_request_json(monkeypatch, module, {"doc_id": "doc-1", "chunk_ids": []})
|
||||
monkeypatch.setattr(
|
||||
module.DocumentService,
|
||||
"get_by_id",
|
||||
lambda _doc_id: (_ for _ in ()).throw(AssertionError("get_by_id must not run for empty delete payload")),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
module.settings.docStoreConn,
|
||||
"delete",
|
||||
lambda *_args, **_kwargs: (_ for _ in ()).throw(AssertionError("delete must not run for empty delete payload")),
|
||||
)
|
||||
res = _run(module.rm())
|
||||
assert res["code"] == 0, res
|
||||
|
||||
monkeypatch.setattr(module.DocumentService, "get_by_id", lambda _doc_id: (True, _DummyDoc()))
|
||||
|
||||
def _raise_delete(*_args, **_kwargs):
|
||||
|
||||
@ -165,7 +165,7 @@ class TestChunksDeletion:
|
||||
pytest.param("not json", 100, """UnboundLocalError("local variable \'duplicate_messages\' referenced before assignment")""", 5, marks=pytest.mark.skip(reason="pull/6376")),
|
||||
pytest.param(lambda r: {"chunk_ids": r[:1]}, 0, "", 3, marks=pytest.mark.p3),
|
||||
pytest.param(lambda r: {"chunk_ids": r}, 0, "", 0, marks=pytest.mark.p1),
|
||||
pytest.param({"chunk_ids": []}, 0, "", 0, marks=pytest.mark.p3),
|
||||
pytest.param({"chunk_ids": []}, 0, "", 5, marks=pytest.mark.p3),
|
||||
],
|
||||
)
|
||||
def test_basic_scenarios(self, WebApiAuth, add_chunks_func, payload, expected_code, expected_message, remaining):
|
||||
|
||||
@ -472,14 +472,8 @@ def test_delete_route_error_summary_matrix_unit(monkeypatch):
|
||||
assert res["data"]["errors"], res
|
||||
|
||||
req_state["ids"] = None
|
||||
monkeypatch.setattr(
|
||||
module.KnowledgebaseService,
|
||||
"query",
|
||||
lambda **_kwargs: (_ for _ in ()).throw(module.OperationalError("db down")),
|
||||
)
|
||||
res = _run(inspect.unwrap(module.delete)("tenant-1"))
|
||||
assert res["code"] == module.RetCode.DATA_ERROR, res
|
||||
assert res["message"] == "Database operation failed", res
|
||||
assert res["code"] == module.RetCode.SUCCESS, res
|
||||
|
||||
|
||||
@pytest.mark.p2
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import pytest
|
||||
from common import batch_create_datasets
|
||||
from common import batch_create_datasets, list_kbs, rm_kb
|
||||
from libs.auth import RAGFlowWebApiAuth
|
||||
from pytest import FixtureRequest
|
||||
from ragflow_sdk import RAGFlow
|
||||
@ -22,17 +22,31 @@ from ragflow_sdk import RAGFlow
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def add_datasets(request: FixtureRequest, client: RAGFlow, WebApiAuth: RAGFlowWebApiAuth) -> list[str]:
|
||||
dataset_ids = batch_create_datasets(WebApiAuth, 5)
|
||||
|
||||
def cleanup():
|
||||
client.delete_datasets(ids=None)
|
||||
# Web KB cleanup cannot call SDK dataset bulk delete with empty ids; deletion must stay explicit.
|
||||
res = list_kbs(WebApiAuth, params={"page_size": 1000})
|
||||
existing_ids = {kb["id"] for kb in res["data"]["kbs"]}
|
||||
for dataset_id in dataset_ids:
|
||||
if dataset_id in existing_ids:
|
||||
rm_kb(WebApiAuth, {"kb_id": dataset_id})
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return batch_create_datasets(WebApiAuth, 5)
|
||||
return dataset_ids
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def add_datasets_func(request: FixtureRequest, client: RAGFlow, WebApiAuth: RAGFlowWebApiAuth) -> list[str]:
|
||||
dataset_ids = batch_create_datasets(WebApiAuth, 3)
|
||||
|
||||
def cleanup():
|
||||
client.delete_datasets(ids=None)
|
||||
# Web KB cleanup cannot call SDK dataset bulk delete with empty ids; deletion must stay explicit.
|
||||
res = list_kbs(WebApiAuth, params={"page_size": 1000})
|
||||
existing_ids = {kb["id"] for kb in res["data"]["kbs"]}
|
||||
for dataset_id in dataset_ids:
|
||||
if dataset_id in existing_ids:
|
||||
rm_kb(WebApiAuth, {"kb_id": dataset_id})
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
return batch_create_datasets(WebApiAuth, 3)
|
||||
return dataset_ids
|
||||
|
||||
Reference in New Issue
Block a user