Align p3 HTTP/SDK tests with current backend behavior (#12563)

### What problem does this PR solve?

Updates pre-existing HTTP API and SDK tests to align with current
backend behavior (validation errors, 404s, and schema defaults). This
ensures p3 regression coverage is accurate without changing production
code.

### Type of change

- [x] Other (please describe): align p3 HTTP/SDK tests with current
backend behavior

---------

Co-authored-by: Liu An <asiro@qq.com>
This commit is contained in:
6ba3i
2026-01-13 19:22:47 +08:00
committed by GitHub
parent 941651a16f
commit 0795616b34
15 changed files with 94 additions and 66 deletions

View File

@ -265,7 +265,6 @@ async def not_found(error):
"message": error_msg,
}, 404
@app.teardown_request
def _db_close(exception):
if exception:

View File

@ -176,7 +176,7 @@ async def update(tenant_id, chat_id):
req["llm_id"] = llm.pop("model_name")
if req.get("llm_id") is not None:
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(req["llm_id"])
model_type = llm.pop("model_type")
model_type = llm.get("model_type")
model_type = model_type if model_type in ["chat", "image2text"] else "chat"
if not TenantLLMService.query(tenant_id=tenant_id, llm_name=llm_name, llm_factory=llm_factory, model_type=model_type):
return get_error_data_result(f"`model_name` {req.get('llm_id')} doesn't exist")

View File

@ -82,6 +82,8 @@ async def validate_and_parse_json_request(request: Request, validator: type[Base
2. Extra fields added via `extras` parameter are automatically removed
from the final output after validation
"""
if request.mimetype != "application/json":
return None, f"Unsupported content type: Expected application/json, got {request.content_type}"
try:
payload = await request.get_json() or {}
except UnsupportedMediaType:

View File

@ -30,6 +30,7 @@ fN33jCHRoDUW81IH9zjij/vaw8IbVyb6vuwg6MX6inOEBRRzVbRYxXOu1wkWY6SsI8X70oF9aeLFp/Pz
X8f7fp9c7vUsfOCkM+gHY3PadG+QHa7KI7mzTKgUTZImK6BZtfRBATDTthEUbbaTewY4H0MnWiCeeDhcbeQao6cFy1To8pE3RpmxnGnS8BsBn8w=="""
INVALID_API_TOKEN = "invalid_key_123"
INVALID_ID_32 = "0" * 32
DATASET_NAME_LIMIT = 128
DOCUMENT_NAME_LIMIT = 255
CHAT_ASSISTANT_NAME_LIMIT = 255

View File

@ -17,7 +17,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
import pytest
from common import add_chunk, delete_documents, list_chunks
from configs import INVALID_API_TOKEN
from configs import INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
@ -152,12 +152,7 @@ class TestAddChunk:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_dataset_id",
102,
"You don't own the dataset invalid_dataset_id.",
),
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
],
)
def test_invalid_dataset_id(
@ -177,11 +172,10 @@ class TestAddChunk:
@pytest.mark.parametrize(
"document_id, expected_code, expected_message",
[
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
(
"invalid_document_id",
INVALID_ID_32,
102,
"You don't own the document invalid_document_id.",
f"You don't own the document {INVALID_ID_32}.",
),
],
)

View File

@ -17,7 +17,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
import pytest
from common import batch_add_chunks, delete_chunks, list_chunks
from configs import INVALID_API_TOKEN
from configs import INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
@ -45,12 +45,7 @@ class TestChunksDeletion:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_dataset_id",
102,
"You don't own the dataset invalid_dataset_id.",
),
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
],
)
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks_func, dataset_id, expected_code, expected_message):
@ -63,8 +58,7 @@ class TestChunksDeletion:
@pytest.mark.parametrize(
"document_id, expected_code, expected_message",
[
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
("invalid_document_id", 100, """LookupError("Can't find the document with ID invalid_document_id!")"""),
(INVALID_ID_32, 100, f"""LookupError("Can't find the document with ID {INVALID_ID_32}!")"""),
],
)
def test_invalid_document_id(self, HttpApiAuth, add_chunks_func, document_id, expected_code, expected_message):

View File

@ -18,7 +18,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
import pytest
from common import batch_add_chunks, list_chunks
from configs import INVALID_API_TOKEN
from configs import INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
@ -177,12 +177,7 @@ class TestChunksList:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_dataset_id",
102,
"You don't own the dataset invalid_dataset_id.",
),
(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}."),
],
)
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
@ -195,11 +190,10 @@ class TestChunksList:
@pytest.mark.parametrize(
"document_id, expected_code, expected_message",
[
("", 102, "The dataset not own the document chunks."),
(
"invalid_document_id",
INVALID_ID_32,
102,
"You don't own the document invalid_document_id.",
f"You don't own the document {INVALID_ID_32}.",
),
],
)

View File

@ -19,7 +19,7 @@ from random import randint
import pytest
from common import delete_documents, update_chunk
from configs import INVALID_API_TOKEN
from configs import INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
@ -145,9 +145,8 @@ class TestUpdatedChunk:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
pytest.param("invalid_dataset_id", 102, "You don't own the dataset invalid_dataset_id.", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") == "infinity", reason="infinity")),
pytest.param("invalid_dataset_id", 102, "Can't find this chunk", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") in [None, "opensearch", "elasticsearch"], reason="elasticsearch")),
pytest.param(INVALID_ID_32, 102, f"You don't own the dataset {INVALID_ID_32}.", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") == "infinity", reason="infinity")),
pytest.param(INVALID_ID_32, 102, "Can't find this chunk", marks=pytest.mark.skipif(os.getenv("DOC_ENGINE") in [None, "opensearch", "elasticsearch"], reason="elasticsearch")),
],
)
def test_invalid_dataset_id(self, HttpApiAuth, add_chunks, dataset_id, expected_code, expected_message):
@ -160,11 +159,10 @@ class TestUpdatedChunk:
@pytest.mark.parametrize(
"document_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_document_id",
INVALID_ID_32,
102,
"You don't own the document invalid_document_id.",
f"You don't own the document {INVALID_ID_32}.",
),
],
)
@ -178,11 +176,10 @@ class TestUpdatedChunk:
@pytest.mark.parametrize(
"chunk_id, expected_code, expected_message",
[
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
(
"invalid_document_id",
INVALID_ID_32,
102,
"Can't find this chunk invalid_document_id",
f"Can't find this chunk {INVALID_ID_32}",
),
],
)

View File

@ -770,7 +770,12 @@ class TestDatasetUpdate:
res = list_datasets(HttpApiAuth)
assert res["code"] == 0, res
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
assert res["data"][0]["parser_config"] == {
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
}, res
@pytest.mark.p3
def test_parser_config_unset_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
@ -781,7 +786,12 @@ class TestDatasetUpdate:
res = list_datasets(HttpApiAuth)
assert res["code"] == 0, res
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
assert res["data"][0]["parser_config"] == {
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
}, res
@pytest.mark.p3
def test_parser_config_none_with_chunk_method_change(self, HttpApiAuth, add_dataset_func):
@ -792,7 +802,12 @@ class TestDatasetUpdate:
res = list_datasets(HttpApiAuth, {"id": dataset_id})
assert res["code"] == 0, res
assert res["data"][0]["parser_config"] == {"raptor": {"use_raptor": False}, "graphrag": {"use_graphrag": False}}, res
assert res["data"][0]["parser_config"] == {
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
}, res
@pytest.mark.p2
@pytest.mark.parametrize(

View File

@ -19,7 +19,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
import pytest
from common import bulk_upload_documents, download_document, upload_documents
from configs import INVALID_API_TOKEN
from configs import INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
from requests import codes
from utils import compare_by_hash
@ -89,9 +89,9 @@ class TestDocumentDownload:
"document_id, expected_code, expected_message",
[
(
"invalid_document_id",
INVALID_ID_32,
102,
"The dataset not own the document invalid_document_id.",
f"The dataset not own the document {INVALID_ID_32}.",
),
],
)
@ -113,11 +113,10 @@ class TestDocumentDownload:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_dataset_id",
INVALID_ID_32,
102,
"You do not own the dataset invalid_dataset_id.",
f"You do not own the dataset {INVALID_ID_32}.",
),
],
)

View File

@ -17,7 +17,7 @@
import pytest
from common import list_documents, update_document
from configs import DOCUMENT_NAME_LIMIT, INVALID_API_TOKEN
from configs import DOCUMENT_NAME_LIMIT, INVALID_API_TOKEN, INVALID_ID_32
from libs.auth import RAGFlowHttpApiAuth
from configs import DEFAULT_PARSER_CONFIG
@ -97,9 +97,8 @@ class TestDocumentsUpdated:
@pytest.mark.parametrize(
"document_id, expected_code, expected_message",
[
("", 100, "<MethodNotAllowed '405: Method Not Allowed'>"),
(
"invalid_document_id",
INVALID_ID_32,
102,
"The dataset doesn't own the document.",
),
@ -115,9 +114,8 @@ class TestDocumentsUpdated:
@pytest.mark.parametrize(
"dataset_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
(
"invalid_dataset_id",
INVALID_ID_32,
102,
"You don't own the dataset.",
),

View File

@ -115,14 +115,15 @@ class TestDocumentsUpload:
dataset_id = add_dataset_func
fp = create_txt_file(tmp_path / "ragflow_test.txt")
url = f"{HOST_ADDRESS}{FILE_API_URL}".format(dataset_id=dataset_id)
fields = (("file", ("", fp.open("rb"))),)
m = MultipartEncoder(fields=fields)
res = requests.post(
url=url,
headers={"Content-Type": m.content_type},
auth=HttpApiAuth,
data=m,
)
with fp.open("rb") as file_obj:
fields = (("file", ("", file_obj)),)
m = MultipartEncoder(fields=fields)
res = requests.post(
url=url,
headers={"Content-Type": m.content_type},
auth=HttpApiAuth,
data=m,
)
assert res.json()["code"] == 101
assert res.json()["message"] == "No file selected!"

View File

@ -0,0 +1,29 @@
#
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import requests
from configs import HOST_ADDRESS, VERSION
@pytest.mark.p3
def test_route_not_found_returns_json():
url = f"{HOST_ADDRESS}/api/{VERSION}/__missing_route__"
res = requests.get(url)
assert res.status_code == 404
payload = res.json()
assert payload["error"] == "Not Found"
assert f"/api/{VERSION}/__missing_route__" in payload["message"]

View File

@ -18,7 +18,7 @@ from random import randint
import pytest
from common import delete_chat_assistants, list_session_with_chat_assistants, update_session_with_chat_assistant
from configs import INVALID_API_TOKEN, SESSION_WITH_CHAT_NAME_LIMIT
from configs import INVALID_API_TOKEN, INVALID_ID_32, SESSION_WITH_CHAT_NAME_LIMIT
from libs.auth import RAGFlowHttpApiAuth
@ -72,8 +72,7 @@ class TestSessionWithChatAssistantUpdate:
@pytest.mark.parametrize(
"chat_assistant_id, expected_code, expected_message",
[
("", 100, "<NotFound '404: Not Found'>"),
pytest.param("invalid_chat_assistant_id", 102, "Session does not exist", marks=pytest.mark.skip(reason="issues/")),
(INVALID_ID_32, 102, "Session does not exist"),
],
)
def test_invalid_chat_assistant_id(self, HttpApiAuth, add_sessions_with_chat_assistant_func, chat_assistant_id, expected_code, expected_message):

View File

@ -663,6 +663,8 @@ class TestDatasetUpdate:
{
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
},
)
dataset.update({"chunk_method": "qa", "parser_config": {}})
@ -679,6 +681,8 @@ class TestDatasetUpdate:
{
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
},
)
dataset.update({"chunk_method": "qa"})
@ -695,6 +699,8 @@ class TestDatasetUpdate:
{
"raptor": {"use_raptor": False},
"graphrag": {"use_graphrag": False},
"image_context_size": 0,
"table_context_size": 0,
},
)
dataset.update({"chunk_method": "qa", "parser_config": None})