merge main

This commit is contained in:
zxhlyh
2025-07-28 16:00:38 +08:00
344 changed files with 2476 additions and 1275 deletions

View File

@ -206,7 +206,7 @@ class ToolManager:
)
except Exception as e:
builtin_provider = None
logger.info(f"Error getting builtin provider {credential_id}:{e}", exc_info=True)
logger.info("Error getting builtin provider %s:%s", credential_id, e, exc_info=True)
# if the provider has been deleted, raise an error
if builtin_provider is None:
raise ToolProviderNotFoundError(f"provider has been deleted: {credential_id}")
@ -237,7 +237,7 @@ class ToolManager:
if builtin_provider is None:
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
encrypter, _ = create_provider_encrypter(
encrypter, cache = create_provider_encrypter(
tenant_id=tenant_id,
config=[
x.to_basic_provider_config()
@ -281,6 +281,7 @@ class ToolManager:
builtin_provider.expires_at = refreshed_credentials.expires_at
db.session.commit()
decrypted_credentials = refreshed_credentials.credentials
cache.delete()
return cast(
BuiltinTool,
@ -569,7 +570,7 @@ class ToolManager:
yield provider
except Exception:
logger.exception(f"load builtin provider {provider_path}")
logger.exception("load builtin provider %s", provider_path)
continue
# set builtin providers loaded
cls._builtin_providers_loaded = True

View File

@ -105,6 +105,29 @@ class ApiBasedToolSchemaParser:
# overwrite the content
interface["operation"]["requestBody"]["content"][content_type]["schema"] = root
# handle allOf reference in schema properties
for prop_dict in root.get("properties", {}).values():
for item in prop_dict.get("allOf", []):
if "$ref" in item:
ref_schema = openapi
reference = item["$ref"].split("/")[1:]
for ref in reference:
ref_schema = ref_schema[ref]
else:
ref_schema = item
for key, value in ref_schema.items():
if isinstance(value, list):
if key not in prop_dict:
prop_dict[key] = []
# extends list field
if isinstance(prop_dict[key], list):
prop_dict[key].extend(value)
elif key not in prop_dict:
# add new field
prop_dict[key] = value
if "allOf" in prop_dict:
del prop_dict["allOf"]
# parse body parameters
if "schema" in interface["operation"]["requestBody"]["content"][content_type]:
body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"]

View File

@ -55,7 +55,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
main_content_type = mimetypes.guess_type(filename)[0]
if main_content_type not in supported_content_types:
return "Unsupported content-type [{}] of URL.".format(main_content_type)
return f"Unsupported content-type [{main_content_type}] of URL."
if main_content_type in extract_processor.SUPPORT_URL_CONTENT_TYPES:
return cast(str, ExtractProcessor.load_from_url(url, return_text=True))
@ -67,7 +67,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore
if response.status_code != 200:
return "URL returned status code {}.".format(response.status_code)
return f"URL returned status code {response.status_code}."
# Detect encoding using chardet
detected_encoding = chardet.detect(response.content)

View File

@ -194,7 +194,7 @@ class WorkflowTool(Tool):
files.append(file_dict)
except Exception:
logger.exception(f"Failed to transform file {file}")
logger.exception("Failed to transform file %s", file)
else:
parameters_result[parameter.name] = tool_parameters.get(parameter.name)