refactor: replace bare dict with dict[str, Any] in entities, workflow nodes, and tasks (#35109)

This commit is contained in:
wdeveloper16
2026-04-14 05:02:39 +02:00
committed by GitHub
parent 4ee1bd5f32
commit ed83f5369e
12 changed files with 34 additions and 26 deletions

View File

@ -14,7 +14,7 @@ class DatasourceApiEntity(BaseModel):
description: I18nObject
parameters: list[DatasourceParameter] | None = None
labels: list[str] = Field(default_factory=list)
output_schema: dict | None = None
output_schema: dict[str, Any] | None = None
ToolProviderTypeApiLiteral = Literal["builtin", "api", "workflow"] | None
@ -30,7 +30,7 @@ class DatasourceProviderApiEntityDict(TypedDict):
icon: str | dict
label: I18nObjectDict
type: str
team_credentials: dict | None
team_credentials: dict[str, Any] | None
is_team_authorization: bool
allow_delete: bool
datasources: list[Any]
@ -45,8 +45,8 @@ class DatasourceProviderApiEntity(BaseModel):
icon: str | dict
label: I18nObject # label
type: str
masked_credentials: dict | None = None
original_credentials: dict | None = None
masked_credentials: dict[str, Any] | None = None
original_credentials: dict[str, Any] | None = None
is_team_authorization: bool = False
allow_delete: bool = True
plugin_id: str | None = Field(default="", description="The plugin id of the datasource")

View File

@ -43,15 +43,20 @@ class IndexProcessorProtocol(Protocol):
original_document_id: str,
chunks: Mapping[str, Any],
batch: Any,
summary_index_setting: dict | None = None,
summary_index_setting: dict[str, Any] | None = None,
) -> IndexingResultDict: ...
def get_preview_output(
self, chunks: Any, dataset_id: str, document_id: str, chunk_structure: str, summary_index_setting: dict | None
self,
chunks: Any,
dataset_id: str,
document_id: str,
chunk_structure: str,
summary_index_setting: dict[str, Any] | None,
) -> Preview: ...
class SummaryIndexServiceProtocol(Protocol):
def generate_and_vectorize_summary(
self, dataset_id: str, document_id: str, is_preview: bool, summary_index_setting: dict | None = None
self, dataset_id: str, document_id: str, is_preview: bool, summary_index_setting: dict[str, Any] | None = None
) -> None: ...

View File

@ -1,4 +1,4 @@
from typing import Literal, Union
from typing import Any, Literal, Union
from graphon.entities.base_node_data import BaseNodeData
from graphon.enums import NodeType
@ -16,7 +16,7 @@ class TriggerScheduleNodeData(BaseNodeData):
mode: str = Field(default="visual", description="Schedule mode: visual or cron")
frequency: str | None = Field(default=None, description="Frequency for visual mode: hourly, daily, weekly, monthly")
cron_expression: str | None = Field(default=None, description="Cron expression for cron mode")
visual_config: dict | None = Field(default=None, description="Visual configuration details")
visual_config: dict[str, Any] | None = Field(default=None, description="Visual configuration details")
timezone: str = Field(default="UTC", description="Timezone for schedule execution")

View File

@ -75,7 +75,7 @@ class TriggerWebhookNode(Node[WebhookData]):
outputs=outputs,
)
def generate_file_var(self, param_name: str, file: dict):
def generate_file_var(self, param_name: str, file: dict[str, Any]):
file_id = resolve_file_record_id(file.get("reference") or file.get("related_id"))
transfer_method_value = file.get("transfer_method")
if transfer_method_value:
@ -147,7 +147,7 @@ class TriggerWebhookNode(Node[WebhookData]):
outputs[param_name] = str(webhook_data.get("body", {}).get("raw", ""))
continue
elif self.node_data.content_type == ContentType.BINARY:
raw_data: dict = webhook_data.get("body", {}).get("raw", {})
raw_data: dict[str, Any] = webhook_data.get("body", {}).get("raw", {})
file_var = self.generate_file_var(param_name, raw_data)
if file_var:
outputs[param_name] = file_var