fix: Fixed the adaptation issue of response formats for different mod… (#32326)

This commit is contained in:
FFXN
2026-03-01 10:23:17 +08:00
committed by GitHub
parent eb66d36ea8
commit de4dac89ae

View File

@ -469,7 +469,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor):
if not isinstance(result, LLMResult):
raise ValueError("Expected LLMResult when stream=False")
summary_content = getattr(result.message, "content", "")
summary_content = result.message.get_text_content()
usage = result.usage
# Deduct quota for summary generation (same as workflow nodes)