feat: re-add prompt messages to result and chunks in llm (#17883)

Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
-LAN-
2025-04-11 18:04:49 +09:00
committed by GitHub
parent 5f8d20b5b2
commit 8e6f6d64a4
5 changed files with 24 additions and 15 deletions

View File

@@ -1,8 +1,9 @@
from collections.abc import Sequence
from decimal import Decimal
from enum import StrEnum
from typing import Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage
from core.model_runtime.entities.model_entities import ModelUsage, PriceInfo
@@ -107,7 +108,7 @@ class LLMResult(BaseModel):
id: Optional[str] = None
model: str
prompt_messages: list[PromptMessage]
prompt_messages: Sequence[PromptMessage] = Field(default_factory=list)
message: AssistantPromptMessage
usage: LLMUsage
system_fingerprint: Optional[str] = None
@@ -130,7 +131,7 @@ class LLMResultChunk(BaseModel):
"""
model: str
prompt_messages: list[PromptMessage]
prompt_messages: Sequence[PromptMessage] = Field(default_factory=list)
system_fingerprint: Optional[str] = None
delta: LLMResultChunkDelta