add finish_reason to the LLM node output (#7498)

This commit is contained in:
orangeclk
2024-08-21 17:29:30 +08:00
committed by GitHub
parent 784b11ce19
commit f53454f81d
3 changed files with 15 additions and 7 deletions

View File

@@ -428,7 +428,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOAI_API_Compat, LargeLanguageModel):
if new_tool_call.function.arguments:
tool_call.function.arguments += new_tool_call.function.arguments
finish_reason = 'Unknown'
finish_reason = None # The default value of finish_reason is None
for chunk in response.iter_lines(decode_unicode=True, delimiter=delimiter):
chunk = chunk.strip()
@@ -437,6 +437,8 @@ class OAIAPICompatLargeLanguageModel(_CommonOAI_API_Compat, LargeLanguageModel):
if chunk.startswith(':'):
continue
decoded_chunk = chunk.strip().lstrip('data: ').lstrip()
if decoded_chunk == '[DONE]': # Some provider returns "data: [DONE]"
continue
try:
chunk_json = json.loads(decoded_chunk)