fix: generate not stop when pressing stop link (#1961)

This commit is contained in:
takatost
2024-01-06 03:03:56 +08:00
committed by GitHub
parent a8cedea15a
commit 0c746f5c5a
8 changed files with 68 additions and 39 deletions

View File

@@ -1,7 +1,7 @@
import time
from typing import cast, Optional, List, Tuple, Generator, Union
from core.application_queue_manager import ApplicationQueueManager
from core.application_queue_manager import ApplicationQueueManager, PublishFrom
from core.entities.application_entities import ModelConfigEntity, PromptTemplateEntity, AppOrchestrationConfigEntity
from core.file.file_obj import FileObj
from core.memory.token_buffer_memory import TokenBufferMemory
@@ -183,7 +183,7 @@ class AppRunner:
index=index,
message=AssistantPromptMessage(content=token)
)
))
), PublishFrom.APPLICATION_MANAGER)
index += 1
time.sleep(0.01)
@@ -193,7 +193,8 @@ class AppRunner:
prompt_messages=prompt_messages,
message=AssistantPromptMessage(content=text),
usage=usage if usage else LLMUsage.empty_usage()
)
),
pub_from=PublishFrom.APPLICATION_MANAGER
)
def _handle_invoke_result(self, invoke_result: Union[LLMResult, Generator],
@@ -226,7 +227,8 @@ class AppRunner:
:return:
"""
queue_manager.publish_message_end(
llm_result=invoke_result
llm_result=invoke_result,
pub_from=PublishFrom.APPLICATION_MANAGER
)
def _handle_invoke_result_stream(self, invoke_result: Generator,
@@ -242,7 +244,7 @@ class AppRunner:
text = ''
usage = None
for result in invoke_result:
queue_manager.publish_chunk_message(result)
queue_manager.publish_chunk_message(result, PublishFrom.APPLICATION_MANAGER)
text += result.delta.message.content
@@ -263,5 +265,6 @@ class AppRunner:
)
queue_manager.publish_message_end(
llm_result=llm_result
llm_result=llm_result,
pub_from=PublishFrom.APPLICATION_MANAGER
)

View File

@@ -5,7 +5,7 @@ from core.app_runner.app_runner import AppRunner
from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler
from core.entities.application_entities import ApplicationGenerateEntity, ModelConfigEntity, \
AppOrchestrationConfigEntity, InvokeFrom, ExternalDataVariableEntity, DatasetEntity
from core.application_queue_manager import ApplicationQueueManager
from core.application_queue_manager import ApplicationQueueManager, PublishFrom
from core.features.annotation_reply import AnnotationReplyFeature
from core.features.dataset_retrieval import DatasetRetrievalFeature
from core.features.external_data_fetch import ExternalDataFetchFeature
@@ -121,7 +121,8 @@ class BasicApplicationRunner(AppRunner):
if annotation_reply:
queue_manager.publish_annotation_reply(
message_annotation_id=annotation_reply.id
message_annotation_id=annotation_reply.id,
pub_from=PublishFrom.APPLICATION_MANAGER
)
self.direct_output(
queue_manager=queue_manager,

View File

@@ -7,7 +7,7 @@ from pydantic import BaseModel
from core.app_runner.moderation_handler import OutputModerationHandler, ModerationRule
from core.entities.application_entities import ApplicationGenerateEntity
from core.application_queue_manager import ApplicationQueueManager
from core.application_queue_manager import ApplicationQueueManager, PublishFrom
from core.entities.queue_entities import QueueErrorEvent, QueueStopEvent, QueueMessageEndEvent, \
QueueRetrieverResourcesEvent, QueueAgentThoughtEvent, QueuePingEvent, QueueMessageEvent, QueueMessageReplaceEvent, \
AnnotationReplyEvent
@@ -312,8 +312,11 @@ class GenerateTaskPipeline:
index=0,
message=AssistantPromptMessage(content=self._task_state.llm_result.message.content)
)
))
self._queue_manager.publish(QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION))
), PublishFrom.TASK_PIPELINE)
self._queue_manager.publish(
QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION),
PublishFrom.TASK_PIPELINE
)
continue
else:
self._output_moderation_handler.append_new_token(delta_text)

View File

@@ -6,6 +6,7 @@ from typing import Any, Optional, Dict
from flask import current_app, Flask
from pydantic import BaseModel
from core.application_queue_manager import PublishFrom
from core.moderation.base import ModerationAction, ModerationOutputsResult
from core.moderation.factory import ModerationFactory
@@ -66,7 +67,7 @@ class OutputModerationHandler(BaseModel):
final_output = result.text
if public_event:
self.on_message_replace_func(final_output)
self.on_message_replace_func(final_output, PublishFrom.TASK_PIPELINE)
return final_output