mirror of
http://112.124.100.131/huang.ze/ebiz-dify-ai.git
synced 2025-12-12 12:26:54 +08:00
fix: run extra model serval ex not return (#916)
This commit is contained in:
@@ -71,7 +71,7 @@ class AzureOpenAIEmbedding(BaseEmbedding):
|
|||||||
elif isinstance(ex, openai.error.RateLimitError):
|
elif isinstance(ex, openai.error.RateLimitError):
|
||||||
return LLMRateLimitError('Azure ' + str(ex))
|
return LLMRateLimitError('Azure ' + str(ex))
|
||||||
elif isinstance(ex, openai.error.AuthenticationError):
|
elif isinstance(ex, openai.error.AuthenticationError):
|
||||||
raise LLMAuthorizationError('Azure ' + str(ex))
|
return LLMAuthorizationError('Azure ' + str(ex))
|
||||||
elif isinstance(ex, openai.error.OpenAIError):
|
elif isinstance(ex, openai.error.OpenAIError):
|
||||||
return LLMBadRequestError('Azure ' + ex.__class__.__name__ + ":" + str(ex))
|
return LLMBadRequestError('Azure ' + ex.__class__.__name__ + ":" + str(ex))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ class OpenAIEmbedding(BaseEmbedding):
|
|||||||
elif isinstance(ex, openai.error.RateLimitError):
|
elif isinstance(ex, openai.error.RateLimitError):
|
||||||
return LLMRateLimitError(str(ex))
|
return LLMRateLimitError(str(ex))
|
||||||
elif isinstance(ex, openai.error.AuthenticationError):
|
elif isinstance(ex, openai.error.AuthenticationError):
|
||||||
raise LLMAuthorizationError(str(ex))
|
return LLMAuthorizationError(str(ex))
|
||||||
elif isinstance(ex, openai.error.OpenAIError):
|
elif isinstance(ex, openai.error.OpenAIError):
|
||||||
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class OpenAIModeration(BaseProviderModel):
|
|||||||
elif isinstance(ex, openai.error.RateLimitError):
|
elif isinstance(ex, openai.error.RateLimitError):
|
||||||
return LLMRateLimitError(str(ex))
|
return LLMRateLimitError(str(ex))
|
||||||
elif isinstance(ex, openai.error.AuthenticationError):
|
elif isinstance(ex, openai.error.AuthenticationError):
|
||||||
raise LLMAuthorizationError(str(ex))
|
return LLMAuthorizationError(str(ex))
|
||||||
elif isinstance(ex, openai.error.OpenAIError):
|
elif isinstance(ex, openai.error.OpenAIError):
|
||||||
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ class OpenAIWhisper(BaseSpeech2Text):
|
|||||||
elif isinstance(ex, openai.error.RateLimitError):
|
elif isinstance(ex, openai.error.RateLimitError):
|
||||||
return LLMRateLimitError(str(ex))
|
return LLMRateLimitError(str(ex))
|
||||||
elif isinstance(ex, openai.error.AuthenticationError):
|
elif isinstance(ex, openai.error.AuthenticationError):
|
||||||
raise LLMAuthorizationError(str(ex))
|
return LLMAuthorizationError(str(ex))
|
||||||
elif isinstance(ex, openai.error.OpenAIError):
|
elif isinstance(ex, openai.error.OpenAIError):
|
||||||
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
return LLMBadRequestError(ex.__class__.__name__ + ":" + str(ex))
|
||||||
else:
|
else:
|
||||||
|
|||||||
Reference in New Issue
Block a user