chore: skip unnecessary key checks prior to accessing a dictionary (#4497)

This commit is contained in:
Bowen Liang
2024-05-19 18:30:45 +08:00
committed by GitHub
parent aa13d14019
commit 04ad46dd31
30 changed files with 45 additions and 44 deletions

View File

@@ -146,7 +146,7 @@ class AnthropicLargeLanguageModel(LargeLanguageModel):
"""
Code block mode wrapper for invoking large language model
"""
if 'response_format' in model_parameters and model_parameters['response_format']:
if model_parameters.get('response_format'):
stop = stop or []
# chat model
self._transform_chat_json_prompts(
@@ -408,7 +408,7 @@ class AnthropicLargeLanguageModel(LargeLanguageModel):
"max_retries": 1,
}
if 'anthropic_api_url' in credentials and credentials['anthropic_api_url']:
if credentials.get('anthropic_api_url'):
credentials['anthropic_api_url'] = credentials['anthropic_api_url'].rstrip('/')
credentials_kwargs['base_url'] = credentials['anthropic_api_url']

View File

@@ -89,7 +89,7 @@ class BaichuanModel:
# save stop reason temporarily
stop_reason = ''
for choice in choices:
if 'finish_reason' in choice and choice['finish_reason']:
if choice.get('finish_reason'):
stop_reason = choice['finish_reason']
if len(choice['delta']['content']) == 0:

View File

@@ -43,7 +43,7 @@ class MinimaxChatCompletionPro:
if 'top_p' in model_parameters and type(model_parameters['top_p']) == float:
extra_kwargs['top_p'] = model_parameters['top_p']
if 'plugin_web_search' in model_parameters and model_parameters['plugin_web_search']:
if model_parameters.get('plugin_web_search'):
extra_kwargs['plugins'] = [
'plugin_web_search'
]
@@ -158,7 +158,7 @@ class MinimaxChatCompletionPro:
self._handle_error(code, msg)
# final chunk
if data['reply'] or 'usage' in data and data['usage']:
if data['reply'] or data.get('usage'):
total_tokens = data['usage']['total_tokens']
minimax_message = MinimaxMessage(
role=MinimaxMessage.Role.ASSISTANT.value,

View File

@@ -25,7 +25,7 @@ class _CommonOpenAI:
"max_retries": 1,
}
if 'openai_api_base' in credentials and credentials['openai_api_base']:
if credentials.get('openai_api_base'):
credentials['openai_api_base'] = credentials['openai_api_base'].rstrip('/')
credentials_kwargs['base_url'] = credentials['openai_api_base'] + '/v1'

View File

@@ -180,7 +180,7 @@ class OpenLLMGenerate:
completion_usage += len(token_ids)
message = OpenLLMGenerateMessage(content=text, role=OpenLLMGenerateMessage.Role.ASSISTANT.value)
if 'finish_reason' in choice and choice['finish_reason']:
if choice.get('finish_reason'):
finish_reason = choice['finish_reason']
prompt_token_ids = data['prompt_token_ids']
message.stop_reason = finish_reason