improve: introduce isort for linting Python imports (#1983)

This commit is contained in:
Bowen Liang
2024-01-12 12:34:01 +08:00
committed by GitHub
parent cca9edc97a
commit cc9e74123c
413 changed files with 1635 additions and 1906 deletions

View File

@@ -1,8 +1,8 @@
from abc import ABC
from typing import Optional, List
from typing import List, Optional
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
from core.model_runtime.entities.message_entities import PromptMessageTool, PromptMessage
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool
from core.model_runtime.model_providers.__base.ai_model import AIModel
_TEXT_COLOR_MAPPING = {

View File

@@ -1,11 +1,11 @@
import json
import logging
import sys
from typing import Optional, List
from typing import List, Optional
from core.model_runtime.callbacks.base_callback import Callback
from core.model_runtime.entities.llm_entities import LLMResultChunk, LLMResult
from core.model_runtime.entities.message_entities import PromptMessageTool, PromptMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool
from core.model_runtime.model_providers.__base.ai_model import AIModel
logger = logging.getLogger(__name__)

View File

@@ -2,7 +2,6 @@ from typing import Dict
from core.model_runtime.entities.model_entities import DefaultParameterName
PARAMETER_RULE_TEMPLATE: Dict[DefaultParameterName, dict] = {
DefaultParameterName.TEMPERATURE: {
'label': {

View File

@@ -2,10 +2,9 @@ from decimal import Decimal
from enum import Enum
from typing import Optional
from pydantic import BaseModel
from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage
from core.model_runtime.entities.model_entities import ModelUsage, PriceInfo
from pydantic import BaseModel
class LLMMode(Enum):

View File

@@ -2,9 +2,8 @@ from decimal import Decimal
from enum import Enum
from typing import Any, Optional
from pydantic import BaseModel
from core.model_runtime.entities.common_entities import I18nObject
from pydantic import BaseModel
class ModelType(Enum):

View File

@@ -1,10 +1,9 @@
from enum import Enum
from typing import Optional
from pydantic import BaseModel
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import ModelType, ProviderModel, AIModelEntity
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType, ProviderModel
from pydantic import BaseModel
class ConfigurateMethod(Enum):

View File

@@ -1,8 +1,7 @@
from decimal import Decimal
from pydantic import BaseModel
from core.model_runtime.entities.model_entities import ModelUsage
from pydantic import BaseModel
class EmbeddingUsage(ModelUsage):

View File

@@ -6,14 +6,13 @@ from abc import ABC, abstractmethod
from typing import Optional
import yaml
from pydantic import ValidationError
from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
from core.model_runtime.entities.model_entities import PriceInfo, AIModelEntity, PriceType, PriceConfig, \
DefaultParameterName, FetchFrom, ModelType
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.errors.invoke import InvokeError, InvokeAuthorizationError
from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
from core.model_runtime.entities.model_entities import (AIModelEntity, DefaultParameterName, FetchFrom, ModelType,
PriceConfig, PriceInfo, PriceType)
from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError
from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer
from pydantic import ValidationError
class AIModel(ABC):

View File

@@ -2,15 +2,14 @@ import logging
import os
import time
from abc import abstractmethod
from typing import Optional, Generator, Union, List
from typing import Generator, List, Optional, Union
from core.model_runtime.callbacks.base_callback import Callback
from core.model_runtime.callbacks.logging_callback import LoggingCallback
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage
from core.model_runtime.entities.model_entities import ModelPropertyKey, PriceType, ParameterType, ParameterRule, \
ModelType
from core.model_runtime.entities.llm_entities import LLMResult, LLMMode, LLMUsage, \
LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage, PromptMessageTool
from core.model_runtime.entities.model_entities import (ModelPropertyKey, ModelType, ParameterRule, ParameterType,
PriceType)
from core.model_runtime.model_providers.__base.ai_model import AIModel
logger = logging.getLogger(__name__)

View File

@@ -1,11 +1,10 @@
import importlib
import os
from abc import ABC, abstractmethod
from typing import Optional, Dict
from typing import Dict, Optional
import yaml
from core.model_runtime.entities.model_entities import ModelType, AIModelEntity
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.model_runtime.model_providers.__base.ai_model import AIModel

View File

@@ -1,6 +1,6 @@
import os
from abc import abstractmethod
from typing import Optional, IO
from typing import IO, Optional
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.model_providers.__base.ai_model import AIModel

View File

@@ -1,7 +1,8 @@
from transformers import GPT2Tokenizer as TransformerGPT2Tokenizer
from os.path import join, abspath, dirname
from typing import Any
from os.path import abspath, dirname, join
from threading import Lock
from typing import Any
from transformers import GPT2Tokenizer as TransformerGPT2Tokenizer
_tokenizer = None
_lock = Lock()

View File

@@ -1,18 +1,16 @@
from typing import Optional, Generator, Union, List
from typing import Generator, List, Optional, Union
import anthropic
from anthropic import Anthropic, Stream
from anthropic.types import completion_create_params, Completion
from httpx import Timeout
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from anthropic.types import Completion, completion_create_params
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from httpx import Timeout
class AnthropicLargeLanguageModel(LargeLanguageModel):

View File

@@ -1,10 +1,8 @@
import openai
from httpx import Timeout
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.model_providers.azure_openai._constant import AZURE_OPENAI_API_VERSION
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from httpx import Timeout
class _CommonAzureOpenAI:

View File

@@ -1,10 +1,9 @@
from pydantic import BaseModel
from core.model_runtime.entities.llm_entities import LLMMode
from core.model_runtime.entities.model_entities import ModelFeature, ModelType, FetchFrom, ParameterRule, \
DefaultParameterName, PriceConfig, ModelPropertyKey
from core.model_runtime.entities.model_entities import AIModelEntity, I18nObject
from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
from core.model_runtime.entities.llm_entities import LLMMode
from core.model_runtime.entities.model_entities import (AIModelEntity, DefaultParameterName, FetchFrom, I18nObject,
ModelFeature, ModelPropertyKey, ModelType, ParameterRule,
PriceConfig)
from pydantic import BaseModel
AZURE_OPENAI_API_VERSION = '2023-12-01-preview'

View File

@@ -1,23 +1,22 @@
import logging
from typing import Optional, Generator, Union, List, cast
from typing import Generator, List, Optional, Union, cast
import tiktoken
from openai import AzureOpenAI, Stream
from openai.types import Completion
from openai.types.chat import ChatCompletionChunk, ChatCompletion, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall, ChoiceDeltaFunctionCall
from openai.types.chat.chat_completion_message import FunctionCall
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, \
LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import PromptMessageTool, PromptMessage, AssistantPromptMessage, \
UserPromptMessage, PromptMessageContentType, ImagePromptMessageContent, \
TextPromptMessageContent, SystemPromptMessage, ToolPromptMessage
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, ImagePromptMessageContent,
PromptMessage, PromptMessageContentType, PromptMessageTool,
SystemPromptMessage, TextPromptMessageContent,
ToolPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import AIModelEntity, ModelPropertyKey
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI
from core.model_runtime.model_providers.azure_openai._constant import LLM_BASE_MODELS, AzureBaseModel
from openai import AzureOpenAI, Stream
from openai.types import Completion
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall
from openai.types.chat.chat_completion_message import FunctionCall
logger = logging.getLogger(__name__)

View File

@@ -4,14 +4,13 @@ from typing import Optional, Tuple
import numpy as np
import tiktoken
from openai import AzureOpenAI
from core.model_runtime.entities.model_entities import PriceType, AIModelEntity
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.model_entities import AIModelEntity, PriceType
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.azure_openai._common import _CommonAzureOpenAI
from core.model_runtime.model_providers.azure_openai._constant import EMBEDDING_BASE_MODELS, AzureBaseModel
from openai import AzureOpenAI
class AzureOpenAITextEmbeddingModel(_CommonAzureOpenAI, TextEmbeddingModel):

View File

@@ -1,7 +1,8 @@
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
import logging
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
import logging
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
logger = logging.getLogger(__name__)

View File

@@ -1,5 +1,6 @@
import re
class BaichuanTokenizer(object):
@classmethod
def count_chinese_characters(cls, text: str) -> int:

View File

@@ -1,12 +1,18 @@
from os.path import join
from typing import List, Optional, Generator, Union, Dict, Any
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import \
InsufficientAccountBalance, InvalidAPIKeyError, InternalServerError, RateLimitReachedError, InvalidAuthenticationError, BadRequestError
from enum import Enum
from json import dumps, loads
from requests import post
from time import time
from hashlib import md5
from json import dumps, loads
from os.path import join
from time import time
from typing import Any, Dict, Generator, List, Optional, Union
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import (BadRequestError,
InsufficientAccountBalance,
InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
from requests import post
class BaichuanMessage:
class Role(Enum):

View File

@@ -1,14 +1,21 @@
from typing import Generator, List, Optional, Union, cast
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import \
InsufficientAccountBalance, InvalidAPIKeyError, InternalServerError, RateLimitReachedError, InvalidAuthenticationError, BadRequestError
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo import BaichuanModel, BaichuanMessage
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.baichuan.llm.baichuan_tokenizer import BaichuanTokenizer
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo import BaichuanMessage, BaichuanModel
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import (BadRequestError,
InsufficientAccountBalance,
InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
class BaichuanLarguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict,

View File

@@ -1,19 +1,22 @@
import time
from json import dumps, loads
from typing import Optional, Tuple
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from core.model_runtime.model_providers.baichuan.llm.baichuan_tokenizer import BaichuanTokenizer
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import InvalidAPIKeyError, InsufficientAccountBalance, \
InvalidAuthenticationError, RateLimitReachedError, InternalServerError, BadRequestError
from core.model_runtime.model_providers.baichuan.llm.baichuan_turbo_errors import (BadRequestError,
InsufficientAccountBalance,
InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
from requests import post
from json import dumps, loads
import time
class BaichuanTextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -1,30 +1,22 @@
from typing import Generator, List, Optional
from requests import post
from os.path import join
from typing import cast
import logging
from json import dumps
from os.path import join
from typing import Generator, List, Optional, cast
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageFunction,
PromptMessageTool, SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.entities.message_entities import PromptMessageTool, PromptMessage, AssistantPromptMessage, \
PromptMessageFunction, UserPromptMessage, SystemPromptMessage
from core.model_runtime.utils import helper
from openai import OpenAI, Stream, \
APIConnectionError, APITimeoutError, AuthenticationError, InternalServerError, \
RateLimitError, ConflictError, NotFoundError, UnprocessableEntityError, PermissionDeniedError
from openai.types.chat import ChatCompletionChunk, ChatCompletion
from openai.types.chat.chat_completion_message import FunctionCall
from httpx import Timeout
import logging
from openai import (APIConnectionError, APITimeoutError, AuthenticationError, ConflictError, InternalServerError,
NotFoundError, OpenAI, PermissionDeniedError, RateLimitError, Stream, UnprocessableEntityError)
from openai.types.chat import ChatCompletion, ChatCompletionChunk
from openai.types.chat.chat_completion_message import FunctionCall
from requests import post
logger = logging.getLogger(__name__)

View File

@@ -1,10 +1,9 @@
from typing import Optional
import cohere
from core.model_runtime.entities.rerank_entities import RerankResult, RerankDocument
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError
from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.rerank_model import RerankModel

View File

@@ -1,24 +1,21 @@
from typing import Optional, Generator, Union, List
import logging
from typing import Generator, List, Optional, Union
import google.generativeai as genai
import google.api_core.exceptions as exceptions
import google.generativeai as genai
import google.generativeai.client as client
from google.generativeai.types import HarmCategory, HarmBlockThreshold
from google.generativeai.types import GenerateContentResponse, ContentType
from google.generativeai.types.content_types import to_part
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage, PromptMessageRole, PromptMessageContentType
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage,
PromptMessageContentType, PromptMessageRole,
PromptMessageTool, SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers import google
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from google.generativeai.types import ContentType, GenerateContentResponse, HarmBlockThreshold, HarmCategory
from google.generativeai.types.content_types import to_part
import logging
logger = logging.getLogger(__name__)
class GoogleLargeLanguageModel(LargeLanguageModel):

View File

@@ -1,6 +1,5 @@
from huggingface_hub.utils import HfHubHTTPError, BadRequestError
from core.model_runtime.errors.invoke import InvokeBadRequestError, InvokeError
from huggingface_hub.utils import BadRequestError, HfHubHTTPError
class _CommonHuggingfaceHub:

View File

@@ -1,19 +1,18 @@
from typing import Optional, List, Union, Generator
from huggingface_hub import InferenceClient
from huggingface_hub.hf_api import HfApi
from huggingface_hub.utils import BadRequestError
from typing import Generator, List, Optional, Union
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMMode
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, \
UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import ParameterRule, DefaultParameterName, AIModelEntity, ModelType, \
FetchFrom, ModelPropertyKey
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, DefaultParameterName, FetchFrom,
ModelPropertyKey, ModelType, ParameterRule)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub
from huggingface_hub import InferenceClient
from huggingface_hub.hf_api import HfApi
from huggingface_hub.utils import BadRequestError
class HuggingfaceHubLargeLanguageModel(_CommonHuggingfaceHub, LargeLanguageModel):

View File

@@ -4,15 +4,13 @@ from typing import Optional
import numpy as np
import requests
from huggingface_hub import InferenceClient, HfApi
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub
from huggingface_hub import HfApi, InferenceClient
HUGGINGFACE_ENDPOINT_API = 'https://api.endpoints.huggingface.cloud/v2/endpoint/'

View File

@@ -1,5 +1,7 @@
from os.path import abspath, dirname, join
from transformers import AutoTokenizer
from os.path import join, abspath, dirname
class JinaTokenizer:
@staticmethod

View File

@@ -1,17 +1,16 @@
import time
from json import JSONDecodeError, dumps
from typing import Optional
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from core.model_runtime.model_providers.jina.text_embedding.jina_tokenizer import JinaTokenizer
from requests import post
from json import dumps, JSONDecodeError
import time
class JinaTextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -1,22 +1,24 @@
from typing import Generator, List, Optional, Union, cast
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage, LLMResultChunk, LLMResultChunkDelta, LLMMode
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule, ParameterType, FetchFrom, ModelType, ModelPropertyKey
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from openai import OpenAI, Stream, \
APIConnectionError, APITimeoutError, AuthenticationError, InternalServerError, \
RateLimitError, ConflictError, NotFoundError, UnprocessableEntityError, PermissionDeniedError
from openai.types.chat import ChatCompletionChunk, ChatCompletion
from openai.types.completion import Completion
from openai.types.chat.chat_completion_message import FunctionCall
from httpx import Timeout
from os.path import join
from typing import Generator, List, Optional, Union, cast
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, FetchFrom, ModelPropertyKey, ModelType,
ParameterRule, ParameterType)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.utils import helper
from httpx import Timeout
from openai import (APIConnectionError, APITimeoutError, AuthenticationError, ConflictError, InternalServerError,
NotFoundError, OpenAI, PermissionDeniedError, RateLimitError, Stream, UnprocessableEntityError)
from openai.types.chat import ChatCompletion, ChatCompletionChunk
from openai.types.chat.chat_completion_message import FunctionCall
from openai.types.completion import Completion
class LocalAILarguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict,

View File

@@ -1,17 +1,16 @@
import time
from json import JSONDecodeError, dumps
from os.path import join
from typing import Optional
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from requests import post
from json import dumps, JSONDecodeError
from os.path import join
import time
class LocalAITextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -1,12 +1,14 @@
from core.model_runtime.model_providers.minimax.llm.errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError, InsufficientAccountBalanceError
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from typing import List, Dict, Any, Generator, Union
from json import dumps, loads
from requests import post, Response
from time import time
from hashlib import md5
from json import dumps, loads
from time import time
from typing import Any, Dict, Generator, List, Union
from core.model_runtime.model_providers.minimax.llm.errors import (BadRequestError, InsufficientAccountBalanceError,
InternalServerError, InvalidAPIKeyError,
InvalidAuthenticationError, RateLimitReachedError)
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from requests import Response, post
class MinimaxChatCompletion(object):
"""

View File

@@ -1,12 +1,14 @@
from core.model_runtime.model_providers.minimax.llm.errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError, InsufficientAccountBalanceError
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from typing import List, Dict, Any, Generator, Union
from json import dumps, loads
from requests import post, Response
from time import time
from hashlib import md5
from json import dumps, loads
from time import time
from typing import Any, Dict, Generator, List, Union
from core.model_runtime.model_providers.minimax.llm.errors import (BadRequestError, InsufficientAccountBalanceError,
InternalServerError, InvalidAPIKeyError,
InvalidAuthenticationError, RateLimitReachedError)
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from requests import Response, post
class MinimaxChatCompletionPro(object):
"""

View File

@@ -1,17 +1,20 @@
from typing import Generator, List, Optional, Union
from core.model_runtime.model_providers.minimax.llm.errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError, InsufficientAccountBalanceError
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, ParameterRule, ParameterType
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.minimax.llm.chat_completion import MinimaxChatCompletion
from core.model_runtime.model_providers.minimax.llm.chat_completion_pro import MinimaxChatCompletionPro
from core.model_runtime.model_providers.minimax.llm.errors import (BadRequestError, InsufficientAccountBalanceError,
InternalServerError, InvalidAPIKeyError,
InvalidAuthenticationError, RateLimitReachedError)
from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage, LLMResultChunk, LLMResultChunkDelta, LLMMode
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule, ParameterType, FetchFrom, ModelType
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.errors.validate import CredentialsValidateFailedError
class MinimaxLargeLanguageModel(LargeLanguageModel):
model_apis = {

View File

@@ -1,5 +1,6 @@
from typing import Dict, Any
from enum import Enum
from typing import Any, Dict
class MinimaxMessage:
class Role(Enum):

View File

@@ -1,7 +1,8 @@
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
import logging
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
import logging
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
logger = logging.getLogger(__name__)

View File

@@ -1,18 +1,18 @@
import time
from json import dumps, loads
from typing import Optional
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from core.model_runtime.model_providers.minimax.llm.errors import InvalidAPIKeyError, InsufficientAccountBalanceError, \
InvalidAuthenticationError, RateLimitReachedError, InternalServerError, BadRequestError
from core.model_runtime.model_providers.minimax.llm.errors import (BadRequestError, InsufficientAccountBalanceError,
InternalServerError, InvalidAPIKeyError,
InvalidAuthenticationError, RateLimitReachedError)
from requests import post
from json import dumps, loads
import time
class MinimaxTextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -5,13 +5,12 @@ from collections import OrderedDict
from typing import Optional
import yaml
from pydantic import BaseModel
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.entities.provider_entities import SimpleProviderEntity, ProviderConfig, ProviderEntity
from core.model_runtime.entities.provider_entities import ProviderConfig, ProviderEntity, SimpleProviderEntity
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
from core.model_runtime.schema_validators.model_credential_schema_validator import ModelCredentialSchemaValidator
from core.model_runtime.schema_validators.provider_credential_schema_validator import ProviderCredentialSchemaValidator
from pydantic import BaseModel
logger = logging.getLogger(__name__)

View File

@@ -1,9 +1,8 @@
import openai
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from httpx import Timeout
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
class _CommonOpenAI:
def _to_credential_kwargs(self, credentials: dict) -> dict:

View File

@@ -1,24 +1,23 @@
import logging
from typing import Optional, Generator, Union, List, cast
from typing import Generator, List, Optional, Union, cast
import tiktoken
from openai import OpenAI, Stream
from openai.types import Completion
from openai.types.chat import ChatCompletionChunk, ChatCompletion, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall, ChoiceDeltaFunctionCall
from openai.types.chat.chat_completion_message import FunctionCall
from core.model_runtime.entities.message_entities import PromptMessageTool, PromptMessage, AssistantPromptMessage, \
PromptMessageFunction, UserPromptMessage, PromptMessageContentType, ImagePromptMessageContent, \
TextPromptMessageContent, SystemPromptMessage, ToolPromptMessage
from core.model_runtime.entities.model_entities import AIModelEntity, I18nObject, ModelType, FetchFrom, \
PriceConfig, AIModelEntity, FetchFrom
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, \
LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, ImagePromptMessageContent,
PromptMessage, PromptMessageContentType,
PromptMessageFunction, PromptMessageTool, SystemPromptMessage,
TextPromptMessageContent, ToolPromptMessage,
UserPromptMessage)
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, I18nObject, ModelType, PriceConfig
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.openai._common import _CommonOpenAI
from core.model_runtime.utils import helper
from openai import OpenAI, Stream
from openai.types import Completion
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall
from openai.types.chat.chat_completion_message import FunctionCall
logger = logging.getLogger(__name__)

View File

@@ -1,12 +1,11 @@
from typing import Optional
from openai import OpenAI
from openai.types import ModerationCreateResponse
from core.model_runtime.entities.model_entities import ModelPropertyKey
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.moderation_model import ModerationModel
from core.model_runtime.model_providers.openai._common import _CommonOpenAI
from openai import OpenAI
from openai.types import ModerationCreateResponse
class OpenAIModerationModel(_CommonOpenAI, ModerationModel):

View File

@@ -1,10 +1,9 @@
from typing import Optional, IO
from openai import OpenAI
from typing import IO, Optional
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel
from core.model_runtime.model_providers.openai._common import _CommonOpenAI
from openai import OpenAI
class OpenAISpeech2TextModel(_CommonOpenAI, Speech2TextModel):

View File

@@ -4,13 +4,12 @@ from typing import Optional, Tuple
import numpy as np
import tiktoken
from openai import OpenAI
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.openai._common import _CommonOpenAI
from openai import OpenAI
class OpenAITextEmbeddingModel(_CommonOpenAI, TextEmbeddingModel):

View File

@@ -1,13 +1,13 @@
from decimal import Decimal
import requests
import requests
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.llm_entities import LLMMode
from core.model_runtime.entities.model_entities import AIModelEntity, DefaultParameterName, \
FetchFrom, ModelPropertyKey, ModelType, ParameterRule, ParameterType, PriceConfig
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.model_entities import (AIModelEntity, DefaultParameterName, FetchFrom,
ModelPropertyKey, ModelType, ParameterRule, ParameterType,
PriceConfig)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
class _CommonOAI_API_Compat:

View File

@@ -1,27 +1,24 @@
import json
import logging
from decimal import Decimal
from typing import Generator, List, Optional, Union, cast
from urllib.parse import urljoin
import requests
import json
from typing import Optional, Generator, Union, List, cast
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.utils import helper
from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessage, \
AssistantPromptMessage, PromptMessageContent, \
PromptMessageContentType, PromptMessageFunction, PromptMessageTool, UserPromptMessage, SystemPromptMessage, \
ToolPromptMessage
from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType, PriceConfig, ParameterRule, \
DefaultParameterName, \
ParameterType, ModelPropertyKey, FetchFrom, AIModelEntity
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, ImagePromptMessageContent,
PromptMessage, PromptMessageContent, PromptMessageContentType,
PromptMessageFunction, PromptMessageTool, SystemPromptMessage,
ToolPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, DefaultParameterName, FetchFrom,
ModelPropertyKey, ModelType, ParameterRule, ParameterType,
PriceConfig)
from core.model_runtime.errors.invoke import InvokeError
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.openai_api_compatible._common import _CommonOAI_API_Compat
from core.model_runtime.utils import helper
logger = logging.getLogger(__name__)

View File

@@ -1,16 +1,15 @@
import json
import time
from decimal import Decimal
from typing import Optional
from urllib.parse import urljoin
import requests
import json
import numpy as np
import requests
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import PriceType, ModelPropertyKey, ModelType, AIModelEntity, FetchFrom, \
PriceConfig
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.model_entities import (AIModelEntity, FetchFrom, ModelPropertyKey, ModelType,
PriceConfig, PriceType)
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.openai_api_compatible._common import _CommonOAI_API_Compat

View File

@@ -1,16 +1,23 @@
from typing import Generator, List, Optional, Union
from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError, InsufficientAccountBalanceError
from core.model_runtime.model_providers.openllm.llm.openllm_generate import OpenLLMGenerate, OpenLLMGenerateMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage, LLMResultChunk, LLMResultChunkDelta, LLMMode
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import AIModelEntity, ParameterRule, ParameterType, FetchFrom, ModelType, ModelPropertyKey
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, FetchFrom, ModelPropertyKey, ModelType,
ParameterRule, ParameterType)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.openllm.llm.openllm_generate import OpenLLMGenerate, OpenLLMGenerateMessage
from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import (BadRequestError,
InsufficientAccountBalanceError,
InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
class OpenLLMLargeLanguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict, prompt_messages: list[PromptMessage],

View File

@@ -1,11 +1,16 @@
from typing import Any, Dict, List, Union, Generator
from requests import post, Response
from requests.exceptions import ConnectionError, InvalidSchema, MissingSchema
from json import dumps, loads
from enum import Enum
from json import dumps, loads
from typing import Any, Dict, Generator, List, Union
from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import (BadRequestError,
InsufficientAccountBalanceError,
InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
from requests import Response, post
from requests.exceptions import ConnectionError, InvalidSchema, MissingSchema
from core.model_runtime.model_providers.openllm.llm.openllm_generate_errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError, InsufficientAccountBalanceError
class OpenLLMGenerateMessage:
class Role(Enum):

View File

@@ -1,6 +1,7 @@
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
import logging
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
logger = logging.getLogger(__name__)

View File

@@ -1,17 +1,16 @@
import time
from json import dumps, loads
from typing import Optional
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from requests import post
from requests.exceptions import InvalidSchema, MissingSchema, ConnectionError
from json import dumps, loads
from requests.exceptions import ConnectionError, InvalidSchema, MissingSchema
import time
class OpenLLMTextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -1,6 +1,5 @@
from replicate.exceptions import ReplicateError, ModelError
from core.model_runtime.errors.invoke import InvokeBadRequestError, InvokeError
from replicate.exceptions import ModelError, ReplicateError
class _CommonReplicate:

View File

@@ -1,17 +1,17 @@
from typing import Optional, List, Union, Generator
from replicate import Client as ReplicateClient
from replicate.exceptions import ReplicateError
from replicate.prediction import Prediction
from typing import Generator, List, Optional, Union
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.llm_entities import LLMResult, LLMMode, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, \
PromptMessageRole, UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import ParameterRule, AIModelEntity, FetchFrom, ModelType, ModelPropertyKey
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageRole,
PromptMessageTool, SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, FetchFrom, ModelPropertyKey, ModelType,
ParameterRule)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.replicate._common import _CommonReplicate
from replicate import Client as ReplicateClient
from replicate.exceptions import ReplicateError
from replicate.prediction import Prediction
class ReplicateLargeLanguageModel(_CommonReplicate, LargeLanguageModel):

View File

@@ -2,14 +2,13 @@ import json
import time
from typing import Optional
from replicate import Client as ReplicateClient
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.replicate._common import _CommonReplicate
from replicate import Client as ReplicateClient
class ReplicateEmbeddingModel(_CommonReplicate, TextEmbeddingModel):

View File

@@ -4,12 +4,11 @@ import hashlib
import hmac
import json
import queue
from typing import Optional
from urllib.parse import urlparse
import ssl
from datetime import datetime
from time import mktime
from urllib.parse import urlencode
from typing import Optional
from urllib.parse import urlencode, urlparse
from wsgiref.handlers import format_date_time
import websocket

View File

@@ -1,12 +1,11 @@
import threading
from typing import Optional, Generator, Union, List
from typing import Generator, List, Optional, Union
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel

View File

@@ -1,9 +1,11 @@
from typing import Generator, List, Optional, Union
from core.model_runtime.entities.llm_entities import LLMResult
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool
from core.model_runtime.entities.model_entities import AIModelEntity
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel
class TogetherAILargeLanguageModel(OAIAPICompatLargeLanguageModel):
def _update_endpoint_url(self, credentials: dict):

View File

@@ -1,4 +1,4 @@
from typing import Dict, Any, List, Optional
from typing import Any, Dict, List, Optional
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms import Tongyi

View File

@@ -1,23 +1,22 @@
from http import HTTPStatus
from typing import Optional, Generator, Union, List
from typing import Generator, List, Optional, Union
import dashscope
from dashscope.api_entities.dashscope_response import DashScopeAPIResponse
from dashscope.common.error import AuthenticationError, RequestFailure, \
InvalidParameter, UnsupportedModel, ServiceUnavailableError, UnsupportedHTTPMethod
from langchain.llms.tongyi import generate_with_retry, stream_generate_with_retry
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from dashscope.api_entities.dashscope_response import DashScopeAPIResponse
from dashscope.common.error import (AuthenticationError, InvalidParameter, RequestFailure, ServiceUnavailableError,
UnsupportedHTTPMethod, UnsupportedModel)
from langchain.llms.tongyi import generate_with_retry, stream_generate_with_retry
from ._client import EnhanceTongyi
class TongyiLargeLanguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict,

View File

@@ -1,12 +1,15 @@
from datetime import datetime, timedelta
from enum import Enum
from json import dumps, loads
from requests import post, Response
from typing import Any, Dict, Union, Generator, List
from core.model_runtime.model_providers.wenxin.llm.ernie_bot_errors import BadRequestError, InvalidAPIKeyError, \
InternalServerError, RateLimitReachedError, InvalidAuthenticationError
from core.model_runtime.entities.message_entities import PromptMessageTool
from datetime import datetime, timedelta
from threading import Lock
from typing import Any, Dict, Generator, List, Union
from core.model_runtime.entities.message_entities import PromptMessageTool
from core.model_runtime.model_providers.wenxin.llm.ernie_bot_errors import (BadRequestError, InternalServerError,
InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
from requests import Response, post
# map api_key to access_token
baidu_access_tokens: Dict[str, 'BaiduAccessToken'] = {}

View File

@@ -1,13 +1,18 @@
from typing import Generator, List, Optional, Union, cast
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, UserPromptMessage, SystemPromptMessage
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.wenxin.llm.ernie_bot import ErnieBotModel, ErnieMessage, BaiduAccessToken
from core.model_runtime.model_providers.wenxin.llm.ernie_bot_errors import \
InsufficientAccountBalance, InvalidAPIKeyError, InternalServerError, RateLimitReachedError, InvalidAuthenticationError, BadRequestError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.wenxin.llm.ernie_bot import BaiduAccessToken, ErnieBotModel, ErnieMessage
from core.model_runtime.model_providers.wenxin.llm.ernie_bot_errors import (BadRequestError, InsufficientAccountBalance,
InternalServerError, InvalidAPIKeyError,
InvalidAuthenticationError,
RateLimitReachedError)
class ErnieBotLarguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict,

View File

@@ -1,7 +1,8 @@
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
import logging
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
import logging
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
logger = logging.getLogger(__name__)

View File

@@ -1,29 +1,27 @@
from typing import Generator, List, Optional, Union, Iterator, cast
from openai import OpenAI
from openai.types.chat import ChatCompletionChunk, ChatCompletion
from openai.types.completion import Completion
from openai.types.chat.chat_completion_message import FunctionCall
from openai.types.chat import ChatCompletionChunk, ChatCompletion, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall, ChoiceDeltaFunctionCall
from openai import OpenAI, Stream, \
APIConnectionError, APITimeoutError, AuthenticationError, InternalServerError, \
RateLimitError, ConflictError, NotFoundError, UnprocessableEntityError, PermissionDeniedError
from typing import Generator, Iterator, List, Optional, Union, cast
from xinference_client.client.restful.restful_client import \
RESTfulChatModelHandle, RESTfulGenerateModelHandle, RESTfulChatglmCppChatModelHandle, Client
from core.model_runtime.entities.model_entities import AIModelEntity
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, SystemPromptMessage, AssistantPromptMessage
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import FetchFrom, ModelType, ParameterRule, ParameterType, ModelPropertyKey
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageTool,
SystemPromptMessage, UserPromptMessage)
from core.model_runtime.entities.model_entities import (AIModelEntity, FetchFrom, ModelPropertyKey, ModelType,
ParameterRule, ParameterType)
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.xinference.llm.xinference_helper import XinferenceHelper, XinferenceModelExtraParameter
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.xinference.llm.xinference_helper import (XinferenceHelper,
XinferenceModelExtraParameter)
from core.model_runtime.utils import helper
from openai import (APIConnectionError, APITimeoutError, AuthenticationError, ConflictError, InternalServerError,
NotFoundError, OpenAI, PermissionDeniedError, RateLimitError, Stream, UnprocessableEntityError)
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, ChoiceDeltaToolCall
from openai.types.chat.chat_completion_message import FunctionCall
from openai.types.completion import Completion
from xinference_client.client.restful.restful_client import (Client, RESTfulChatglmCppChatModelHandle,
RESTfulChatModelHandle, RESTfulGenerateModelHandle)
class XinferenceAILargeLanguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict, prompt_messages: list[PromptMessage],

View File

@@ -1,11 +1,13 @@
from requests import get
from requests.sessions import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import MissingSchema, ConnectionError, Timeout
from time import time
from threading import Lock
from time import time
from typing import List
from requests import get
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, MissingSchema, Timeout
from requests.sessions import Session
class XinferenceModelExtraParameter(object):
model_format: str
model_handle_type: str

View File

@@ -1,14 +1,14 @@
from typing import Optional
from core.model_runtime.entities.rerank_entities import RerankResult, RerankDocument
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType
from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.rerank_model import RerankModel
from core.model_runtime.entities.model_entities import FetchFrom, ModelType, AIModelEntity
from core.model_runtime.entities.common_entities import I18nObject
from xinference_client.client.restful.restful_client import Client, RESTfulRerankModelHandle
from xinference_client.client.restful.restful_client import RESTfulRerankModelHandle, Client
class XinferenceRerankModel(RerankModel):
"""

View File

@@ -1,16 +1,15 @@
import time
from typing import Optional
from core.model_runtime.entities.model_entities import PriceType, FetchFrom, ModelType, AIModelEntity
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType, PriceType
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.errors.invoke import InvokeError, InvokeConnectionError, InvokeServerUnavailableError, \
InvokeRateLimitError, InvokeAuthorizationError, InvokeBadRequestError
from xinference_client.client.restful.restful_client import Client, RESTfulEmbeddingModelHandle, RESTfulModelHandle
from xinference_client.client.restful.restful_client import RESTfulEmbeddingModelHandle, RESTfulModelHandle, Client
import time
class XinferenceTextEmbeddingModel(TextEmbeddingModel):
"""

View File

@@ -4,8 +4,7 @@ from __future__ import annotations
import logging
import posixpath
from pydantic import Extra, BaseModel
from pydantic import BaseModel, Extra
from zhipuai.model_api.api import InvokeType
from zhipuai.utils import jwt_token
from zhipuai.utils.http_client import post, stream

View File

@@ -1,5 +1,5 @@
from core.model_runtime.errors.invoke import InvokeConnectionError, InvokeServerUnavailableError, InvokeRateLimitError, \
InvokeAuthorizationError, InvokeBadRequestError, InvokeError
from core.model_runtime.errors.invoke import (InvokeAuthorizationError, InvokeBadRequestError, InvokeConnectionError,
InvokeError, InvokeRateLimitError, InvokeServerUnavailableError)
class _CommonZhipuaiAI:

View File

@@ -1,18 +1,9 @@
import json
from typing import (
Any,
Dict,
List,
Optional,
Generator,
Union
)
from typing import Any, Dict, Generator, List, Optional, Union
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, \
AssistantPromptMessage, \
SystemPromptMessage, PromptMessageRole
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageRole,
PromptMessageTool, SystemPromptMessage, UserPromptMessage)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.zhipuai._client import ZhipuModelAPI

View File

@@ -1,14 +1,13 @@
import time
from typing import Optional, List, Tuple
from langchain.schema.language_model import _get_token_ids_default_method
from typing import List, Optional, Tuple
from core.model_runtime.entities.model_entities import PriceType
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult, EmbeddingUsage
from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from core.model_runtime.model_providers.zhipuai._client import ZhipuModelAPI
from core.model_runtime.model_providers.zhipuai._common import _CommonZhipuaiAI
from langchain.schema.language_model import _get_token_ids_default_method
class ZhipuAITextEmbeddingModel(_CommonZhipuaiAI, TextEmbeddingModel):

View File

@@ -1,6 +1,6 @@
from typing import Optional
from core.model_runtime.entities.provider_entities import FormType, CredentialFormSchema
from core.model_runtime.entities.provider_entities import CredentialFormSchema, FormType
class CommonValidator:

View File

@@ -1,6 +1,4 @@
from typing import (
Any
)
from typing import Any
from pydantic import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION

View File

@@ -3,14 +3,7 @@ import datetime
from collections import defaultdict, deque
from decimal import Decimal
from enum import Enum
from ipaddress import (
IPv4Address,
IPv4Interface,
IPv4Network,
IPv6Address,
IPv6Interface,
IPv6Network,
)
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path, PurePath
from re import Pattern
from types import GeneratorType