Model Runtime (#1858)

Co-authored-by: StyleZhang <jasonapring2015@outlook.com>
Co-authored-by: Garfield Dai <dai.hai@foxmail.com>
Co-authored-by: chenhe <guchenhe@gmail.com>
Co-authored-by: jyong <jyong@dify.ai>
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: Yeuoly <admin@srmxy.cn>
This commit is contained in:
takatost
2024-01-02 23:42:00 +08:00
committed by GitHub
parent e91dd28a76
commit d069c668f8
807 changed files with 171310 additions and 23806 deletions

View File

@@ -0,0 +1,181 @@
import os
from typing import Generator
import pytest
from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage, \
SystemPromptMessage, PromptMessageTool
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunkDelta, \
LLMResultChunk
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel
"""
Using Together.ai's OpenAI-compatible API as testing endpoint
"""
def test_validate_credentials():
model = OAIAPICompatLargeLanguageModel()
with pytest.raises(CredentialsValidateFailedError):
model.validate_credentials(
model='mistralai/Mixtral-8x7B-Instruct-v0.1',
credentials={
'api_key': 'invalid_key',
'endpoint_url': 'https://api.together.xyz/v1/chat/completions',
'mode': 'chat'
}
)
model.validate_credentials(
model='mistralai/Mixtral-8x7B-Instruct-v0.1',
credentials={
'api_key': os.environ.get('TOGETHER_API_KEY'),
'endpoint_url': 'https://api.together.xyz/v1/chat/completions',
'mode': 'chat'
}
)
def test_invoke_model():
model = OAIAPICompatLargeLanguageModel()
response = model.invoke(
model='mistralai/Mixtral-8x7B-Instruct-v0.1',
credentials={
'api_key': os.environ.get('TOGETHER_API_KEY'),
'endpoint_url': 'https://api.together.xyz/v1/completions',
'mode': 'completion'
},
prompt_messages=[
SystemPromptMessage(
content='You are a helpful AI assistant.',
),
UserPromptMessage(
content='Who are you?'
)
],
model_parameters={
'temperature': 1.0,
'top_k': 2,
'top_p': 0.5,
},
stop=['How'],
stream=False,
user="abc-123"
)
assert isinstance(response, LLMResult)
assert len(response.message.content) > 0
def test_invoke_stream_model():
model = OAIAPICompatLargeLanguageModel()
response = model.invoke(
model='mistralai/Mixtral-8x7B-Instruct-v0.1',
credentials={
'api_key': os.environ.get('TOGETHER_API_KEY'),
'endpoint_url': 'https://api.together.xyz/v1/chat/completions',
'mode': 'chat'
},
prompt_messages=[
SystemPromptMessage(
content='You are a helpful AI assistant.',
),
UserPromptMessage(
content='Who are you?'
)
],
model_parameters={
'temperature': 1.0,
'top_k': 2,
'top_p': 0.5,
},
stop=['How'],
stream=True,
user="abc-123"
)
assert isinstance(response, Generator)
for chunk in response:
assert isinstance(chunk, LLMResultChunk)
assert isinstance(chunk.delta, LLMResultChunkDelta)
assert isinstance(chunk.delta.message, AssistantPromptMessage)
# using OpenAI's ChatGPT-3.5 as testing endpoint
def test_invoke_chat_model_with_tools():
model = OAIAPICompatLargeLanguageModel()
result = model.invoke(
model='gpt-3.5-turbo',
credentials={
'api_key': os.environ.get('OPENAI_API_KEY'),
'endpoint_url': 'https://api.openai.com/v1/chat/completions',
'mode': 'chat'
},
prompt_messages=[
SystemPromptMessage(
content='You are a helpful AI assistant.',
),
UserPromptMessage(
content="what's the weather today in London?",
)
],
tools=[
PromptMessageTool(
name='get_weather',
description='Determine weather in my location',
parameters={
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
]
}
),
],
model_parameters={
'temperature': 0.0,
'max_tokens': 1024
},
stream=False,
user="abc-123"
)
assert isinstance(result, LLMResult)
assert isinstance(result.message, AssistantPromptMessage)
assert len(result.message.tool_calls) > 0
def test_get_num_tokens():
model = OAIAPICompatLargeLanguageModel()
num_tokens = model.get_num_tokens(
model='mistralai/Mixtral-8x7B-Instruct-v0.1',
credentials={
'api_key': os.environ.get('OPENAI_API_KEY'),
'endpoint_url': 'https://api.openai.com/v1/chat/completions'
},
prompt_messages=[
SystemPromptMessage(
content='You are a helpful AI assistant.',
),
UserPromptMessage(
content='Hello World!'
)
]
)
assert isinstance(num_tokens, int)
assert num_tokens == 21

View File

@@ -0,0 +1,79 @@
import os
import pytest
from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import OAICompatEmbeddingModel
"""
Using OpenAI's API as testing endpoint
"""
def test_validate_credentials():
model = OAICompatEmbeddingModel()
with pytest.raises(CredentialsValidateFailedError):
model.validate_credentials(
model='text-embedding-ada-002',
credentials={
'api_key': 'invalid_key',
'endpoint_url': 'https://api.openai.com/v1/embeddings',
'context_size': 8184,
'max_chunks': 32
}
)
model.validate_credentials(
model='text-embedding-ada-002',
credentials={
'api_key': os.environ.get('OPENAI_API_KEY'),
'endpoint_url': 'https://api.openai.com/v1/embeddings',
'context_size': 8184,
'max_chunks': 32
}
)
def test_invoke_model():
model = OAICompatEmbeddingModel()
result = model.invoke(
model='text-embedding-ada-002',
credentials={
'api_key': os.environ.get('OPENAI_API_KEY'),
'endpoint_url': 'https://api.openai.com/v1/embeddings',
'context_size': 8184,
'max_chunks': 32
},
texts=[
"hello",
"world"
],
user="abc-123"
)
assert isinstance(result, TextEmbeddingResult)
assert len(result.embeddings) == 2
assert result.usage.total_tokens == 2
def test_get_num_tokens():
model = OAICompatEmbeddingModel()
num_tokens = model.get_num_tokens(
model='text-embedding-ada-002',
credentials={
'api_key': os.environ.get('OPENAI_API_KEY'),
'endpoint_url': 'https://api.openai.com/v1/embeddings',
'context_size': 8184,
'max_chunks': 32
},
texts=[
"hello",
"world"
]
)
assert num_tokens == 2