feat(ark): add doubao-pro-256k and doubao-embedding-large (#11831)

This commit is contained in:
sino
2024-12-19 17:49:31 +08:00
committed by GitHub
parent 3388d6636c
commit 560d375e0f
3 changed files with 18 additions and 1 deletions

View File

@@ -40,6 +40,10 @@ configs: dict[str, ModelConfig] = {
properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT),
features=[ModelFeature.TOOL_CALL],
),
"Doubao-pro-256k": ModelConfig(
properties=ModelProperties(context_size=262144, max_tokens=4096, mode=LLMMode.CHAT),
features=[],
),
"Doubao-pro-128k": ModelConfig(
properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT),
features=[ModelFeature.TOOL_CALL],

View File

@@ -12,6 +12,7 @@ class ModelConfig(BaseModel):
ModelConfigs = {
"Doubao-embedding": ModelConfig(properties=ModelProperties(context_size=4096, max_chunks=32)),
"Doubao-embedding-large": ModelConfig(properties=ModelProperties(context_size=4096, max_chunks=32)),
}
@@ -21,7 +22,7 @@ def get_model_config(credentials: dict) -> ModelConfig:
if not model_configs:
return ModelConfig(
properties=ModelProperties(
context_size=int(credentials.get("context_size", 0)),
context_size=int(credentials.get("context_size", 4096)),
max_chunks=int(credentials.get("max_chunks", 1)),
)
)

View File

@@ -166,6 +166,12 @@ model_credential_schema:
show_on:
- variable: __model_type
value: llm
- label:
en_US: Doubao-pro-256k
value: Doubao-pro-256k
show_on:
- variable: __model_type
value: llm
- label:
en_US: Llama3-8B
value: Llama3-8B
@@ -220,6 +226,12 @@ model_credential_schema:
show_on:
- variable: __model_type
value: text-embedding
- label:
en_US: Doubao-embedding-large
value: Doubao-embedding-large
show_on:
- variable: __model_type
value: text-embedding
- label:
en_US: Custom
zh_Hans: 自定义