fix(typo): misspelling (#5094)

This commit is contained in:
Pika 2024-06-12 17:01:21 +08:00 committed by GitHub
parent 95443bd551
commit ecc7f130b4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 9 additions and 9 deletions

View File

@ -50,7 +50,7 @@ from core.model_runtime.model_providers.__base.large_language_model import Large
from core.model_runtime.utils import helper
class LocalAILarguageModel(LargeLanguageModel):
class LocalAILanguageModel(LargeLanguageModel):
def _invoke(self, model: str, credentials: dict,
prompt_messages: list[PromptMessage], model_parameters: dict,
tools: list[PromptMessageTool] | None = None, stop: list[str] | None = None,

View File

@ -13,11 +13,11 @@ from core.model_runtime.entities.message_entities import (
)
from core.model_runtime.entities.model_entities import ParameterRule
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.localai.llm.llm import LocalAILarguageModel
from core.model_runtime.model_providers.localai.llm.llm import LocalAILanguageModel
def test_validate_credentials_for_chat_model():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
with pytest.raises(CredentialsValidateFailedError):
model.validate_credentials(
@ -37,7 +37,7 @@ def test_validate_credentials_for_chat_model():
)
def test_invoke_completion_model():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
response = model.invoke(
model='chinese-llama-2-7b',
@ -65,7 +65,7 @@ def test_invoke_completion_model():
assert response.usage.total_tokens > 0
def test_invoke_chat_model():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
response = model.invoke(
model='chinese-llama-2-7b',
@ -93,7 +93,7 @@ def test_invoke_chat_model():
assert response.usage.total_tokens > 0
def test_invoke_stream_completion_model():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
response = model.invoke(
model='chinese-llama-2-7b',
@ -124,7 +124,7 @@ def test_invoke_stream_completion_model():
assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
def test_invoke_stream_chat_model():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
response = model.invoke(
model='chinese-llama-2-7b',
@ -155,7 +155,7 @@ def test_invoke_stream_chat_model():
assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
def test_get_num_tokens():
model = LocalAILarguageModel()
model = LocalAILanguageModel()
num_tokens = model.get_num_tokens(
model='????',
@ -215,4 +215,4 @@ def test_get_num_tokens():
)
assert isinstance(num_tokens, int)
assert num_tokens == 10
assert num_tokens == 10