fix azure chatgpt o1 parameter error (#10067)
Some checks are pending
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions

This commit is contained in:
Charlie.Wei 2024-10-30 22:08:56 +08:00 committed by GitHub
parent 0a3d51e9cf
commit f6fecb957e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -37,6 +37,17 @@ def _get_max_tokens(default: int, min_val: int, max_val: int) -> ParameterRule:
return rule return rule
def _get_o1_max_tokens(default: int, min_val: int, max_val: int) -> ParameterRule:
rule = ParameterRule(
name="max_completion_tokens",
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.MAX_TOKENS],
)
rule.default = default
rule.min = min_val
rule.max = max_val
return rule
class AzureBaseModel(BaseModel): class AzureBaseModel(BaseModel):
base_model_name: str base_model_name: str
entity: AIModelEntity entity: AIModelEntity
@ -1098,14 +1109,6 @@ LLM_BASE_MODELS = [
ModelPropertyKey.CONTEXT_SIZE: 128000, ModelPropertyKey.CONTEXT_SIZE: 128000,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule(
name="temperature",
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE],
),
ParameterRule(
name="top_p",
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P],
),
ParameterRule( ParameterRule(
name="response_format", name="response_format",
label=I18nObject(zh_Hans="回复格式", en_US="response_format"), label=I18nObject(zh_Hans="回复格式", en_US="response_format"),
@ -1116,7 +1119,7 @@ LLM_BASE_MODELS = [
required=False, required=False,
options=["text", "json_object"], options=["text", "json_object"],
), ),
_get_max_tokens(default=512, min_val=1, max_val=32768), _get_o1_max_tokens(default=512, min_val=1, max_val=32768),
], ],
pricing=PriceConfig( pricing=PriceConfig(
input=15.00, input=15.00,
@ -1143,14 +1146,6 @@ LLM_BASE_MODELS = [
ModelPropertyKey.CONTEXT_SIZE: 128000, ModelPropertyKey.CONTEXT_SIZE: 128000,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule(
name="temperature",
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE],
),
ParameterRule(
name="top_p",
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P],
),
ParameterRule( ParameterRule(
name="response_format", name="response_format",
label=I18nObject(zh_Hans="回复格式", en_US="response_format"), label=I18nObject(zh_Hans="回复格式", en_US="response_format"),
@ -1161,7 +1156,7 @@ LLM_BASE_MODELS = [
required=False, required=False,
options=["text", "json_object"], options=["text", "json_object"],
), ),
_get_max_tokens(default=512, min_val=1, max_val=65536), _get_o1_max_tokens(default=512, min_val=1, max_val=65536),
], ],
pricing=PriceConfig( pricing=PriceConfig(
input=3.00, input=3.00,