From 18dd0d569d3f99ebea110adda0346611aaf6f57e Mon Sep 17 00:00:00 2001 From: takatost Date: Sun, 20 Aug 2023 19:12:52 +0800 Subject: [PATCH] fix: xinference max_tokens alisa error (#929) --- api/core/model_providers/providers/xinference_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/model_providers/providers/xinference_provider.py b/api/core/model_providers/providers/xinference_provider.py index 4589b3f853..5188c99f93 100644 --- a/api/core/model_providers/providers/xinference_provider.py +++ b/api/core/model_providers/providers/xinference_provider.py @@ -50,7 +50,7 @@ class XinferenceProvider(BaseModelProvider): top_p=KwargRule[float](min=0, max=1, default=0.7), presence_penalty=KwargRule[float](min=-2, max=2, default=0), frequency_penalty=KwargRule[float](min=-2, max=2, default=0), - max_tokens=KwargRule[int](alias='max_token', min=10, max=4000, default=256), + max_tokens=KwargRule[int](min=10, max=4000, default=256), ) @classmethod