diff --git a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py index 68f12c77d84..3d5d5c90cb6 100644 --- a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py @@ -18,7 +18,8 @@ class OpenAILLMModelParams(BaseForm): - temperature = forms.SliderField(TooltipLabel(_('Temperature'), _('Higher values make the output more random, while lower values make it more focused and deterministic')), + temperature = forms.SliderField(TooltipLabel(_('Temperature'), + _('Higher values make the output more random, while lower values make it more focused and deterministic')), required=True, default_value=0.7, _min=0.1, _max=1.0, @@ -26,7 +27,8 @@ class OpenAILLMModelParams(BaseForm): precision=2) max_tokens = forms.SliderField( - TooltipLabel(_('Output the maximum Tokens'), _('Specify the maximum number of tokens that the model can generate')), + TooltipLabel(_('Output the maximum Tokens'), + _('Specify the maximum number of tokens that the model can generate')), required=True, default_value=800, _min=1, _max=100000, @@ -40,7 +42,8 @@ def is_valid(self, model_type: str, model_name, model_credential: Dict[str, obje raise_exception=False): model_type_list = provider.get_model_type_list() if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))): - raise AppApiException(ValidCode.valid_error.value, __('{model_type} Model type is not supported').format(model_type=model_type)) + raise AppApiException(ValidCode.valid_error.value, + __('{model_type} Model type is not supported').format(model_type=model_type)) for key in ['api_base', 'api_key']: if key not in model_credential: @@ -51,12 +54,14 @@ def is_valid(self, model_type: str, model_name, model_credential: Dict[str, obje try: model = provider.get_model(model_type, model_name, model_credential, **model_params) - model.invoke([HumanMessage(content=_('Hello'))]) + model.invoke([HumanMessage(content=__('Hello'))]) except Exception as e: if isinstance(e, AppApiException): raise e if raise_exception: - raise AppApiException(ValidCode.valid_error.value, __('Verification failed, please check whether the parameters are correct: {error}').format(error=str(e))) + raise AppApiException(ValidCode.valid_error.value, + __('Verification failed, please check whether the parameters are correct: {error}').format( + error=str(e))) else: return False return True