File tree Expand file tree Collapse file tree 2 files changed +7
-4
lines changed Expand file tree Collapse file tree 2 files changed +7
-4
lines changed Original file line number Diff line number Diff line change @@ -185,20 +185,23 @@ def get_default_llm():
185
185
available_llms = list (get_config_models (model_type = "llm" ).keys ())
186
186
if Settings .model_settings .DEFAULT_LLM_MODEL in available_llms :
187
187
return Settings .model_settings .DEFAULT_LLM_MODEL
188
- else :
188
+ elif available_llms :
189
189
logger .warning (f"default llm model { Settings .model_settings .DEFAULT_LLM_MODEL } is not found in available llms, "
190
190
f"using { available_llms [0 ]} instead" )
191
191
return available_llms [0 ]
192
-
192
+ else :
193
+ logger .error ("can not find an available llm model" )
193
194
194
195
def get_default_embedding ():
195
196
available_embeddings = list (get_config_models (model_type = "embed" ).keys ())
196
197
if Settings .model_settings .DEFAULT_EMBEDDING_MODEL in available_embeddings :
197
198
return Settings .model_settings .DEFAULT_EMBEDDING_MODEL
198
- else :
199
+ elif available_embeddings :
199
200
logger .warning (f"default embedding model { Settings .model_settings .DEFAULT_EMBEDDING_MODEL } is not found in "
200
201
f"available embeddings, using { available_embeddings [0 ]} instead" )
201
202
return available_embeddings [0 ]
203
+ else :
204
+ logger .error ("can not find an available embedding model" )
202
205
203
206
204
207
def get_history_len () -> int :
Original file line number Diff line number Diff line change @@ -990,7 +990,7 @@ def check_model_supports_streaming(llm_model: str):
990
990
返回 True 或 False
991
991
"""
992
992
# todo: 需要实现更精细的"关于模型是否支持流式输出"的判断逻辑
993
- if llm_model == "qwen2.5-instruct" :
993
+ if llm_model . startswith ( "qwen" ) :
994
994
return False
995
995
else :
996
996
return True
You can’t perform that action at this time.
0 commit comments