diff --git a/gpt4all-chat/src/chatllm.cpp b/gpt4all-chat/src/chatllm.cpp index 9898ba22f49e..e693c03b60d1 100644 --- a/gpt4all-chat/src/chatllm.cpp +++ b/gpt4all-chat/src/chatllm.cpp @@ -198,7 +198,7 @@ bool ChatLLM::loadDefaultModel() { ModelInfo defaultModel = ModelList::globalInstance()->defaultModelInfo(); if (defaultModel.filename().isEmpty()) { - emit modelLoadingError(u"Could not find any model to load"_qs); + emit modelLoadingError(u"Could not find any model to load"_s); return false; } return loadModel(defaultModel);