diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp index 89505ef7c293..e54301c8622e 100644 --- a/gpt4all-chat/chatllm.cpp +++ b/gpt4all-chat/chatllm.cpp @@ -350,6 +350,7 @@ bool ChatLLM::loadModel(const ModelInfo &modelInfo) } m_llModelInfo.model->setProgressCallback([this](float progress) -> bool { + progress = std::max(progress, std::numeric_limits::min()); // keep progress above zero emit modelLoadingPercentageChanged(progress); return m_shouldBeLoaded; });