diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp index efb43ee4..2131760e 100644 --- a/gpt4all-chat/chatllm.cpp +++ b/gpt4all-chat/chatllm.cpp @@ -503,6 +503,11 @@ bool ChatLLM::handleRecalculate(bool isRecalc) } bool ChatLLM::prompt(const QList &collectionList, const QString &prompt) { + if (m_restoreStateFromText) { + Q_ASSERT(m_state.isEmpty()); + processRestoreStateFromText(); + } + if (!m_processedSystemPrompt) processSystemPrompt(); const QString promptTemplate = MySettings::globalInstance()->modelPromptTemplate(m_modelInfo); @@ -906,11 +911,6 @@ void ChatLLM::restoreState() return; } - if (m_restoreStateFromText) { - Q_ASSERT(m_state.isEmpty()); - processRestoreStateFromText(); - } - #if defined(DEBUG) qDebug() << "restoreState" << m_llmThread.objectName() << "size:" << m_state.size(); #endif