Restore basic functionality.

This commit is contained in:
Adam Treat 2023-04-21 09:56:06 -04:00
parent 670bbe4db5
commit 3e7cf346d6

View File

@ -889,9 +889,9 @@ Window {
if (listElement.name === qsTr("Response: ")) { if (listElement.name === qsTr("Response: ")) {
listElement.currentResponse = true listElement.currentResponse = true
listElement.value = LLM.response listElement.value = LLM.response
LLM.prompt(listElement.prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, LLM.prompt(listElement.prompt, settings.promptTemplate, settings.maxLength,
settingsDialog.topK, settingsDialog.topP, settingsDialog.temperature, settings.topK, settings.topP, settings.temperature,
settingsDialog.promptBatchSize) settings.promptBatchSize)
} }
} }
} }
@ -961,8 +961,8 @@ Window {
chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text}) chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text})
chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt}) chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt})
LLM.resetResponse() LLM.resetResponse()
LLM.prompt(prompt, settingsDialog.promptTemplate, settingsDialog.maxLength, settingsDialog.topK, LLM.prompt(prompt, settings.promptTemplate, settings.maxLength, settings.topK,
settingsDialog.topP, settingsDialog.temperature, settingsDialog.promptBatchSize) settings.topP, settings.temperature, settings.promptBatchSize)
textInput.text = "" textInput.text = ""
} }
} }