Feature: dynamic changes of language and locale at runtime issue #2644 (#2659)

This change updates the UI to allow for dynamic changes of language and
locale at runtime. Right now none of the language translations are finished
yet or in releasable shape so it also adds a new option to the build that
enables/disables the feature. By default no translations are currently
enabled to be built as part of a release.

Signed-off-by: Adam Treat <treat.adam@gmail.com>
This commit is contained in:
AT 2024-07-12 16:14:58 -04:00 committed by GitHub
parent 0a94d7d55d
commit d515ad3b18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 3950 additions and 4170 deletions

View File

@ -33,6 +33,7 @@ set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTORCC ON) set(CMAKE_AUTORCC ON)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
option(GPT4ALL_TRANSLATIONS OFF "Build with translations")
option(GPT4ALL_LOCALHOST OFF "Build installer for localhost repo") option(GPT4ALL_LOCALHOST OFF "Build installer for localhost repo")
option(GPT4ALL_OFFLINE_INSTALLER "Build an offline installer" OFF) option(GPT4ALL_OFFLINE_INSTALLER "Build an offline installer" OFF)
option(GPT4ALL_SIGN_INSTALL "Sign installed binaries and installers (requires signing identities)" OFF) option(GPT4ALL_SIGN_INSTALL "Sign installed binaries and installers (requires signing identities)" OFF)
@ -226,9 +227,14 @@ qt_add_qml_module(chat
icons/you.svg icons/you.svg
) )
qt_add_translations(chat if (GPT4ALL_TRANSLATIONS)
TS_FILES ${CMAKE_SOURCE_DIR}/translations/gpt4all_en.ts qt_add_translations(chat
) TS_FILES
${CMAKE_SOURCE_DIR}/translations/gpt4all_en.ts
${CMAKE_SOURCE_DIR}/translations/gpt4all_es_MX.ts
${CMAKE_SOURCE_DIR}/translations/gpt4all_zh_CN.ts
)
endif()
set_target_properties(chat PROPERTIES set_target_properties(chat PROPERTIES
WIN32_EXECUTABLE TRUE WIN32_EXECUTABLE TRUE

View File

@ -33,13 +33,18 @@ int main(int argc, char *argv[])
QGuiApplication app(argc, argv); QGuiApplication app(argc, argv);
QTranslator translator; // Set the local and language translation before the qml engine has even been started. This will
bool success = translator.load(":/i18n/gpt4all_en.qm"); // use the default system locale unless the user has explicitly set it to use a different one.
Q_ASSERT(success); MySettings::globalInstance()->setLanguageAndLocale();
app.installTranslator(&translator);
QQmlApplicationEngine engine; QQmlApplicationEngine engine;
// Add a connection here from MySettings::languageAndLocaleChanged signal to a lambda slot where I can call
// engine.uiLanguage property
QObject::connect(MySettings::globalInstance(), &MySettings::languageAndLocaleChanged, [&engine]() {
engine.setUiLanguage(MySettings::globalInstance()->languageAndLocale());
});
QString llmodelSearchPaths = QCoreApplication::applicationDirPath(); QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/"; const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/";
if (LLM::directoryExists(libDir)) if (LLM::directoryExists(libDir))

View File

@ -7,6 +7,7 @@
#include <QFile> #include <QFile>
#include <QFileInfo> #include <QFileInfo>
#include <QGlobalStatic> #include <QGlobalStatic>
#include <QGuiApplication>
#include <QIODevice> #include <QIODevice>
#include <QMap> #include <QMap>
#include <QMetaObject> #include <QMetaObject>
@ -23,6 +24,9 @@
using namespace Qt::Literals::StringLiterals; using namespace Qt::Literals::StringLiterals;
// FIXME: All of these default strings that are shown in the UI for settings need to be marked as
// translatable
namespace defaults { namespace defaults {
static const int threadCount = std::min(4, (int32_t) std::thread::hardware_concurrency()); static const int threadCount = std::min(4, (int32_t) std::thread::hardware_concurrency());
@ -30,6 +34,7 @@ static const bool forceMetal = false;
static const bool networkIsActive = false; static const bool networkIsActive = false;
static const bool networkUsageStatsActive = false; static const bool networkUsageStatsActive = false;
static const QString device = "Auto"; static const QString device = "Auto";
static const QString languageAndLocale = "Default";
} // namespace defaults } // namespace defaults
@ -95,6 +100,46 @@ static QStringList getDevices(bool skipKompute = false)
return deviceList; return deviceList;
} }
static QString getUiLanguage(const QString directory, const QString fileName)
{
QTranslator translator;
const QString filePath = directory + QDir::separator() + fileName;
if (translator.load(filePath)) {
const QString lang = fileName.mid(fileName.indexOf('_') + 1,
fileName.lastIndexOf('.') - fileName.indexOf('_') - 1);
return lang;
}
qDebug() << "ERROR: Failed to load translation file:" << filePath;
return QString();
}
static QStringList getUiLanguages(const QString &modelPath)
{
QStringList languageList( { QObject::tr("Default") } );
// Add the language translations from model path files first which is used by translation developers
// to load translations in progress without having to rebuild all of GPT4All from source
{
const QDir dir(modelPath);
const QStringList qmFiles = dir.entryList({"*.qm"}, QDir::Files);
for (const QString &fileName : qmFiles)
languageList << getUiLanguage(modelPath, fileName);
}
// Now add the internal language translations
{
const QDir dir(":/i18n");
const QStringList qmFiles = dir.entryList({"*.qm"}, QDir::Files);
for (const QString &fileName : qmFiles) {
const QString lang = getUiLanguage(":/i18n", fileName);
if (!languageList.contains(lang))
languageList.append(lang);
}
}
return languageList;
}
class MyPrivateSettings: public MySettings { }; class MyPrivateSettings: public MySettings { };
Q_GLOBAL_STATIC(MyPrivateSettings, settingsInstance) Q_GLOBAL_STATIC(MyPrivateSettings, settingsInstance)
MySettings *MySettings::globalInstance() MySettings *MySettings::globalInstance()
@ -106,6 +151,7 @@ MySettings::MySettings()
: QObject(nullptr) : QObject(nullptr)
, m_deviceList(getDevices()) , m_deviceList(getDevices())
, m_embeddingsDeviceList(getDevices(/*skipKompute*/ true)) , m_embeddingsDeviceList(getDevices(/*skipKompute*/ true))
, m_uiLanguages(getUiLanguages(modelPath()))
{ {
} }
@ -154,6 +200,7 @@ void MySettings::restoreApplicationDefaults()
setUserDefaultModel(basicDefaults.value("userDefaultModel").toString()); setUserDefaultModel(basicDefaults.value("userDefaultModel").toString());
setForceMetal(defaults::forceMetal); setForceMetal(defaults::forceMetal);
setSuggestionMode(basicDefaults.value("suggestionMode").value<SuggestionMode>()); setSuggestionMode(basicDefaults.value("suggestionMode").value<SuggestionMode>());
setLanguageAndLocale(defaults::languageAndLocale);
} }
void MySettings::restoreLocalDocsDefaults() void MySettings::restoreLocalDocsDefaults()
@ -521,3 +568,91 @@ void MySettings::setNetworkUsageStatsActive(bool value)
emit networkUsageStatsActiveChanged(); emit networkUsageStatsActiveChanged();
} }
} }
QString MySettings::languageAndLocale() const
{
auto value = m_settings.value("languageAndLocale");
if (!value.isValid())
return defaults::languageAndLocale;
return value.toString();
}
QString MySettings::filePathForLocale(const QLocale &locale)
{
// Check and see if we have a translation for the chosen locale and set it if possible otherwise
// we return the filepath for the 'en' translation
const QStringList uiLanguages = locale.uiLanguages(QLocale::TagSeparator::Underscore);
// Scan this directory for files named like gpt4all_%1.qm that match and if so return them first
// this is the model download directory and it can be used by translation developers who are
// trying to test their translations by just compiling the translation with the lrelease tool
// rather than having to recompile all of GPT4All
QString directory = modelPath();
for (const QString &bcp47Name : uiLanguages) {
QString filePath = QString("%1/gpt4all_%2.qm").arg(directory).arg(bcp47Name);
QFileInfo filePathInfo(filePath);
if (filePathInfo.exists()) return filePath;
}
// Now scan the internal built-in translations
for (QString bcp47Name : uiLanguages) {
QString filePath = QString(":/i18n/gpt4all_%1.qm").arg(bcp47Name);
QFileInfo filePathInfo(filePath);
if (filePathInfo.exists()) return filePath;
}
return QString(":/i18n/gpt4all_en.qm");
}
void MySettings::setLanguageAndLocale(const QString &bcp47Name)
{
if (!bcp47Name.isEmpty() && languageAndLocale() != bcp47Name)
m_settings.setValue("languageAndLocale", bcp47Name);
// When the app is started this method is called with no bcp47Name given which sets the translation
// to either the default which is the system locale or the one explicitly set by the user previously.
QLocale locale;
const QString l = languageAndLocale();
if (l == "Default")
locale = QLocale::system();
else
locale = QLocale(l);
// If we previously installed a translator, then remove it
if (m_translator) {
if (!qGuiApp->removeTranslator(m_translator)) {
qDebug() << "ERROR: Failed to remove the previous translator";
} else {
delete m_translator;
m_translator = nullptr;
}
}
// We expect that the translator was removed and is now a nullptr
Q_ASSERT(!m_translator);
const QString filePath = filePathForLocale(locale);
// Installing the default gpt4all_en.qm fails presumably because it has no strings that are
// different from the ones stored in the binary
if (!m_translator && !filePath.endsWith("en.qm")) {
// Create a new translator object on the heap
m_translator = new QTranslator(this);
bool success = m_translator->load(filePath);
Q_ASSERT(success);
if (!success) {
qDebug() << "ERROR: Failed to load translation file:" << filePath;
delete m_translator;
m_translator = nullptr;
}
// If we've successfully loaded it, then try and install it
if (!qGuiApp->installTranslator(m_translator)) {
qDebug() << "ERROR: Failed to install the translator:" << filePath;
delete m_translator;
m_translator = nullptr;
}
}
// Finally, set the locale whether we have a translation or not
QLocale::setDefault(locale);
emit languageAndLocaleChanged();
}

View File

@ -33,8 +33,11 @@ class MySettings : public QObject
Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged) Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged)
Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged) Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged)
Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged) Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged)
// FIXME: This should be changed to an enum to allow translations to work
Q_PROPERTY(QString chatTheme READ chatTheme WRITE setChatTheme NOTIFY chatThemeChanged) Q_PROPERTY(QString chatTheme READ chatTheme WRITE setChatTheme NOTIFY chatThemeChanged)
// FIXME: This should be changed to an enum to allow translations to work
Q_PROPERTY(QString fontSize READ fontSize WRITE setFontSize NOTIFY fontSizeChanged) Q_PROPERTY(QString fontSize READ fontSize WRITE setFontSize NOTIFY fontSizeChanged)
Q_PROPERTY(QString languageAndLocale READ languageAndLocale WRITE setLanguageAndLocale NOTIFY languageAndLocaleChanged)
Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged) Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged)
Q_PROPERTY(QString lastVersionStarted READ lastVersionStarted WRITE setLastVersionStarted NOTIFY lastVersionStartedChanged) Q_PROPERTY(QString lastVersionStarted READ lastVersionStarted WRITE setLastVersionStarted NOTIFY lastVersionStartedChanged)
Q_PROPERTY(int localDocsChunkSize READ localDocsChunkSize WRITE setLocalDocsChunkSize NOTIFY localDocsChunkSizeChanged) Q_PROPERTY(int localDocsChunkSize READ localDocsChunkSize WRITE setLocalDocsChunkSize NOTIFY localDocsChunkSizeChanged)
@ -52,6 +55,7 @@ class MySettings : public QObject
Q_PROPERTY(QStringList embeddingsDeviceList MEMBER m_embeddingsDeviceList CONSTANT) Q_PROPERTY(QStringList embeddingsDeviceList MEMBER m_embeddingsDeviceList CONSTANT)
Q_PROPERTY(int networkPort READ networkPort WRITE setNetworkPort NOTIFY networkPortChanged) Q_PROPERTY(int networkPort READ networkPort WRITE setNetworkPort NOTIFY networkPortChanged)
Q_PROPERTY(SuggestionMode suggestionMode READ suggestionMode WRITE setSuggestionMode NOTIFY suggestionModeChanged) Q_PROPERTY(SuggestionMode suggestionMode READ suggestionMode WRITE setSuggestionMode NOTIFY suggestionModeChanged)
Q_PROPERTY(QStringList uiLanguages MEMBER m_uiLanguages CONSTANT)
public: public:
static MySettings *globalInstance(); static MySettings *globalInstance();
@ -142,6 +146,9 @@ public:
SuggestionMode suggestionMode() const; SuggestionMode suggestionMode() const;
void setSuggestionMode(SuggestionMode mode); void setSuggestionMode(SuggestionMode mode);
QString languageAndLocale() const;
void setLanguageAndLocale(const QString &bcp47Name = QString()); // called on startup with QString()
// Release/Download settings // Release/Download settings
QString lastVersionStarted() const; QString lastVersionStarted() const;
void setLastVersionStarted(const QString &value); void setLastVersionStarted(const QString &value);
@ -215,12 +222,15 @@ Q_SIGNALS:
void attemptModelLoadChanged(); void attemptModelLoadChanged();
void deviceChanged(); void deviceChanged();
void suggestionModeChanged(); void suggestionModeChanged();
void languageAndLocaleChanged();
private: private:
QSettings m_settings; QSettings m_settings;
bool m_forceMetal; bool m_forceMetal;
const QStringList m_deviceList; const QStringList m_deviceList;
const QStringList m_embeddingsDeviceList; const QStringList m_embeddingsDeviceList;
const QStringList m_uiLanguages;
QTranslator *m_translator = nullptr;
private: private:
explicit MySettings(); explicit MySettings();
@ -232,6 +242,7 @@ private:
QVariant getModelSetting(const QString &name, const ModelInfo &info) const; QVariant getModelSetting(const QString &name, const ModelInfo &info) const;
void setModelSetting(const QString &name, const ModelInfo &info, const QVariant &value, bool force, void setModelSetting(const QString &name, const ModelInfo &info, const QVariant &value, bool force,
bool signal = false); bool signal = false);
QString filePathForLocale(const QLocale &locale);
}; };
#endif // MYSETTINGS_H #endif // MYSETTINGS_H

View File

@ -161,16 +161,46 @@ MySettingsTab {
} }
} }
MySettingsLabel { MySettingsLabel {
id: deviceLabel id: languageLabel
text: qsTr("Device") visible: MySettings.uiLanguages.length > 1
helpText: qsTr('The compute device used for text generation. "Auto" uses Vulkan or Metal.') text: qsTr("Language and Locale")
helpText: qsTr("The language and locale you wish to use.")
Layout.row: 4 Layout.row: 4
Layout.column: 0 Layout.column: 0
} }
MyComboBox { MyComboBox {
id: deviceBox id: languageBox
visible: MySettings.uiLanguages.length > 1
Layout.row: 4 Layout.row: 4
Layout.column: 2 Layout.column: 2
Layout.minimumWidth: 200
Layout.maximumWidth: 200
Layout.fillWidth: false
Layout.alignment: Qt.AlignRight
model: MySettings.uiLanguages
Accessible.name: fontLabel.text
Accessible.description: fontLabel.helpText
function updateModel() {
languageBox.currentIndex = languageBox.indexOfValue(MySettings.languageAndLocale);
}
Component.onCompleted: {
languageBox.updateModel()
}
onActivated: {
MySettings.languageAndLocale = languageBox.currentText
}
}
MySettingsLabel {
id: deviceLabel
text: qsTr("Device")
helpText: qsTr('The compute device used for text generation. "Auto" uses Vulkan or Metal.')
Layout.row: 5
Layout.column: 0
}
MyComboBox {
id: deviceBox
Layout.row: 5
Layout.column: 2
Layout.minimumWidth: 400 Layout.minimumWidth: 400
Layout.maximumWidth: 400 Layout.maximumWidth: 400
Layout.fillWidth: false Layout.fillWidth: false
@ -198,12 +228,12 @@ MySettingsTab {
id: defaultModelLabel id: defaultModelLabel
text: qsTr("Default Model") text: qsTr("Default Model")
helpText: qsTr("The preferred model for new chats. Also used as the local server fallback.") helpText: qsTr("The preferred model for new chats. Also used as the local server fallback.")
Layout.row: 5 Layout.row: 6
Layout.column: 0 Layout.column: 0
} }
MyComboBox { MyComboBox {
id: comboBox id: comboBox
Layout.row: 5 Layout.row: 6
Layout.column: 2 Layout.column: 2
Layout.minimumWidth: 400 Layout.minimumWidth: 400
Layout.maximumWidth: 400 Layout.maximumWidth: 400
@ -231,12 +261,12 @@ MySettingsTab {
id: suggestionModeLabel id: suggestionModeLabel
text: qsTr("Suggestion Mode") text: qsTr("Suggestion Mode")
helpText: qsTr("Generate suggested follow-up questions at the end of responses.") helpText: qsTr("Generate suggested follow-up questions at the end of responses.")
Layout.row: 6 Layout.row: 7
Layout.column: 0 Layout.column: 0
} }
MyComboBox { MyComboBox {
id: suggestionModeBox id: suggestionModeBox
Layout.row: 6 Layout.row: 7
Layout.column: 2 Layout.column: 2
Layout.minimumWidth: 400 Layout.minimumWidth: 400
Layout.maximumWidth: 400 Layout.maximumWidth: 400
@ -255,12 +285,12 @@ MySettingsTab {
id: modelPathLabel id: modelPathLabel
text: qsTr("Download Path") text: qsTr("Download Path")
helpText: qsTr("Where to store local models and the LocalDocs database.") helpText: qsTr("Where to store local models and the LocalDocs database.")
Layout.row: 7 Layout.row: 8
Layout.column: 0 Layout.column: 0
} }
RowLayout { RowLayout {
Layout.row: 7 Layout.row: 8
Layout.column: 2 Layout.column: 2
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
Layout.minimumWidth: 400 Layout.minimumWidth: 400
@ -297,12 +327,12 @@ MySettingsTab {
id: dataLakeLabel id: dataLakeLabel
text: qsTr("Enable Datalake") text: qsTr("Enable Datalake")
helpText: qsTr("Send chats and feedback to the GPT4All Open-Source Datalake.") helpText: qsTr("Send chats and feedback to the GPT4All Open-Source Datalake.")
Layout.row: 8 Layout.row: 9
Layout.column: 0 Layout.column: 0
} }
MyCheckBox { MyCheckBox {
id: dataLakeBox id: dataLakeBox
Layout.row: 8 Layout.row: 9
Layout.column: 2 Layout.column: 2
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
Component.onCompleted: { dataLakeBox.checked = MySettings.networkIsActive; } Component.onCompleted: { dataLakeBox.checked = MySettings.networkIsActive; }
@ -320,7 +350,7 @@ MySettingsTab {
} }
ColumnLayout { ColumnLayout {
Layout.row: 9 Layout.row: 10
Layout.column: 0 Layout.column: 0
Layout.columnSpan: 3 Layout.columnSpan: 3
Layout.fillWidth: true Layout.fillWidth: true
@ -343,7 +373,7 @@ MySettingsTab {
id: nThreadsLabel id: nThreadsLabel
text: qsTr("CPU Threads") text: qsTr("CPU Threads")
helpText: qsTr("The number of CPU threads used for inference and embedding.") helpText: qsTr("The number of CPU threads used for inference and embedding.")
Layout.row: 10 Layout.row: 11
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
@ -351,7 +381,7 @@ MySettingsTab {
color: theme.textColor color: theme.textColor
font.pixelSize: theme.fontSizeLarge font.pixelSize: theme.fontSizeLarge
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
Layout.row: 10 Layout.row: 11
Layout.column: 2 Layout.column: 2
Layout.minimumWidth: 200 Layout.minimumWidth: 200
Layout.maximumWidth: 200 Layout.maximumWidth: 200
@ -375,12 +405,12 @@ MySettingsTab {
id: saveChatsContextLabel id: saveChatsContextLabel
text: qsTr("Save Chat Context") text: qsTr("Save Chat Context")
helpText: qsTr("Save the chat model's state to disk for faster loading. WARNING: Uses ~2GB per chat.") helpText: qsTr("Save the chat model's state to disk for faster loading. WARNING: Uses ~2GB per chat.")
Layout.row: 11 Layout.row: 12
Layout.column: 0 Layout.column: 0
} }
MyCheckBox { MyCheckBox {
id: saveChatsContextBox id: saveChatsContextBox
Layout.row: 11 Layout.row: 12
Layout.column: 2 Layout.column: 2
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
checked: MySettings.saveChatsContext checked: MySettings.saveChatsContext
@ -392,12 +422,12 @@ MySettingsTab {
id: serverChatLabel id: serverChatLabel
text: qsTr("Enable Local Server") text: qsTr("Enable Local Server")
helpText: qsTr("Expose an OpenAI-Compatible server to localhost. WARNING: Results in increased resource usage.") helpText: qsTr("Expose an OpenAI-Compatible server to localhost. WARNING: Results in increased resource usage.")
Layout.row: 12 Layout.row: 13
Layout.column: 0 Layout.column: 0
} }
MyCheckBox { MyCheckBox {
id: serverChatBox id: serverChatBox
Layout.row: 12 Layout.row: 13
Layout.column: 2 Layout.column: 2
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
checked: MySettings.serverChat checked: MySettings.serverChat
@ -409,7 +439,7 @@ MySettingsTab {
id: serverPortLabel id: serverPortLabel
text: qsTr("API Server Port") text: qsTr("API Server Port")
helpText: qsTr("The port to use for the local server. Requires restart.") helpText: qsTr("The port to use for the local server. Requires restart.")
Layout.row: 13 Layout.row: 14
Layout.column: 0 Layout.column: 0
} }
MyTextField { MyTextField {
@ -417,7 +447,7 @@ MySettingsTab {
text: MySettings.networkPort text: MySettings.networkPort
color: theme.textColor color: theme.textColor
font.pixelSize: theme.fontSizeLarge font.pixelSize: theme.fontSizeLarge
Layout.row: 13 Layout.row: 14
Layout.column: 2 Layout.column: 2
Layout.minimumWidth: 200 Layout.minimumWidth: 200
Layout.maximumWidth: 200 Layout.maximumWidth: 200
@ -462,12 +492,12 @@ MySettingsTab {
id: updatesLabel id: updatesLabel
text: qsTr("Check For Updates") text: qsTr("Check For Updates")
helpText: qsTr("Manually check for an update to GPT4All."); helpText: qsTr("Manually check for an update to GPT4All.");
Layout.row: 14 Layout.row: 15
Layout.column: 0 Layout.column: 0
} }
MySettingsButton { MySettingsButton {
Layout.row: 14 Layout.row: 15
Layout.column: 2 Layout.column: 2
Layout.alignment: Qt.AlignRight Layout.alignment: Qt.AlignRight
text: qsTr("Updates"); text: qsTr("Updates");
@ -478,7 +508,7 @@ MySettingsTab {
} }
Rectangle { Rectangle {
Layout.row: 15 Layout.row: 16
Layout.column: 0 Layout.column: 0
Layout.columnSpan: 3 Layout.columnSpan: 3
Layout.fillWidth: true Layout.fillWidth: true

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS> <!DOCTYPE TS>
<TS version="2.1"> <TS version="2.1" language="en">
<context> <context>
<name>AddCollectionView</name> <name>AddCollectionView</name>
<message> <message>
@ -486,164 +486,176 @@
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="165"/> <location filename="../qml/ApplicationSettings.qml" line="166"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="165"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/>
<source>Language and Locale</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="167"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="167"/>
<source>The language and locale you wish to use.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="195"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="195"/>
<source>Device</source> <source>Device</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="166"/> <location filename="../qml/ApplicationSettings.qml" line="196"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="166"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="196"/>
<source>The compute device used for text generation. &quot;Auto&quot; uses Vulkan or Metal.</source> <source>The compute device used for text generation. &quot;Auto&quot; uses Vulkan or Metal.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="199"/> <location filename="../qml/ApplicationSettings.qml" line="229"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="199"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="229"/>
<source>Default Model</source> <source>Default Model</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="200"/> <location filename="../qml/ApplicationSettings.qml" line="230"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="200"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="230"/>
<source>The preferred model for new chats. Also used as the local server fallback.</source> <source>The preferred model for new chats. Also used as the local server fallback.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="232"/> <location filename="../qml/ApplicationSettings.qml" line="262"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="232"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="262"/>
<source>Suggestion Mode</source> <source>Suggestion Mode</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="233"/> <location filename="../qml/ApplicationSettings.qml" line="263"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="233"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="263"/>
<source>Generate suggested follow-up questions at the end of responses.</source> <source>Generate suggested follow-up questions at the end of responses.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="244"/> <location filename="../qml/ApplicationSettings.qml" line="274"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="244"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="274"/>
<source>When chatting with LocalDocs</source> <source>When chatting with LocalDocs</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="244"/> <location filename="../qml/ApplicationSettings.qml" line="274"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="244"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="274"/>
<source>Whenever possible</source> <source>Whenever possible</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="244"/> <location filename="../qml/ApplicationSettings.qml" line="274"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="244"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="274"/>
<source>Never</source> <source>Never</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="256"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="256"/>
<source>Download Path</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="257"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="257"/>
<source>Where to store local models and the LocalDocs database.</source>
<translation type="unfinished"></translation>
</message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="286"/> <location filename="../qml/ApplicationSettings.qml" line="286"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="286"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="286"/>
<source>Browse</source> <source>Download Path</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="287"/> <location filename="../qml/ApplicationSettings.qml" line="287"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="287"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="287"/>
<source>Where to store local models and the LocalDocs database.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="316"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="316"/>
<source>Browse</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../qml/ApplicationSettings.qml" line="317"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="317"/>
<source>Choose where to save model files</source> <source>Choose where to save model files</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="298"/> <location filename="../qml/ApplicationSettings.qml" line="328"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="298"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="328"/>
<source>Enable Datalake</source> <source>Enable Datalake</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="299"/> <location filename="../qml/ApplicationSettings.qml" line="329"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="299"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="329"/>
<source>Send chats and feedback to the GPT4All Open-Source Datalake.</source> <source>Send chats and feedback to the GPT4All Open-Source Datalake.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="332"/> <location filename="../qml/ApplicationSettings.qml" line="362"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="332"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="362"/>
<source>Advanced</source> <source>Advanced</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="344"/> <location filename="../qml/ApplicationSettings.qml" line="374"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="344"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="374"/>
<source>CPU Threads</source> <source>CPU Threads</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="345"/> <location filename="../qml/ApplicationSettings.qml" line="375"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="345"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="375"/>
<source>The number of CPU threads used for inference and embedding.</source> <source>The number of CPU threads used for inference and embedding.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="376"/> <location filename="../qml/ApplicationSettings.qml" line="406"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="376"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="406"/>
<source>Save Chat Context</source> <source>Save Chat Context</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="377"/> <location filename="../qml/ApplicationSettings.qml" line="407"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="377"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="407"/>
<source>Save the chat model&apos;s state to disk for faster loading. WARNING: Uses ~2GB per chat.</source> <source>Save the chat model&apos;s state to disk for faster loading. WARNING: Uses ~2GB per chat.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="393"/> <location filename="../qml/ApplicationSettings.qml" line="423"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="393"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="423"/>
<source>Enable Local Server</source> <source>Enable Local Server</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="394"/> <location filename="../qml/ApplicationSettings.qml" line="424"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="394"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="424"/>
<source>Expose an OpenAI-Compatible server to localhost. WARNING: Results in increased resource usage.</source> <source>Expose an OpenAI-Compatible server to localhost. WARNING: Results in increased resource usage.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="410"/> <location filename="../qml/ApplicationSettings.qml" line="440"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="410"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="440"/>
<source>API Server Port</source> <source>API Server Port</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="411"/> <location filename="../qml/ApplicationSettings.qml" line="441"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="411"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="441"/>
<source>The port to use for the local server. Requires restart.</source> <source>The port to use for the local server. Requires restart.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="463"/> <location filename="../qml/ApplicationSettings.qml" line="493"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="463"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="493"/>
<source>Check For Updates</source> <source>Check For Updates</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="464"/> <location filename="../qml/ApplicationSettings.qml" line="494"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="464"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="494"/>
<source>Manually check for an update to GPT4All.</source> <source>Manually check for an update to GPT4All.</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
<message> <message>
<location filename="../qml/ApplicationSettings.qml" line="473"/> <location filename="../qml/ApplicationSettings.qml" line="503"/>
<location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="473"/> <location filename="../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line="503"/>
<source>Updates</source> <source>Updates</source>
<translation type="unfinished"></translation> <translation type="unfinished"></translation>
</message> </message>
@ -1178,6 +1190,7 @@ model to get started</source>
<source>%n file(s)</source> <source>%n file(s)</source>
<translation type="unfinished"> <translation type="unfinished">
<numerusform></numerusform> <numerusform></numerusform>
<numerusform></numerusform>
</translation> </translation>
</message> </message>
<message numerus="yes"> <message numerus="yes">
@ -1186,6 +1199,7 @@ model to get started</source>
<source>%n word(s)</source> <source>%n word(s)</source>
<translation type="unfinished"> <translation type="unfinished">
<numerusform></numerusform> <numerusform></numerusform>
<numerusform></numerusform>
</translation> </translation>
</message> </message>
<message> <message>
@ -1593,6 +1607,7 @@ model to get started</source>
<source>%n file(s)</source> <source>%n file(s)</source>
<translation type="unfinished"> <translation type="unfinished">
<numerusform></numerusform> <numerusform></numerusform>
<numerusform></numerusform>
</translation> </translation>
</message> </message>
<message numerus="yes"> <message numerus="yes">
@ -1601,6 +1616,7 @@ model to get started</source>
<source>%n word(s)</source> <source>%n word(s)</source>
<translation type="unfinished"> <translation type="unfinished">
<numerusform></numerusform> <numerusform></numerusform>
<numerusform></numerusform>
</translation> </translation>
</message> </message>
<message> <message>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff