diff --git a/README.md b/README.md index 4c86191e..6363b23e 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ By using this repository or any code related to it, you agree to the [legal notice](./LEGAL_NOTICE.md). The author is not responsible for any copies, forks, reuploads made by other users, or anything else related to gpt4free. This is the author's only account and repository. To prevent impersonation or irresponsible actions, please comply with the GNU GPL license this Repository uses. -- latest pypi version: [`0.1.6.7`](https://pypi.org/project/g4f/0.1.6.7) +- latest pypi version: [`0.1.6.8`](https://pypi.org/project/g4f/0.1.6.8) ```sh pip install -U g4f ``` diff --git a/g4f/__init__.py b/g4f/__init__.py index 96b620d9..58cfc314 100644 --- a/g4f/__init__.py +++ b/g4f/__init__.py @@ -5,7 +5,7 @@ from .Provider import BaseProvider, RetryProvider from .typing import Messages, CreateResult, Union, List from .debug import logging -version = '0.1.6.7' +version = '0.1.6.8' version_check = True def check_pypi_version() -> None: diff --git a/g4f/gui/client/js/chat.v2.js b/g4f/gui/client/js/chat.v2.js index 03c3a87a..5c1cbca8 100644 --- a/g4f/gui/client/js/chat.v2.js +++ b/g4f/gui/client/js/chat.v2.js @@ -597,15 +597,21 @@ observer.observe(message_input, { attributes: true }); const load_models = async () => { - response = await fetch('/backend-api/v2/models') - models = await response.json() + models = localStorage.getItem('_models') - var MODELS_SELECT = document.getElementById('model'); + if (models === null) { + response = await fetch('/backend-api/v2/models') + models = await response.json() + localStorage.setItem('_models', JSON.stringify(models)) + + } else { + models = JSON.parse(models) + } + + let MODELS_SELECT = document.getElementById('model'); for (model of models) { - - // Create new option elements - var model_info = document.createElement('option'); + let model_info = document.createElement('option'); model_info.value = model model_info.text = model diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index 714609f6..e2c445a5 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -27,7 +27,6 @@ class Backend_Api: def models(self): models = g4f._all_models - models.remove('oasst-sft-4-pythia-12b-epoch-3.5') return models diff --git a/g4f/models.py b/g4f/models.py index b34297f5..af4958cc 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -168,12 +168,12 @@ code_davinci_002 = Model( gpt_35_turbo_16k = Model( name = 'gpt-3.5-turbo-16k', base_provider = 'openai', - best_provider = gpt_35_turbo.best_provider) + best_provider = gpt_35_long.best_provider) gpt_35_turbo_16k_0613 = Model( name = 'gpt-3.5-turbo-16k-0613', base_provider = 'openai', - best_provider = gpt_35_turbo.best_provider + best_provider = gpt_35_long.best_provider ) gpt_35_turbo_0613 = Model( diff --git a/setup.py b/setup.py index 56a139dd..e385df99 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ with codecs.open(os.path.join(here, "README.md"), encoding="utf-8") as fh: with open("requirements.txt") as f: required = f.read().splitlines() -VERSION = "0.1.6.7" +VERSION = "0.1.6.8" DESCRIPTION = ( "The official gpt4free repository | various collection of powerful language models" )