mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-23 09:10:13 +03:00
Update Liaobots.py (#1678)
* Update Liaobots.py Added Claude 3 models and updated other models
This commit is contained in:
parent
2a3880ac2f
commit
d6d75c28c8
@ -10,18 +10,6 @@ from .helper import get_connector
|
||||
from ..requests import raise_for_status
|
||||
|
||||
models = {
|
||||
"gpt-4": {
|
||||
"id": "gpt-4",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 24000,
|
||||
"tokenLimit": 8000,
|
||||
},
|
||||
"gpt-4-0613": {
|
||||
"id": "gpt-4-0613",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 32000,
|
||||
"tokenLimit": 8000,
|
||||
},
|
||||
"gpt-3.5-turbo": {
|
||||
"id": "gpt-3.5-turbo",
|
||||
"name": "GPT-3.5-Turbo",
|
||||
@ -29,14 +17,8 @@ models = {
|
||||
"tokenLimit": 14000,
|
||||
"context": "16K",
|
||||
},
|
||||
"gpt-3.5-turbo-16k": {
|
||||
"id": "gpt-3.5-turbo-16k",
|
||||
"name": "GPT-3.5-16k",
|
||||
"maxLength": 48000,
|
||||
"tokenLimit": 16000,
|
||||
},
|
||||
"gpt-4-1106-preview": {
|
||||
"id": "gpt-4-1106-preview",
|
||||
"gpt-4-turbo-preview": {
|
||||
"id": "gpt-4-turbo-preview",
|
||||
"name": "GPT-4-Turbo",
|
||||
"maxLength": 260000,
|
||||
"tokenLimit": 126000,
|
||||
@ -49,6 +31,13 @@ models = {
|
||||
"tokenLimit": 31000,
|
||||
"context": "32K",
|
||||
},
|
||||
"gpt-4-0613": {
|
||||
"id": "gpt-4-0613",
|
||||
"name": "GPT-4-0613",
|
||||
"maxLength": 60000,
|
||||
"tokenLimit": 15000,
|
||||
"context": "16K",
|
||||
},
|
||||
"gemini-pro": {
|
||||
"id": "gemini-pro",
|
||||
"name": "Gemini-Pro",
|
||||
@ -56,13 +45,34 @@ models = {
|
||||
"tokenLimit": 30000,
|
||||
"context": "32K",
|
||||
},
|
||||
"claude-2": {
|
||||
"id": "claude-2",
|
||||
"name": "Claude-2-200k",
|
||||
"claude-3-opus-20240229": {
|
||||
"id": "claude-3-opus-20240229",
|
||||
"name": "Claude-3-Opus",
|
||||
"maxLength": 800000,
|
||||
"tokenLimit": 200000,
|
||||
"context": "200K",
|
||||
},
|
||||
"claude-3-sonnet-20240229": {
|
||||
"id": "claude-3-sonnet-20240229",
|
||||
"name": "Claude-3-Sonnet",
|
||||
"maxLength": 800000,
|
||||
"tokenLimit": 200000,
|
||||
"context": "200K",
|
||||
},
|
||||
"claude-2.1": {
|
||||
"id": "claude-2.1",
|
||||
"name": "Claude-2.1-200k",
|
||||
"maxLength": 800000,
|
||||
"tokenLimit": 200000,
|
||||
"context": "200K",
|
||||
},
|
||||
"claude-2.0": {
|
||||
"id": "claude-2.0",
|
||||
"name": "Claude-2.0-100k",
|
||||
"maxLength": 400000,
|
||||
"tokenLimit": 100000,
|
||||
"context": "100K",
|
||||
},
|
||||
"claude-instant-1": {
|
||||
"id": "claude-instant-1",
|
||||
"name": "Claude-instant-1",
|
||||
@ -72,6 +82,7 @@ models = {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
url = "https://liaobots.site"
|
||||
working = True
|
||||
|
Loading…
Reference in New Issue
Block a user