mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-12-24 11:34:02 +03:00
Add labels to provider list
This commit is contained in:
parent
a107d3f2ff
commit
f724c07f1b
@ -6,7 +6,7 @@ import uuid
|
||||
import time
|
||||
import asyncio
|
||||
from urllib import parse
|
||||
from datetime import datetime
|
||||
from datetime import datetime, date
|
||||
from aiohttp import ClientSession, ClientTimeout, BaseConnector, WSMsgType
|
||||
|
||||
from ..typing import AsyncResult, Messages, ImageType, Cookies
|
||||
@ -32,6 +32,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
"""
|
||||
Bing provider for generating responses using the Bing API.
|
||||
"""
|
||||
label = "Microsoft Copilot in Bing"
|
||||
url = "https://bing.com/chat"
|
||||
working = True
|
||||
supports_message_history = True
|
||||
@ -47,7 +48,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
proxy: str = None,
|
||||
timeout: int = 900,
|
||||
api_key: str = None,
|
||||
cookies: Cookies = {},
|
||||
cookies: Cookies = None,
|
||||
connector: BaseConnector = None,
|
||||
tone: str = None,
|
||||
image: ImageType = None,
|
||||
@ -69,8 +70,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
:return: An asynchronous result object.
|
||||
"""
|
||||
prompt = messages[-1]["content"]
|
||||
if api_key is not None:
|
||||
cookies["_U"] = api_key
|
||||
if context is None:
|
||||
context = create_context(messages[:-1]) if len(messages) > 1 else None
|
||||
if tone is None:
|
||||
@ -79,7 +78,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
|
||||
|
||||
return stream_generate(
|
||||
prompt, tone, image, context, cookies,
|
||||
prompt, tone, image, context, api_key, cookies,
|
||||
get_connector(connector, proxy, True),
|
||||
proxy, web_search, gpt4_turbo, timeout,
|
||||
**kwargs
|
||||
@ -110,11 +109,15 @@ def get_default_cookies():
|
||||
'SUID' : '',
|
||||
'SRCHUSR' : '',
|
||||
'SRCHHPGUSR' : f'HV={int(time.time())}',
|
||||
'BCP' : 'AD=1&AL=1&SM=1',
|
||||
'_Rwho' : f'u=d&ts={date.today().isoformat()}',
|
||||
}
|
||||
|
||||
def create_headers(cookies: Cookies = None) -> dict:
|
||||
def create_headers(cookies: Cookies = None, api_key: str = None) -> dict:
|
||||
if cookies is None:
|
||||
cookies = get_default_cookies()
|
||||
if api_key is not None:
|
||||
cookies["_U"] = api_key
|
||||
headers = Defaults.headers.copy()
|
||||
headers["cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
|
||||
headers["x-forwarded-for"] = get_ip_address()
|
||||
@ -364,6 +367,7 @@ async def stream_generate(
|
||||
image: ImageType = None,
|
||||
context: str = None,
|
||||
cookies: dict = None,
|
||||
api_key: str = None,
|
||||
connector: BaseConnector = None,
|
||||
proxy: str = None,
|
||||
web_search: bool = False,
|
||||
@ -389,7 +393,7 @@ async def stream_generate(
|
||||
:param timeout: Timeout for the request.
|
||||
:return: An asynchronous generator yielding responses.
|
||||
"""
|
||||
headers = create_headers(cookies)
|
||||
headers = create_headers(cookies, api_key)
|
||||
new_conversation = conversation is None
|
||||
max_retries = (5 if new_conversation else 0) if max_retries is None else max_retries
|
||||
async with ClientSession(
|
||||
|
@ -12,6 +12,7 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||
from .bing.create_images import create_images, create_session, get_cookies_from_browser
|
||||
|
||||
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
label = "Microsoft Designer"
|
||||
url = "https://www.bing.com/images/create"
|
||||
working = True
|
||||
|
||||
|
@ -5,6 +5,7 @@ from ..typing import AsyncResult, Messages
|
||||
from .needs_auth.Openai import Openai
|
||||
|
||||
class DeepInfra(Openai):
|
||||
label = "DeepInfra"
|
||||
url = "https://deepinfra.com"
|
||||
working = True
|
||||
needs_auth = False
|
||||
|
@ -12,7 +12,7 @@ from .helper import format_prompt, get_connector
|
||||
class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
url = "https://huggingface.co/chat"
|
||||
working = True
|
||||
default_model = "meta-llama/Llama-2-70b-chat-hf"
|
||||
default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
||||
models = [
|
||||
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
||||
"google/gemma-7b-it",
|
||||
|
@ -12,6 +12,7 @@ from ..providers.base_provider import AbstractProvider, ProviderModelMixin
|
||||
from ..errors import MissingRequirementsError
|
||||
|
||||
class Local(AbstractProvider, ProviderModelMixin):
|
||||
label = "gpt4all"
|
||||
working = True
|
||||
supports_message_history = True
|
||||
supports_system_message = True
|
||||
|
@ -4,6 +4,7 @@ from .Openai import Openai
|
||||
from ...typing import AsyncResult, Messages
|
||||
|
||||
class Groq(Openai):
|
||||
lebel = "Groq"
|
||||
url = "https://console.groq.com/playground"
|
||||
working = True
|
||||
default_model = "mixtral-8x7b-32768"
|
||||
|
@ -6,6 +6,7 @@ from .Openai import Openai
|
||||
from ...typing import AsyncResult, Messages
|
||||
|
||||
class OpenRouter(Openai):
|
||||
label = "OpenRouter"
|
||||
url = "https://openrouter.ai"
|
||||
working = True
|
||||
default_model = "openrouter/auto"
|
||||
|
@ -9,6 +9,7 @@ from ...requests import StreamSession, raise_for_status
|
||||
from ...errors import MissingAuthError, ResponseError
|
||||
|
||||
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
label = "OpenAI API"
|
||||
url = "https://openai.com"
|
||||
working = True
|
||||
needs_auth = True
|
||||
|
@ -35,6 +35,7 @@ from ... import debug
|
||||
class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
"""A class for creating and managing conversations with OpenAI chat service"""
|
||||
|
||||
lebel = "OpenAI ChatGPT"
|
||||
url = "https://chat.openai.com"
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
@ -32,6 +32,7 @@ models = {
|
||||
}
|
||||
|
||||
class Theb(AbstractProvider):
|
||||
label = "TheB.AI"
|
||||
url = "https://beta.theb.ai"
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
@ -28,6 +28,7 @@ models = {
|
||||
}
|
||||
|
||||
class ThebApi(Openai):
|
||||
label = "TheB.AI API"
|
||||
url = "https://theb.ai"
|
||||
working = True
|
||||
needs_auth = True
|
||||
|
@ -1026,9 +1026,10 @@ async function on_api() {
|
||||
});
|
||||
|
||||
providers = await api("providers")
|
||||
providers.forEach((provider) => {
|
||||
Object.entries(providers).forEach(([provider, label]) => {
|
||||
let option = document.createElement("option");
|
||||
option.value = option.text = provider;
|
||||
option.value = provider;
|
||||
option.text = label;
|
||||
providerSelect.appendChild(option);
|
||||
})
|
||||
|
||||
|
@ -43,7 +43,16 @@ class Api():
|
||||
"""
|
||||
Return a list of all working providers.
|
||||
"""
|
||||
return [provider.__name__ for provider in __providers__ if provider.working]
|
||||
return {
|
||||
provider.__name__: (provider.label
|
||||
if hasattr(provider, "label")
|
||||
else provider.__name__) +
|
||||
(" (WebDriver)"
|
||||
if "webdriver" in provider.get_parameters()
|
||||
else "")
|
||||
for provider in __providers__
|
||||
if provider.working
|
||||
}
|
||||
|
||||
def get_version(self):
|
||||
"""
|
||||
|
Loading…
Reference in New Issue
Block a user