Fix load model list i AirforceChat provider

Add Microsoft Copilot provider
Show image support in the model list of the gui
This commit is contained in:
Heiner Lohaus 2024-11-18 15:41:45 +01:00
parent 56beb19fef
commit f1ef23285a
11 changed files with 156 additions and 54 deletions

View File

@ -21,7 +21,7 @@
> <sup><strong>Stats:</strong></sup> [![Downloads](https://static.pepy.tech/badge/g4f)](https://pepy.tech/project/g4f) [![Downloads](https://static.pepy.tech/badge/g4f/month)](https://pepy.tech/project/g4f)
```sh
pip install -U g4f
pip install -U g4f[all]
```
```sh

View File

@ -10,6 +10,7 @@ import aiohttp
from ..typing import AsyncResult, Messages, ImageType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse, to_data_uri
from .helper import get_random_string
class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
label = "Blackbox AI"
@ -22,11 +23,13 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
_last_validated_value = None
default_model = 'blackboxai'
default_vision_model = default_model
default_image_model = 'generate_image'
image_models = [default_image_model, 'repomap']
text_models = [default_model, 'gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'blackboxai-pro']
vision_models = [default_model, 'gpt-4o', 'gemini-pro', 'blackboxai-pro']
agentMode = {
'Image Generation': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
default_image_model: {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
}
trendingAgentMode = {
"gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
@ -111,11 +114,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
return cls._last_validated_value
@staticmethod
def generate_id(length=7):
characters = string.ascii_letters + string.digits
return ''.join(random.choice(characters) for _ in range(length))
@classmethod
def add_prefix_to_messages(cls, messages: Messages, model: str) -> Messages:
prefix = cls.model_prefixes.get(model, "")
@ -143,12 +141,12 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
message_id = cls.generate_id()
messages_with_prefix = cls.add_prefix_to_messages(messages, model)
message_id = get_random_string(7)
messages = cls.add_prefix_to_messages(messages, model)
validated_value = await cls.fetch_validated()
if image is not None:
messages_with_prefix[-1]['data'] = {
messages[-1]['data'] = {
'fileText': '',
'imageBase64': to_data_uri(image),
'title': image_name
@ -171,9 +169,9 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36'
}
data = {
"messages": messages_with_prefix,
"messages": messages,
"id": message_id,
"previewToken": None,
"userId": None,
@ -200,27 +198,24 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
async with ClientSession(headers=headers) as session:
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
response_text = await response.text()
if model in cls.image_models:
image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', response_text)
if image_matches:
image_url = image_matches[0]
image_response = ImageResponse(images=[image_url], alt="Generated Image")
yield image_response
return
async for chunk in response.content.iter_any():
text_chunk = chunk.decode(errors="ignore")
if model in cls.image_models:
image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', text_chunk)
if image_matches:
image_url = image_matches[0]
image_response = ImageResponse(images=[image_url])
yield image_response
continue
response_text = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', response_text, flags=re.DOTALL)
json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', response_text, re.DOTALL)
if json_match:
search_results = json.loads(json_match.group(1))
answer = response_text.split('$~~~$')[-1].strip()
formatted_response = f"{answer}\n\n**Source:**"
for i, result in enumerate(search_results, 1):
formatted_response += f"\n{i}. {result['title']}: {result['link']}"
yield formatted_response
else:
yield response_text.strip()
text_chunk = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', text_chunk, flags=re.DOTALL)
json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', text_chunk, re.DOTALL)
if json_match:
search_results = json.loads(json_match.group(1))
answer = text_chunk.split('$~~~$')[-1].strip()
formatted_response = f"{answer}\n\n**Source:**"
for i, result in enumerate(search_results, 1):
formatted_response += f"\n{i}. {result['title']}: {result['link']}"
yield formatted_response
else:
yield text_chunk.strip()

87
g4f/Provider/Copilot.py Normal file
View File

@ -0,0 +1,87 @@
from __future__ import annotations
import json
from http.cookiejar import CookieJar
try:
from curl_cffi.requests import Session, CurlWsFlag
has_curl_cffi = True
except ImportError:
has_curl_cffi = False
from .base_provider import AbstractProvider, BaseConversation
from .helper import format_prompt
from ..typing import CreateResult, Messages
from ..errors import MissingRequirementsError
from ..requests.raise_for_status import raise_for_status
from .. import debug
class Conversation(BaseConversation):
conversation_id: str
cookie_jar: CookieJar
def __init__(self, conversation_id: str, cookie_jar: CookieJar):
self.conversation_id = conversation_id
self.cookie_jar = cookie_jar
class Copilot(AbstractProvider):
label = "Microsoft Copilot"
url = "https://copilot.microsoft.com"
working = True
supports_stream = True
websocket_url = "wss://copilot.microsoft.com/c/api/chat?api-version=2"
conversation_url = f"{url}/c/api/conversations"
@classmethod
def create_completion(
cls,
model: str,
messages: Messages,
stream: bool = False,
proxy: str = None,
timeout: int = 900,
conversation: Conversation = None,
return_conversation: bool = False,
**kwargs
) -> CreateResult:
if not has_curl_cffi:
raise MissingRequirementsError('Install or update "curl_cffi" package | pip install -U nodriver')
cookies = conversation.cookie_jar if conversation is not None else None
with Session(timeout=timeout, proxy=proxy, impersonate="chrome", cookies=cookies) as session:
response = session.get(f"{cls.url}/")
raise_for_status(response)
if conversation is None:
response = session.post(cls.conversation_url)
raise_for_status(response)
conversation_id = response.json().get("id")
if return_conversation:
yield Conversation(conversation_id, session.cookies.jar)
prompt = format_prompt(messages)
if debug.logging:
print(f"Copilot: Created conversation: {conversation_id}")
else:
conversation_id = conversation.conversation_id
prompt = messages[-1]["content"]
if debug.logging:
print(f"Copilot: Use conversation: {conversation_id}")
wss = session.ws_connect(cls.websocket_url)
wss.send(json.dumps({
"event": "send",
"conversationId": conversation_id,
"content": [{
"type": "text",
"text": prompt,
}],
"mode": "chat"
}).encode(), CurlWsFlag.TEXT)
while True:
try:
msg = json.loads(wss.recv()[0])
except:
break
if msg.get("event") == "appendText":
yield msg.get("text")
elif msg.get("event") in ["done", "partCompleted"]:
break

View File

@ -4,10 +4,8 @@ from aiohttp import ClientSession
import json
from ..typing import AsyncResult, Messages, ImageType
from ..image import to_data_uri
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
class DeepInfraChat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://deepinfra.com/chat"
api_endpoint = "https://api.deepinfra.com/v1/openai/chat/completions"

View File

@ -19,6 +19,7 @@ from .Blackbox import Blackbox
from .ChatGpt import ChatGpt
from .ChatGptEs import ChatGptEs
from .Cloudflare import Cloudflare
from .Copilot import Copilot
from .DarkAI import DarkAI
from .DDG import DDG
from .DeepInfraChat import DeepInfraChat

View File

@ -50,11 +50,13 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_message_history = True
default_model = 'llama-3.1-70b-chat'
response = requests.get('https://api.airforce/models')
data = response.json()
text_models = [model['id'] for model in data['data']]
models = [*text_models]
@classmethod
def get_models(cls) -> list:
if not cls.models:
response = requests.get('https://api.airforce/models')
data = response.json()
cls.models = [model['id'] for model in data['data']]
model_aliases = {
# openchat

View File

@ -6,6 +6,7 @@ import uuid
import json
import base64
import time
import requests
from aiohttp import ClientWebSocketResponse
from copy import copy
@ -62,13 +63,26 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_system_message = True
default_model = "auto"
default_vision_model = "gpt-4o"
models = ["auto", "gpt-4o-mini", "gpt-4o", "gpt-4", "gpt-4-gizmo"]
fallback_models = ["auto", "gpt-4", "gpt-4o", "gpt-4o-mini", "gpt-4o-canmore", "o1-preview", "o1-mini"]
vision_models = fallback_models
_api_key: str = None
_headers: dict = None
_cookies: Cookies = None
_expires: int = None
@classmethod
def get_models(cls):
if not cls.models:
try:
response = requests.get(f"{cls.url}/backend-anon/models")
response.raise_for_status()
data = response.json()
cls.models = [model.get("slug") for model in data.get("models")]
except Exception:
cls.models = cls.fallback_models
return cls.models
@classmethod
async def create(
cls,

View File

@ -245,6 +245,7 @@
<select name="provider" id="provider">
<option value="">Provider: Auto</option>
<option value="OpenaiChat">OpenAI ChatGPT</option>
<option value="Copilot">Microsoft Copilot</option>
<option value="ChatGpt">ChatGpt</option>
<option value="Gemini">Gemini</option>
<option value="MetaAI">Meta AI</option>

View File

@ -1367,7 +1367,8 @@ async function load_provider_models(providerIndex=null) {
modelProvider.classList.remove("hidden");
models.forEach((model) => {
let option = document.createElement('option');
option.value = option.text = model.model;
option.value = model.model;
option.text = `${model.model}${model.image ? " (Image Generation)" : ""}${model.vision ? " (Image Upload)" : ""}`;
option.selected = model.default;
modelProvider.appendChild(option);
});
@ -1381,7 +1382,7 @@ providerSelect.addEventListener("change", () => load_provider_models());
function save_storage() {
let filename = `chat ${new Date().toLocaleString()}.json`.replaceAll(":", "-");
let data = {"options": {"g4f": ""}};
for (let i = 0; i < appStorage.length; i++){
for (let i = 0; i < appStorage.length; i++){label
let key = appStorage.key(i);
let item = appStorage.getItem(key);
if (key.startsWith("conversation:")) {

View File

@ -42,7 +42,12 @@ class Api:
provider: ProviderType = __map__[provider]
if issubclass(provider, ProviderModelMixin):
return [
{"model": model, "default": model == provider.default_model}
{
"model": model,
"default": model == provider.default_model,
"vision": getattr(provider, "default_vision_model", None) == model or model in getattr(provider, "vision_models", []),
"image": model in getattr(provider, "image_models", []),
}
for model in provider.get_models()
]
return []
@ -65,7 +70,7 @@ class Api:
"url": parent.url,
"label": parent.label if hasattr(parent, "label") else None,
"image_model": model,
"vision_model": parent.default_vision_model if hasattr(parent, "default_vision_model") else None
"vision_model": getattr(parent, "default_vision_model", None)
})
index.append(parent.__name__)
elif hasattr(provider, "default_vision_model") and provider.__name__ not in index:
@ -82,13 +87,11 @@ class Api:
@staticmethod
def get_providers() -> list[str]:
return {
provider.__name__: (
provider.label if hasattr(provider, "label") else provider.__name__
) + (
" (WebDriver)" if "webdriver" in provider.get_parameters() else ""
) + (
" (Auth)" if provider.needs_auth else ""
)
provider.__name__: (provider.label if hasattr(provider, "label") else provider.__name__)
+ (" (Image Generation)" if hasattr(provider, "image_models") else "")
+ (" (Image Upload)" if getattr(provider, "default_vision_model", None) else "")
+ (" (WebDriver)" if "webdriver" in provider.get_parameters() else "")
+ (" (Auth)" if provider.needs_auth else "")
for provider in __providers__
if provider.working
}

View File

@ -109,7 +109,7 @@ def get_args_from_browser(
def get_session_from_browser(url: str, webdriver: WebDriver = None, proxy: str = None, timeout: int = 120) -> Session:
if not has_curl_cffi:
raise MissingRequirementsError('Install "curl_cffi" package')
raise MissingRequirementsError('Install "curl_cffi" package | pip install -U curl_cffi')
args = get_args_from_browser(url, webdriver, proxy, timeout)
return Session(
**args,