gpt4free/g4f/Provider/ChatGptEs.py
kqlio67 8d5d522c4e
feat(g4f): Major provider updates and new model support (#2437)
* refactor(g4f/Provider/Airforce.py): Enhance Airforce provider with dynamic model fetching

* refactor(g4f/Provider/Blackbox.py): Enhance Blackbox AI provider configuration and streamline code

* feat(g4f/Provider/RobocodersAPI.py): Add RobocodersAPI new async chat provider

* refactor(g4f/client/__init__.py): Improve provider handling in async_generate method

* refactor(g4f/models.py): Update provider configurations for multiple models

* refactor(g4f/Provider/Blackbox.py): Streamline model configuration and improve response handling

* feat(g4f/Provider/DDG.py): Enhance model support and improve conversation handling

* refactor(g4f/Provider/Copilot.py): Enhance Copilot provider with model support

* refactor(g4f/Provider/AmigoChat.py): update models and improve code structure

* chore(g4f/Provider/not_working/AIUncensored.): move AIUncensored to not_working directory

* chore(g4f/Provider/not_working/Allyfy.py): remove Allyfy provider

* Update (g4f/Provider/not_working/AIUncensored.py g4f/Provider/not_working/__init__.py)

* refactor(g4f/Provider/ChatGptEs.py): Implement format_prompt for message handling

* refactor(g4f/Provider/Blackbox.py): Update message formatting and improve code structure

* refactor(g4f/Provider/LLMPlayground.py): Enhance text generation and error handling

* refactor(g4f/Provider/needs_auth/PollinationsAI.py): move PollinationsAI to needs_auth directory

* refactor(g4f/Provider/Liaobots.py): Update Liaobots provider models and aliases

* feat(g4f/Provider/DeepInfraChat.py): Add new DeepInfra models and aliases

* Update (g4f/Provider/__init__.py)

* Update (g4f/models.py)

* g4f/models.py

* Update g4f/models.py

* Update g4f/Provider/LLMPlayground.py

* Update (g4f/models.py g4f/Provider/Airforce.py g4f/Provider/__init__.py g4f/Provider/LLMPlayground.py)

* Update g4f/Provider/__init__.py

* Update (g4f/Provider/Airforce.py)

---------

Co-authored-by: kqlio67 <kqlio67@users.noreply.github.com>
2024-11-28 17:50:24 +01:00

87 lines
3.2 KiB
Python

from __future__ import annotations
from aiohttp import ClientSession
import os
import json
import re
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import format_prompt
class ChatGptEs(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chatgpt.es"
api_endpoint = "https://chatgpt.es/wp-admin/admin-ajax.php"
working = True
supports_stream = True
supports_system_message = True
supports_message_history = True
default_model = 'gpt-4o'
models = ['gpt-4o', 'gpt-4o-mini', 'chatgpt-4o-latest']
model_aliases = {
"gpt-4o": "chatgpt-4o-latest",
}
@classmethod
def get_model(cls, model: str) -> str:
if model in cls.models:
return model
elif model in cls.model_aliases:
return cls.model_aliases[model]
else:
return cls.default_model
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
"authority": "chatgpt.es",
"accept": "application/json",
"origin": cls.url,
"referer": f"{cls.url}/chat",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
}
async with ClientSession(headers=headers) as session:
initial_response = await session.get(cls.url)
nonce_ = re.findall(r'data-nonce="(.+?)"', await initial_response.text())[0]
post_id = re.findall(r'data-post-id="(.+?)"', await initial_response.text())[0]
formatted_prompt = format_prompt(messages)
conversation_history = [
"Human: You are a helpful AI assistant. Please respond in the same language that the user uses in their message. Provide accurate, relevant and helpful information while maintaining a friendly and professional tone. If you're not sure about something, please acknowledge that and provide the best information you can while noting any uncertainties. Focus on being helpful while respecting the user's choice of language."
]
for message in messages[:-1]:
if message['role'] == "user":
conversation_history.append(f"Human: {message['content']}")
else:
conversation_history.append(f"AI: {message['content']}")
payload = {
'_wpnonce': nonce_,
'post_id': post_id,
'url': cls.url,
'action': 'wpaicg_chat_shortcode_message',
'message': formatted_prompt,
'bot_id': '0',
'chatbot_identity': 'shortcode',
'wpaicg_chat_client_id': os.urandom(5).hex(),
'wpaicg_chat_history': json.dumps(conversation_history)
}
async with session.post(cls.api_endpoint, headers=headers, data=payload) as response:
response.raise_for_status()
result = await response.json()
yield result['data']