mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-12-23 19:11:48 +03:00
Restored provider (g4f/Provider/nexra/NexraEmi.py)
This commit is contained in:
parent
f939bbfa1a
commit
c446948488
@ -1,66 +1,62 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from aiohttp import ClientSession
|
|
||||||
import json
|
import json
|
||||||
|
import requests
|
||||||
from ...typing import AsyncResult, Messages
|
from ...typing import CreateResult, Messages
|
||||||
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
from ..base_provider import ProviderModelMixin, AbstractProvider
|
||||||
from ...image import ImageResponse
|
from ...image import ImageResponse
|
||||||
|
|
||||||
|
class NexraEmi(AbstractProvider, ProviderModelMixin):
|
||||||
class NexraEmi(AsyncGeneratorProvider, ProviderModelMixin):
|
|
||||||
label = "Nexra Emi"
|
label = "Nexra Emi"
|
||||||
url = "https://nexra.aryahcr.cc/documentation/emi/en"
|
url = "https://nexra.aryahcr.cc/documentation/emi/en"
|
||||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||||
working = False
|
working = True
|
||||||
|
|
||||||
default_model = 'emi'
|
default_model = "emi"
|
||||||
models = [default_model]
|
models = [default_model]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_model(cls, model: str) -> str:
|
def get_model(cls, model: str) -> str:
|
||||||
return cls.default_model
|
return cls.default_model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def create_async_generator(
|
def create_completion(
|
||||||
cls,
|
cls,
|
||||||
model: str,
|
model: str,
|
||||||
messages: Messages,
|
messages: Messages,
|
||||||
proxy: str = None,
|
|
||||||
response: str = "url", # base64 or url
|
response: str = "url", # base64 or url
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> CreateResult:
|
||||||
# Retrieve the correct model to use
|
|
||||||
model = cls.get_model(model)
|
model = cls.get_model(model)
|
||||||
|
|
||||||
# Format the prompt from the messages
|
|
||||||
prompt = messages[0]['content']
|
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json"
|
'Content-Type': 'application/json'
|
||||||
}
|
}
|
||||||
payload = {
|
|
||||||
"prompt": prompt,
|
data = {
|
||||||
|
"prompt": messages[-1]["content"],
|
||||||
"model": model,
|
"model": model,
|
||||||
"response": response
|
"response": response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
response = requests.post(cls.api_endpoint, headers=headers, json=data)
|
||||||
|
|
||||||
async with ClientSession(headers=headers) as session:
|
result = cls.process_response(response)
|
||||||
async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
|
yield result
|
||||||
response.raise_for_status()
|
|
||||||
text_data = await response.text()
|
|
||||||
|
|
||||||
try:
|
@classmethod
|
||||||
# Parse the JSON response
|
def process_response(cls, response):
|
||||||
json_start = text_data.find('{')
|
if response.status_code == 200:
|
||||||
json_data = text_data[json_start:]
|
try:
|
||||||
data = json.loads(json_data)
|
content = response.text.strip()
|
||||||
|
content = content.lstrip('_')
|
||||||
# Check if the response contains images
|
data = json.loads(content)
|
||||||
if 'images' in data and len(data['images']) > 0:
|
if data.get('status') and data.get('images'):
|
||||||
image_url = data['images'][0]
|
image_url = data['images'][0]
|
||||||
yield ImageResponse(image_url, prompt)
|
return ImageResponse(images=[image_url], alt="Generated Image")
|
||||||
else:
|
else:
|
||||||
yield ImageResponse("No images found in the response.", prompt)
|
return "Error: No image URL found in the response"
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError as e:
|
||||||
yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt)
|
return f"Error: Unable to decode JSON response. Details: {str(e)}"
|
||||||
|
else:
|
||||||
|
return f"Error: {response.status_code}, Response: {response.text}"
|
||||||
|
Loading…
Reference in New Issue
Block a user