2024-09-29 23:38:25 +03:00
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
2024-10-11 09:33:30 +03:00
|
|
|
|
import json
|
2024-10-21 21:52:27 +03:00
|
|
|
|
import requests
|
2024-09-29 23:38:25 +03:00
|
|
|
|
|
2024-10-21 21:52:27 +03:00
|
|
|
|
from ...typing import CreateResult, Messages
|
|
|
|
|
from ..base_provider import ProviderModelMixin, AbstractProvider
|
|
|
|
|
from ..helper import format_prompt
|
2024-09-29 23:38:25 +03:00
|
|
|
|
|
2024-10-21 21:52:27 +03:00
|
|
|
|
class NexraGeminiPro(AbstractProvider, ProviderModelMixin):
|
2024-09-29 23:38:25 +03:00
|
|
|
|
label = "Nexra Gemini PRO"
|
2024-10-11 09:33:30 +03:00
|
|
|
|
url = "https://nexra.aryahcr.cc/documentation/gemini-pro/en"
|
2024-09-29 23:38:25 +03:00
|
|
|
|
api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
|
2024-10-21 21:52:27 +03:00
|
|
|
|
working = True
|
2024-10-11 09:33:30 +03:00
|
|
|
|
supports_stream = True
|
2024-10-21 21:52:27 +03:00
|
|
|
|
|
2024-10-11 09:33:30 +03:00
|
|
|
|
default_model = 'gemini-pro'
|
|
|
|
|
models = [default_model]
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_model(cls, model: str) -> str:
|
|
|
|
|
return cls.default_model
|
2024-10-21 21:52:27 +03:00
|
|
|
|
|
2024-09-29 23:38:25 +03:00
|
|
|
|
@classmethod
|
2024-10-21 21:52:27 +03:00
|
|
|
|
def create_completion(
|
2024-09-29 23:38:25 +03:00
|
|
|
|
cls,
|
|
|
|
|
model: str,
|
|
|
|
|
messages: Messages,
|
2024-10-21 21:52:27 +03:00
|
|
|
|
stream: bool,
|
2024-10-11 09:33:30 +03:00
|
|
|
|
markdown: bool = False,
|
2024-09-29 23:38:25 +03:00
|
|
|
|
**kwargs
|
2024-10-21 21:52:27 +03:00
|
|
|
|
) -> CreateResult:
|
2024-10-11 09:33:30 +03:00
|
|
|
|
model = cls.get_model(model)
|
|
|
|
|
|
2024-09-29 23:38:25 +03:00
|
|
|
|
headers = {
|
2024-10-21 21:52:27 +03:00
|
|
|
|
'Content-Type': 'application/json'
|
2024-09-29 23:38:25 +03:00
|
|
|
|
}
|
2024-10-21 21:52:27 +03:00
|
|
|
|
|
2024-10-11 09:33:30 +03:00
|
|
|
|
data = {
|
|
|
|
|
"messages": [
|
|
|
|
|
{
|
|
|
|
|
"role": "user",
|
|
|
|
|
"content": format_prompt(messages)
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"stream": stream,
|
2024-10-21 21:52:27 +03:00
|
|
|
|
"markdown": markdown,
|
2024-10-11 09:33:30 +03:00
|
|
|
|
"model": model
|
|
|
|
|
}
|
2024-10-21 21:52:27 +03:00
|
|
|
|
|
|
|
|
|
response = requests.post(cls.api_endpoint, headers=headers, json=data, stream=stream)
|
|
|
|
|
|
|
|
|
|
if stream:
|
|
|
|
|
return cls.process_streaming_response(response)
|
|
|
|
|
else:
|
|
|
|
|
return cls.process_non_streaming_response(response)
|
2024-10-11 09:33:30 +03:00
|
|
|
|
|
2024-10-21 21:52:27 +03:00
|
|
|
|
@classmethod
|
|
|
|
|
def process_non_streaming_response(cls, response):
|
|
|
|
|
if response.status_code == 200:
|
|
|
|
|
try:
|
2024-10-21 21:54:55 +03:00
|
|
|
|
content = response.text.lstrip('')
|
2024-10-21 21:52:27 +03:00
|
|
|
|
data = json.loads(content)
|
|
|
|
|
return data.get('message', '')
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
return "Error: Unable to decode JSON response"
|
|
|
|
|
else:
|
|
|
|
|
return f"Error: {response.status_code}"
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def process_streaming_response(cls, response):
|
|
|
|
|
full_message = ""
|
|
|
|
|
for line in response.iter_lines(decode_unicode=True):
|
|
|
|
|
if line:
|
|
|
|
|
try:
|
2024-10-21 21:54:55 +03:00
|
|
|
|
line = line.lstrip('')
|
2024-10-21 21:52:27 +03:00
|
|
|
|
data = json.loads(line)
|
|
|
|
|
if data.get('finish'):
|
|
|
|
|
break
|
|
|
|
|
message = data.get('message', '')
|
|
|
|
|
if message:
|
|
|
|
|
yield message[len(full_message):]
|
|
|
|
|
full_message = message
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
pass
|