2023-10-26 22:32:49 +03:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2024-01-02 01:23:45 +03:00
|
|
|
import json
|
|
|
|
from ..typing import AsyncResult, Messages
|
|
|
|
from .base_provider import AsyncGeneratorProvider
|
|
|
|
from ..requests import StreamSession
|
2023-10-26 22:32:49 +03:00
|
|
|
|
2024-01-02 01:23:45 +03:00
|
|
|
class DeepInfra(AsyncGeneratorProvider):
|
|
|
|
url = "https://deepinfra.com"
|
|
|
|
working = True
|
|
|
|
supports_stream = True
|
|
|
|
supports_message_history = True
|
2023-10-26 22:32:49 +03:00
|
|
|
|
2023-11-24 17:16:00 +03:00
|
|
|
@staticmethod
|
2024-01-02 01:23:45 +03:00
|
|
|
async def create_async_generator(
|
|
|
|
model: str,
|
|
|
|
messages: Messages,
|
|
|
|
stream: bool,
|
|
|
|
proxy: str = None,
|
|
|
|
timeout: int = 120,
|
|
|
|
auth: str = None,
|
|
|
|
**kwargs
|
|
|
|
) -> AsyncResult:
|
2024-01-01 19:48:57 +03:00
|
|
|
if not model:
|
|
|
|
model = 'meta-llama/Llama-2-70b-chat-hf'
|
2023-10-26 22:32:49 +03:00
|
|
|
headers = {
|
2024-01-02 01:23:45 +03:00
|
|
|
'Accept-Encoding': 'gzip, deflate, br',
|
|
|
|
'Accept-Language': 'en-US',
|
2023-11-24 17:16:00 +03:00
|
|
|
'Connection': 'keep-alive',
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'Origin': 'https://deepinfra.com',
|
|
|
|
'Referer': 'https://deepinfra.com/',
|
|
|
|
'Sec-Fetch-Dest': 'empty',
|
|
|
|
'Sec-Fetch-Mode': 'cors',
|
|
|
|
'Sec-Fetch-Site': 'same-site',
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
|
|
|
|
'X-Deepinfra-Source': 'web-embed',
|
|
|
|
'accept': 'text/event-stream',
|
|
|
|
'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
|
|
|
|
'sec-ch-ua-mobile': '?0',
|
|
|
|
'sec-ch-ua-platform': '"macOS"',
|
2023-10-26 22:32:49 +03:00
|
|
|
}
|
2024-01-01 19:48:57 +03:00
|
|
|
if auth:
|
|
|
|
headers['Authorization'] = f"bearer {auth}"
|
2024-01-02 01:23:45 +03:00
|
|
|
|
|
|
|
async with StreamSession(headers=headers,
|
|
|
|
timeout=timeout,
|
|
|
|
proxies={"https": proxy},
|
|
|
|
impersonate="chrome110"
|
|
|
|
) as session:
|
|
|
|
json_data = {
|
|
|
|
'model' : model,
|
|
|
|
'messages': messages,
|
|
|
|
'stream' : True
|
|
|
|
}
|
|
|
|
async with session.post('https://api.deepinfra.com/v1/openai/chat/completions',
|
|
|
|
json=json_data) as response:
|
|
|
|
response.raise_for_status()
|
|
|
|
first = True
|
|
|
|
async for line in response.iter_lines():
|
|
|
|
try:
|
|
|
|
if line.startswith(b"data: [DONE]"):
|
|
|
|
break
|
|
|
|
elif line.startswith(b"data: "):
|
|
|
|
chunk = json.loads(line[6:])["choices"][0]["delta"].get("content")
|
2023-10-26 22:32:49 +03:00
|
|
|
if chunk:
|
2024-01-02 01:23:45 +03:00
|
|
|
if first:
|
|
|
|
chunk = chunk.lstrip()
|
|
|
|
if chunk:
|
|
|
|
first = False
|
|
|
|
yield chunk
|
|
|
|
except Exception:
|
|
|
|
raise RuntimeError(f"Response: {line}")
|