2024-01-10 12:34:56 +03:00
from __future__ import annotations
2024-03-12 04:06:06 +03:00
import json
2024-07-08 23:49:38 +03:00
from aiohttp import ClientSession
2024-01-10 12:34:56 +03:00
from . . typing import AsyncResult , Messages
2024-02-23 19:21:10 +03:00
from . base_provider import AsyncGeneratorProvider , ProviderModelMixin
2024-07-08 23:49:38 +03:00
from . helper import format_prompt
2024-01-10 12:34:56 +03:00
2024-02-23 19:21:10 +03:00
class FreeChatgpt ( AsyncGeneratorProvider , ProviderModelMixin ) :
2024-07-08 23:49:38 +03:00
url = " https://chat.chatgpt.org.uk "
api_endpoint = " /api/openai/v1/chat/completions "
2024-01-10 12:34:56 +03:00
working = True
2024-08-31 09:47:39 +03:00
default_model = ' @cf/qwen/qwen1.5-14b-chat-awq '
2024-07-08 23:49:38 +03:00
models = [
2024-08-29 09:03:32 +03:00
' @cf/qwen/qwen1.5-14b-chat-awq ' ,
2024-08-31 09:47:39 +03:00
' SparkDesk-v1.1 ' ,
2024-07-08 23:49:38 +03:00
' Qwen2-7B-Instruct ' ,
' glm4-9B-chat ' ,
' chatglm3-6B ' ,
' Yi-1.5-9B-Chat ' ,
]
2024-08-31 09:47:39 +03:00
2024-08-29 09:03:32 +03:00
model_aliases = {
" qwen-1.5-14b " : " @cf/qwen/qwen1.5-14b-chat-awq " ,
" sparkdesk-v1.1 " : " SparkDesk-v1.1 " ,
2024-09-06 21:14:35 +03:00
" qwen-2-7b " : " Qwen2-7B-Instruct " ,
" glm-4-9b " : " glm4-9B-chat " ,
" glm-3-6b " : " chatglm3-6B " ,
2024-08-29 09:03:32 +03:00
" yi-1.5-9b " : " Yi-1.5-9B-Chat " ,
}
2024-01-10 12:34:56 +03:00
2024-08-29 09:03:32 +03:00
@classmethod
def get_model ( cls , model : str ) - > str :
if model in cls . models :
return model
elif model . lower ( ) in cls . model_aliases :
return cls . model_aliases [ model . lower ( ) ]
else :
return cls . default_model
2024-08-31 09:47:39 +03:00
2024-01-10 12:34:56 +03:00
@classmethod
async def create_async_generator (
cls ,
model : str ,
messages : Messages ,
proxy : str = None ,
* * kwargs
) - > AsyncResult :
2024-08-31 09:47:39 +03:00
model = cls . get_model ( model )
2024-01-10 12:34:56 +03:00
headers = {
2024-07-08 23:49:38 +03:00
" accept " : " application/json, text/event-stream " ,
" accept-language " : " en-US,en;q=0.9 " ,
" content-type " : " application/json " ,
" dnt " : " 1 " ,
" origin " : cls . url ,
" referer " : f " { cls . url } / " ,
" sec-ch-ua " : ' " Not/A)Brand " ;v= " 8 " , " Chromium " ;v= " 126 " ' ,
" sec-ch-ua-mobile " : " ?0 " ,
" sec-ch-ua-platform " : ' " Linux " ' ,
" sec-fetch-dest " : " empty " ,
" sec-fetch-mode " : " cors " ,
" sec-fetch-site " : " same-origin " ,
" user-agent " : " Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 " ,
2024-01-13 18:18:31 +03:00
}
2024-07-08 23:49:38 +03:00
async with ClientSession ( headers = headers ) as session :
prompt = format_prompt ( messages )
2024-01-13 18:18:31 +03:00
data = {
2024-07-08 23:49:38 +03:00
" messages " : [
{ " role " : " system " , " content " : " \n You are ChatGPT, a large language model trained by OpenAI. \n Knowledge cutoff: 2021-09 \n Current model: gpt-3.5-turbo \n Current time: Thu Jul 04 2024 21:35:59 GMT+0300 (Eastern European Summer Time) \n Latex inline: \\ (x^2 \\ ) \n Latex block: $$e=mc^2$$ \n \n " } ,
{ " role " : " user " , " content " : prompt }
] ,
2024-02-23 19:21:10 +03:00
" stream " : True ,
2024-07-08 23:49:38 +03:00
" model " : model ,
" temperature " : 0.5 ,
" presence_penalty " : 0 ,
" frequency_penalty " : 0 ,
" top_p " : 1
2024-01-13 18:18:31 +03:00
}
2024-07-08 23:49:38 +03:00
async with session . post ( f " { cls . url } { cls . api_endpoint } " , json = data , proxy = proxy ) as response :
response . raise_for_status ( )
accumulated_text = " "
2024-01-13 18:18:31 +03:00
async for line in response . content :
2024-07-08 23:49:38 +03:00
if line :
line_str = line . decode ( ) . strip ( )
if line_str == " data: [DONE] " :
yield accumulated_text
break
elif line_str . startswith ( " data: " ) :
try :
chunk = json . loads ( line_str [ 6 : ] )
delta_content = chunk . get ( " choices " , [ { } ] ) [ 0 ] . get ( " delta " , { } ) . get ( " content " , " " )
accumulated_text + = delta_content
2024-08-31 09:47:39 +03:00
yield delta_content # Yield each chunk of content
2024-07-08 23:49:38 +03:00
except json . JSONDecodeError :
pass