2023-09-03 11:26:26 +03:00
from __future__ import annotations
2023-08-27 18:37:44 +03:00
2023-09-03 11:26:26 +03:00
import json
import random
import requests
from . . typing import Any , CreateResult
2023-07-28 13:07:17 +03:00
from . base_provider import BaseProvider
class Theb ( BaseProvider ) :
2023-08-27 18:37:44 +03:00
url = " https://theb.ai "
working = True
supports_stream = True
supports_gpt_35_turbo = True
needs_auth = True
2023-07-28 13:07:17 +03:00
@staticmethod
def create_completion (
model : str ,
messages : list [ dict [ str , str ] ] ,
2023-08-27 18:37:44 +03:00
stream : bool , * * kwargs : Any ) - > CreateResult :
2023-09-04 08:34:31 +03:00
conversation = " \n " . join ( f " { message [ ' role ' ] } : { message [ ' content ' ] } " for message in messages )
conversation + = " \n assistant: "
2023-08-27 18:37:44 +03:00
2023-08-20 18:10:02 +03:00
auth = kwargs . get ( " auth " , {
" bearer_token " : " free " ,
" org_id " : " theb " ,
} )
2023-08-27 18:37:44 +03:00
2023-08-20 18:10:02 +03:00
bearer_token = auth [ " bearer_token " ]
2023-08-27 18:37:44 +03:00
org_id = auth [ " org_id " ]
2023-07-28 13:07:17 +03:00
headers = {
2023-08-27 18:37:44 +03:00
' authority ' : ' beta.theb.ai ' ,
' accept ' : ' text/event-stream ' ,
' accept-language ' : ' id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7 ' ,
' authorization ' : ' Bearer ' + bearer_token ,
' content-type ' : ' application/json ' ,
' origin ' : ' https://beta.theb.ai ' ,
' referer ' : ' https://beta.theb.ai/home ' ,
' sec-ch-ua ' : ' " Chromium " ;v= " 116 " , " Not)A;Brand " ;v= " 24 " , " Google Chrome " ;v= " 116 " ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
2023-08-20 18:10:02 +03:00
' sec-ch-ua-platform ' : ' " Windows " ' ,
2023-08-27 18:37:44 +03:00
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
' user-agent ' : ' Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 ' ,
' x-ai-model ' : ' ee8d4f29cb7047f78cbe84313ed6ace8 ' ,
2023-07-28 13:07:17 +03:00
}
2023-08-27 18:37:44 +03:00
2023-08-20 18:10:02 +03:00
req_rand = random . randint ( 100000000 , 9999999999 )
2023-07-28 13:07:17 +03:00
2023-08-20 18:10:02 +03:00
json_data : dict [ str , Any ] = {
2023-08-27 18:37:44 +03:00
" text " : conversation ,
" category " : " 04f58f64a4aa4191a957b47290fee864 " ,
" model " : " ee8d4f29cb7047f78cbe84313ed6ace8 " ,
2023-08-20 18:10:02 +03:00
" model_params " : {
2023-08-27 18:37:44 +03:00
" system_prompt " : " You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture. \n Knowledge cutoff: 2021-09 \n Current date: {{ YYYY-MM-DD}} " ,
" temperature " : kwargs . get ( " temperature " , 1 ) ,
" top_p " : kwargs . get ( " top_p " , 1 ) ,
" frequency_penalty " : kwargs . get ( " frequency_penalty " , 0 ) ,
" presence_penalty " : kwargs . get ( " presence_penalty " , 0 ) ,
" long_term_memory " : " auto "
2023-08-20 18:10:02 +03:00
}
}
2023-08-27 18:37:44 +03:00
response = requests . post ( f " https://beta.theb.ai/api/conversation?org_id= { org_id } &req_rand= { req_rand } " ,
headers = headers , json = json_data , stream = True )
2023-07-28 13:07:17 +03:00
response . raise_for_status ( )
2023-08-20 18:10:02 +03:00
content = " "
next_content = " "
for chunk in response . iter_lines ( ) :
if b " content " in chunk :
next_content = content
data = json . loads ( chunk . decode ( ) . split ( " data: " ) [ 1 ] )
content = data [ " content " ]
yield data [ " content " ] . replace ( next_content , " " )
@classmethod
@property
def params ( cls ) :
params = [
( " model " , " str " ) ,
( " messages " , " list[dict[str, str]] " ) ,
( " auth " , " list[dict[str, str]] " ) ,
( " stream " , " bool " ) ,
( " temperature " , " float " ) ,
( " presence_penalty " , " int " ) ,
( " frequency_penalty " , " int " ) ,
( " top_p " , " int " )
]
param = " , " . join ( [ " : " . join ( p ) for p in params ] )
2023-09-18 00:23:54 +03:00
return f " g4f.provider. { cls . __name__ } supports: ( { param } ) "