2024-09-19 21:58:37 +03:00
from __future__ import annotations
from . . typing import Messages , CreateResult
from . . providers . base_provider import AbstractProvider , ProviderModelMixin
2024-11-05 01:55:26 +03:00
import time
import uuid
import random
import json
2024-09-19 21:58:37 +03:00
from requests import Session
from . openai . new import (
get_config ,
get_answer_token ,
process_turnstile ,
get_requirements_token
)
def format_conversation ( messages : list ) :
conversation = [ ]
for message in messages :
conversation . append ( {
' id ' : str ( uuid . uuid4 ( ) ) ,
' author ' : {
' role ' : message [ ' role ' ] ,
} ,
' content ' : {
' content_type ' : ' text ' ,
' parts ' : [
message [ ' content ' ] ,
] ,
} ,
' metadata ' : {
' serialization_metadata ' : {
' custom_symbol_offsets ' : [ ] ,
} ,
} ,
' create_time ' : round ( time . time ( ) , 3 ) ,
} )
return conversation
def init_session ( user_agent ) :
session = Session ( )
cookies = {
' _dd_s ' : ' ' ,
}
headers = {
' accept ' : ' */* ' ,
' accept-language ' : ' en-US,en;q=0.8 ' ,
' cache-control ' : ' no-cache ' ,
' pragma ' : ' no-cache ' ,
' priority ' : ' u=0, i ' ,
' sec-ch-ua ' : ' " Not)A;Brand " ;v= " 99 " , " Google Chrome " ;v= " 127 " , " Chromium " ;v= " 127 " ' ,
' sec-ch-ua-arch ' : ' " arm " ' ,
' sec-ch-ua-bitness ' : ' " 64 " ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
' sec-ch-ua-model ' : ' " " ' ,
' sec-ch-ua-platform ' : ' " macOS " ' ,
' sec-ch-ua-platform-version ' : ' " 14.4.0 " ' ,
' sec-fetch-dest ' : ' document ' ,
' sec-fetch-mode ' : ' navigate ' ,
' sec-fetch-site ' : ' none ' ,
' sec-fetch-user ' : ' ?1 ' ,
' upgrade-insecure-requests ' : ' 1 ' ,
' user-agent ' : user_agent ,
}
session . get ( ' https://chatgpt.com/ ' , cookies = cookies , headers = headers )
return session
class ChatGpt ( AbstractProvider , ProviderModelMixin ) :
label = " ChatGpt "
2024-11-05 01:55:26 +03:00
url = " https://chatgpt.com "
2024-09-19 21:58:37 +03:00
working = True
supports_message_history = True
supports_system_message = True
supports_stream = True
2024-11-05 01:55:26 +03:00
default_model = ' auto '
2024-09-24 13:23:53 +03:00
models = [
2024-11-05 01:55:26 +03:00
default_model ,
' gpt-3.5-turbo ' ,
2024-09-24 13:23:53 +03:00
' gpt-4o ' ,
' gpt-4o-mini ' ,
' gpt-4 ' ,
' gpt-4-turbo ' ,
' chatgpt-4o-latest ' ,
]
2024-11-05 01:55:26 +03:00
model_aliases = {
" gpt-4o " : " chatgpt-4o-latest " ,
}
@classmethod
def get_model ( cls , model : str ) - > str :
if model in cls . models :
return model
elif model in cls . model_aliases :
return cls . model_aliases [ model ]
else :
return cls . default_model
2024-09-19 21:58:37 +03:00
@classmethod
def create_completion (
cls ,
model : str ,
messages : Messages ,
stream : bool ,
* * kwargs
) - > CreateResult :
2024-11-05 01:55:26 +03:00
model = cls . get_model ( model )
if model not in cls . models :
raise ValueError ( f " Model ' { model } ' is not available. Available models: { ' , ' . join ( cls . models ) } " )
2024-09-19 21:58:37 +03:00
2024-11-05 01:55:26 +03:00
user_agent = ' Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 '
2024-09-19 21:58:37 +03:00
session : Session = init_session ( user_agent )
2024-11-05 01:55:26 +03:00
config = get_config ( user_agent )
pow_req = get_requirements_token ( config )
headers = {
2024-09-19 21:58:37 +03:00
' accept ' : ' */* ' ,
' accept-language ' : ' en-US,en;q=0.8 ' ,
' content-type ' : ' application/json ' ,
' oai-device-id ' : f ' { uuid . uuid4 ( ) } ' ,
' oai-language ' : ' en-US ' ,
' origin ' : ' https://chatgpt.com ' ,
' priority ' : ' u=1, i ' ,
' referer ' : ' https://chatgpt.com/ ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
' sec-ch-ua-platform ' : ' " Linux " ' ,
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
' sec-gpc ' : ' 1 ' ,
' user-agent ' : f ' { user_agent } '
}
response = session . post ( ' https://chatgpt.com/backend-anon/sentinel/chat-requirements ' ,
2024-11-05 01:55:26 +03:00
headers = headers , json = { ' p ' : pow_req } )
2024-09-19 21:58:37 +03:00
2024-10-03 00:31:38 +03:00
if response . status_code != 200 :
return
response_data = response . json ( )
if " detail " in response_data and " Unusual activity " in response_data [ " detail " ] :
return
2024-11-05 01:55:26 +03:00
turnstile = response_data . get ( ' turnstile ' , { } )
2024-09-19 21:58:37 +03:00
turnstile_required = turnstile . get ( ' required ' )
2024-11-05 01:55:26 +03:00
pow_conf = response_data . get ( ' proofofwork ' , { } )
2024-09-19 21:58:37 +03:00
if turnstile_required :
2024-11-05 01:55:26 +03:00
turnstile_dx = turnstile . get ( ' dx ' )
2024-09-19 21:58:37 +03:00
turnstile_token = process_turnstile ( turnstile_dx , pow_req )
2024-11-05 01:55:26 +03:00
headers = { * * headers ,
' openai-sentinel-turnstile-token ' : turnstile_token ,
' openai-sentinel-chat-requirements-token ' : response_data . get ( ' token ' ) ,
' openai-sentinel-proof-token ' : get_answer_token (
pow_conf . get ( ' seed ' ) , pow_conf . get ( ' difficulty ' ) , config
) }
2024-09-19 21:58:37 +03:00
json_data = {
' action ' : ' next ' ,
' messages ' : format_conversation ( messages ) ,
' parent_message_id ' : str ( uuid . uuid4 ( ) ) ,
2024-11-05 01:55:26 +03:00
' model ' : model ,
2024-09-19 21:58:37 +03:00
' timezone_offset_min ' : - 120 ,
' suggestions ' : [
' Can you help me create a personalized morning routine that would help increase my productivity throughout the day? Start by asking me about my current habits and what activities energize me in the morning. ' ,
' Could you help me plan a relaxing day that focuses on activities for rejuvenation? To start, can you ask me what my favorite forms of relaxation are? ' ,
' I have a photoshoot tomorrow. Can you recommend me some colors and outfit options that will look good on camera? ' ,
' Make up a 5-sentence story about " Sharky " , a tooth-brushing shark superhero. Make each sentence a bullet point. ' ,
] ,
' history_and_training_disabled ' : False ,
' conversation_mode ' : {
' kind ' : ' primary_assistant ' ,
} ,
' force_paragen ' : False ,
' force_paragen_model_slug ' : ' ' ,
' force_nulligen ' : False ,
' force_rate_limit ' : False ,
' reset_rate_limits ' : False ,
' websocket_request_id ' : str ( uuid . uuid4 ( ) ) ,
' system_hints ' : [ ] ,
' force_use_sse ' : True ,
' conversation_origin ' : None ,
' client_contextual_info ' : {
' is_dark_mode ' : True ,
2024-11-05 01:55:26 +03:00
' time_since_loaded ' : random . randint ( 22 , 33 ) ,
2024-09-19 21:58:37 +03:00
' page_height ' : random . randint ( 600 , 900 ) ,
' page_width ' : random . randint ( 500 , 800 ) ,
' pixel_ratio ' : 2 ,
' screen_height ' : random . randint ( 800 , 1200 ) ,
' screen_width ' : random . randint ( 1200 , 2000 ) ,
} ,
}
2024-10-03 00:31:38 +03:00
time . sleep ( 2 )
2024-09-19 21:58:37 +03:00
response = session . post ( ' https://chatgpt.com/backend-anon/conversation ' ,
2024-11-05 01:55:26 +03:00
headers = headers , json = json_data , stream = True )
2024-10-03 00:31:38 +03:00
2024-09-19 21:58:37 +03:00
replace = ' '
for line in response . iter_lines ( ) :
if line :
2024-10-03 00:31:38 +03:00
decoded_line = line . decode ( )
2024-11-05 01:55:26 +03:00
2024-10-03 00:31:38 +03:00
if decoded_line . startswith ( ' data: ' ) :
2024-11-05 01:55:26 +03:00
json_string = decoded_line [ 6 : ] . strip ( )
if json_string == ' [DONE] ' :
break
if json_string :
2024-10-03 00:31:38 +03:00
try :
data = json . loads ( json_string )
2024-11-05 01:55:26 +03:00
except json . JSONDecodeError :
2024-10-03 00:31:38 +03:00
continue
2024-11-05 01:55:26 +03:00
if data . get ( ' message ' ) and data [ ' message ' ] . get ( ' author ' ) :
role = data [ ' message ' ] [ ' author ' ] . get ( ' role ' )
if role == ' assistant ' :
tokens = data [ ' message ' ] [ ' content ' ] . get ( ' parts ' , [ ] )
if tokens :
yield tokens [ 0 ] . replace ( replace , ' ' )
replace = tokens [ 0 ]