2023-09-23 12:58:13 +03:00
from __future__ import annotations
2024-03-07 15:31:30 +03:00
import json , base64 , requests , random , os
2024-01-26 09:54:13 +03:00
try :
import execjs
has_requirements = True
except ImportError :
has_requirements = False
2023-09-23 12:58:13 +03:00
2024-03-12 04:06:06 +03:00
from . . typing import Messages , CreateResult
2024-01-01 19:48:57 +03:00
from . base_provider import AbstractProvider
2024-03-12 04:06:06 +03:00
from . . requests import raise_for_status
from . . errors import MissingRequirementsError , RateLimitError , ResponseStatusError
2023-09-23 12:58:13 +03:00
2024-01-01 19:48:57 +03:00
class Vercel ( AbstractProvider ) :
2024-03-07 15:31:30 +03:00
url = ' https://chat.vercel.ai '
working = True
2024-03-12 04:06:06 +03:00
supports_message_history = True
supports_system_message = True
2024-03-07 15:31:30 +03:00
supports_gpt_35_turbo = True
supports_stream = True
2023-09-23 12:58:13 +03:00
@staticmethod
def create_completion (
model : str ,
2023-10-09 11:22:17 +03:00
messages : Messages ,
2023-09-26 02:02:02 +03:00
stream : bool ,
2023-10-22 09:57:31 +03:00
proxy : str = None ,
2024-03-12 04:06:06 +03:00
max_retries : int = 6 ,
2023-10-22 09:57:31 +03:00
* * kwargs
) - > CreateResult :
2024-01-26 09:54:13 +03:00
if not has_requirements :
raise MissingRequirementsError ( ' Install " PyExecJS " package ' )
2024-03-07 15:31:30 +03:00
2023-09-23 12:58:13 +03:00
headers = {
2024-03-07 15:31:30 +03:00
' authority ' : ' chat.vercel.ai ' ,
2023-10-23 10:46:25 +03:00
' accept ' : ' */* ' ,
' accept-language ' : ' en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3 ' ,
' cache-control ' : ' no-cache ' ,
' content-type ' : ' application/json ' ,
' custom-encoding ' : get_anti_bot_token ( ) ,
2024-03-07 15:31:30 +03:00
' origin ' : ' https://chat.vercel.ai ' ,
2023-10-23 10:46:25 +03:00
' pragma ' : ' no-cache ' ,
2024-03-07 15:31:30 +03:00
' referer ' : ' https://chat.vercel.ai/ ' ,
' sec-ch-ua ' : ' " Chromium " ;v= " 122 " , " Not(A:Brand " ;v= " 24 " , " Google Chrome " ;v= " 122 " ' ,
2023-10-23 10:46:25 +03:00
' sec-ch-ua-mobile ' : ' ?0 ' ,
2023-09-23 12:58:13 +03:00
' sec-ch-ua-platform ' : ' " macOS " ' ,
2023-10-23 10:46:25 +03:00
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
2024-03-07 15:31:30 +03:00
' user-agent ' : ' Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 ' ,
2023-09-23 12:58:13 +03:00
}
json_data = {
2024-03-07 15:31:30 +03:00
' messages ' : messages ,
' id ' : f ' { os . urandom ( 3 ) . hex ( ) } a ' ,
2023-10-09 11:22:17 +03:00
}
2024-03-12 04:06:06 +03:00
response = None
2023-10-23 10:46:25 +03:00
for _ in range ( max_retries ) :
2024-03-07 15:31:30 +03:00
response = requests . post ( ' https://chat.vercel.ai/api/chat ' ,
2023-10-09 11:22:17 +03:00
headers = headers , json = json_data , stream = True , proxies = { " https " : proxy } )
2024-03-12 04:06:06 +03:00
if not response . ok :
2023-09-26 02:02:02 +03:00
continue
2023-09-26 11:03:37 +03:00
for token in response . iter_content ( chunk_size = None ) :
2024-03-15 15:30:15 +03:00
try :
2024-04-07 01:10:32 +03:00
yield token . decode ( errors = " ignore " )
2024-03-15 15:30:15 +03:00
except UnicodeDecodeError :
pass
2023-09-26 02:02:02 +03:00
break
2024-03-12 04:06:06 +03:00
raise_for_status ( response )
2023-09-26 02:02:02 +03:00
def get_anti_bot_token ( ) - > str :
2023-09-23 12:58:13 +03:00
headers = {
2023-10-23 10:46:25 +03:00
' authority ' : ' sdk.vercel.ai ' ,
' accept ' : ' */* ' ,
' accept-language ' : ' en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3 ' ,
' cache-control ' : ' no-cache ' ,
' pragma ' : ' no-cache ' ,
' referer ' : ' https://sdk.vercel.ai/ ' ,
' sec-ch-ua ' : ' " Google Chrome " ;v= " 117 " , " Not;A=Brand " ;v= " 8 " , " Chromium " ;v= " 117 " ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
2023-09-23 12:58:13 +03:00
' sec-ch-ua-platform ' : ' " macOS " ' ,
2023-10-23 10:46:25 +03:00
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
' user-agent ' : f ' Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0. { random . randint ( 99 , 999 ) } . { random . randint ( 99 , 999 ) } Safari/537.36 ' ,
2023-09-23 12:58:13 +03:00
}
2024-03-07 15:31:30 +03:00
response = requests . get ( ' https://chat.vercel.ai/openai.jpeg ' ,
2023-09-23 12:58:13 +03:00
headers = headers ) . text
raw_data = json . loads ( base64 . b64decode ( response ,
validate = True ) )
js_script = ''' const globalThis= { marker: " mark " };String.prototype.fontcolor=function() { return `<font>$ {this} </font>`};
return ( % s ) ( % s ) ''' % (raw_data[ ' c ' ], raw_data[ ' a ' ])
2024-03-12 04:06:06 +03:00
2024-03-07 15:31:30 +03:00
sec_list = [ execjs . compile ( js_script ) . call ( ' ' ) [ 0 ] , [ ] , " sentinel " ]
2023-09-23 12:58:13 +03:00
2024-03-07 15:31:30 +03:00
raw_token = json . dumps ( { ' r ' : sec_list , ' t ' : raw_data [ ' t ' ] } ,
2023-09-23 12:58:13 +03:00
separators = ( " , " , " : " ) )
2024-03-07 15:31:30 +03:00
return base64 . b64encode ( raw_token . encode ( ' utf-8 ' ) ) . decode ( )