2023-09-23 12:58:13 +03:00
from __future__ import annotations
import json , base64 , requests , execjs , random , uuid
2023-10-19 17:14:48 +03:00
from . . typing import Messages , TypedDict , CreateResult , Any
2023-09-23 12:58:13 +03:00
from . base_provider import BaseProvider
2023-10-19 17:14:48 +03:00
from . . debug import logging
2023-09-23 12:58:13 +03:00
class Vercel ( BaseProvider ) :
url = ' https://sdk.vercel.ai '
2023-11-20 21:40:55 +03:00
working = False
2023-10-27 23:59:14 +03:00
supports_message_history = True
2023-09-23 12:58:13 +03:00
supports_gpt_35_turbo = True
2023-10-27 23:59:14 +03:00
supports_stream = True
2023-09-23 12:58:13 +03:00
@staticmethod
def create_completion (
model : str ,
2023-10-09 11:22:17 +03:00
messages : Messages ,
2023-09-26 02:02:02 +03:00
stream : bool ,
2023-10-22 09:57:31 +03:00
proxy : str = None ,
* * kwargs
) - > CreateResult :
2023-10-19 17:14:48 +03:00
2023-09-26 02:02:02 +03:00
if not model :
model = " gpt-3.5-turbo "
2023-10-23 10:46:25 +03:00
2023-09-26 02:02:02 +03:00
elif model not in model_info :
2023-10-19 17:14:48 +03:00
raise ValueError ( f " Vercel does not support { model } " )
2023-10-23 10:46:25 +03:00
2023-09-23 12:58:13 +03:00
headers = {
2023-10-23 10:46:25 +03:00
' authority ' : ' sdk.vercel.ai ' ,
' accept ' : ' */* ' ,
' accept-language ' : ' en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3 ' ,
' cache-control ' : ' no-cache ' ,
' content-type ' : ' application/json ' ,
' custom-encoding ' : get_anti_bot_token ( ) ,
' origin ' : ' https://sdk.vercel.ai ' ,
' pragma ' : ' no-cache ' ,
' referer ' : ' https://sdk.vercel.ai/ ' ,
' sec-ch-ua ' : ' " Google Chrome " ;v= " 117 " , " Not;A=Brand " ;v= " 8 " , " Chromium " ;v= " 117 " ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
2023-09-23 12:58:13 +03:00
' sec-ch-ua-platform ' : ' " macOS " ' ,
2023-10-23 10:46:25 +03:00
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
' user-agent ' : f ' Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0. { random . randint ( 99 , 999 ) } . { random . randint ( 99 , 999 ) } Safari/537.36 ' ,
2023-09-23 12:58:13 +03:00
}
json_data = {
' model ' : model_info [ model ] [ ' id ' ] ,
' messages ' : messages ,
' playgroundId ' : str ( uuid . uuid4 ( ) ) ,
2023-10-09 11:22:17 +03:00
' chatIndex ' : 0 ,
* * model_info [ model ] [ ' default_params ' ] ,
* * kwargs
}
2023-09-23 12:58:13 +03:00
2023-09-23 13:16:19 +03:00
max_retries = kwargs . get ( ' max_retries ' , 20 )
2023-10-23 10:46:25 +03:00
for _ in range ( max_retries ) :
2023-09-23 12:58:13 +03:00
response = requests . post ( ' https://sdk.vercel.ai/api/generate ' ,
2023-10-09 11:22:17 +03:00
headers = headers , json = json_data , stream = True , proxies = { " https " : proxy } )
2023-09-26 02:02:02 +03:00
try :
response . raise_for_status ( )
2023-10-22 09:57:31 +03:00
except :
2023-09-26 02:02:02 +03:00
continue
2023-09-26 11:03:37 +03:00
for token in response . iter_content ( chunk_size = None ) :
2023-09-26 02:02:02 +03:00
yield token . decode ( )
break
2023-09-23 12:58:13 +03:00
2023-09-26 02:02:02 +03:00
def get_anti_bot_token ( ) - > str :
2023-09-23 12:58:13 +03:00
headers = {
2023-10-23 10:46:25 +03:00
' authority ' : ' sdk.vercel.ai ' ,
' accept ' : ' */* ' ,
' accept-language ' : ' en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3 ' ,
' cache-control ' : ' no-cache ' ,
' pragma ' : ' no-cache ' ,
' referer ' : ' https://sdk.vercel.ai/ ' ,
' sec-ch-ua ' : ' " Google Chrome " ;v= " 117 " , " Not;A=Brand " ;v= " 8 " , " Chromium " ;v= " 117 " ' ,
' sec-ch-ua-mobile ' : ' ?0 ' ,
2023-09-23 12:58:13 +03:00
' sec-ch-ua-platform ' : ' " macOS " ' ,
2023-10-23 10:46:25 +03:00
' sec-fetch-dest ' : ' empty ' ,
' sec-fetch-mode ' : ' cors ' ,
' sec-fetch-site ' : ' same-origin ' ,
' user-agent ' : f ' Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0. { random . randint ( 99 , 999 ) } . { random . randint ( 99 , 999 ) } Safari/537.36 ' ,
2023-09-23 12:58:13 +03:00
}
response = requests . get ( ' https://sdk.vercel.ai/openai.jpeg ' ,
headers = headers ) . text
raw_data = json . loads ( base64 . b64decode ( response ,
validate = True ) )
js_script = ''' const globalThis= { marker: " mark " };String.prototype.fontcolor=function() { return `<font>$ {this} </font>`};
return ( % s ) ( % s ) ''' % (raw_data[ ' c ' ], raw_data[ ' a ' ])
raw_token = json . dumps ( { ' r ' : execjs . compile ( js_script ) . call ( ' ' ) , ' t ' : raw_data [ ' t ' ] } ,
separators = ( " , " , " : " ) )
return base64 . b64encode ( raw_token . encode ( ' utf-16le ' ) ) . decode ( )
class ModelInfo ( TypedDict ) :
id : str
default_params : dict [ str , Any ]
model_info : dict [ str , ModelInfo ] = {
2023-10-19 17:14:48 +03:00
# 'claude-instant-v1': {
# 'id': 'anthropic:claude-instant-v1',
# 'default_params': {
# 'temperature': 1,
# 'maximumLength': 1024,
# 'topP': 1,
# 'topK': 1,
# 'presencePenalty': 1,
# 'frequencyPenalty': 1,
# 'stopSequences': ['\n\nHuman:'],
# },
# },
# 'claude-v1': {
# 'id': 'anthropic:claude-v1',
# 'default_params': {
# 'temperature': 1,
# 'maximumLength': 1024,
# 'topP': 1,
# 'topK': 1,
# 'presencePenalty': 1,
# 'frequencyPenalty': 1,
# 'stopSequences': ['\n\nHuman:'],
# },
# },
# 'claude-v2': {
# 'id': 'anthropic:claude-v2',
# 'default_params': {
# 'temperature': 1,
# 'maximumLength': 1024,
# 'topP': 1,
# 'topK': 1,
# 'presencePenalty': 1,
# 'frequencyPenalty': 1,
# 'stopSequences': ['\n\nHuman:'],
# },
# },
' replicate/llama70b-v2-chat ' : {
' id ' : ' replicate:replicate/llama-2-70b-chat ' ,
2023-09-23 12:58:13 +03:00
' default_params ' : {
2023-10-19 17:14:48 +03:00
' temperature ' : 0.75 ,
' maximumLength ' : 3000 ,
2023-09-23 12:58:13 +03:00
' topP ' : 1 ,
2023-10-19 17:14:48 +03:00
' repetitionPenalty ' : 1 ,
2023-09-23 12:58:13 +03:00
} ,
} ,
' a16z-infra/llama7b-v2-chat ' : {
' id ' : ' replicate:a16z-infra/llama7b-v2-chat ' ,
' default_params ' : {
' temperature ' : 0.75 ,
' maximumLength ' : 3000 ,
' topP ' : 1 ,
' repetitionPenalty ' : 1 ,
} ,
} ,
' a16z-infra/llama13b-v2-chat ' : {
' id ' : ' replicate:a16z-infra/llama13b-v2-chat ' ,
' default_params ' : {
' temperature ' : 0.75 ,
' maximumLength ' : 3000 ,
' topP ' : 1 ,
' repetitionPenalty ' : 1 ,
} ,
} ,
' replicate/llama-2-70b-chat ' : {
' id ' : ' replicate:replicate/llama-2-70b-chat ' ,
' default_params ' : {
' temperature ' : 0.75 ,
' maximumLength ' : 3000 ,
' topP ' : 1 ,
' repetitionPenalty ' : 1 ,
} ,
} ,
' bigscience/bloom ' : {
' id ' : ' huggingface:bigscience/bloom ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 0.95 ,
' topK ' : 4 ,
' repetitionPenalty ' : 1.03 ,
} ,
} ,
' google/flan-t5-xxl ' : {
' id ' : ' huggingface:google/flan-t5-xxl ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 0.95 ,
' topK ' : 4 ,
' repetitionPenalty ' : 1.03 ,
} ,
} ,
' EleutherAI/gpt-neox-20b ' : {
' id ' : ' huggingface:EleutherAI/gpt-neox-20b ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 0.95 ,
' topK ' : 4 ,
' repetitionPenalty ' : 1.03 ,
' stopSequences ' : [ ] ,
} ,
} ,
' OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 ' : {
' id ' : ' huggingface:OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5 ' ,
' default_params ' : {
' maximumLength ' : 1024 ,
' typicalP ' : 0.2 ,
' repetitionPenalty ' : 1 ,
} ,
} ,
' OpenAssistant/oasst-sft-1-pythia-12b ' : {
' id ' : ' huggingface:OpenAssistant/oasst-sft-1-pythia-12b ' ,
' default_params ' : {
' maximumLength ' : 1024 ,
' typicalP ' : 0.2 ,
' repetitionPenalty ' : 1 ,
} ,
} ,
' bigcode/santacoder ' : {
' id ' : ' huggingface:bigcode/santacoder ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 0.95 ,
' topK ' : 4 ,
' repetitionPenalty ' : 1.03 ,
} ,
} ,
' command-light-nightly ' : {
' id ' : ' cohere:command-light-nightly ' ,
' default_params ' : {
' temperature ' : 0.9 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' topK ' : 0 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' command-nightly ' : {
' id ' : ' cohere:command-nightly ' ,
' default_params ' : {
' temperature ' : 0.9 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' topK ' : 0 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
2023-10-19 17:14:48 +03:00
# 'gpt-4': {
# 'id': 'openai:gpt-4',
# 'default_params': {
# 'temperature': 0.7,
# 'maximumLength': 8192,
# 'topP': 1,
# 'presencePenalty': 0,
# 'frequencyPenalty': 0,
# 'stopSequences': [],
# },
# },
# 'gpt-4-0613': {
# 'id': 'openai:gpt-4-0613',
# 'default_params': {
# 'temperature': 0.7,
# 'maximumLength': 8192,
# 'topP': 1,
# 'presencePenalty': 0,
# 'frequencyPenalty': 0,
# 'stopSequences': [],
# },
# },
2023-09-23 12:58:13 +03:00
' code-davinci-002 ' : {
' id ' : ' openai:code-davinci-002 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' gpt-3.5-turbo ' : {
' id ' : ' openai:gpt-3.5-turbo ' ,
' default_params ' : {
' temperature ' : 0.7 ,
' maximumLength ' : 4096 ,
' topP ' : 1 ,
' topK ' : 1 ,
' presencePenalty ' : 1 ,
' frequencyPenalty ' : 1 ,
' stopSequences ' : [ ] ,
} ,
} ,
' gpt-3.5-turbo-16k ' : {
' id ' : ' openai:gpt-3.5-turbo-16k ' ,
' default_params ' : {
' temperature ' : 0.7 ,
' maximumLength ' : 16280 ,
' topP ' : 1 ,
' topK ' : 1 ,
' presencePenalty ' : 1 ,
' frequencyPenalty ' : 1 ,
' stopSequences ' : [ ] ,
} ,
} ,
' gpt-3.5-turbo-16k-0613 ' : {
' id ' : ' openai:gpt-3.5-turbo-16k-0613 ' ,
' default_params ' : {
' temperature ' : 0.7 ,
' maximumLength ' : 16280 ,
' topP ' : 1 ,
' topK ' : 1 ,
' presencePenalty ' : 1 ,
' frequencyPenalty ' : 1 ,
' stopSequences ' : [ ] ,
} ,
} ,
' text-ada-001 ' : {
' id ' : ' openai:text-ada-001 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' text-babbage-001 ' : {
' id ' : ' openai:text-babbage-001 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' text-curie-001 ' : {
' id ' : ' openai:text-curie-001 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' text-davinci-002 ' : {
' id ' : ' openai:text-davinci-002 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 1024 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
' text-davinci-003 ' : {
' id ' : ' openai:text-davinci-003 ' ,
' default_params ' : {
' temperature ' : 0.5 ,
' maximumLength ' : 4097 ,
' topP ' : 1 ,
' presencePenalty ' : 0 ,
' frequencyPenalty ' : 0 ,
' stopSequences ' : [ ] ,
} ,
} ,
}