mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-10 06:54:22 +03:00
add provider and helper
This commit is contained in:
parent
e594500c4e
commit
25870e7523
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.autopep8"
|
||||
},
|
||||
"python.formatting.provider": "none"
|
||||
}
|
49
testing/binghuan/BingHuan.py
Normal file
49
testing/binghuan/BingHuan.py
Normal file
@ -0,0 +1,49 @@
|
||||
import os,sys
|
||||
import json
|
||||
import subprocess
|
||||
# from ...typing import sha256, Dict, get_type_hints
|
||||
|
||||
url = 'https://b.ai-huan.xyz'
|
||||
model = ['gpt-3.5-turbo', 'gpt-4']
|
||||
supports_stream = True
|
||||
needs_auth = False
|
||||
|
||||
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
||||
path = os.path.dirname(os.path.realpath(__file__))
|
||||
config = json.dumps({
|
||||
'messages': messages,
|
||||
'model': model}, separators=(',', ':'))
|
||||
|
||||
cmd = ['python', f'{path}/helpers/binghuan.py', config]
|
||||
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
for line in iter(p.stdout.readline, b''):
|
||||
yield line.decode('cp1252') #[:-1]
|
||||
|
||||
|
||||
# params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
||||
# '(%s)' % ', '.join(
|
||||
# [f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
||||
|
||||
|
||||
# Temporary For ChatCompletion Class
|
||||
class ChatCompletion:
|
||||
@staticmethod
|
||||
def create(model: str, messages: list, provider: None or str, stream: bool = False, auth: str = False, **kwargs):
|
||||
kwargs['auth'] = auth
|
||||
|
||||
if provider and needs_auth and not auth:
|
||||
print(
|
||||
f'ValueError: {provider} requires authentication (use auth="cookie or token or jwt ..." param)', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
return (_create_completion(model, messages, stream, **kwargs)
|
||||
if stream else ''.join(_create_completion(model, messages, stream, **kwargs)))
|
||||
except TypeError as e:
|
||||
print(e)
|
||||
arg: str = str(e).split("'")[1]
|
||||
print(
|
||||
f"ValueError: {provider} does not support '{arg}' argument", file=sys.stderr)
|
||||
sys.exit(1)
|
206
testing/binghuan/helpers/binghuan.py
Normal file
206
testing/binghuan/helpers/binghuan.py
Normal file
@ -0,0 +1,206 @@
|
||||
import sys
|
||||
import ssl
|
||||
import uuid
|
||||
import json
|
||||
import time
|
||||
import random
|
||||
import asyncio
|
||||
import certifi
|
||||
# import requests
|
||||
from curl_cffi import requests
|
||||
import websockets
|
||||
import browser_cookie3
|
||||
|
||||
config = json.loads(sys.argv[1])
|
||||
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.load_verify_locations(certifi.where())
|
||||
|
||||
|
||||
|
||||
conversationstyles = {
|
||||
'gpt-4': [ #'precise'
|
||||
"nlu_direct_response_filter",
|
||||
"deepleo",
|
||||
"disable_emoji_spoken_text",
|
||||
"responsible_ai_policy_235",
|
||||
"enablemm",
|
||||
"h3precise",
|
||||
"rcsprtsalwlst",
|
||||
"dv3sugg",
|
||||
"autosave",
|
||||
"clgalileo",
|
||||
"gencontentv3"
|
||||
],
|
||||
'balanced': [
|
||||
"nlu_direct_response_filter",
|
||||
"deepleo",
|
||||
"disable_emoji_spoken_text",
|
||||
"responsible_ai_policy_235",
|
||||
"enablemm",
|
||||
"harmonyv3",
|
||||
"rcsprtsalwlst",
|
||||
"dv3sugg",
|
||||
"autosave"
|
||||
],
|
||||
'gpt-3.5-turbo': [ #'precise'
|
||||
"nlu_direct_response_filter",
|
||||
"deepleo",
|
||||
"disable_emoji_spoken_text",
|
||||
"responsible_ai_policy_235",
|
||||
"enablemm",
|
||||
"h3imaginative",
|
||||
"rcsprtsalwlst",
|
||||
"dv3sugg",
|
||||
"autosave",
|
||||
"gencontentv3"
|
||||
]
|
||||
}
|
||||
|
||||
def format(msg: dict) -> str:
|
||||
return json.dumps(msg) + '\x1e'
|
||||
|
||||
def get_token():
|
||||
return
|
||||
|
||||
try:
|
||||
cookies = {c.name: c.value for c in browser_cookie3.edge(domain_name='bing.com')}
|
||||
return cookies['_U']
|
||||
except:
|
||||
print('Error: could not find bing _U cookie in edge browser.')
|
||||
exit(1)
|
||||
|
||||
class AsyncCompletion:
|
||||
async def create(
|
||||
prompt : str = None,
|
||||
optionSets : list = None,
|
||||
token : str = None): # No auth required anymore
|
||||
|
||||
create = None
|
||||
for _ in range(5):
|
||||
try:
|
||||
create = requests.get('https://b.ai-huan.xyz/turing/conversation/create',
|
||||
headers = {
|
||||
'host': 'b.ai-huan.xyz',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'connection': 'keep-alive',
|
||||
'authority': 'b.ai-huan.xyz',
|
||||
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'cache-control': 'max-age=0',
|
||||
'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"110.0.1587.69"',
|
||||
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': '""',
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-ch-ua-platform-version': '"15.0.0"',
|
||||
'sec-fetch-dest': 'document',
|
||||
'sec-fetch-mode': 'navigate',
|
||||
'sec-fetch-site': 'none',
|
||||
'sec-fetch-user': '?1',
|
||||
'upgrade-insecure-requests': '1',
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
|
||||
'x-edge-shopping-flag': '1',
|
||||
'x-forwarded-for': f'13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}'
|
||||
}
|
||||
)
|
||||
|
||||
conversationId = create.json()['conversationId']
|
||||
clientId = create.json()['clientId']
|
||||
conversationSignature = create.json()['conversationSignature']
|
||||
|
||||
except Exception as e:
|
||||
time.sleep(0.5)
|
||||
continue
|
||||
|
||||
if create == None: raise Exception('Failed to create conversation.')
|
||||
|
||||
wss: websockets.WebSocketClientProtocol or None = None
|
||||
|
||||
wss = await websockets.connect('wss://sydney.vcanbb.chat/sydney/ChatHub', max_size = None, ssl = ssl_context,
|
||||
extra_headers = {
|
||||
'accept': 'application/json',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'content-type': 'application/json',
|
||||
'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"109.0.1518.78"',
|
||||
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': "",
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-ch-ua-platform-version': '"15.0.0"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'x-ms-client-request-id': str(uuid.uuid4()),
|
||||
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
|
||||
'Referer': 'https://b.ai-huan.xyz/search?q=Bing+AI&showconv=1&FORM=hpcodx',
|
||||
'Referrer-Policy': 'origin-when-cross-origin',
|
||||
'x-forwarded-for': f'13.{random.randint(104, 107)}.{random.randint(0, 255)}.{random.randint(0, 255)}'
|
||||
}
|
||||
)
|
||||
|
||||
await wss.send(format({'protocol': 'json', 'version': 1}))
|
||||
await wss.recv()
|
||||
|
||||
struct = {
|
||||
'arguments': [
|
||||
{
|
||||
'source': 'cib',
|
||||
'optionsSets': optionSets,
|
||||
'isStartOfSession': True,
|
||||
'message': {
|
||||
'author': 'user',
|
||||
'inputMethod': 'Keyboard',
|
||||
'text': prompt,
|
||||
'messageType': 'Chat'
|
||||
},
|
||||
'conversationSignature': conversationSignature,
|
||||
'participant': {
|
||||
'id': clientId
|
||||
},
|
||||
'conversationId': conversationId
|
||||
}
|
||||
],
|
||||
'invocationId': '0',
|
||||
'target': 'chat',
|
||||
'type': 4
|
||||
}
|
||||
|
||||
await wss.send(format(struct))
|
||||
|
||||
base_string = ''
|
||||
|
||||
final = False
|
||||
while not final:
|
||||
objects = str(await wss.recv()).split('\x1e')
|
||||
for obj in objects:
|
||||
if obj is None or obj == '':
|
||||
continue
|
||||
|
||||
response = json.loads(obj)
|
||||
#print(response, flush=True, end='')
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
||||
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
||||
|
||||
yield (response_text.replace(base_string, ''))
|
||||
base_string = response_text
|
||||
|
||||
elif response.get('type') == 2:
|
||||
final = True
|
||||
|
||||
await wss.close()
|
||||
|
||||
async def run(optionSets, messages):
|
||||
async for value in AsyncCompletion.create(prompt=messages[-1]['content'],
|
||||
optionSets=optionSets):
|
||||
|
||||
print(value, flush=True, end = '')
|
||||
|
||||
optionSet = conversationstyles[config['model']]
|
||||
asyncio.run(run(optionSet, config['messages']))
|
0
testing/binghuan/testing.py
Normal file
0
testing/binghuan/testing.py
Normal file
Loading…
Reference in New Issue
Block a user