Added new provider PI (Hacky way to use) (#1291)

* Added new provider PI (Hacky way to use)

* Updated models endpoint made it show real info about the model.py

* Added cloudscraper to the requirements

* Fixed some bugs aka made streaming also return role
This commit is contained in:
madonchik123 2023-12-02 01:11:52 +03:00 committed by GitHub
parent 1ade1d959c
commit b0276f6c9e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 170 additions and 12 deletions

111
g4f/Provider/PI.py Normal file
View File

@ -0,0 +1,111 @@
from __future__ import annotations
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
import json
import cloudscraper
class PI(AsyncGeneratorProvider):
url = "https://chat-gpt.com"
working = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
Conversation = kwargs['conversation']
UserPrompt = messages[-1]
if UserPrompt['role'] == 'user':
UserPrompt = UserPrompt['content']
else:
UserPrompt = messages[-2]['content']
if Conversation == None:
Conversation = PI.Start_Conversation()
Answer = Ask_PI(UserPrompt,Conversation['sid'],Conversation['cookies'])
yield Answer[0]['text']
def Start_Conversation():
scraper.headers = {
'accept-type': 'application/json'
}
response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'})
cookies = response.cookies
if 'Just a moment' in response.text:
return {
'error': 'cloudflare detected',
'sid': None,
'cookies': None,
}
return {
'sid': response.json()['conversations'][0]['sid'],
'cookies': cookies
}
def GetConversationTitle(Conversation):
response = scraper.post('https://pi.ai/api/chat/start', data="{}",headers={'x-api-version': '3'}, cookies=Conversation['cookies'])
if 'Just a moment' in response.text:
return {
'error': 'cloudflare detected',
'title': 'Couldnt get the title',
}
return {
'title': response.json()['conversations'][0]['title']
}
def GetChatHistory(Conversation):
params = {
'conversation': Conversation['sid'],
}
response = scraper.get('https://pi.ai/api/chat/history', params=params, cookies=Conversation['cookies'])
if 'Just a moment' in response.text:
return {
'error': 'cloudflare detected',
'traceback': 'Couldnt get the chat history'
}
return response.json()
session = cloudscraper.session()
scraper = cloudscraper.create_scraper(
browser={
'browser': 'chrome',
'platform': 'windows',
'desktop': True
},
sess=session
)
scraper.headers = {
'Accept': '*/*',
'Accept-Encoding': 'deflate,gzip,br',
}
def Ask_PI(message,sid,cookies):
json_data = {
'text': message,
'conversation': sid,
'mode': 'BASE',
}
response = scraper.post('https://pi.ai/api/chat', json=json_data, cookies=cookies)
if 'Just a moment' in response.text:
return [{
'error': 'cloudflare detected',
'text': 'Couldnt generate the answer because we got detected by cloudflare please try again later'
}
]
result = []
for line in response.iter_lines(chunk_size=1024, decode_unicode=True):
if line.startswith('data: {"text":'):
result.append(json.loads(line.split('data: ')[1].encode('utf-8')))
if line.startswith('data: {"title":'):
result.append(json.loads(line.split('data: ')[1].encode('utf-8')))
return result

View File

@ -70,4 +70,5 @@ __map__: dict[str, BaseProvider] = dict([
])
class ProviderUtils:
convert: dict[str, BaseProvider] = __map__
convert: dict[str, BaseProvider] = __map__
from .PI import PI

View File

@ -118,4 +118,4 @@ class Completion:
return result if stream else ''.join(result)
if version_check:
check_pypi_version()
check_pypi_version()

View File

@ -40,12 +40,15 @@ class Api:
@self.app.get("/v1/models")
async def models():
model_list = [{
model_list = []
for model in g4f.Model.__all__():
model_info = (g4f.ModelUtils.convert[model])
model_list.append({
'id': model,
'object': 'model',
'created': 0,
'owned_by': 'g4f'} for model in g4f.Model.__all__()]
'owned_by': model_info.base_provider}
)
return Response(content=json.dumps({
'object': 'list',
'data': model_list}, indent=4), media_type="application/json")
@ -80,17 +83,25 @@ class Api:
model = item_data.get('model')
stream = True if item_data.get("stream") == "True" else False
messages = item_data.get('messages')
conversation = item_data.get('conversation') if item_data.get('conversation') != None else None
try:
response = g4f.ChatCompletion.create(
model=model,
stream=stream,
messages=messages,
ignored=self.list_ignored_providers)
if model == 'pi':
response = g4f.ChatCompletion.create(
model=model,
stream=stream,
messages=messages,
conversation=conversation,
ignored=self.list_ignored_providers)
else:
response = g4f.ChatCompletion.create(
model=model,
stream=stream,
messages=messages,
ignored=self.list_ignored_providers)
except Exception as e:
logging.exception(e)
return Response(content=json.dumps({"error": "An error occurred while generating the response."}, indent=4), media_type="application/json")
completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
@ -134,6 +145,7 @@ class Api:
{
'index': 0,
'delta': {
'role': 'assistant',
'content': chunk,
},
'finish_reason': None,

View File

@ -3,4 +3,4 @@ import g4f.api
if __name__ == "__main__":
print(f'Starting server... [g4f v-{g4f.version}]')
g4f.api.Api(engine = g4f, debug = True).run(ip = "127.0.0.1:1337")
g4f.api.Api(engine = g4f, debug = True).run(ip = "0.0.0.0:10000")

View File

@ -27,6 +27,7 @@ from .Provider import (
Bing,
You,
H2o,
PI,
)
@dataclass(unsafe_hash=True)
@ -260,6 +261,11 @@ llama70b_v2_chat = Model(
base_provider = 'replicate',
best_provider = Vercel)
pi = Model(
name = 'pi',
base_provider = 'inflection',
best_provider=PI
)
class ModelUtils:
convert: dict[str, Model] = {
@ -315,6 +321,8 @@ class ModelUtils:
'oasst-sft-1-pythia-12b' : oasst_sft_1_pythia_12b,
'oasst-sft-4-pythia-12b-epoch-3.5' : oasst_sft_4_pythia_12b_epoch_35,
'command-light-nightly' : command_light_nightly,
'pi': pi
}
_all_models = list(ModelUtils.convert.keys())

25
piexample.py Normal file
View File

@ -0,0 +1,25 @@
from g4f import Provider
import g4f
Conversation = Provider.PI.Start_Conversation()
Chat_History = Provider.PI.GetChatHistory(Conversation)
response = g4f.ChatCompletion.create(
model="pi",
provider=g4f.Provider.PI,
messages=[
{
"role": "user",
"content": 'Hello who are you?'
}
],
stream=False,
conversation=Conversation
)
for message in response:
print(message, flush=True, end='')
Chat_Title = Provider.PI.GetConversationTitle(Conversation)

View File

@ -26,3 +26,4 @@ async-property
undetected-chromedriver
asyncstdlib
async_property
cloudscraper