diff --git a/README.md b/README.md
index 0fcbfc79..1062ff71 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ This repository provides reverse-engineered language models from various sources
## To-Do List
-- [ ] implement poe.com create bot feature (4)
+- [x] implement poe.com create bot feature (4) AVAILABLE NOW
- [ ] poe.com chat history management (3)
- [x] renaming the 'poe' module to 'quora' (2)
- [x] add you.com api (1)
@@ -47,31 +47,41 @@ These sites will be reverse engineered but need account access:
### Example: `quora (poe)` (use like openai pypi package) - GPT-4
```python
-# Import quora (poe)
+# quora model names: (use left key as argument)
+models = {
+ 'sage' : 'capybara',
+ 'gpt-4' : 'beaver',
+ 'claude-v1.2' : 'a2_2',
+ 'claude-instant-v1.0' : 'a2',
+ 'gpt-3.5-turbo' : 'chinchilla'
+}
+```
+
+#### !! new: bot creation
+
+```python
+# import quora (poe) package
import quora
-# quora.Account.create
-# quora.Completion.create
-# quora.StreamCompletion.create
+# create account
+# make shure to set enable_bot_creation to True
+token = quora.Account.create(logging = True, enable_bot_creation=True)
-[...]
+model = quora.Model.create(
+ token = token,
+ model = 'gpt-3.5-turbo', # or claude-instant-v1.0
+ system_prompt = 'you are ChatGPT a large language model ...'
+)
-```
+print(model.name) # gptx....
-#### Create Token (3-6s)
-```python
-token = quora.Account.create(logging = True)
-print('token', token)
-```
-
-#### Streaming Response
-```python
-
-for response in quora.StreamingCompletion.create(model = 'gpt-4',
- prompt = 'hello world',
- token = token):
+# streaming response
+for response in quora.StreamingCompletion.create(
+ custom_model = model.name,
+ prompt ='hello world',
+ token = token):
- print(response.completion.choices[0].text, end="", flush=True)
+ print(response.completion.choices[0].text)
```
#### Normal Response:
@@ -84,8 +94,6 @@ response = quora.Completion.create(model = 'gpt-4',
print(response.completion.choices[0].text)
```
-
-
### Example: `t3nsor` (use like openai pypi package)
```python
diff --git a/quora/__init__.py b/quora/__init__.py
index 5cae5905..175077ab 100644
--- a/quora/__init__.py
+++ b/quora/__init__.py
@@ -3,11 +3,12 @@ from quora.mail import Mail
from requests import Session
from re import search, findall
from json import loads
-from time import sleep, time
+from time import sleep
from pathlib import Path
-from random import choice
+from random import choice, choices, randint
+from string import ascii_letters, digits
from urllib import parse
-
+
class PoeResponse:
class Completion:
@@ -48,8 +49,88 @@ class PoeResponse:
def json(self) -> dict:
return self.response_dict
+
+class ModelResponse:
+ def __init__(self, json_response: dict) -> None:
+ self.id = json_response['data']['poeBotCreate']['bot']['id']
+ self.name = json_response['data']['poeBotCreate']['bot']['displayName']
+ self.limit = json_response['data']['poeBotCreate']['bot']['messageLimit']['dailyLimit']
+ self.deleted = json_response['data']['poeBotCreate']['bot']['deletionState']
+
+class Model:
+ def create(
+ token: str,
+ model: str = 'gpt-3.5-turbo', # claude-instant
+ system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
+ description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
+ handle: str = None) -> ModelResponse:
+
+ models = {
+ 'gpt-3.5-turbo' : 'chinchilla',
+ 'claude-instant-v1.0': 'a2'
+ }
+
+ if not handle:
+ handle = f'gptx{randint(1111111, 9999999)}'
+
+ client = Session()
+ client.cookies['p-b'] = token
+
+ settings = client.get('https://poe.com/api/settings').json()
+
+ client.headers = {
+ "host" : "poe.com",
+ "origin" : "https://poe.com",
+ "referer" : "https://poe.com/",
+ "content-type" : "application/json",
+ "poe-formkey" : settings['formkey'],
+ "poe-tchannel" : settings['tchannelData']['channel'],
+ "user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
+ "connection" : "keep-alive",
+ "sec-ch-ua" : "\"Chromium\";v=\"112\", \"Google Chrome\";v=\"112\", \"Not:A-Brand\";v=\"99\"",
+ "sec-ch-ua-mobile" : "?0",
+ "sec-ch-ua-platform": "\"macOS\"",
+ "sec-fetch-site" : "same-origin",
+ "sec-fetch-mode" : "cors",
+ "sec-fetch-dest" : "empty",
+ "accept" : "*/*",
+ "accept-encoding" : "gzip, deflate, br",
+ "accept-language" : "en-GB,en-US;q=0.9,en;q=0.8",
+ }
+
+ response = client.post("https://poe.com/api/gql_POST", json = {
+ 'queryName': 'CreateBotMain_poeBotCreate_Mutation',
+ 'variables': {
+ 'model' : models[model],
+ 'handle' : handle,
+ 'prompt' : system_prompt,
+ 'isPromptPublic' : True,
+ 'introduction' : '',
+ 'description' : description,
+ 'profilePictureUrl' : 'https://qph.fs.quoracdn.net/main-qimg-24e0b480dcd946e1cc6728802c5128b6',
+ 'apiUrl' : None,
+ 'apiKey' : ''.join(choices(ascii_letters + digits, k = 32)),
+ 'isApiBot' : False,
+ 'hasLinkification' : False,
+ 'hasMarkdownRendering' : False,
+ 'hasSuggestedReplies' : False,
+ 'isPrivateBot' : False
+ },
+ 'query': 'mutation CreateBotMain_poeBotCreate_Mutation(\n $model: String!\n $handle: String!\n $prompt: String!\n $isPromptPublic: Boolean!\n $introduction: String!\n $description: String!\n $profilePictureUrl: String\n $apiUrl: String\n $apiKey: String\n $isApiBot: Boolean\n $hasLinkification: Boolean\n $hasMarkdownRendering: Boolean\n $hasSuggestedReplies: Boolean\n $isPrivateBot: Boolean\n) {\n poeBotCreate(model: $model, handle: $handle, promptPlaintext: $prompt, isPromptPublic: $isPromptPublic, introduction: $introduction, description: $description, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, isApiBot: $isApiBot, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {\n status\n bot {\n id\n ...BotHeader_bot\n }\n }\n}\n\nfragment BotHeader_bot on Bot {\n displayName\n messageLimit {\n dailyLimit\n }\n ...BotImage_bot\n ...BotLink_bot\n ...IdAnnotation_node\n ...botHelpers_useViewerCanAccessPrivateBot\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotImage_bot on Bot {\n displayName\n ...botHelpers_useDeletion_bot\n ...BotImage_useProfileImage_bot\n}\n\nfragment BotImage_useProfileImage_bot on Bot {\n image {\n __typename\n ... on LocalBotImage {\n localName\n }\n ... on UrlBotImage {\n url\n }\n }\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotLink_bot on Bot {\n displayName\n}\n\nfragment IdAnnotation_node on Node {\n __isNode: __typename\n id\n}\n\nfragment botHelpers_useDeletion_bot on Bot {\n deletionState\n}\n\nfragment botHelpers_useViewerCanAccessPrivateBot on Bot {\n isPrivateBot\n viewerIsCreator\n}\n',
+ })
+
+ if not 'success' in response.text:
+ raise Exception('''
+ Bot creation Failed
+ !! Important !!
+ Bot creation was not enabled on this account
+ please use: quora.Account.create with enable_bot_creation set to True
+ ''')
+
+ return ModelResponse(response.json())
+
class Account:
- def create(proxy: None or str = None, logging: bool = False):
+ def create(proxy: None or str = None, logging: bool = False, enable_bot_creation: bool = False):
client = Session()
client.proxies = {
@@ -133,6 +214,13 @@ class Account:
with open(Path(__file__).resolve().parent / 'cookies.txt', 'a') as f:
f.write(f'{token}\n')
+ if enable_bot_creation:
+ client.post("https://poe.com/api/gql_POST", json = {
+ "queryName": "UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation",
+ "variables": {},
+ "query": "mutation UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation {\n markMultiplayerNuxCompleted {\n viewer {\n hasCompletedMultiplayerNux\n id\n }\n }\n}\n"
+ })
+
return token
def get():
@@ -142,17 +230,20 @@ class Account:
class StreamingCompletion:
def create(
model : str = 'gpt-4',
+ custom_model : str = None,
prompt: str = 'hello world',
token : str = ''):
models = {
'sage' : 'capybara',
'gpt-4' : 'beaver',
- 'claude+': 'a2_2',
- 'claude' : 'a2',
- 'gpt-3.5': 'chinchilla'
+ 'claude-v1.2' : 'a2_2',
+ 'claude-instant-v1.0' : 'a2',
+ 'gpt-3.5-turbo' : 'chinchilla'
}
+ _model = models[model] if not custom_model else custom_model
+
client = PoeClient(token)
for chunk in client.send_message(models[model], prompt):
@@ -161,7 +252,7 @@ class StreamingCompletion:
'id' : chunk["messageId"],
'object' : 'text_completion',
'created': chunk['creationTime'],
- 'model' : models[model],
+ 'model' : _model,
'choices': [{
'text' : chunk["text_new"],
'index' : 0,
@@ -178,17 +269,20 @@ class StreamingCompletion:
class Completion:
def create(
model : str = 'gpt-4',
+ custom_model : str = None,
prompt: str = 'hello world',
token : str = ''):
models = {
'sage' : 'capybara',
'gpt-4' : 'beaver',
- 'claude+': 'a2_2',
- 'claude' : 'a2',
- 'gpt-3.5': 'chinchilla'
+ 'claude-v1.2' : 'a2_2',
+ 'claude-instant-v1.0' : 'a2',
+ 'gpt-3.5-turbo' : 'chinchilla'
}
+ _model = models[model] if not custom_model else custom_model
+
client = PoeClient(token)
for chunk in client.send_message(models[model], prompt):
@@ -198,7 +292,7 @@ class Completion:
'id' : chunk["messageId"],
'object' : 'text_completion',
'created': chunk['creationTime'],
- 'model' : models[model],
+ 'model' : _model,
'choices': [{
'text' : chunk["text"],
'index' : 0,
diff --git a/quora/cookies.txt b/quora/cookies.txt
index 14d298a3..2734991f 100644
--- a/quora/cookies.txt
+++ b/quora/cookies.txt
@@ -8,3 +8,8 @@ pUEbtxobN_QUSpLIR8RGww==
9_dUWxKkHHhpQRSvCvBk2Q==
UV45rvGwUwi2qV9QdIbMcw==
cVIN0pK1Wx-F7zCdUxlYqA==
+UP2wQVds17VFHh6IfCQFrA==
+18eKr0ME2Tzifdfqat38Aw==
+FNgKEpc2r-XqWe0rHBfYpg==
+juCAh6kB0sUpXHvKik2woA==
+nBvuNYRLaE4xE4HuzBPiIQ==
diff --git a/requirements.txt b/requirements.txt
index 3d83438e..ef39ac10 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,3 @@
websocket-client
requests
-tls-client
+tls-client
\ No newline at end of file
diff --git a/testing/quora_test_2.py b/testing/quora_test_2.py
new file mode 100644
index 00000000..c51b8478
--- /dev/null
+++ b/testing/quora_test_2.py
@@ -0,0 +1,18 @@
+import quora
+
+token = quora.Account.create(logging = True, enable_bot_creation=True)
+
+model = quora.Model.create(
+ token = token,
+ model = 'gpt-3.5-turbo', # or claude-instant-v1.0
+ system_prompt = 'you are ChatGPT a large language model ...'
+)
+
+print(model.name)
+
+for response in quora.StreamingCompletion.create(
+ custom_model = model.name,
+ prompt ='hello world',
+ token = token):
+
+ print(response.completion.choices[0].text)
\ No newline at end of file