Merge pull request #1958 from hlohaus/leech

Add needs auth to provierds, Add PerplexityApi provider
This commit is contained in:
H Lohaus 2024-05-16 20:06:57 +02:00 committed by GitHub
commit 0332c0c0dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 73 additions and 44 deletions

View File

@ -8,8 +8,7 @@ class DeepInfra(Openai):
label = "DeepInfra" label = "DeepInfra"
url = "https://deepinfra.com" url = "https://deepinfra.com"
working = True working = True
needs_auth = False needs_auth = True
has_auth = True
supports_stream = True supports_stream = True
supports_message_history = True supports_message_history = True
default_model = "meta-llama/Meta-Llama-3-70b-instruct" default_model = "meta-llama/Meta-Llama-3-70b-instruct"

View File

@ -9,6 +9,7 @@ from ..image import to_bytes
class Reka(AbstractProvider): class Reka(AbstractProvider):
url = "https://chat.reka.ai/" url = "https://chat.reka.ai/"
working = True working = True
needs_auth = True
supports_stream = True supports_stream = True
default_vision_model = "reka" default_vision_model = "reka"
cookies = {} cookies = {}
@ -20,13 +21,12 @@ class Reka(AbstractProvider):
messages: Messages, messages: Messages,
stream: bool, stream: bool,
proxy: str = None, proxy: str = None,
timeout: int = 180,
api_key: str = None, api_key: str = None,
image: ImageType = None, image: ImageType = None,
**kwargs **kwargs
) -> CreateResult: ) -> CreateResult:
cls.proxy = proxy cls.proxy = proxy
if not api_key: if not api_key:
cls.cookies = get_cookies("chat.reka.ai") cls.cookies = get_cookies("chat.reka.ai")
if not cls.cookies: if not cls.cookies:
@ -34,19 +34,19 @@ class Reka(AbstractProvider):
elif "appSession" not in cls.cookies: elif "appSession" not in cls.cookies:
raise ValueError("No appSession found in cookies for chat.reka.ai, log in or provide bearer_auth") raise ValueError("No appSession found in cookies for chat.reka.ai, log in or provide bearer_auth")
api_key = cls.get_access_token(cls) api_key = cls.get_access_token(cls)
conversation = [] conversation = []
for message in messages: for message in messages:
conversation.append({ conversation.append({
"type": "human", "type": "human",
"text": message["content"], "text": message["content"],
}) })
if image: if image:
image_url = cls.upload_image(cls, api_key, image) image_url = cls.upload_image(cls, api_key, image)
conversation[-1]["image_url"] = image_url conversation[-1]["image_url"] = image_url
conversation[-1]["media_type"] = "image" conversation[-1]["media_type"] = "image"
headers = { headers = {
'accept': '*/*', 'accept': '*/*',
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
@ -64,7 +64,7 @@ class Reka(AbstractProvider):
'sec-fetch-site': 'same-origin', 'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36', 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36',
} }
json_data = { json_data = {
'conversation_history': conversation, 'conversation_history': conversation,
'stream': True, 'stream': True,
@ -73,7 +73,7 @@ class Reka(AbstractProvider):
'model_name': 'reka-core', 'model_name': 'reka-core',
'random_seed': int(time.time() * 1000), 'random_seed': int(time.time() * 1000),
} }
tokens = '' tokens = ''
response = requests.post('https://chat.reka.ai/api/chat', response = requests.post('https://chat.reka.ai/api/chat',
@ -82,11 +82,11 @@ class Reka(AbstractProvider):
for completion in response.iter_lines(): for completion in response.iter_lines():
if b'data' in completion: if b'data' in completion:
token_data = json.loads(completion.decode('utf-8')[5:])['text'] token_data = json.loads(completion.decode('utf-8')[5:])['text']
yield (token_data.replace(tokens, '')) yield (token_data.replace(tokens, ''))
tokens = token_data tokens = token_data
def upload_image(cls, access_token, image: ImageType) -> str: def upload_image(cls, access_token, image: ImageType) -> str:
boundary_token = os.urandom(8).hex() boundary_token = os.urandom(8).hex()
@ -120,7 +120,7 @@ class Reka(AbstractProvider):
cookies=cls.cookies, headers=headers, proxies=cls.proxy, data=data.encode('latin-1')) cookies=cls.cookies, headers=headers, proxies=cls.proxy, data=data.encode('latin-1'))
return response.json()['media_url'] return response.json()['media_url']
def get_access_token(cls): def get_access_token(cls):
headers = { headers = {
'accept': '*/*', 'accept': '*/*',
@ -141,8 +141,8 @@ class Reka(AbstractProvider):
try: try:
response = requests.get('https://chat.reka.ai/bff/auth/access_token', response = requests.get('https://chat.reka.ai/bff/auth/access_token',
cookies=cls.cookies, headers=headers, proxies=cls.proxy) cookies=cls.cookies, headers=headers, proxies=cls.proxy)
return response.json()['accessToken'] return response.json()['accessToken']
except Exception as e: except Exception as e:
raise ValueError(f"Failed to get access token: {e}, refresh your cookies / log in into chat.reka.ai") raise ValueError(f"Failed to get access token: {e}, refresh your cookies / log in into chat.reka.ai")

View File

@ -10,6 +10,7 @@ from ..errors import ResponseError, MissingAuthError
class Replicate(AsyncGeneratorProvider, ProviderModelMixin): class Replicate(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://replicate.com" url = "https://replicate.com"
working = True working = True
needs_auth = True
default_model = "meta/meta-llama-3-70b-instruct" default_model = "meta/meta-llama-3-70b-instruct"
model_aliases = { model_aliases = {
"meta-llama/Meta-Llama-3-70B-Instruct": default_model "meta-llama/Meta-Llama-3-70B-Instruct": default_model

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import re import re
import json import json
import base64 import base64
@ -42,7 +44,6 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
] ]
model_aliases = { model_aliases = {
"claude-v2": "claude-2", "claude-v2": "claude-2",
"gpt-4o": "gpt-4o",
} }
_cookies = None _cookies = None
_cookies_used = 0 _cookies_used = 0
@ -185,15 +186,7 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
@classmethod @classmethod
async def create_cookies(cls, client: StreamSession) -> Cookies: async def create_cookies(cls, client: StreamSession) -> Cookies:
if not cls._telemetry_ids: if not cls._telemetry_ids:
try: cls._telemetry_ids = await get_telemetry_ids()
cls._telemetry_ids = await get_telemetry_ids()
except RuntimeError as e:
if str(e) == "Event loop is closed":
if debug.logging:
print("Event loop is closed error occurred in create_cookies.")
else:
raise
user_uuid = str(uuid.uuid4()) user_uuid = str(uuid.uuid4())
telemetry_id = cls._telemetry_ids.pop() telemetry_id = cls._telemetry_ids.pop()
if debug.logging: if debug.logging:

View File

@ -59,7 +59,7 @@ class Gemini(AsyncGeneratorProvider):
_cookies: Cookies = None _cookies: Cookies = None
@classmethod @classmethod
async def nodriver_login(cls) -> AsyncIterator[str]: async def nodriver_login(cls, proxy: str = None) -> AsyncIterator[str]:
try: try:
import nodriver as uc import nodriver as uc
except ImportError: except ImportError:
@ -71,7 +71,10 @@ class Gemini(AsyncGeneratorProvider):
user_data_dir = None user_data_dir = None
if debug.logging: if debug.logging:
print(f"Open nodriver with user_dir: {user_data_dir}") print(f"Open nodriver with user_dir: {user_data_dir}")
browser = await uc.start(user_data_dir=user_data_dir) browser = await uc.start(
user_data_dir=user_data_dir,
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
)
login_url = os.environ.get("G4F_LOGIN_URL") login_url = os.environ.get("G4F_LOGIN_URL")
if login_url: if login_url:
yield f"Please login: [Google Gemini]({login_url})\n\n" yield f"Please login: [Google Gemini]({login_url})\n\n"
@ -134,7 +137,7 @@ class Gemini(AsyncGeneratorProvider):
) as session: ) as session:
snlm0e = await cls.fetch_snlm0e(session, cls._cookies) if cls._cookies else None snlm0e = await cls.fetch_snlm0e(session, cls._cookies) if cls._cookies else None
if not snlm0e: if not snlm0e:
async for chunk in cls.nodriver_login(): async for chunk in cls.nodriver_login(proxy):
yield chunk yield chunk
if cls._cookies is None: if cls._cookies is None:
async for chunk in cls.webdriver_login(proxy): async for chunk in cls.webdriver_login(proxy):

View File

@ -403,7 +403,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
except NoValidHarFileError as e: except NoValidHarFileError as e:
error = e error = e
if cls._api_key is None: if cls._api_key is None:
await cls.nodriver_access_token() await cls.nodriver_access_token(proxy)
if cls._api_key is None and cls.needs_auth: if cls._api_key is None and cls.needs_auth:
raise error raise error
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers)) cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
@ -625,7 +625,7 @@ this.fetch = async (url, options) => {
cls._update_cookie_header() cls._update_cookie_header()
@classmethod @classmethod
async def nodriver_access_token(cls): async def nodriver_access_token(cls, proxy: str = None):
try: try:
import nodriver as uc import nodriver as uc
except ImportError: except ImportError:
@ -637,7 +637,10 @@ this.fetch = async (url, options) => {
user_data_dir = None user_data_dir = None
if debug.logging: if debug.logging:
print(f"Open nodriver with user_dir: {user_data_dir}") print(f"Open nodriver with user_dir: {user_data_dir}")
browser = await uc.start(user_data_dir=user_data_dir) browser = await uc.start(
user_data_dir=user_data_dir,
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
)
page = await browser.get("https://chatgpt.com/") page = await browser.get("https://chatgpt.com/")
await page.select("[id^=headlessui-menu-button-]", 240) await page.select("[id^=headlessui-menu-button-]", 240)
api_key = await page.evaluate( api_key = await page.evaluate(

View File

@ -0,0 +1,31 @@
from __future__ import annotations
from .Openai import Openai
from ...typing import AsyncResult, Messages
class PerplexityApi(Openai):
label = "Perplexity API"
url = "https://www.perplexity.ai"
working = True
default_model = "llama-3-sonar-large-32k-online"
models = [
"llama-3-sonar-small-32k-chat",
"llama-3-sonar-small-32k-online",
"llama-3-sonar-large-32k-chat",
"llama-3-sonar-large-32k-online",
"llama-3-8b-instruct",
"llama-3-70b-instruct",
"mixtral-8x7b-instruct"
]
@classmethod
def create_async_generator(
cls,
model: str,
messages: Messages,
api_base: str = "https://api.perplexity.ai",
**kwargs
) -> AsyncResult:
return super().create_async_generator(
model, messages, api_base=api_base, **kwargs
)

View File

@ -7,4 +7,5 @@ from .Poe import Poe
from .Openai import Openai from .Openai import Openai
from .Groq import Groq from .Groq import Groq
from .OpenRouter import OpenRouter from .OpenRouter import OpenRouter
from .OpenaiAccount import OpenaiAccount from .OpenaiAccount import OpenaiAccount
from .PerplexityApi import PerplexityApi

View File

@ -88,36 +88,34 @@ async def get_telemetry_ids(proxy: str = None) -> list:
except NoValidHarFileError as e: except NoValidHarFileError as e:
if debug.logging: if debug.logging:
logging.error(e) logging.error(e)
if debug.logging:
logging.error('Getting telemetry_id for you.com with nodriver')
try: try:
from nodriver import start from nodriver import start
except ImportError: except ImportError:
raise MissingRequirementsError('Add .har file from you.com or install "nodriver" package | pip install -U nodriver') raise MissingRequirementsError('Add .har file from you.com or install "nodriver" package | pip install -U nodriver')
page = None if debug.logging:
try: logging.error('Getting telemetry_id for you.com with nodriver')
browser = await start()
page = await browser.get("https://you.com")
browser = page = None
try:
browser = await start(
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
)
page = await browser.get("https://you.com")
while not await page.evaluate('"GetTelemetryID" in this'): while not await page.evaluate('"GetTelemetryID" in this'):
await page.sleep(1) await page.sleep(1)
async def get_telemetry_id(): async def get_telemetry_id():
return await page.evaluate( return await page.evaluate(
f'this.GetTelemetryID("{public_token}", "{telemetry_url}");', f'this.GetTelemetryID("{public_token}", "{telemetry_url}");',
await_promise=True await_promise=True
) )
return [await get_telemetry_id()] return [await get_telemetry_id()]
finally: finally:
try: try:
if page is not None: if page is not None:
await page.close() await page.close()
if browser is not None: if browser is not None:
await browser.close() await browser.close()
except Exception as e: except Exception as e:
if debug.logging: if debug.logging:
logging.error(e) logging.error(e)

View File

@ -133,7 +133,7 @@ class NewBaseRetryProvider(BaseRetryProvider):
if not stream: if not stream:
yield await provider.create_async(model, messages, **kwargs) yield await provider.create_async(model, messages, **kwargs)
elif hasattr(provider, "create_async_generator"): elif hasattr(provider, "create_async_generator"):
async for token in provider.create_async_generator(model, messages, stream, **kwargs): async for token in provider.create_async_generator(model, messages, stream=stream, **kwargs):
yield token yield token
else: else:
for token in provider.create_completion(model, messages, stream, **kwargs): for token in provider.create_completion(model, messages, stream, **kwargs):