Fix DuckDuckGo Provider issues

Fix PerplexityLabs, FlowGpt Provider
Update Bing, Gemini Provider
This commit is contained in:
Heiner Lohaus 2024-04-17 10:33:23 +02:00
parent 8ec942040c
commit a2b5446b2e
11 changed files with 122 additions and 27 deletions

View File

@ -107,7 +107,7 @@ def get_default_cookies():
'PPLState' : '1',
'KievRPSSecAuth': '',
'SUID' : '',
'SRCHUSR' : '',
'SRCHUSR' : f'DOB={date.today().strftime("%Y%m%d")}&T={int(time.time())}',
'SRCHHPGUSR' : f'HV={int(time.time())}',
'BCP' : 'AD=1&AL=1&SM=1',
'_Rwho' : f'u=d&ts={date.today().isoformat()}',

View File

@ -6,6 +6,7 @@ from aiohttp import ClientSession
from ..typing import Messages, AsyncResult
from ..requests import get_args_from_browser
from ..webdriver import WebDriver
from .base_provider import AsyncGeneratorProvider
from .helper import get_random_string
@ -23,9 +24,10 @@ class Chatgpt4Online(AsyncGeneratorProvider):
model: str,
messages: Messages,
proxy: str = None,
webdriver: WebDriver = None,
**kwargs
) -> AsyncResult:
args = get_args_from_browser(f"{cls.url}/chat/", proxy=proxy)
args = get_args_from_browser(f"{cls.url}/chat/", webdriver, proxy=proxy)
async with ClientSession(**args) as session:
if not cls._wpnonce:
async with session.get(f"{cls.url}/chat/", proxy=proxy) as response:

View File

@ -4,8 +4,10 @@ import json
import aiohttp
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_connector
from ..typing import AsyncResult, Messages
from ..requests.raise_for_status import raise_for_status
from ..providers.conversation import BaseConversation
class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://duckduckgo.com/duckchat"
@ -42,23 +44,39 @@ class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin):
cls,
model: str,
messages: Messages,
proxy: str = None,
connector: aiohttp.BaseConnector = None,
conversation: Conversation = None,
return_conversation: bool = False,
**kwargs
) -> AsyncResult:
async with aiohttp.ClientSession(headers=cls.headers) as session:
async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response:
await raise_for_status(response)
vqd_4 = response.headers.get("x-vqd-4")
async with aiohttp.ClientSession(headers=cls.headers, connector=get_connector(connector, proxy)) as session:
if conversation is not None and len(messages) > 1:
vqd_4 = conversation.vqd_4
messages = [*conversation.messages, messages[-2], messages[-1]]
else:
async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response:
await raise_for_status(response)
vqd_4 = response.headers.get("x-vqd-4")
messages = [messages[-1]]
payload = {
'model': cls.get_model(model),
'messages': messages
}
async with session.post(cls.chat_url, json=payload, headers={"x-vqd-4": vqd_4}) as response:
await raise_for_status(response)
if return_conversation:
yield Conversation(response.headers.get("x-vqd-4"), messages)
async for line in response.content:
if line.startswith(b"data: "):
chunk = line[6:]
if chunk.startswith(b"[DONE]"):
break
data = json.loads(chunk)
if "message" in data:
yield data["message"]
if "message" in data and data["message"]:
yield data["message"]
class Conversation(BaseConversation):
def __init__(self, vqd_4: str, messages: Messages) -> None:
self.vqd_4 = vqd_4
self.messages = messages

View File

@ -1,10 +1,13 @@
from __future__ import annotations
import json
import time
import hashlib
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_random_hex, get_random_string
from ..requests.raise_for_status import raise_for_status
class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
@ -17,9 +20,17 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
models = [
"gpt-3.5-turbo",
"gpt-3.5-long",
"gpt-4-turbo",
"google-gemini",
"claude-instant",
"claude-v1",
"claude-v2",
"llama2-13b"
"llama2-13b",
"mythalion-13b",
"pygmalion-13b",
"chronos-hermes-13b",
"Mixtral-8x7B",
"Dolphin-2.6-8x7B"
]
model_aliases = {
"gemini": "google-gemini",
@ -36,6 +47,12 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
timestamp = str(int(time.time()))
auth = "Bearer null"
nonce = get_random_hex()
data = f"{timestamp}-{nonce}-{auth}"
signature = hashlib.md5(data.encode()).hexdigest()
headers = {
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:122.0) Gecko/20100101 Firefox/122.0",
"Accept": "*/*",
@ -49,7 +66,12 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-site",
"TE": "trailers"
"TE": "trailers",
"Authorization": auth,
"x-flow-device-id": f"f-{get_random_string(19)}",
"x-nonce": nonce,
"x-signature": signature,
"x-timestamp": timestamp
}
async with ClientSession(headers=headers) as session:
history = [message for message in messages[:-1] if message["role"] != "system"]
@ -69,7 +91,7 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"generateImage": False,
"generateAudio": False
}
async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous", json=data, proxy=proxy) as response:
async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous-encrypted", json=data, proxy=proxy) as response:
await raise_for_status(response)
async for chunk in response.content:
if chunk.strip():

View File

@ -2,14 +2,13 @@ from __future__ import annotations
import random
import json
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from ..requests import StreamSession, raise_for_status
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_connector
API_URL = "https://labs-api.perplexity.ai/socket.io/"
WS_URL = "wss://labs-api.perplexity.ai/socket.io/"
API_URL = "https://www.perplexity.ai/socket.io/"
WS_URL = "wss://www.perplexity.ai/socket.io/"
class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://labs.perplexity.ai"
@ -35,7 +34,6 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
model: str,
messages: Messages,
proxy: str = None,
connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
headers = {
@ -51,21 +49,22 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
"Sec-Fetch-Site": "same-site",
"TE": "trailers",
}
async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
async with StreamSession(headers=headers, proxies={"all": proxy}) as session:
t = format(random.getrandbits(32), "08x")
async with session.get(
f"{API_URL}?EIO=4&transport=polling&t={t}"
) as response:
await raise_for_status(response)
text = await response.text()
assert text.startswith("0")
sid = json.loads(text[1:])["sid"]
post_data = '40{"jwt":"anonymous-ask-user"}'
async with session.post(
f"{API_URL}?EIO=4&transport=polling&t={t}&sid={sid}",
data=post_data
) as response:
assert await response.text() == "OK"
await raise_for_status(response)
assert await response.text() == "OK"
async with session.ws_connect(f"{WS_URL}?EIO=4&transport=websocket&sid={sid}", autoping=False) as ws:
await ws.send_str("2probe")
assert(await ws.receive_str() == "3probe")

View File

@ -34,9 +34,9 @@ async def create_conversation(session: ClientSession, headers: dict, tone: str)
Conversation: An instance representing the created conversation.
"""
if tone == "Copilot":
url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1686.0"
url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1690.0"
else:
url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1686.0"
url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1690.0"
async with session.get(url, headers=headers) as response:
if response.status == 404:
raise RateLimitError("Response 404: Do less requests and reuse conversations")

View File

@ -60,6 +60,7 @@ class Gemini(AsyncGeneratorProvider):
model: str,
messages: Messages,
proxy: str = None,
api_key: str = None,
cookies: Cookies = None,
connector: BaseConnector = None,
image: ImageType = None,
@ -67,6 +68,10 @@ class Gemini(AsyncGeneratorProvider):
**kwargs
) -> AsyncResult:
prompt = format_prompt(messages)
if api_key is not None:
if cookies is None:
cookies = {}
cookies["__Secure-1PSID"] = api_key
cookies = cookies if cookies else get_cookies(".google.com", False, True)
base_connector = get_connector(connector, proxy)
async with ClientSession(

View File

@ -38,6 +38,9 @@
</script>
<script src="https://unpkg.com/gpt-tokenizer/dist/cl100k_base.js" async></script>
<script src="/static/js/text_to_speech/index.js" async></script>
<!--
<script src="/static/js/whisper-web/index.js" async></script>
-->
<script>
const user_image = '<img src="/static/img/user.png" alt="your avatar">';
const gpt_image = '<img src="/static/img/gpt.png" alt="your avatar">';
@ -89,6 +92,7 @@
</div>
<div class="settings hidden">
<div class="paper">
<h3>Settings</h3>
<div class="field">
<span class="label">Web Access</span>
<input type="checkbox" id="switch" />
@ -127,7 +131,7 @@
</div>
<div class="field box">
<label for="Gemini-api_key" class="label" title="">Gemini:</label>
<textarea id="Gemini-api_key" name="Gemini[api_key]" placeholder="Cookies"></textarea>
<textarea id="Gemini-api_key" name="Gemini[api_key]" placeholder="&quot;__Secure-1PSID&quot; cookie"></textarea>
</div>
<div class="field box">
<label for="GeminiPro-api_key" class="label" title="">GeminiPro:</label>

View File

@ -602,7 +602,8 @@ label[for="camera"] {
width: 100%;
}
.buttons input:checked+label:after {
.buttons input:checked+label:after,
.settings input:checked+label:after {
left: calc(100% - 5px - 20px);
}
@ -844,13 +845,17 @@ ul {
max-width: none;
}
.settings h3 {
padding-left: 50px;
}
.buttons {
align-items: flex-start;
flex-wrap: wrap;
gap: 15px;
}
.mobile-sidebar {
.mobile-sidebar { 0 0 0
display: flex;
}
@ -1099,10 +1104,15 @@ a:-webkit-any-link {
width: 100%;
display: flex;
flex-direction: column;
overflow: auto;
}
.settings h3 {
padding-left: 10px;
padding-top: 10px;
}
.settings .paper {
overflow: auto;
flex-direction: column;
min-width: 400px;
}

View File

@ -49,6 +49,9 @@ class Api():
else provider.__name__) +
(" (WebDriver)"
if "webdriver" in provider.get_parameters()
else "") +
(" (Auth)"
if provider.needs_auth
else "")
for provider in __providers__
if provider.working

View File

@ -6,6 +6,11 @@ try:
has_curl_mime = True
except ImportError:
has_curl_mime = False
try:
from curl_cffi.requests import CurlWsFlag
has_curl_ws = True
except ImportError:
has_curl_ws = False
from typing import AsyncGenerator, Any
from functools import partialmethod
import json
@ -73,6 +78,12 @@ class StreamSession(AsyncSession):
"""Create and return a StreamResponse object for the given HTTP request."""
return StreamResponse(super().request(method, url, stream=True, **kwargs))
def ws_connect(self, url, *args, **kwargs):
return WebSocket(self, url)
def _ws_connect(self, url):
return super().ws_connect(url)
# Defining HTTP methods as partial methods of the request method.
head = partialmethod(request, "HEAD")
get = partialmethod(request, "GET")
@ -88,4 +99,25 @@ if has_curl_mime:
else:
class FormData():
def __init__(self) -> None:
raise RuntimeError("CurlMimi in curl_cffi is missing | pip install -U g4f[curl_cffi]")
raise RuntimeError("CurlMimi in curl_cffi is missing | pip install -U g4f[curl_cffi]")
class WebSocket():
def __init__(self, session, url) -> None:
if not has_curl_ws:
raise RuntimeError("CurlWsFlag in curl_cffi is missing | pip install -U g4f[curl_cffi]")
self.session: StreamSession = session
self.url: str = url
async def __aenter__(self):
self.inner = await self.session._ws_connect(self.url)
return self
async def __aexit__(self, *args):
self.inner.aclose()
async def receive_str(self) -> str:
bytes, _ = await self.inner.arecv()
return bytes.decode(errors="ignore")
async def send_str(self, data: str):
await self.inner.asend(data.encode(), CurlWsFlag.TEXT)