Fix issue with get_cookies from nodriver in OpenaiChat

This commit is contained in:
Heiner Lohaus 2024-12-18 02:28:26 +01:00
parent bbb858249b
commit af677717ee
7 changed files with 66 additions and 29 deletions

View File

@ -3,10 +3,16 @@ from __future__ import annotations
import asyncio import asyncio
import json import json
try:
import nodriver
has_nodriver = True
except ImportError:
has_nodriver = False
from ..typing import AsyncResult, Messages, Cookies from ..typing import AsyncResult, Messages, Cookies
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin, get_running_loop from .base_provider import AsyncGeneratorProvider, ProviderModelMixin, get_running_loop
from ..requests import Session, StreamSession, get_args_from_nodriver, raise_for_status, merge_cookies, DEFAULT_HEADERS from ..requests import Session, StreamSession, get_args_from_nodriver, raise_for_status, merge_cookies, DEFAULT_HEADERS
from ..errors import ResponseStatusError, MissingRequirementsError from ..errors import ResponseStatusError
class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin): class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
label = "Cloudflare AI" label = "Cloudflare AI"
@ -35,12 +41,15 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
def get_models(cls) -> str: def get_models(cls) -> str:
if not cls.models: if not cls.models:
if cls._args is None: if cls._args is None:
get_running_loop(check_nested=True) if has_nodriver:
args = get_args_from_nodriver(cls.url) get_running_loop(check_nested=True)
cls._args = asyncio.run(args) args = get_args_from_nodriver(cls.url)
cls._args = asyncio.run(args)
else:
cls._args = {"headers": DEFAULT_HEADERS, "cookies": {}}
with Session(**cls._args) as session: with Session(**cls._args) as session:
response = session.get(cls.models_url) response = session.get(cls.models_url)
cls._args["cookies"] = merge_cookies(cls._args["cookies"] , response) cls._args["cookies"] = merge_cookies(cls._args["cookies"], response)
try: try:
raise_for_status(response) raise_for_status(response)
except ResponseStatusError: except ResponseStatusError:
@ -62,10 +71,10 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin):
**kwargs **kwargs
) -> AsyncResult: ) -> AsyncResult:
if cls._args is None: if cls._args is None:
try: if has_nodriver:
cls._args = await get_args_from_nodriver(cls.url, proxy, timeout, cookies) cls._args = await get_args_from_nodriver(cls.url, proxy, timeout, cookies)
except MissingRequirementsError: else:
cls._args = {"headers": DEFAULT_HEADERS, cookies: {}} cls._args = {"headers": DEFAULT_HEADERS, "cookies": {}}
model = cls.get_model(model) model = cls.get_model(model)
data = { data = {
"messages": messages, "messages": messages,

View File

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import os
import json import json
import asyncio import asyncio
import base64 import base64
@ -76,10 +77,13 @@ class Copilot(AbstractProvider, ProviderModelMixin):
cls._access_token, cls._cookies = readHAR(cls.url) cls._access_token, cls._cookies = readHAR(cls.url)
except NoValidHarFileError as h: except NoValidHarFileError as h:
debug.log(f"Copilot: {h}") debug.log(f"Copilot: {h}")
try: if has_nodriver:
login_url = os.environ.get("G4F_LOGIN_URL")
if login_url:
yield f"[Login to {cls.label}]({login_url})\n\n"
get_running_loop(check_nested=True) get_running_loop(check_nested=True)
cls._access_token, cls._cookies = asyncio.run(get_access_token_and_cookies(cls.url, proxy)) cls._access_token, cls._cookies = asyncio.run(get_access_token_and_cookies(cls.url, proxy))
except MissingRequirementsError: else:
raise h raise h
debug.log(f"Copilot: Access token: {cls._access_token[:7]}...{cls._access_token[-5:]}") debug.log(f"Copilot: Access token: {cls._access_token[:7]}...{cls._access_token[-5:]}")
websocket_url = f"{websocket_url}&accessToken={quote(cls._access_token)}" websocket_url = f"{websocket_url}&accessToken={quote(cls._access_token)}"

View File

@ -81,7 +81,7 @@ class Gemini(AsyncGeneratorProvider, ProviderModelMixin):
browser = await get_nodriver(proxy=proxy, user_data_dir="gemini") browser = await get_nodriver(proxy=proxy, user_data_dir="gemini")
login_url = os.environ.get("G4F_LOGIN_URL") login_url = os.environ.get("G4F_LOGIN_URL")
if login_url: if login_url:
yield f"Please login: [Google Gemini]({login_url})\n\n" yield f"[Login to {cls.label}]({login_url})\n\n"
page = await browser.get(f"{cls.url}/app") page = await browser.get(f"{cls.url}/app")
await page.select("div.ql-editor.textarea", 240) await page.select("div.ql-editor.textarea", 240)
cookies = {} cookies = {}

View File

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import os
import re import re
import asyncio import asyncio
import uuid import uuid
@ -8,6 +9,7 @@ import base64
import time import time
import requests import requests
import random import random
from typing import AsyncIterator
from copy import copy from copy import copy
try: try:
@ -314,7 +316,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
RuntimeError: If an error occurs during processing. RuntimeError: If an error occurs during processing.
""" """
if cls.needs_auth: if cls.needs_auth:
await cls.login(proxy) async for message in cls.login(proxy):
yield message
async with StreamSession( async with StreamSession(
proxy=proxy, proxy=proxy,
impersonate="chrome", impersonate="chrome",
@ -504,7 +507,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
@classmethod @classmethod
async def synthesize(cls, params: dict) -> AsyncIterator[bytes]: async def synthesize(cls, params: dict) -> AsyncIterator[bytes]:
await cls.login() async for _ in cls.login():
pass
async with StreamSession( async with StreamSession(
impersonate="chrome", impersonate="chrome",
timeout=0 timeout=0
@ -519,23 +523,27 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
yield chunk yield chunk
@classmethod @classmethod
async def login(cls, proxy: str = None): async def login(cls, proxy: str = None) -> AsyncIterator[str]:
if cls._expires is not None and cls._expires < time.time(): if cls._expires is not None and cls._expires < time.time():
cls._headers = cls._api_key = None cls._headers = cls._api_key = None
try: try:
await get_request_config(proxy) await get_request_config(proxy)
cls._create_request_args(RequestConfig.cookies, RequestConfig.headers) cls._create_request_args(RequestConfig.cookies, RequestConfig.headers)
cls._set_api_key(RequestConfig.access_token) if RequestConfig.access_token is not None:
cls._set_api_key(RequestConfig.access_token)
except NoValidHarFileError: except NoValidHarFileError:
if has_nodriver: if has_nodriver:
if cls._api_key is None: if cls._api_key is None:
login_url = os.environ.get("G4F_LOGIN_URL")
if login_url:
yield f"[Login to {cls.label}]({login_url})\n\n"
await cls.nodriver_auth(proxy) await cls.nodriver_auth(proxy)
else: else:
raise raise
@classmethod @classmethod
async def nodriver_auth(cls, proxy: str = None): async def nodriver_auth(cls, proxy: str = None):
browser = await get_nodriver(proxy=proxy, user_data_dir="chatgpt") browser = await get_nodriver(proxy=proxy)
page = browser.main_tab page = browser.main_tab
def on_request(event: nodriver.cdp.network.RequestWillBeSent): def on_request(event: nodriver.cdp.network.RequestWillBeSent):
if event.request.url == start_url or event.request.url.startswith(conversation_url): if event.request.url == start_url or event.request.url.startswith(conversation_url):
@ -548,7 +556,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
if "OpenAI-Sentinel-Turnstile-Token" in event.request.headers: if "OpenAI-Sentinel-Turnstile-Token" in event.request.headers:
RequestConfig.turnstile_token = event.request.headers["OpenAI-Sentinel-Turnstile-Token"] RequestConfig.turnstile_token = event.request.headers["OpenAI-Sentinel-Turnstile-Token"]
if "Authorization" in event.request.headers: if "Authorization" in event.request.headers:
cls._set_api_key(event.request.headers["Authorization"].split()[-1]) cls._api_key = event.request.headers["Authorization"].split()[-1]
elif event.request.url == arkose_url: elif event.request.url == arkose_url:
RequestConfig.arkose_request = arkReq( RequestConfig.arkose_request = arkReq(
arkURL=event.request.url, arkURL=event.request.url,
@ -569,7 +577,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
if body: if body:
match = re.search(r'"accessToken":"(.*?)"', body) match = re.search(r'"accessToken":"(.*?)"', body)
if match: if match:
cls._set_api_key(match.group(1)) cls._api_key = match.group(1)
break break
await asyncio.sleep(1) await asyncio.sleep(1)
while True: while True:
@ -577,10 +585,11 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
break break
await asyncio.sleep(1) await asyncio.sleep(1)
RequestConfig.data_build = await page.evaluate("document.documentElement.getAttribute('data-build')") RequestConfig.data_build = await page.evaluate("document.documentElement.getAttribute('data-build')")
for c in await page.send(nodriver.cdp.network.get_cookies([cls.url])): for c in await page.send(get_cookies([cls.url])):
RequestConfig.cookies[c.name] = c.value RequestConfig.cookies[c["name"]] = c["value"]
await page.close() await page.close()
cls._create_request_args(RequestConfig.cookies, RequestConfig.headers, user_agent=user_agent) cls._create_request_args(RequestConfig.cookies, RequestConfig.headers, user_agent=user_agent)
cls._set_api_key(cls._api_key)
@staticmethod @staticmethod
def get_default_headers() -> dict: def get_default_headers() -> dict:
@ -623,4 +632,17 @@ class Conversation(BaseConversation):
self.conversation_id = conversation_id self.conversation_id = conversation_id
self.message_id = message_id self.message_id = message_id
self.finish_reason = finish_reason self.finish_reason = finish_reason
self.is_recipient = False self.is_recipient = False
def get_cookies(
urls: list[str] = None
):
params = dict()
if urls is not None:
params['urls'] = [i for i in urls]
cmd_dict = {
'method': 'Network.getCookies',
'params': params,
}
json = yield cmd_dict
return json['cookies']

View File

@ -81,6 +81,7 @@ body:not(.white) a:visited{
transform: translate(-50%, -50%); transform: translate(-50%, -50%);
filter: blur(var(--blur)) opacity(var(--opacity)); filter: blur(var(--blur)) opacity(var(--opacity));
animation: zoom_gradient 6s infinite alternate; animation: zoom_gradient 6s infinite alternate;
display: none;
} }
@keyframes zoom_gradient { @keyframes zoom_gradient {
@ -116,6 +117,8 @@ body:not(.white) a:visited{
font-weight: 500; font-weight: 500;
background-color: rgba(0, 0, 0, 0.5); background-color: rgba(0, 0, 0, 0.5);
color: var(--colour-3); color: var(--colour-3);
border: var(--colour-1) 1px solid;
border-radius: var(--border-radius-1);
} }
.white .new_version { .white .new_version {
@ -174,10 +177,6 @@ body:not(.white) a:visited{
color: var(--user-input) color: var(--user-input)
} }
body.white .gradient{
display: none;
}
.conversations { .conversations {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
@ -826,6 +825,9 @@ select:hover,
.count_total { .count_total {
padding-left: 98px; padding-left: 98px;
} }
body:not(.white) .gradient{
display: block;
}
} }
.input-box { .input-box {

View File

@ -601,6 +601,7 @@ const ask_gpt = async (message_id, message_index = -1, regenerate = false, provi
api_key: api_key, api_key: api_key,
ignored: ignored, ignored: ignored,
}, files, message_id); }, files, message_id);
if (content_map.inner.dataset.timeout) clearTimeout(content_map.inner.dataset.timeout);
if (!error_storage[message_id]) { if (!error_storage[message_id]) {
html = markdown_render(message_storage[message_id]); html = markdown_render(message_storage[message_id]);
content_map.inner.innerHTML = html; content_map.inner.innerHTML = html;
@ -629,10 +630,9 @@ const ask_gpt = async (message_id, message_index = -1, regenerate = false, provi
regenerate regenerate
); );
await safe_load_conversation(window.conversation_id, message_index == -1); await safe_load_conversation(window.conversation_id, message_index == -1);
} else {
let cursorDiv = message_el.querySelector(".cursor");
if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv);
} }
let cursorDiv = message_el.querySelector(".cursor");
if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv);
if (message_index == -1) { if (message_index == -1) {
await scroll_to_bottom(); await scroll_to_bottom();
} }

View File

@ -181,7 +181,7 @@ class Api:
def handle_provider(self, provider_handler, model): def handle_provider(self, provider_handler, model):
if isinstance(provider_handler, IterListProvider) and provider_handler.last_provider is not None: if isinstance(provider_handler, IterListProvider) and provider_handler.last_provider is not None:
provider_handler = provider_handler.last_provider provider_handler = provider_handler.last_provider
if hasattr(provider_handler, "last_model") and provider_handler.last_model is not None: if not model and hasattr(provider_handler, "last_model") and provider_handler.last_model is not None:
model = provider_handler.last_model model = provider_handler.last_model
return self._format_json("provider", {**provider_handler.get_dict(), "model": model}) return self._format_json("provider", {**provider_handler.get_dict(), "model": model})