2024-02-09 00:02:52 +03:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2024-02-09 05:31:05 +03:00
|
|
|
import os
|
2024-02-09 00:02:52 +03:00
|
|
|
import json
|
|
|
|
import random
|
|
|
|
import re
|
|
|
|
|
2024-04-05 17:16:12 +03:00
|
|
|
from aiohttp import ClientSession, BaseConnector
|
|
|
|
|
|
|
|
from ..helper import get_connector
|
2024-02-09 00:02:52 +03:00
|
|
|
|
2024-02-09 05:31:05 +03:00
|
|
|
try:
|
|
|
|
from selenium.webdriver.common.by import By
|
|
|
|
from selenium.webdriver.support.ui import WebDriverWait
|
|
|
|
from selenium.webdriver.support import expected_conditions as EC
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
2024-04-22 02:27:48 +03:00
|
|
|
from ... import debug
|
2024-05-15 03:27:51 +03:00
|
|
|
from ...typing import Messages, Cookies, ImageType, AsyncResult, AsyncIterator
|
2024-05-21 21:39:31 +03:00
|
|
|
from ..base_provider import AsyncGeneratorProvider, BaseConversation
|
2024-02-09 00:02:52 +03:00
|
|
|
from ..helper import format_prompt, get_cookies
|
2024-04-07 11:36:13 +03:00
|
|
|
from ...requests.raise_for_status import raise_for_status
|
2024-02-09 05:31:05 +03:00
|
|
|
from ...errors import MissingAuthError, MissingRequirementsError
|
2024-05-21 21:39:31 +03:00
|
|
|
from ...image import ImageResponse, to_bytes
|
2024-02-09 05:31:05 +03:00
|
|
|
from ...webdriver import get_browser, get_driver_cookies
|
2024-02-09 00:02:52 +03:00
|
|
|
|
|
|
|
REQUEST_HEADERS = {
|
|
|
|
"authority": "gemini.google.com",
|
|
|
|
"origin": "https://gemini.google.com",
|
|
|
|
"referer": "https://gemini.google.com/",
|
|
|
|
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
|
|
|
'x-same-domain': '1',
|
|
|
|
}
|
2024-05-21 21:39:31 +03:00
|
|
|
REQUEST_BL_PARAM = "boq_assistant-bard-web-server_20240519.16_p0"
|
2024-02-09 00:02:52 +03:00
|
|
|
REQUEST_URL = "https://gemini.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate"
|
|
|
|
UPLOAD_IMAGE_URL = "https://content-push.googleapis.com/upload/"
|
|
|
|
UPLOAD_IMAGE_HEADERS = {
|
|
|
|
"authority": "content-push.googleapis.com",
|
|
|
|
"accept": "*/*",
|
|
|
|
"accept-language": "en-US,en;q=0.7",
|
|
|
|
"authorization": "Basic c2F2ZXM6cyNMdGhlNmxzd2F2b0RsN3J1d1U=",
|
|
|
|
"content-type": "application/x-www-form-urlencoded;charset=UTF-8",
|
|
|
|
"origin": "https://gemini.google.com",
|
|
|
|
"push-id": "feeds/mcudyrk2a4khkz",
|
|
|
|
"referer": "https://gemini.google.com/",
|
|
|
|
"x-goog-upload-command": "start",
|
|
|
|
"x-goog-upload-header-content-length": "",
|
|
|
|
"x-goog-upload-protocol": "resumable",
|
|
|
|
"x-tenant-id": "bard-storage",
|
|
|
|
}
|
|
|
|
|
|
|
|
class Gemini(AsyncGeneratorProvider):
|
|
|
|
url = "https://gemini.google.com"
|
|
|
|
needs_auth = True
|
|
|
|
working = True
|
2024-04-21 16:15:55 +03:00
|
|
|
image_models = ["gemini"]
|
2024-04-22 02:27:48 +03:00
|
|
|
default_vision_model = "gemini"
|
|
|
|
_cookies: Cookies = None
|
2024-05-21 21:39:31 +03:00
|
|
|
_snlm0e: str = None
|
|
|
|
_sid: str = None
|
2024-04-22 02:27:48 +03:00
|
|
|
|
|
|
|
@classmethod
|
2024-05-15 22:07:49 +03:00
|
|
|
async def nodriver_login(cls, proxy: str = None) -> AsyncIterator[str]:
|
2024-04-22 02:27:48 +03:00
|
|
|
try:
|
|
|
|
import nodriver as uc
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
from platformdirs import user_config_dir
|
|
|
|
user_data_dir = user_config_dir("g4f-nodriver")
|
|
|
|
except:
|
|
|
|
user_data_dir = None
|
|
|
|
if debug.logging:
|
|
|
|
print(f"Open nodriver with user_dir: {user_data_dir}")
|
2024-05-15 22:07:49 +03:00
|
|
|
browser = await uc.start(
|
|
|
|
user_data_dir=user_data_dir,
|
|
|
|
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
|
|
|
|
)
|
2024-05-15 03:27:51 +03:00
|
|
|
login_url = os.environ.get("G4F_LOGIN_URL")
|
|
|
|
if login_url:
|
|
|
|
yield f"Please login: [Google Gemini]({login_url})\n\n"
|
2024-04-22 02:27:48 +03:00
|
|
|
page = await browser.get(f"{cls.url}/app")
|
|
|
|
await page.select("div.ql-editor.textarea", 240)
|
|
|
|
cookies = {}
|
|
|
|
for c in await page.browser.cookies.get_all():
|
|
|
|
if c.domain.endswith(".google.com"):
|
|
|
|
cookies[c.name] = c.value
|
|
|
|
await page.close()
|
2024-05-15 03:27:51 +03:00
|
|
|
cls._cookies = cookies
|
2024-04-22 02:27:48 +03:00
|
|
|
|
|
|
|
@classmethod
|
2024-05-15 03:27:51 +03:00
|
|
|
async def webdriver_login(cls, proxy: str) -> AsyncIterator[str]:
|
2024-04-22 02:27:48 +03:00
|
|
|
driver = None
|
|
|
|
try:
|
|
|
|
driver = get_browser(proxy=proxy)
|
|
|
|
try:
|
|
|
|
driver.get(f"{cls.url}/app")
|
|
|
|
WebDriverWait(driver, 5).until(
|
|
|
|
EC.visibility_of_element_located((By.CSS_SELECTOR, "div.ql-editor.textarea"))
|
|
|
|
)
|
|
|
|
except:
|
|
|
|
login_url = os.environ.get("G4F_LOGIN_URL")
|
|
|
|
if login_url:
|
|
|
|
yield f"Please login: [Google Gemini]({login_url})\n\n"
|
|
|
|
WebDriverWait(driver, 240).until(
|
|
|
|
EC.visibility_of_element_located((By.CSS_SELECTOR, "div.ql-editor.textarea"))
|
|
|
|
)
|
|
|
|
cls._cookies = get_driver_cookies(driver)
|
|
|
|
except MissingRequirementsError:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
if driver:
|
|
|
|
driver.close()
|
2024-02-09 00:02:52 +03:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
async def create_async_generator(
|
|
|
|
cls,
|
|
|
|
model: str,
|
|
|
|
messages: Messages,
|
|
|
|
proxy: str = None,
|
|
|
|
cookies: Cookies = None,
|
2024-04-05 17:16:12 +03:00
|
|
|
connector: BaseConnector = None,
|
2024-02-09 00:02:52 +03:00
|
|
|
image: ImageType = None,
|
|
|
|
image_name: str = None,
|
2024-05-18 08:37:37 +03:00
|
|
|
response_format: str = None,
|
2024-05-21 21:39:31 +03:00
|
|
|
return_conversation: bool = False,
|
|
|
|
conversation: Conversation = None,
|
|
|
|
language: str = "en",
|
2024-02-09 00:02:52 +03:00
|
|
|
**kwargs
|
|
|
|
) -> AsyncResult:
|
2024-05-21 21:39:31 +03:00
|
|
|
prompt = format_prompt(messages) if conversation is None else messages[-1]["content"]
|
2024-04-22 02:27:48 +03:00
|
|
|
cls._cookies = cookies or cls._cookies or get_cookies(".google.com", False, True)
|
2024-04-05 17:16:12 +03:00
|
|
|
base_connector = get_connector(connector, proxy)
|
2024-02-09 00:02:52 +03:00
|
|
|
async with ClientSession(
|
2024-04-05 17:16:12 +03:00
|
|
|
headers=REQUEST_HEADERS,
|
|
|
|
connector=base_connector
|
2024-02-09 00:02:52 +03:00
|
|
|
) as session:
|
2024-05-21 21:39:31 +03:00
|
|
|
if not cls._snlm0e:
|
|
|
|
await cls.fetch_snlm0e(session, cls._cookies) if cls._cookies else None
|
|
|
|
if not cls._snlm0e:
|
2024-05-15 22:07:49 +03:00
|
|
|
async for chunk in cls.nodriver_login(proxy):
|
2024-05-15 03:27:51 +03:00
|
|
|
yield chunk
|
2024-04-22 02:27:48 +03:00
|
|
|
if cls._cookies is None:
|
|
|
|
async for chunk in cls.webdriver_login(proxy):
|
|
|
|
yield chunk
|
2024-05-21 21:39:31 +03:00
|
|
|
if not cls._snlm0e:
|
2024-05-15 03:27:51 +03:00
|
|
|
if cls._cookies is None or "__Secure-1PSID" not in cls._cookies:
|
2024-04-05 17:16:12 +03:00
|
|
|
raise MissingAuthError('Missing "__Secure-1PSID" cookie')
|
2024-05-21 21:39:31 +03:00
|
|
|
await cls.fetch_snlm0e(session, cls._cookies)
|
|
|
|
if not cls._snlm0e:
|
2024-04-22 02:27:48 +03:00
|
|
|
raise RuntimeError("Invalid cookies. SNlM0e not found")
|
2024-04-05 17:16:12 +03:00
|
|
|
|
|
|
|
image_url = await cls.upload_image(base_connector, to_bytes(image), image_name) if image else None
|
2024-05-21 21:39:31 +03:00
|
|
|
|
2024-04-05 17:16:12 +03:00
|
|
|
async with ClientSession(
|
2024-04-22 02:27:48 +03:00
|
|
|
cookies=cls._cookies,
|
2024-04-05 17:16:12 +03:00
|
|
|
headers=REQUEST_HEADERS,
|
|
|
|
connector=base_connector,
|
|
|
|
) as client:
|
|
|
|
params = {
|
|
|
|
'bl': REQUEST_BL_PARAM,
|
2024-05-21 21:39:31 +03:00
|
|
|
'hl': language,
|
2024-04-05 17:16:12 +03:00
|
|
|
'_reqid': random.randint(1111, 9999),
|
2024-05-21 21:39:31 +03:00
|
|
|
'rt': 'c',
|
|
|
|
"f.sid": cls._sid,
|
2024-04-05 17:16:12 +03:00
|
|
|
}
|
|
|
|
data = {
|
2024-05-21 21:39:31 +03:00
|
|
|
'at': cls._snlm0e,
|
2024-04-05 17:16:12 +03:00
|
|
|
'f.req': json.dumps([None, json.dumps(cls.build_request(
|
|
|
|
prompt,
|
2024-05-21 21:39:31 +03:00
|
|
|
language=language,
|
|
|
|
conversation=conversation,
|
2024-04-05 17:16:12 +03:00
|
|
|
image_url=image_url,
|
|
|
|
image_name=image_name
|
|
|
|
))])
|
|
|
|
}
|
|
|
|
async with client.post(
|
|
|
|
REQUEST_URL,
|
|
|
|
data=data,
|
|
|
|
params=params,
|
|
|
|
) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-05-21 21:39:31 +03:00
|
|
|
image_prompt = response_part = None
|
|
|
|
last_content_len = 0
|
|
|
|
async for line in response.content:
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
line = json.loads(line)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
if not isinstance(line, list):
|
|
|
|
continue
|
|
|
|
if len(line[0]) < 3 or not line[0][2]:
|
|
|
|
continue
|
|
|
|
response_part = json.loads(line[0][2])
|
|
|
|
if not response_part[4]:
|
|
|
|
continue
|
|
|
|
if return_conversation:
|
|
|
|
yield Conversation(response_part[1][0], response_part[1][1], response_part[4][0][0])
|
|
|
|
content = response_part[4][0][1][0]
|
|
|
|
except (ValueError, KeyError, TypeError, IndexError) as e:
|
|
|
|
print(f"{cls.__name__}:{e.__class__.__name__}:{e}")
|
|
|
|
continue
|
|
|
|
match = re.search(r'\[Imagen of (.*?)\]', content)
|
|
|
|
if match:
|
|
|
|
image_prompt = match.group(1)
|
|
|
|
content = content.replace(match.group(0), '')
|
|
|
|
yield content[last_content_len:]
|
|
|
|
last_content_len = len(content)
|
2024-04-05 17:16:12 +03:00
|
|
|
if image_prompt:
|
|
|
|
images = [image[0][3][3] for image in response_part[4][0][12][7][0]]
|
2024-05-18 08:37:37 +03:00
|
|
|
if response_format == "b64_json":
|
2024-05-20 02:28:08 +03:00
|
|
|
yield ImageResponse(images, image_prompt, {"cookies": cls._cookies})
|
2024-05-18 08:37:37 +03:00
|
|
|
else:
|
2024-05-20 02:28:08 +03:00
|
|
|
resolved_images = []
|
2024-05-18 08:37:37 +03:00
|
|
|
preview = []
|
|
|
|
for image in images:
|
|
|
|
async with client.get(image, allow_redirects=False) as fetch:
|
|
|
|
image = fetch.headers["location"]
|
|
|
|
async with client.get(image, allow_redirects=False) as fetch:
|
|
|
|
image = fetch.headers["location"]
|
|
|
|
resolved_images.append(image)
|
|
|
|
preview.append(image.replace('=s512', '=s200'))
|
|
|
|
yield ImageResponse(resolved_images, image_prompt, {"orginal_links": images, "preview": preview})
|
2024-02-09 00:02:52 +03:00
|
|
|
|
|
|
|
def build_request(
|
|
|
|
prompt: str,
|
2024-05-21 21:39:31 +03:00
|
|
|
language: str,
|
|
|
|
conversation: Conversation = None,
|
2024-02-09 00:02:52 +03:00
|
|
|
image_url: str = None,
|
|
|
|
image_name: str = None,
|
|
|
|
tools: list[list[str]] = []
|
|
|
|
) -> list:
|
|
|
|
image_list = [[[image_url, 1], image_name]] if image_url else []
|
|
|
|
return [
|
|
|
|
[prompt, 0, None, image_list, None, None, 0],
|
2024-05-21 21:39:31 +03:00
|
|
|
[language],
|
|
|
|
[
|
|
|
|
None if conversation is None else conversation.conversation_id,
|
|
|
|
None if conversation is None else conversation.response_id,
|
|
|
|
None if conversation is None else conversation.choice_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
[]
|
|
|
|
],
|
2024-02-09 00:02:52 +03:00
|
|
|
None,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
[1],
|
|
|
|
0,
|
|
|
|
[],
|
|
|
|
tools,
|
|
|
|
1,
|
|
|
|
0,
|
|
|
|
]
|
|
|
|
|
2024-04-05 17:16:12 +03:00
|
|
|
async def upload_image(connector: BaseConnector, image: bytes, image_name: str = None):
|
2024-02-09 00:02:52 +03:00
|
|
|
async with ClientSession(
|
2024-04-05 17:16:12 +03:00
|
|
|
headers=UPLOAD_IMAGE_HEADERS,
|
|
|
|
connector=connector
|
2024-02-09 00:02:52 +03:00
|
|
|
) as session:
|
2024-05-09 07:10:35 +03:00
|
|
|
async with session.options(UPLOAD_IMAGE_URL) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-02-09 00:02:52 +03:00
|
|
|
|
|
|
|
headers = {
|
|
|
|
"size": str(len(image)),
|
|
|
|
"x-goog-upload-command": "start"
|
|
|
|
}
|
|
|
|
data = f"File name: {image_name}" if image_name else None
|
|
|
|
async with session.post(
|
2024-04-05 17:16:12 +03:00
|
|
|
UPLOAD_IMAGE_URL, headers=headers, data=data
|
2024-02-09 00:02:52 +03:00
|
|
|
) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-02-09 00:02:52 +03:00
|
|
|
upload_url = response.headers["X-Goog-Upload-Url"]
|
|
|
|
|
|
|
|
async with session.options(upload_url, headers=headers) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-02-09 00:02:52 +03:00
|
|
|
|
|
|
|
headers["x-goog-upload-command"] = "upload, finalize"
|
|
|
|
headers["X-Goog-Upload-Offset"] = "0"
|
|
|
|
async with session.post(
|
2024-04-05 17:16:12 +03:00
|
|
|
upload_url, headers=headers, data=image
|
2024-02-09 00:02:52 +03:00
|
|
|
) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-02-21 19:02:54 +03:00
|
|
|
return await response.text()
|
|
|
|
|
|
|
|
@classmethod
|
2024-04-05 17:16:12 +03:00
|
|
|
async def fetch_snlm0e(cls, session: ClientSession, cookies: Cookies):
|
|
|
|
async with session.get(cls.url, cookies=cookies) as response:
|
2024-04-07 01:15:53 +03:00
|
|
|
await raise_for_status(response)
|
2024-05-21 21:39:31 +03:00
|
|
|
response_text = await response.text()
|
|
|
|
match = re.search(r'SNlM0e\":\"(.*?)\"', response_text)
|
2024-04-05 17:16:12 +03:00
|
|
|
if match:
|
2024-05-21 21:39:31 +03:00
|
|
|
cls._snlm0e = match.group(1)
|
|
|
|
sid_match = re.search(r'"FdrFJe":"([\d-]+)"', response_text)
|
|
|
|
if sid_match:
|
|
|
|
cls._sid = sid_match.group(1)
|
|
|
|
|
|
|
|
class Conversation(BaseConversation):
|
|
|
|
def __init__(self,
|
|
|
|
conversation_id: str = "",
|
|
|
|
response_id: str = "",
|
|
|
|
choice_id: str = ""
|
|
|
|
) -> None:
|
|
|
|
self.conversation_id = conversation_id
|
|
|
|
self.response_id = response_id
|
|
|
|
self.choice_id = choice_id
|