mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-22 15:05:57 +03:00
Add get/set cookies dir, hide prompt option in gui
This commit is contained in:
parent
96e378e9e2
commit
9ddac1715f
30
README.md
30
README.md
@ -236,19 +236,39 @@ set_cookies(".google.com", {
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can place your .har and cookie files in the `/har_and_cookies` directory. To export a cookie file, use the EditThisCookie extension available on the Chrome Web Store: [EditThisCookie Extension](https://chromewebstore.google.com/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg).
|
#### Using .har and Cookie Files
|
||||||
|
|
||||||
You can also create .har files to capture cookies. If you need further assistance, refer to the next section.
|
You can place `.har` and cookie files in the default `./har_and_cookies` directory. To export a cookie file, use the [EditThisCookie Extension](https://chromewebstore.google.com/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg) available on the Chrome Web Store.
|
||||||
|
|
||||||
```bash
|
#### Creating .har Files to Capture Cookies
|
||||||
python -m g4f.cli api --debug
|
|
||||||
|
To capture cookies, you can also create `.har` files. For more details, refer to the next section.
|
||||||
|
|
||||||
|
#### Changing the Cookies Directory and Loading Cookie Files in Python
|
||||||
|
|
||||||
|
You can change the cookies directory and load cookie files in your Python environment. To set the cookies directory relative to your Python file, use the following code:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os.path
|
||||||
|
from g4f.cookies import set_cookies_dir, read_cookie_files
|
||||||
|
|
||||||
|
import g4f.debug
|
||||||
|
g4f.debug.logging = True
|
||||||
|
|
||||||
|
cookies_dir = os.path.join(os.path.dirname(__file__), "har_and_cookies")
|
||||||
|
set_cookies_dir(cookies_dir)
|
||||||
|
read_cookie_files(cookies_dir)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
|
||||||
|
If you enable debug mode, you will see logs similar to the following:
|
||||||
|
|
||||||
```
|
```
|
||||||
Read .har file: ./har_and_cookies/you.com.har
|
Read .har file: ./har_and_cookies/you.com.har
|
||||||
Cookies added: 10 from .you.com
|
Cookies added: 10 from .you.com
|
||||||
Read cookie file: ./har_and_cookies/google.json
|
Read cookie file: ./har_and_cookies/google.json
|
||||||
Cookies added: 16 from .google.com
|
Cookies added: 16 from .google.com
|
||||||
Starting server... [g4f v-0.0.0] (debug)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### .HAR File for OpenaiChat Provider
|
#### .HAR File for OpenaiChat Provider
|
||||||
|
@ -1,18 +1,14 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import os
|
|
||||||
from typing import Iterator, Union
|
|
||||||
|
|
||||||
from ..cookies import get_cookies
|
from ..cookies import get_cookies
|
||||||
from ..image import ImageResponse
|
from ..image import ImageResponse
|
||||||
from ..errors import MissingRequirementsError, MissingAuthError
|
from ..errors import MissingAuthError
|
||||||
from ..typing import AsyncResult, Messages, Cookies
|
from ..typing import AsyncResult, Messages, Cookies
|
||||||
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
from .bing.create_images import create_images, create_session, get_cookies_from_browser
|
from .bing.create_images import create_images, create_session
|
||||||
|
|
||||||
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
label = "Microsoft Designer"
|
label = "Microsoft Designer in Bing"
|
||||||
parent = "Bing"
|
parent = "Bing"
|
||||||
url = "https://www.bing.com/images/create"
|
url = "https://www.bing.com/images/create"
|
||||||
working = True
|
working = True
|
||||||
@ -38,30 +34,9 @@ class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
session = BingCreateImages(cookies, proxy, api_key)
|
session = BingCreateImages(cookies, proxy, api_key)
|
||||||
yield await session.create_async(messages[-1]["content"])
|
yield await session.generate(messages[-1]["content"])
|
||||||
|
|
||||||
def create(self, prompt: str) -> Iterator[Union[ImageResponse, str]]:
|
async def generate(self, prompt: str) -> ImageResponse:
|
||||||
"""
|
|
||||||
Generator for creating imagecompletion based on a prompt.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
prompt (str): Prompt to generate images.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
Generator[str, None, None]: The final output as markdown formatted string with images.
|
|
||||||
"""
|
|
||||||
cookies = self.cookies or get_cookies(".bing.com", False)
|
|
||||||
if cookies is None or "_U" not in cookies:
|
|
||||||
login_url = os.environ.get("G4F_LOGIN_URL")
|
|
||||||
if login_url:
|
|
||||||
yield f"Please login: [Bing]({login_url})\n\n"
|
|
||||||
try:
|
|
||||||
self.cookies = get_cookies_from_browser(self.proxy)
|
|
||||||
except MissingRequirementsError as e:
|
|
||||||
raise MissingAuthError(f'Missing "_U" cookie. {e}')
|
|
||||||
yield asyncio.run(self.create_async(prompt))
|
|
||||||
|
|
||||||
async def create_async(self, prompt: str) -> ImageResponse:
|
|
||||||
"""
|
"""
|
||||||
Asynchronously creates a markdown formatted string with images based on the prompt.
|
Asynchronously creates a markdown formatted string with images based on the prompt.
|
||||||
|
|
||||||
@ -74,7 +49,6 @@ class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
cookies = self.cookies or get_cookies(".bing.com", False)
|
cookies = self.cookies or get_cookies(".bing.com", False)
|
||||||
if cookies is None or "_U" not in cookies:
|
if cookies is None or "_U" not in cookies:
|
||||||
raise MissingAuthError('Missing "_U" cookie')
|
raise MissingAuthError('Missing "_U" cookie')
|
||||||
proxy = self.proxy or os.environ.get("G4F_PROXY")
|
async with create_session(cookies, self.proxy) as session:
|
||||||
async with create_session(cookies, proxy) as session:
|
images = await create_images(session, prompt)
|
||||||
images = await create_images(session, prompt, proxy)
|
|
||||||
return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"} if len(images) > 1 else {})
|
return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"} if len(images) > 1 else {})
|
@ -4,7 +4,8 @@ import uuid
|
|||||||
import secrets
|
import secrets
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from ..typing import AsyncResult, Messages
|
from ..typing import AsyncResult, Messages, ImageType
|
||||||
|
from ..image import to_data_uri
|
||||||
from .base_provider import AsyncGeneratorProvider
|
from .base_provider import AsyncGeneratorProvider
|
||||||
|
|
||||||
class Blackbox(AsyncGeneratorProvider):
|
class Blackbox(AsyncGeneratorProvider):
|
||||||
@ -17,8 +18,15 @@ class Blackbox(AsyncGeneratorProvider):
|
|||||||
model: str,
|
model: str,
|
||||||
messages: Messages,
|
messages: Messages,
|
||||||
proxy: str = None,
|
proxy: str = None,
|
||||||
|
image: ImageType = None,
|
||||||
|
image_name: str = None,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
|
if image is not None:
|
||||||
|
messages[-1]["data"] = {
|
||||||
|
"fileText": image_name,
|
||||||
|
"imageBase64": to_data_uri(image)
|
||||||
|
}
|
||||||
headers = {
|
headers = {
|
||||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
|
||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
|
@ -1,7 +1,3 @@
|
|||||||
"""
|
|
||||||
This module provides functionalities for creating and managing images using Bing's service.
|
|
||||||
It includes functions for user login, session creation, image creation, and processing.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@ -17,9 +13,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
has_requirements = False
|
has_requirements = False
|
||||||
|
|
||||||
from ...providers.create_images import CreateImagesProvider
|
|
||||||
from ..helper import get_connector
|
from ..helper import get_connector
|
||||||
from ...providers.types import ProviderType
|
|
||||||
from ...errors import MissingRequirementsError, RateLimitError
|
from ...errors import MissingRequirementsError, RateLimitError
|
||||||
from ...webdriver import WebDriver, get_driver_cookies, get_browser
|
from ...webdriver import WebDriver, get_driver_cookies, get_browser
|
||||||
|
|
||||||
@ -101,7 +95,7 @@ def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseCo
|
|||||||
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
|
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
|
||||||
return ClientSession(headers=headers, connector=get_connector(connector, proxy))
|
return ClientSession(headers=headers, connector=get_connector(connector, proxy))
|
||||||
|
|
||||||
async def create_images(session: ClientSession, prompt: str, proxy: str = None, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]:
|
async def create_images(session: ClientSession, prompt: str, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Creates images based on a given prompt using Bing's service.
|
Creates images based on a given prompt using Bing's service.
|
||||||
|
|
||||||
@ -132,7 +126,7 @@ async def create_images(session: ClientSession, prompt: str, proxy: str = None,
|
|||||||
raise RuntimeError(f"Create images failed: {error}")
|
raise RuntimeError(f"Create images failed: {error}")
|
||||||
if response.status != 302:
|
if response.status != 302:
|
||||||
url = f"{BING_URL}/images/create?q={url_encoded_prompt}&rt=3&FORM=GENCRE"
|
url = f"{BING_URL}/images/create?q={url_encoded_prompt}&rt=3&FORM=GENCRE"
|
||||||
async with session.post(url, allow_redirects=False, proxy=proxy, timeout=timeout) as response:
|
async with session.post(url, allow_redirects=False, timeout=timeout) as response:
|
||||||
if response.status != 302:
|
if response.status != 302:
|
||||||
raise RuntimeError(f"Create images failed. Code: {response.status}")
|
raise RuntimeError(f"Create images failed. Code: {response.status}")
|
||||||
|
|
||||||
@ -185,22 +179,4 @@ def read_images(html_content: str) -> List[str]:
|
|||||||
raise RuntimeError("Bad images found")
|
raise RuntimeError("Bad images found")
|
||||||
if not images:
|
if not images:
|
||||||
raise RuntimeError("No images found")
|
raise RuntimeError("No images found")
|
||||||
return images
|
return images
|
||||||
|
|
||||||
def patch_provider(provider: ProviderType) -> CreateImagesProvider:
|
|
||||||
"""
|
|
||||||
Patches a provider to include image creation capabilities.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
provider (ProviderType): The provider to be patched.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
CreateImagesProvider: The patched provider with image creation capabilities.
|
|
||||||
"""
|
|
||||||
from ..BingCreateImages import BingCreateImages
|
|
||||||
service = BingCreateImages()
|
|
||||||
return CreateImagesProvider(
|
|
||||||
provider,
|
|
||||||
service.create,
|
|
||||||
service.create_async
|
|
||||||
)
|
|
@ -26,7 +26,7 @@ from ...webdriver import get_browser
|
|||||||
from ...typing import AsyncResult, Messages, Cookies, ImageType, AsyncIterator
|
from ...typing import AsyncResult, Messages, Cookies, ImageType, AsyncIterator
|
||||||
from ...requests import get_args_from_browser, raise_for_status
|
from ...requests import get_args_from_browser, raise_for_status
|
||||||
from ...requests.aiohttp import StreamSession
|
from ...requests.aiohttp import StreamSession
|
||||||
from ...image import to_image, to_bytes, ImageResponse, ImageRequest
|
from ...image import ImageResponse, ImageRequest, to_image, to_bytes, is_accepted_format
|
||||||
from ...errors import MissingAuthError, ResponseError
|
from ...errors import MissingAuthError, ResponseError
|
||||||
from ...providers.conversation import BaseConversation
|
from ...providers.conversation import BaseConversation
|
||||||
from ..helper import format_cookies
|
from ..helper import format_cookies
|
||||||
@ -138,23 +138,22 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
An ImageRequest object that contains the download URL, file name, and other data
|
An ImageRequest object that contains the download URL, file name, and other data
|
||||||
"""
|
"""
|
||||||
# Convert the image to a PIL Image object and get the extension
|
# Convert the image to a PIL Image object and get the extension
|
||||||
image = to_image(image)
|
|
||||||
extension = image.format.lower()
|
|
||||||
# Convert the image to a bytes object and get the size
|
|
||||||
data_bytes = to_bytes(image)
|
data_bytes = to_bytes(image)
|
||||||
|
image = to_image(data_bytes)
|
||||||
|
extension = image.format.lower()
|
||||||
data = {
|
data = {
|
||||||
"file_name": image_name if image_name else f"{image.width}x{image.height}.{extension}",
|
"file_name": "" if image_name is None else image_name,
|
||||||
"file_size": len(data_bytes),
|
"file_size": len(data_bytes),
|
||||||
"use_case": "multimodal"
|
"use_case": "multimodal"
|
||||||
}
|
}
|
||||||
# Post the image data to the service and get the image data
|
# Post the image data to the service and get the image data
|
||||||
async with session.post(f"{cls.url}/backend-api/files", json=data, headers=headers) as response:
|
async with session.post(f"{cls.url}/backend-api/files", json=data, headers=headers) as response:
|
||||||
cls._update_request_args()
|
cls._update_request_args(session)
|
||||||
await raise_for_status(response)
|
await raise_for_status(response)
|
||||||
image_data = {
|
image_data = {
|
||||||
**data,
|
**data,
|
||||||
**await response.json(),
|
**await response.json(),
|
||||||
"mime_type": f"image/{extension}",
|
"mime_type": is_accepted_format(data_bytes),
|
||||||
"extension": extension,
|
"extension": extension,
|
||||||
"height": image.height,
|
"height": image.height,
|
||||||
"width": image.width
|
"width": image.width
|
||||||
@ -275,7 +274,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
first_part = line["message"]["content"]["parts"][0]
|
first_part = line["message"]["content"]["parts"][0]
|
||||||
if "asset_pointer" not in first_part or "metadata" not in first_part:
|
if "asset_pointer" not in first_part or "metadata" not in first_part:
|
||||||
return
|
return
|
||||||
if first_part["metadata"] is None:
|
if first_part["metadata"] is None or first_part["metadata"]["dalle"] is None:
|
||||||
return
|
return
|
||||||
prompt = first_part["metadata"]["dalle"]["prompt"]
|
prompt = first_part["metadata"]["dalle"]["prompt"]
|
||||||
file_id = first_part["asset_pointer"].split("file-service://", 1)[1]
|
file_id = first_part["asset_pointer"].split("file-service://", 1)[1]
|
||||||
@ -365,49 +364,17 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
) as session:
|
) as session:
|
||||||
if cls._expires is not None and cls._expires < time.time():
|
if cls._expires is not None and cls._expires < time.time():
|
||||||
cls._headers = cls._api_key = None
|
cls._headers = cls._api_key = None
|
||||||
if cls._headers is None or cookies is not None:
|
|
||||||
cls._create_request_args(cookies)
|
|
||||||
api_key = kwargs["access_token"] if "access_token" in kwargs else api_key
|
|
||||||
if api_key is not None:
|
|
||||||
cls._set_api_key(api_key)
|
|
||||||
|
|
||||||
if cls.default_model is None and (not cls.needs_auth or cls._api_key is not None):
|
|
||||||
if cls._api_key is None:
|
|
||||||
cls._create_request_args(cookies)
|
|
||||||
async with session.get(
|
|
||||||
f"{cls.url}/",
|
|
||||||
headers=DEFAULT_HEADERS
|
|
||||||
) as response:
|
|
||||||
cls._update_request_args(session)
|
|
||||||
await raise_for_status(response)
|
|
||||||
try:
|
|
||||||
if not model:
|
|
||||||
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
|
|
||||||
else:
|
|
||||||
cls.default_model = cls.get_model(model)
|
|
||||||
except MissingAuthError:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
api_key = cls._api_key = None
|
|
||||||
cls._create_request_args()
|
|
||||||
if debug.logging:
|
|
||||||
print("OpenaiChat: Load default model failed")
|
|
||||||
print(f"{e.__class__.__name__}: {e}")
|
|
||||||
|
|
||||||
arkose_token = None
|
arkose_token = None
|
||||||
proofTokens = None
|
proofTokens = None
|
||||||
if cls.default_model is None:
|
try:
|
||||||
error = None
|
arkose_token, api_key, cookies, headers, proofTokens = await getArkoseAndAccessToken(proxy)
|
||||||
try:
|
cls._create_request_args(cookies, headers)
|
||||||
arkose_token, api_key, cookies, headers, proofTokens = await getArkoseAndAccessToken(proxy)
|
cls._set_api_key(api_key)
|
||||||
cls._create_request_args(cookies, headers)
|
except NoValidHarFileError as e:
|
||||||
cls._set_api_key(api_key)
|
|
||||||
except NoValidHarFileError as e:
|
|
||||||
error = e
|
|
||||||
if cls._api_key is None:
|
|
||||||
await cls.nodriver_access_token(proxy)
|
|
||||||
if cls._api_key is None and cls.needs_auth:
|
if cls._api_key is None and cls.needs_auth:
|
||||||
raise error
|
raise e
|
||||||
|
|
||||||
|
if cls.default_model is None:
|
||||||
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
|
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -461,7 +428,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
)
|
)
|
||||||
ws = None
|
ws = None
|
||||||
if need_arkose:
|
if need_arkose:
|
||||||
async with session.post("https://chatgpt.com/backend-api/register-websocket", headers=cls._headers) as response:
|
async with session.post(f"{cls.url}/backend-api/register-websocket", headers=cls._headers) as response:
|
||||||
wss_url = (await response.json()).get("wss_url")
|
wss_url = (await response.json()).get("wss_url")
|
||||||
if wss_url:
|
if wss_url:
|
||||||
ws = await session.ws_connect(wss_url)
|
ws = await session.ws_connect(wss_url)
|
||||||
@ -490,7 +457,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
if proofofwork is not None:
|
if proofofwork is not None:
|
||||||
headers["Openai-Sentinel-Proof-Token"] = proofofwork
|
headers["Openai-Sentinel-Proof-Token"] = proofofwork
|
||||||
async with session.post(
|
async with session.post(
|
||||||
f"{cls.url}/backend-anon/conversation" if cls._api_key is None else
|
f"{cls.url}/backend-anon/conversation"
|
||||||
|
if cls._api_key is None else
|
||||||
f"{cls.url}/backend-api/conversation",
|
f"{cls.url}/backend-api/conversation",
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers
|
headers=headers
|
||||||
@ -580,12 +548,9 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
|
|||||||
raise RuntimeError(line["error"])
|
raise RuntimeError(line["error"])
|
||||||
if "message_type" not in line["message"]["metadata"]:
|
if "message_type" not in line["message"]["metadata"]:
|
||||||
return
|
return
|
||||||
try:
|
image_response = await cls.get_generated_image(session, cls._headers, line)
|
||||||
image_response = await cls.get_generated_image(session, cls._headers, line)
|
if image_response is not None:
|
||||||
if image_response is not None:
|
yield image_response
|
||||||
yield image_response
|
|
||||||
except Exception as e:
|
|
||||||
yield e
|
|
||||||
if line["message"]["author"]["role"] != "assistant":
|
if line["message"]["author"]["role"] != "assistant":
|
||||||
return
|
return
|
||||||
if line["message"]["content"]["content_type"] != "text":
|
if line["message"]["content"]["content_type"] != "text":
|
||||||
|
@ -12,6 +12,7 @@ from copy import deepcopy
|
|||||||
|
|
||||||
from .crypt import decrypt, encrypt
|
from .crypt import decrypt, encrypt
|
||||||
from ...requests import StreamSession
|
from ...requests import StreamSession
|
||||||
|
from ...cookies import get_cookies_dir
|
||||||
from ... import debug
|
from ... import debug
|
||||||
|
|
||||||
class NoValidHarFileError(Exception):
|
class NoValidHarFileError(Exception):
|
||||||
@ -36,17 +37,14 @@ proofTokens: list = []
|
|||||||
|
|
||||||
def readHAR():
|
def readHAR():
|
||||||
global proofTokens
|
global proofTokens
|
||||||
dirPath = "./"
|
|
||||||
harPath = []
|
harPath = []
|
||||||
chatArks = []
|
chatArks = []
|
||||||
accessToken = None
|
accessToken = None
|
||||||
cookies = {}
|
cookies = {}
|
||||||
for root, dirs, files in os.walk(dirPath):
|
for root, dirs, files in os.walk(get_cookies_dir()):
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(".har"):
|
if file.endswith(".har"):
|
||||||
harPath.append(os.path.join(root, file))
|
harPath.append(os.path.join(root, file))
|
||||||
if harPath:
|
|
||||||
break
|
|
||||||
if not harPath:
|
if not harPath:
|
||||||
raise NoValidHarFileError("No .har file found")
|
raise NoValidHarFileError("No .har file found")
|
||||||
for path in harPath:
|
for path in harPath:
|
||||||
|
@ -7,6 +7,7 @@ import random
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ...requests import StreamSession, raise_for_status
|
from ...requests import StreamSession, raise_for_status
|
||||||
|
from ...cookies import get_cookies_dir
|
||||||
from ...errors import MissingRequirementsError
|
from ...errors import MissingRequirementsError
|
||||||
from ... import debug
|
from ... import debug
|
||||||
|
|
||||||
@ -28,10 +29,9 @@ public_token = "public-token-live-507a52ad-7e69-496b-aee0-1c9863c7c819"
|
|||||||
chatArks: list = None
|
chatArks: list = None
|
||||||
|
|
||||||
def readHAR():
|
def readHAR():
|
||||||
dirPath = "./"
|
|
||||||
harPath = []
|
harPath = []
|
||||||
chatArks = []
|
chatArks = []
|
||||||
for root, dirs, files in os.walk(dirPath):
|
for root, dirs, files in os.walk(get_cookies_dir()):
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(".har"):
|
if file.endswith(".har"):
|
||||||
harPath.append(os.path.join(root, file))
|
harPath.append(os.path.join(root, file))
|
||||||
@ -65,16 +65,10 @@ def parseHAREntry(entry) -> arkReq:
|
|||||||
return tmpArk
|
return tmpArk
|
||||||
|
|
||||||
async def sendRequest(tmpArk: arkReq, proxy: str = None):
|
async def sendRequest(tmpArk: arkReq, proxy: str = None):
|
||||||
try:
|
async with StreamSession(headers=tmpArk.arkHeaders, cookies=tmpArk.arkCookies, proxy=proxy) as session:
|
||||||
async with StreamSession(headers=tmpArk.arkHeaders, cookies=tmpArk.arkCookies, proxy=proxy) as session:
|
async with session.post(tmpArk.arkURL, data=tmpArk.arkBody) as response:
|
||||||
async with session.post(tmpArk.arkURL, data=tmpArk.arkBody) as response:
|
await raise_for_status(response)
|
||||||
await raise_for_status(response)
|
return await response.text()
|
||||||
return await response.text()
|
|
||||||
except RuntimeError as e:
|
|
||||||
if str(e) == "Event loop is closed":
|
|
||||||
print("Event loop is closed error occurred in sendRequest.")
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def create_telemetry_id(proxy: str = None):
|
async def create_telemetry_id(proxy: str = None):
|
||||||
global chatArks
|
global chatArks
|
||||||
|
@ -23,8 +23,9 @@ from .typing import Dict, Cookies
|
|||||||
from .errors import MissingRequirementsError
|
from .errors import MissingRequirementsError
|
||||||
from . import debug
|
from . import debug
|
||||||
|
|
||||||
# Global variable to store cookies
|
class CookiesConfig():
|
||||||
_cookies: Dict[str, Cookies] = {}
|
cookies: Dict[str, Cookies] = {}
|
||||||
|
cookies_dir: str = "./har_and_cookies"
|
||||||
|
|
||||||
DOMAINS = [
|
DOMAINS = [
|
||||||
".bing.com",
|
".bing.com",
|
||||||
@ -48,20 +49,18 @@ def get_cookies(domain_name: str = '', raise_requirements_error: bool = True, si
|
|||||||
Returns:
|
Returns:
|
||||||
Dict[str, str]: A dictionary of cookie names and values.
|
Dict[str, str]: A dictionary of cookie names and values.
|
||||||
"""
|
"""
|
||||||
global _cookies
|
if domain_name in CookiesConfig.cookies:
|
||||||
if domain_name in _cookies:
|
return CookiesConfig.cookies[domain_name]
|
||||||
return _cookies[domain_name]
|
|
||||||
|
|
||||||
cookies = load_cookies_from_browsers(domain_name, raise_requirements_error, single_browser)
|
cookies = load_cookies_from_browsers(domain_name, raise_requirements_error, single_browser)
|
||||||
_cookies[domain_name] = cookies
|
CookiesConfig.cookies[domain_name] = cookies
|
||||||
return cookies
|
return cookies
|
||||||
|
|
||||||
def set_cookies(domain_name: str, cookies: Cookies = None) -> None:
|
def set_cookies(domain_name: str, cookies: Cookies = None) -> None:
|
||||||
global _cookies
|
|
||||||
if cookies:
|
if cookies:
|
||||||
_cookies[domain_name] = cookies
|
CookiesConfig.cookies[domain_name] = cookies
|
||||||
elif domain_name in _cookies:
|
elif domain_name in CookiesConfig.cookies:
|
||||||
_cookies.pop(domain_name)
|
CookiesConfig.cookies.pop(domain_name)
|
||||||
|
|
||||||
def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True, single_browser: bool = False) -> Cookies:
|
def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True, single_browser: bool = False) -> Cookies:
|
||||||
"""
|
"""
|
||||||
@ -96,7 +95,13 @@ def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool
|
|||||||
print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}")
|
print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}")
|
||||||
return cookies
|
return cookies
|
||||||
|
|
||||||
def read_cookie_files(dirPath: str = "./har_and_cookies"):
|
def set_cookies_dir(dir: str) -> None:
|
||||||
|
CookiesConfig.cookies_dir = dir
|
||||||
|
|
||||||
|
def get_cookies_dir() -> str:
|
||||||
|
return CookiesConfig.cookies_dir
|
||||||
|
|
||||||
|
def read_cookie_files(dirPath: str = None):
|
||||||
def get_domain(v: dict) -> str:
|
def get_domain(v: dict) -> str:
|
||||||
host = [h["value"] for h in v['request']['headers'] if h["name"].lower() in ("host", ":authority")]
|
host = [h["value"] for h in v['request']['headers'] if h["name"].lower() in ("host", ":authority")]
|
||||||
if not host:
|
if not host:
|
||||||
@ -106,16 +111,16 @@ def read_cookie_files(dirPath: str = "./har_and_cookies"):
|
|||||||
if d in host:
|
if d in host:
|
||||||
return d
|
return d
|
||||||
|
|
||||||
global _cookies
|
|
||||||
harFiles = []
|
harFiles = []
|
||||||
cookieFiles = []
|
cookieFiles = []
|
||||||
for root, dirs, files in os.walk(dirPath):
|
for root, dirs, files in os.walk(CookiesConfig.cookies_dir if dirPath is None else dirPath):
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(".har"):
|
if file.endswith(".har"):
|
||||||
harFiles.append(os.path.join(root, file))
|
harFiles.append(os.path.join(root, file))
|
||||||
elif file.endswith(".json"):
|
elif file.endswith(".json"):
|
||||||
cookieFiles.append(os.path.join(root, file))
|
cookieFiles.append(os.path.join(root, file))
|
||||||
_cookies = {}
|
|
||||||
|
CookiesConfig.cookies = {}
|
||||||
for path in harFiles:
|
for path in harFiles:
|
||||||
with open(path, 'rb') as file:
|
with open(path, 'rb') as file:
|
||||||
try:
|
try:
|
||||||
@ -134,7 +139,7 @@ def read_cookie_files(dirPath: str = "./har_and_cookies"):
|
|||||||
for c in v['request']['cookies']:
|
for c in v['request']['cookies']:
|
||||||
v_cookies[c['name']] = c['value']
|
v_cookies[c['name']] = c['value']
|
||||||
if len(v_cookies) > 0:
|
if len(v_cookies) > 0:
|
||||||
_cookies[domain] = v_cookies
|
CookiesConfig.cookies[domain] = v_cookies
|
||||||
new_cookies[domain] = len(v_cookies)
|
new_cookies[domain] = len(v_cookies)
|
||||||
if debug.logging:
|
if debug.logging:
|
||||||
for domain, new_values in new_cookies.items():
|
for domain, new_values in new_cookies.items():
|
||||||
@ -159,7 +164,7 @@ def read_cookie_files(dirPath: str = "./har_and_cookies"):
|
|||||||
for domain, new_values in new_cookies.items():
|
for domain, new_values in new_cookies.items():
|
||||||
if debug.logging:
|
if debug.logging:
|
||||||
print(f"Cookies added: {len(new_values)} from {domain}")
|
print(f"Cookies added: {len(new_values)} from {domain}")
|
||||||
_cookies[domain] = new_values
|
CookiesConfig.cookies[domain] = new_values
|
||||||
|
|
||||||
def _g4f(domain_name: str) -> list:
|
def _g4f(domain_name: str) -> list:
|
||||||
"""
|
"""
|
||||||
|
@ -93,22 +93,22 @@
|
|||||||
<div class="paper">
|
<div class="paper">
|
||||||
<h3>Settings</h3>
|
<h3>Settings</h3>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<span class="label">Web Access</span>
|
<span class="label">Web Access with DuckDuckGo</span>
|
||||||
<input type="checkbox" id="switch" />
|
<input type="checkbox" id="switch" />
|
||||||
<label for="switch" class="toogle" title="Add the pages of the first 5 search results to the query."></label>
|
<label for="switch" class="toogle" title="Add the pages of the first 5 search results to the query."></label>
|
||||||
</div>
|
</div>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<span class="label">Disable History</span>
|
<span class="label">Disable Conversation History</span>
|
||||||
<input type="checkbox" id="history" />
|
<input type="checkbox" id="history" />
|
||||||
<label for="history" class="toogle" title="To improve the reaction time or if you have trouble with large conversations."></label>
|
<label for="history" class="toogle" title="To improve the reaction time or if you have trouble with large conversations."></label>
|
||||||
</div>
|
</div>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<span class="label">Hide System prompt</span>
|
<span class="label">Hide System-prompt</span>
|
||||||
<input type="checkbox" id="hide-systemPrompt" />
|
<input type="checkbox" id="hide-systemPrompt" />
|
||||||
<label for="hide-systemPrompt" class="toogle" title="For more space on phones"></label>
|
<label for="hide-systemPrompt" class="toogle" title="For more space on phones"></label>
|
||||||
</div>
|
</div>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<span class="label">Auto continue</span>
|
<span class="label">Auto continue in ChatGPT</span>
|
||||||
<input id="auto_continue" type="checkbox" name="auto_continue" checked/>
|
<input id="auto_continue" type="checkbox" name="auto_continue" checked/>
|
||||||
<label for="auto_continue" class="toogle" title="Continue large responses in OpenaiChat"></label>
|
<label for="auto_continue" class="toogle" title="Continue large responses in OpenaiChat"></label>
|
||||||
</div>
|
</div>
|
||||||
@ -121,8 +121,8 @@
|
|||||||
<input type="text" id="recognition-language" value="" placeholder="navigator.language"/>
|
<input type="text" id="recognition-language" value="" placeholder="navigator.language"/>
|
||||||
</div>
|
</div>
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="Bing-api_key" class="label" title="">Bing:</label>
|
<label for="BingCreateImages-api_key" class="label" title="">Microsoft Designer in Bing:</label>
|
||||||
<textarea id="Bing-api_key" name="Bing[api_key]" class="BingCreateImages-api_key" placeholder=""_U" cookie"></textarea>
|
<textarea id="BingCreateImages-api_key" name="BingCreateImages[api_key]" placeholder=""_U" cookie"></textarea>
|
||||||
</div>
|
</div>
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="DeepInfra-api_key" class="label" title="">DeepInfra:</label>
|
<label for="DeepInfra-api_key" class="label" title="">DeepInfra:</label>
|
||||||
@ -144,14 +144,14 @@
|
|||||||
<label for="Openai-api_key" class="label" title="">OpenAI API:</label>
|
<label for="Openai-api_key" class="label" title="">OpenAI API:</label>
|
||||||
<textarea id="Openai-api_key" name="Openai[api_key]" placeholder="api_key"></textarea>
|
<textarea id="Openai-api_key" name="Openai[api_key]" placeholder="api_key"></textarea>
|
||||||
</div>
|
</div>
|
||||||
<div class="field box">
|
|
||||||
<label for="OpenaiAccount-api_key" class="label" title="">OpenAI ChatGPT:</label>
|
|
||||||
<textarea id="OpenaiAccount-api_key" name="OpenaiAccount[api_key]" class="OpenaiChat-api_key" placeholder="access_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="OpenRouter-api_key" class="label" title="">OpenRouter:</label>
|
<label for="OpenRouter-api_key" class="label" title="">OpenRouter:</label>
|
||||||
<textarea id="OpenRouter-api_key" name="OpenRouter[api_key]" placeholder="api_key"></textarea>
|
<textarea id="OpenRouter-api_key" name="OpenRouter[api_key]" placeholder="api_key"></textarea>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="field box">
|
||||||
|
<label for="PerplexityApi-api_key" class="label" title="">Perplexity API:</label>
|
||||||
|
<textarea id="PerplexityApi-api_key" name="PerplexityApi[api_key]" placeholder="api_key"></textarea>
|
||||||
|
</div>
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="Replicate-api_key" class="label" title="">Replicate:</label>
|
<label for="Replicate-api_key" class="label" title="">Replicate:</label>
|
||||||
<textarea id="Replicate-api_key" name="Replicate[api_key]" class="ReplicateImage-api_key" placeholder="api_key"></textarea>
|
<textarea id="Replicate-api_key" name="Replicate[api_key]" class="ReplicateImage-api_key" placeholder="api_key"></textarea>
|
||||||
@ -173,7 +173,10 @@
|
|||||||
<div id="messages" class="box"></div>
|
<div id="messages" class="box"></div>
|
||||||
<div class="toolbar">
|
<div class="toolbar">
|
||||||
<div id="input-count" class="">
|
<div id="input-count" class="">
|
||||||
|
<button class="hide-input">
|
||||||
|
<i class="fa-solid fa-angles-down"></i>
|
||||||
|
</button>
|
||||||
|
<span class="text"></span>
|
||||||
</div>
|
</div>
|
||||||
<div class="stop_generating stop_generating-hidden">
|
<div class="stop_generating stop_generating-hidden">
|
||||||
<button id="cancelButton">
|
<button id="cancelButton">
|
||||||
|
@ -457,7 +457,11 @@ body {
|
|||||||
#input-count {
|
#input-count {
|
||||||
width: fit-content;
|
width: fit-content;
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
padding: 6px var(--inner-gap);
|
padding: 6px 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#input-count .text {
|
||||||
|
padding: 0 4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.stop_generating, .toolbar .regenerate {
|
.stop_generating, .toolbar .regenerate {
|
||||||
@ -491,6 +495,13 @@ body {
|
|||||||
animation: show_popup 0.4s;
|
animation: show_popup 0.4s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.toolbar .hide-input {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--colour-3);
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
@keyframes show_popup {
|
@keyframes show_popup {
|
||||||
from {
|
from {
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
|
@ -11,7 +11,7 @@ const imageInput = document.getElementById("image");
|
|||||||
const cameraInput = document.getElementById("camera");
|
const cameraInput = document.getElementById("camera");
|
||||||
const fileInput = document.getElementById("file");
|
const fileInput = document.getElementById("file");
|
||||||
const microLabel = document.querySelector(".micro-label");
|
const microLabel = document.querySelector(".micro-label");
|
||||||
const inputCount = document.getElementById("input-count");
|
const inputCount = document.getElementById("input-count").querySelector(".text");
|
||||||
const providerSelect = document.getElementById("provider");
|
const providerSelect = document.getElementById("provider");
|
||||||
const modelSelect = document.getElementById("model");
|
const modelSelect = document.getElementById("model");
|
||||||
const modelProvider = document.getElementById("model2");
|
const modelProvider = document.getElementById("model2");
|
||||||
@ -41,9 +41,7 @@ appStorage = window.localStorage || {
|
|||||||
length: 0
|
length: 0
|
||||||
}
|
}
|
||||||
|
|
||||||
const markdown = window.markdownit({
|
const markdown = window.markdownit();
|
||||||
html: true,
|
|
||||||
});
|
|
||||||
const markdown_render = (content) => {
|
const markdown_render = (content) => {
|
||||||
return markdown.render(content
|
return markdown.render(content
|
||||||
.replaceAll(/<!-- generated images start -->|<!-- generated images end -->/gm, "")
|
.replaceAll(/<!-- generated images start -->|<!-- generated images end -->/gm, "")
|
||||||
@ -813,6 +811,17 @@ document.getElementById("regenerateButton").addEventListener("click", async () =
|
|||||||
await ask_gpt();
|
await ask_gpt();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const hide_input = document.querySelector(".toolbar .hide-input");
|
||||||
|
hide_input.addEventListener("click", async (e) => {
|
||||||
|
const icon = hide_input.querySelector("i");
|
||||||
|
const func = icon.classList.contains("fa-angles-down") ? "add" : "remove";
|
||||||
|
const remv = icon.classList.contains("fa-angles-down") ? "remove" : "add";
|
||||||
|
icon.classList[func]("fa-angles-up");
|
||||||
|
icon.classList[remv]("fa-angles-down");
|
||||||
|
document.querySelector(".conversation .user-input").classList[func]("hidden");
|
||||||
|
document.querySelector(".conversation .buttons").classList[func]("hidden");
|
||||||
|
});
|
||||||
|
|
||||||
const uuid = () => {
|
const uuid = () => {
|
||||||
return `xxxxxxxx-xxxx-4xxx-yxxx-${Date.now().toString(16)}`.replace(
|
return `xxxxxxxx-xxxx-4xxx-yxxx-${Date.now().toString(16)}`.replace(
|
||||||
/[xy]/g,
|
/[xy]/g,
|
||||||
@ -1016,7 +1025,7 @@ const count_input = async () => {
|
|||||||
if (countFocus.value) {
|
if (countFocus.value) {
|
||||||
inputCount.innerText = count_words_and_tokens(countFocus.value, get_selected_model());
|
inputCount.innerText = count_words_and_tokens(countFocus.value, get_selected_model());
|
||||||
} else {
|
} else {
|
||||||
inputCount.innerHTML = " "
|
inputCount.innerText = "";
|
||||||
}
|
}
|
||||||
}, 100);
|
}, 100);
|
||||||
};
|
};
|
||||||
@ -1060,6 +1069,8 @@ async function on_api() {
|
|||||||
messageInput.addEventListener("keydown", async (evt) => {
|
messageInput.addEventListener("keydown", async (evt) => {
|
||||||
if (prompt_lock) return;
|
if (prompt_lock) return;
|
||||||
|
|
||||||
|
// If not mobile
|
||||||
|
if (!window.matchMedia("(pointer:coarse)").matches)
|
||||||
if (evt.keyCode === 13 && !evt.shiftKey) {
|
if (evt.keyCode === 13 && !evt.shiftKey) {
|
||||||
evt.preventDefault();
|
evt.preventDefault();
|
||||||
console.log("pressed enter");
|
console.log("pressed enter");
|
||||||
@ -1262,6 +1273,7 @@ async function load_provider_models(providerIndex=null) {
|
|||||||
if (!providerIndex) {
|
if (!providerIndex) {
|
||||||
providerIndex = providerSelect.selectedIndex;
|
providerIndex = providerSelect.selectedIndex;
|
||||||
}
|
}
|
||||||
|
modelProvider.innerHTML = '';
|
||||||
const provider = providerSelect.options[providerIndex].value;
|
const provider = providerSelect.options[providerIndex].value;
|
||||||
if (!provider) {
|
if (!provider) {
|
||||||
modelProvider.classList.add("hidden");
|
modelProvider.classList.add("hidden");
|
||||||
@ -1269,7 +1281,6 @@ async function load_provider_models(providerIndex=null) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const models = await api('models', provider);
|
const models = await api('models', provider);
|
||||||
modelProvider.innerHTML = '';
|
|
||||||
if (models.length > 0) {
|
if (models.length > 0) {
|
||||||
modelSelect.classList.add("hidden");
|
modelSelect.classList.add("hidden");
|
||||||
modelProvider.classList.remove("hidden");
|
modelProvider.classList.remove("hidden");
|
||||||
|
@ -210,8 +210,8 @@ def format_images_markdown(images: Union[str, list], alt: str, preview: Union[st
|
|||||||
if not isinstance(preview, list):
|
if not isinstance(preview, list):
|
||||||
preview = [preview.replace('{image}', image) if preview else image for image in images]
|
preview = [preview.replace('{image}', image) if preview else image for image in images]
|
||||||
result = "\n".join(
|
result = "\n".join(
|
||||||
#f"[![#{idx+1} {alt}]({preview[idx]})]({image})"
|
f"[![#{idx+1} {alt}]({preview[idx]})]({image})"
|
||||||
f'[<img src="{preview[idx]}" width="200" alt="#{idx+1} {alt}">]({image})'
|
#f'[<img src="{preview[idx]}" width="200" alt="#{idx+1} {alt}">]({image})'
|
||||||
for idx, image in enumerate(images)
|
for idx, image in enumerate(images)
|
||||||
)
|
)
|
||||||
start_flag = "<!-- generated images start -->\n"
|
start_flag = "<!-- generated images start -->\n"
|
||||||
|
Loading…
Reference in New Issue
Block a user