Add aiohttp_socks support

This commit is contained in:
Heiner Lohaus 2024-01-23 23:48:11 +01:00
parent 91feb34054
commit 8864b70ee4
6 changed files with 61 additions and 23 deletions

View File

@ -126,6 +126,7 @@ def analyze_code(pull: PullRequest, diff: str)-> list[dict]:
for line in diff.split('\n'): for line in diff.split('\n'):
if line.startswith('+++ b/'): if line.startswith('+++ b/'):
current_file_path = line[6:] current_file_path = line[6:]
changed_lines = []
elif line.startswith('@@'): elif line.startswith('@@'):
match = re.search(r'\+([0-9]+?),', line) match = re.search(r'\+([0-9]+?),', line)
if match: if match:
@ -137,9 +138,10 @@ def analyze_code(pull: PullRequest, diff: str)-> list[dict]:
for review in response.get('reviews', []): for review in response.get('reviews', []):
review['path'] = current_file_path review['path'] = current_file_path
comments.append(review) comments.append(review)
changed_lines = []
current_file_path = None current_file_path = None
elif not line.startswith('-'): elif line.startswith('-'):
changed_lines.append(line)
else:
changed_lines.append(f"{offset_line}:{line}") changed_lines.append(f"{offset_line}:{line}")
offset_line += 1 offset_line += 1

View File

@ -6,7 +6,7 @@ import os
import uuid import uuid
import time import time
from urllib import parse from urllib import parse
from aiohttp import ClientSession, ClientTimeout from aiohttp import ClientSession, ClientTimeout, BaseConnector
from ..typing import AsyncResult, Messages, ImageType from ..typing import AsyncResult, Messages, ImageType
from ..image import ImageResponse from ..image import ImageResponse
@ -39,6 +39,7 @@ class Bing(AsyncGeneratorProvider):
proxy: str = None, proxy: str = None,
timeout: int = 900, timeout: int = 900,
cookies: dict = None, cookies: dict = None,
connector: BaseConnector = None,
tone: str = Tones.balanced, tone: str = Tones.balanced,
image: ImageType = None, image: ImageType = None,
web_search: bool = False, web_search: bool = False,
@ -68,7 +69,14 @@ class Bing(AsyncGeneratorProvider):
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
return stream_generate(prompt, tone, image, context, proxy, cookies, web_search, gpt4_turbo, timeout) if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
return stream_generate(prompt, tone, image, context, cookies, connector, web_search, gpt4_turbo, timeout)
def create_context(messages: Messages) -> str: def create_context(messages: Messages) -> str:
""" """
@ -253,8 +261,8 @@ async def stream_generate(
tone: str, tone: str,
image: ImageType = None, image: ImageType = None,
context: str = None, context: str = None,
proxy: str = None,
cookies: dict = None, cookies: dict = None,
connector: BaseConnector = None,
web_search: bool = False, web_search: bool = False,
gpt4_turbo: bool = False, gpt4_turbo: bool = False,
timeout: int = 900 timeout: int = 900
@ -266,7 +274,6 @@ async def stream_generate(
:param tone: The desired tone for the response. :param tone: The desired tone for the response.
:param image: The image type involved in the response. :param image: The image type involved in the response.
:param context: Additional context for the prompt. :param context: Additional context for the prompt.
:param proxy: Proxy settings for the request.
:param cookies: Cookies for the session. :param cookies: Cookies for the session.
:param web_search: Flag to enable web search. :param web_search: Flag to enable web search.
:param gpt4_turbo: Flag to enable GPT-4 Turbo. :param gpt4_turbo: Flag to enable GPT-4 Turbo.
@ -278,10 +285,10 @@ async def stream_generate(
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items()) headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
async with ClientSession( async with ClientSession(
timeout=ClientTimeout(total=timeout), headers=headers timeout=ClientTimeout(total=timeout), headers=headers, connector=connector
) as session: ) as session:
conversation = await create_conversation(session, proxy) conversation = await create_conversation(session)
image_response = await upload_image(session, image, tone, proxy) if image else None image_response = await upload_image(session, image, tone) if image else None
if image_response: if image_response:
yield image_response yield image_response
@ -289,8 +296,7 @@ async def stream_generate(
async with session.ws_connect( async with session.ws_connect(
'wss://sydney.bing.com/sydney/ChatHub', 'wss://sydney.bing.com/sydney/ChatHub',
autoping=False, autoping=False,
params={'sec_access_token': conversation.conversationSignature}, params={'sec_access_token': conversation.conversationSignature}
proxy=proxy
) as wss: ) as wss:
await wss.send_str(format_message({'protocol': 'json', 'version': 1})) await wss.send_str(format_message({'protocol': 'json', 'version': 1}))
await wss.receive(timeout=timeout) await wss.receive(timeout=timeout)
@ -322,7 +328,7 @@ async def stream_generate(
elif message.get('contentType') == "IMAGE": elif message.get('contentType') == "IMAGE":
prompt = message.get('text') prompt = message.get('text')
try: try:
image_response = ImageResponse(await create_images(session, prompt, proxy), prompt) image_response = ImageResponse(await create_images(session, prompt), prompt)
except: except:
response_txt += f"\nhttps://www.bing.com/images/create?q={parse.quote(prompt)}" response_txt += f"\nhttps://www.bing.com/images/create?q={parse.quote(prompt)}"
final = True final = True
@ -342,4 +348,4 @@ async def stream_generate(
raise Exception(f"{result['value']}: {result['message']}") raise Exception(f"{result['value']}: {result['message']}")
return return
finally: finally:
await delete_conversation(session, conversation, proxy) await delete_conversation(session, conversation)

View File

@ -2,7 +2,7 @@ from __future__ import annotations
import json, uuid import json, uuid
from aiohttp import ClientSession from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -33,6 +33,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages, messages: Messages,
stream: bool = True, stream: bool = True,
proxy: str = None, proxy: str = None,
connector: BaseConnector = None,
web_search: bool = False, web_search: bool = False,
cookies: dict = None, cookies: dict = None,
**kwargs **kwargs
@ -43,9 +44,16 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
headers = { headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
} }
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession( async with ClientSession(
cookies=cookies, cookies=cookies,
headers=headers headers=headers,
connector=connector
) as session: ) as session:
async with session.post(f"{cls.url}/conversation", json={"model": cls.get_model(model)}, proxy=proxy) as response: async with session.post(f"{cls.url}/conversation", json={"model": cls.get_model(model)}, proxy=proxy) as response:
conversation_id = (await response.json())["conversationId"] conversation_id = (await response.json())["conversationId"]

View File

@ -2,7 +2,7 @@ from __future__ import annotations
import uuid import uuid
from aiohttp import ClientSession from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -91,6 +91,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages, messages: Messages,
auth: str = None, auth: str = None,
proxy: str = None, proxy: str = None,
connector: BaseConnector = None,
**kwargs **kwargs
) -> AsyncResult: ) -> AsyncResult:
headers = { headers = {
@ -100,9 +101,16 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
"referer": f"{cls.url}/", "referer": f"{cls.url}/",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36", "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
} }
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession( async with ClientSession(
headers=headers, headers=headers,
cookie_jar=cls._cookie_jar cookie_jar=cls._cookie_jar,
connector=connector
) as session: ) as session:
cls._auth_code = auth if isinstance(auth, str) else cls._auth_code cls._auth_code = auth if isinstance(auth, str) else cls._auth_code
if not cls._auth_code: if not cls._auth_code:

View File

@ -2,7 +2,7 @@ from __future__ import annotations
import random import random
import json import json
from aiohttp import ClientSession from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -32,6 +32,7 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
model: str, model: str,
messages: Messages, messages: Messages,
proxy: str = None, proxy: str = None,
connector: BaseConnector = None,
**kwargs **kwargs
) -> AsyncResult: ) -> AsyncResult:
headers = { headers = {
@ -47,7 +48,13 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
"Sec-Fetch-Site": "same-site", "Sec-Fetch-Site": "same-site",
"TE": "trailers", "TE": "trailers",
} }
async with ClientSession(headers=headers) as session: if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession(headers=headers, connector=connector) as session:
t = format(random.getrandbits(32), '08x') t = format(random.getrandbits(32), '08x')
async with session.get( async with session.get(
f"{API_URL}?EIO=4&transport=polling&t={t}", f"{API_URL}?EIO=4&transport=polling&t={t}",

View File

@ -7,7 +7,7 @@ import asyncio
import time import time
import json import json
import os import os
from aiohttp import ClientSession from aiohttp import ClientSession, BaseConnector
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from urllib.parse import quote from urllib.parse import quote
from typing import Generator, List, Dict from typing import Generator, List, Dict
@ -50,7 +50,7 @@ def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
raise RuntimeError("Timeout error") raise RuntimeError("Timeout error")
time.sleep(0.5) time.sleep(0.5)
def create_session(cookies: Dict[str, str]) -> ClientSession: def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession:
""" """
Creates a new client session with specified cookies and headers. Creates a new client session with specified cookies and headers.
@ -79,7 +79,13 @@ def create_session(cookies: Dict[str, str]) -> ClientSession:
} }
if cookies: if cookies:
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items()) headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
return ClientSession(headers=headers) if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
return ClientSession(headers=headers, connector=connector)
async def create_images(session: ClientSession, prompt: str, proxy: str = None, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]: async def create_images(session: ClientSession, prompt: str, proxy: str = None, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]:
""" """
@ -214,7 +220,8 @@ class CreateImagesBing:
cookies = self.cookies or get_cookies(".bing.com") cookies = self.cookies or get_cookies(".bing.com")
if "_U" not in cookies: if "_U" not in cookies:
raise RuntimeError('"_U" cookie is missing') raise RuntimeError('"_U" cookie is missing')
async with create_session(cookies) as session: proxy = os.environ.get("G4F_PROXY")
async with create_session(cookies, proxy) as session:
images = await create_images(session, prompt, self.proxy) images = await create_images(session, prompt, self.proxy)
return ImageResponse(images, prompt) return ImageResponse(images, prompt)