Merge pull request #1389 from hlohaus/gpt6

Add Gpt6 Provider
This commit is contained in:
H Lohaus 2023-12-27 16:55:23 +01:00 committed by GitHub
commit 648b322a15
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 61 additions and 1 deletions

View File

@ -10,7 +10,7 @@ from .helper import format_prompt
class ChatgptDemo(AsyncGeneratorProvider): class ChatgptDemo(AsyncGeneratorProvider):
url = "https://chat.chatgptdemo.net" url = "https://chat.chatgptdemo.net"
supports_gpt_35_turbo = True supports_gpt_35_turbo = True
working = False working = True
@classmethod @classmethod
async def create_async_generator( async def create_async_generator(

55
g4f/Provider/Gpt6.py Normal file
View File

@ -0,0 +1,55 @@
from __future__ import annotations
import json
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt
class Gpt6(AsyncGeneratorProvider):
url = "https://gpt6.ai"
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/119.0",
"Accept": "*/*",
"Accept-Language": "de,en-US;q=0.7,en;q=0.3",
"Accept-Encoding": "gzip, deflate, br",
"Content-Type": "application/json",
"Origin": "https://gpt6.ai",
"Connection": "keep-alive",
"Referer": "https://gpt6.ai/",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "cross-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
data = {
"prompts":messages,
"geoInfo":{"ip":"100.90.100.222","hostname":"ip-100-090-100-222.um36.pools.vodafone-ip.de","city":"Muenchen","region":"North Rhine-Westphalia","country":"DE","loc":"44.0910,5.5827","org":"AS3209 Vodafone GmbH","postal":"41507","timezone":"Europe/Berlin"},
"paid":False,
"character":{"textContent":"","id":"52690ad6-22e4-4674-93d4-1784721e9944","name":"GPT6","htmlContent":""}
}
async with session.post(f"https://seahorse-app-d29hu.ondigitalocean.app/api/v1/query", json=data, proxy=proxy) as response:
response.raise_for_status()
async for line in response.content:
if line.startswith(b"data: [DONE]"):
break
elif line.startswith(b"data: "):
line = json.loads(line[6:-1])
chunk = line["choices"][0]["delta"].get("content")
if chunk:
yield chunk

View File

@ -29,6 +29,7 @@ from .Chatxyz import Chatxyz
from .DeepInfra import DeepInfra from .DeepInfra import DeepInfra
from .FakeGpt import FakeGpt from .FakeGpt import FakeGpt
from .FreeGpt import FreeGpt from .FreeGpt import FreeGpt
from .Gpt6 import Gpt6
from .GPTalk import GPTalk from .GPTalk import GPTalk
from .GptChatly import GptChatly from .GptChatly import GptChatly
from .GptForLove import GptForLove from .GptForLove import GptForLove

View File

@ -8,6 +8,7 @@ from .Provider import (
ChatAnywhere, ChatAnywhere,
ChatgptNext, ChatgptNext,
HuggingChat, HuggingChat,
ChatgptDemo,
GptForLove, GptForLove,
ChatgptAi, ChatgptAi,
DeepInfra, DeepInfra,
@ -23,6 +24,7 @@ from .Provider import (
Phind, Phind,
Koala, Koala,
GptGo, GptGo,
Gpt6,
Bard, Bard,
Bing, Bing,
You, You,
@ -65,6 +67,8 @@ gpt_35_long = Model(
ChatgptDemoAi, ChatgptDemoAi,
OnlineGpt, OnlineGpt,
ChatgptNext, ChatgptNext,
ChatgptDemo,
Gpt6,
]) ])
) )