mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-01-03 08:47:29 +03:00
Use other model for copilot
This commit is contained in:
parent
5b76f2cde0
commit
75cb6163ae
@ -18,7 +18,7 @@ g4f.debug.version_check = False
|
|||||||
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
|
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
|
||||||
GITHUB_REPOSITORY = os.getenv('GITHUB_REPOSITORY')
|
GITHUB_REPOSITORY = os.getenv('GITHUB_REPOSITORY')
|
||||||
G4F_PROVIDER = os.getenv('G4F_PROVIDER')
|
G4F_PROVIDER = os.getenv('G4F_PROVIDER')
|
||||||
G4F_MODEL = os.getenv('G4F_MODEL') or g4f.models.default
|
G4F_MODEL = os.getenv('G4F_MODEL') or g4f.models.gpt_4
|
||||||
|
|
||||||
def get_pr_details(github: Github) -> PullRequest:
|
def get_pr_details(github: Github) -> PullRequest:
|
||||||
"""
|
"""
|
||||||
|
@ -15,16 +15,14 @@ from .helper import format_prompt
|
|||||||
class PollinationsAI(OpenaiAPI):
|
class PollinationsAI(OpenaiAPI):
|
||||||
label = "Pollinations AI"
|
label = "Pollinations AI"
|
||||||
url = "https://pollinations.ai"
|
url = "https://pollinations.ai"
|
||||||
|
|
||||||
working = True
|
working = True
|
||||||
needs_auth = False
|
needs_auth = False
|
||||||
supports_stream = True
|
supports_stream = True
|
||||||
|
api_base = "https://text.pollinations.ai/openai"
|
||||||
|
|
||||||
default_model = "openai"
|
default_model = "openai"
|
||||||
|
|
||||||
additional_models_image = ["midjourney", "dall-e-3"]
|
additional_models_image = ["midjourney", "dall-e-3"]
|
||||||
additional_models_text = ["sur", "sur-mistral", "claude"]
|
additional_models_text = ["sur", "sur-mistral", "claude"]
|
||||||
|
|
||||||
model_aliases = {
|
model_aliases = {
|
||||||
"gpt-4o": "openai",
|
"gpt-4o": "openai",
|
||||||
"mistral-nemo": "mistral",
|
"mistral-nemo": "mistral",
|
||||||
@ -66,7 +64,6 @@ class PollinationsAI(OpenaiAPI):
|
|||||||
model: str,
|
model: str,
|
||||||
messages: Messages,
|
messages: Messages,
|
||||||
prompt: str = None,
|
prompt: str = None,
|
||||||
api_base: str = "https://text.pollinations.ai/openai",
|
|
||||||
api_key: str = None,
|
api_key: str = None,
|
||||||
proxy: str = None,
|
proxy: str = None,
|
||||||
seed: str = None,
|
seed: str = None,
|
||||||
@ -76,25 +73,28 @@ class PollinationsAI(OpenaiAPI):
|
|||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
model = cls.get_model(model)
|
model = cls.get_model(model)
|
||||||
if model in cls.image_models:
|
if model in cls.image_models:
|
||||||
async for response in cls._generate_image(model, messages, prompt, seed, width, height):
|
async for response in cls._generate_image(model, messages, prompt, proxy, seed, width, height):
|
||||||
yield response
|
yield response
|
||||||
elif model in cls.models:
|
elif model in cls.models:
|
||||||
async for response in cls._generate_text(model, messages, api_base, api_key, proxy, **kwargs):
|
async for response in cls._generate_text(model, messages, api_key, proxy, **kwargs):
|
||||||
yield response
|
yield response
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown model: {model}")
|
raise ValueError(f"Unknown model: {model}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _generate_image(cls, model: str, messages: Messages, prompt: str = None, seed: str = None, width: int = 1024, height: int = 1024):
|
async def _generate_image(cls, model: str, messages: Messages, prompt: str = None, proxy: str = None, seed: str = None, width: int = 1024, height: int = 1024):
|
||||||
if prompt is None:
|
if prompt is None:
|
||||||
prompt = messages[-1]["content"]
|
prompt = messages[-1]["content"]
|
||||||
if seed is None:
|
if seed is None:
|
||||||
seed = random.randint(0, 100000)
|
seed = random.randint(0, 100000)
|
||||||
image = f"https://image.pollinations.ai/prompt/{quote(prompt)}?width={width}&height={height}&seed={int(seed)}&nofeed=true&nologo=true&model={quote(model)}"
|
image = f"https://image.pollinations.ai/prompt/{quote(prompt)}?width={width}&height={height}&seed={int(seed)}&nofeed=true&nologo=true&model={quote(model)}"
|
||||||
|
async with ClientSession(connector=get_connector(proxy=proxy), headers=cls.headers) as session:
|
||||||
|
async with session.get(image) as response:
|
||||||
|
await raise_for_status(response)
|
||||||
yield ImageResponse(image, prompt)
|
yield ImageResponse(image, prompt)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _generate_text(cls, model: str, messages: Messages, api_base: str, api_key: str = None, proxy: str = None, **kwargs):
|
async def _generate_text(cls, model: str, messages: Messages, api_key: str = None, proxy: str = None, **kwargs):
|
||||||
if api_key is None:
|
if api_key is None:
|
||||||
async with ClientSession(connector=get_connector(proxy=proxy), headers=cls.headers) as session:
|
async with ClientSession(connector=get_connector(proxy=proxy), headers=cls.headers) as session:
|
||||||
prompt = format_prompt(messages)
|
prompt = format_prompt(messages)
|
||||||
@ -104,6 +104,6 @@ class PollinationsAI(OpenaiAPI):
|
|||||||
yield line.decode(errors="ignore")
|
yield line.decode(errors="ignore")
|
||||||
else:
|
else:
|
||||||
async for chunk in super().create_async_generator(
|
async for chunk in super().create_async_generator(
|
||||||
model, messages, api_base=api_base, proxy=proxy, **kwargs
|
model, messages, proxy=proxy, **kwargs
|
||||||
):
|
):
|
||||||
yield chunk
|
yield chunk
|
@ -24,7 +24,6 @@ from .Provider import (
|
|||||||
HuggingFace,
|
HuggingFace,
|
||||||
Liaobots,
|
Liaobots,
|
||||||
Airforce,
|
Airforce,
|
||||||
Mhystical,
|
|
||||||
MetaAI,
|
MetaAI,
|
||||||
MicrosoftDesigner,
|
MicrosoftDesigner,
|
||||||
OpenaiChat,
|
OpenaiChat,
|
||||||
@ -68,7 +67,6 @@ default = Model(
|
|||||||
best_provider = IterListProvider([
|
best_provider = IterListProvider([
|
||||||
DDG,
|
DDG,
|
||||||
Pizzagpt,
|
Pizzagpt,
|
||||||
ReplicateHome,
|
|
||||||
Blackbox2,
|
Blackbox2,
|
||||||
Blackbox,
|
Blackbox,
|
||||||
Copilot,
|
Copilot,
|
||||||
@ -78,7 +76,7 @@ default = Model(
|
|||||||
Cloudflare,
|
Cloudflare,
|
||||||
PollinationsAI,
|
PollinationsAI,
|
||||||
ChatGptEs,
|
ChatGptEs,
|
||||||
ChatGpt,
|
OpenaiChat,
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user