From 75cb6163ae691bad1072ae334500f8a8a44d099e Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Mon, 16 Dec 2024 19:34:22 +0100 Subject: [PATCH] Use other model for copilot --- etc/tool/copilot.py | 2 +- g4f/Provider/PollinationsAI.py | 22 +++++++++++----------- g4f/models.py | 4 +--- 3 files changed, 13 insertions(+), 15 deletions(-) diff --git a/etc/tool/copilot.py b/etc/tool/copilot.py index 4732e341..df4dd796 100644 --- a/etc/tool/copilot.py +++ b/etc/tool/copilot.py @@ -18,7 +18,7 @@ g4f.debug.version_check = False GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') GITHUB_REPOSITORY = os.getenv('GITHUB_REPOSITORY') G4F_PROVIDER = os.getenv('G4F_PROVIDER') -G4F_MODEL = os.getenv('G4F_MODEL') or g4f.models.default +G4F_MODEL = os.getenv('G4F_MODEL') or g4f.models.gpt_4 def get_pr_details(github: Github) -> PullRequest: """ diff --git a/g4f/Provider/PollinationsAI.py b/g4f/Provider/PollinationsAI.py index 31a7e7e4..20f3e0c9 100644 --- a/g4f/Provider/PollinationsAI.py +++ b/g4f/Provider/PollinationsAI.py @@ -15,16 +15,14 @@ from .helper import format_prompt class PollinationsAI(OpenaiAPI): label = "Pollinations AI" url = "https://pollinations.ai" - working = True needs_auth = False supports_stream = True - + api_base = "https://text.pollinations.ai/openai" + default_model = "openai" - additional_models_image = ["midjourney", "dall-e-3"] additional_models_text = ["sur", "sur-mistral", "claude"] - model_aliases = { "gpt-4o": "openai", "mistral-nemo": "mistral", @@ -66,7 +64,6 @@ class PollinationsAI(OpenaiAPI): model: str, messages: Messages, prompt: str = None, - api_base: str = "https://text.pollinations.ai/openai", api_key: str = None, proxy: str = None, seed: str = None, @@ -76,25 +73,28 @@ class PollinationsAI(OpenaiAPI): ) -> AsyncResult: model = cls.get_model(model) if model in cls.image_models: - async for response in cls._generate_image(model, messages, prompt, seed, width, height): + async for response in cls._generate_image(model, messages, prompt, proxy, seed, width, height): yield response elif model in cls.models: - async for response in cls._generate_text(model, messages, api_base, api_key, proxy, **kwargs): + async for response in cls._generate_text(model, messages, api_key, proxy, **kwargs): yield response else: raise ValueError(f"Unknown model: {model}") @classmethod - async def _generate_image(cls, model: str, messages: Messages, prompt: str = None, seed: str = None, width: int = 1024, height: int = 1024): + async def _generate_image(cls, model: str, messages: Messages, prompt: str = None, proxy: str = None, seed: str = None, width: int = 1024, height: int = 1024): if prompt is None: prompt = messages[-1]["content"] if seed is None: seed = random.randint(0, 100000) image = f"https://image.pollinations.ai/prompt/{quote(prompt)}?width={width}&height={height}&seed={int(seed)}&nofeed=true&nologo=true&model={quote(model)}" + async with ClientSession(connector=get_connector(proxy=proxy), headers=cls.headers) as session: + async with session.get(image) as response: + await raise_for_status(response) yield ImageResponse(image, prompt) @classmethod - async def _generate_text(cls, model: str, messages: Messages, api_base: str, api_key: str = None, proxy: str = None, **kwargs): + async def _generate_text(cls, model: str, messages: Messages, api_key: str = None, proxy: str = None, **kwargs): if api_key is None: async with ClientSession(connector=get_connector(proxy=proxy), headers=cls.headers) as session: prompt = format_prompt(messages) @@ -104,6 +104,6 @@ class PollinationsAI(OpenaiAPI): yield line.decode(errors="ignore") else: async for chunk in super().create_async_generator( - model, messages, api_base=api_base, proxy=proxy, **kwargs + model, messages, proxy=proxy, **kwargs ): - yield chunk + yield chunk \ No newline at end of file diff --git a/g4f/models.py b/g4f/models.py index 96fead58..857119af 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -24,7 +24,6 @@ from .Provider import ( HuggingFace, Liaobots, Airforce, - Mhystical, MetaAI, MicrosoftDesigner, OpenaiChat, @@ -68,7 +67,6 @@ default = Model( best_provider = IterListProvider([ DDG, Pizzagpt, - ReplicateHome, Blackbox2, Blackbox, Copilot, @@ -78,7 +76,7 @@ default = Model( Cloudflare, PollinationsAI, ChatGptEs, - ChatGpt, + OpenaiChat, ]) )