2023-07-28 13:07:17 +03:00
|
|
|
import re
|
|
|
|
from urllib.parse import urlparse
|
2023-09-12 05:40:10 +03:00
|
|
|
import asyncio
|
2023-11-12 12:12:05 +03:00
|
|
|
|
2024-02-09 16:24:15 +03:00
|
|
|
from g4f import models, ChatCompletion
|
2024-02-22 02:16:58 +03:00
|
|
|
from g4f.providers.types import BaseRetryProvider, ProviderType
|
2024-02-09 16:24:15 +03:00
|
|
|
from etc.testing._providers import get_providers
|
2023-11-12 12:12:05 +03:00
|
|
|
from g4f import debug
|
|
|
|
|
|
|
|
debug.logging = True
|
2023-07-28 13:07:17 +03:00
|
|
|
|
2024-02-09 16:24:15 +03:00
|
|
|
async def test_async(provider: ProviderType):
|
2023-09-12 05:40:10 +03:00
|
|
|
if not provider.working:
|
|
|
|
return False
|
|
|
|
messages = [{"role": "user", "content": "Hello Assistant!"}]
|
|
|
|
try:
|
2023-11-12 12:12:05 +03:00
|
|
|
response = await asyncio.wait_for(ChatCompletion.create_async(
|
|
|
|
model=models.default,
|
|
|
|
messages=messages,
|
|
|
|
provider=provider
|
|
|
|
), 30)
|
2023-10-23 10:46:25 +03:00
|
|
|
return bool(response)
|
2023-09-12 05:40:10 +03:00
|
|
|
except Exception as e:
|
2023-11-12 12:12:05 +03:00
|
|
|
if debug.logging:
|
2023-09-12 05:40:10 +03:00
|
|
|
print(f"{provider.__name__}: {e.__class__.__name__}: {e}")
|
|
|
|
return False
|
|
|
|
|
2024-02-09 16:24:15 +03:00
|
|
|
def test_async_list(providers: list[ProviderType]):
|
2023-09-12 05:40:10 +03:00
|
|
|
responses: list = [
|
2024-02-09 16:24:15 +03:00
|
|
|
asyncio.run(test_async(_provider))
|
2023-09-12 05:40:10 +03:00
|
|
|
for _provider in providers
|
|
|
|
]
|
2024-02-09 16:24:15 +03:00
|
|
|
return responses
|
2023-09-12 05:40:10 +03:00
|
|
|
|
2023-07-28 13:07:17 +03:00
|
|
|
def print_providers():
|
2023-09-12 05:40:10 +03:00
|
|
|
|
2023-07-28 13:07:17 +03:00
|
|
|
providers = get_providers()
|
2024-02-09 16:24:15 +03:00
|
|
|
responses = test_async_list(providers)
|
2023-09-12 05:40:10 +03:00
|
|
|
|
2023-11-12 12:12:05 +03:00
|
|
|
for type in ("GPT-4", "GPT-3.5", "Other"):
|
|
|
|
lines = [
|
|
|
|
"",
|
|
|
|
f"### {type}",
|
|
|
|
"",
|
|
|
|
"| Website | Provider | GPT-3.5 | GPT-4 | Stream | Status | Auth |",
|
|
|
|
"| ------ | ------- | ------- | ----- | ------ | ------ | ---- |",
|
|
|
|
]
|
|
|
|
for is_working in (True, False):
|
|
|
|
for idx, _provider in enumerate(providers):
|
|
|
|
if is_working != _provider.working:
|
|
|
|
continue
|
|
|
|
do_continue = False
|
|
|
|
if type == "GPT-4" and _provider.supports_gpt_4:
|
|
|
|
do_continue = True
|
|
|
|
elif type == "GPT-3.5" and not _provider.supports_gpt_4 and _provider.supports_gpt_35_turbo:
|
|
|
|
do_continue = True
|
|
|
|
elif type == "Other" and not _provider.supports_gpt_4 and not _provider.supports_gpt_35_turbo:
|
|
|
|
do_continue = True
|
|
|
|
if not do_continue:
|
|
|
|
continue
|
2024-02-09 16:24:15 +03:00
|
|
|
netloc = urlparse(_provider.url).netloc.replace("www.", "")
|
2023-11-12 12:12:05 +03:00
|
|
|
website = f"[{netloc}]({_provider.url})"
|
|
|
|
|
|
|
|
provider_name = f"`g4f.Provider.{_provider.__name__}`"
|
|
|
|
|
|
|
|
has_gpt_35 = "✔️" if _provider.supports_gpt_35_turbo else "❌"
|
|
|
|
has_gpt_4 = "✔️" if _provider.supports_gpt_4 else "❌"
|
|
|
|
stream = "✔️" if _provider.supports_stream else "❌"
|
|
|
|
if _provider.working:
|
2023-09-23 03:30:45 +03:00
|
|
|
status = '![Active](https://img.shields.io/badge/Active-brightgreen)'
|
2023-11-12 12:12:05 +03:00
|
|
|
if responses[idx]:
|
|
|
|
status = '![Active](https://img.shields.io/badge/Active-brightgreen)'
|
|
|
|
else:
|
|
|
|
status = '![Unknown](https://img.shields.io/badge/Unknown-grey)'
|
2023-09-23 03:30:45 +03:00
|
|
|
else:
|
2023-11-12 12:12:05 +03:00
|
|
|
status = '![Inactive](https://img.shields.io/badge/Inactive-red)'
|
|
|
|
auth = "✔️" if _provider.needs_auth else "❌"
|
|
|
|
|
|
|
|
lines.append(
|
|
|
|
f"| {website} | {provider_name} | {has_gpt_35} | {has_gpt_4} | {stream} | {status} | {auth} |"
|
|
|
|
)
|
|
|
|
print("\n".join(lines))
|
2023-07-28 13:07:17 +03:00
|
|
|
|
|
|
|
def print_models():
|
|
|
|
base_provider_names = {
|
|
|
|
"google": "Google",
|
|
|
|
"openai": "OpenAI",
|
|
|
|
"huggingface": "Huggingface",
|
2024-02-09 16:24:15 +03:00
|
|
|
"anthropic": "Anthropic",
|
|
|
|
"inflection": "Inflection"
|
2023-07-28 13:07:17 +03:00
|
|
|
}
|
|
|
|
provider_urls = {
|
2024-02-09 16:24:15 +03:00
|
|
|
"google": "https://gemini.google.com/",
|
|
|
|
"openai": "https://openai.com/",
|
|
|
|
"huggingface": "https://huggingface.co/",
|
|
|
|
"anthropic": "https://www.anthropic.com/",
|
|
|
|
"inflection": "https://inflection.ai/",
|
2023-07-28 13:07:17 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
lines = [
|
|
|
|
"| Model | Base Provider | Provider | Website |",
|
|
|
|
"| ----- | ------------- | -------- | ------- |",
|
|
|
|
]
|
2024-02-09 16:24:15 +03:00
|
|
|
for name, model in models.ModelUtils.convert.items():
|
|
|
|
if name.startswith("gpt-3.5") or name.startswith("gpt-4"):
|
|
|
|
if name not in ("gpt-3.5-turbo", "gpt-4", "gpt-4-turbo"):
|
|
|
|
continue
|
2023-09-12 05:40:10 +03:00
|
|
|
name = re.split(r":|/", model.name)[-1]
|
2023-07-28 13:07:17 +03:00
|
|
|
base_provider = base_provider_names[model.base_provider]
|
2024-02-09 16:24:15 +03:00
|
|
|
if not isinstance(model.best_provider, BaseRetryProvider):
|
|
|
|
provider_name = f"g4f.Provider.{model.best_provider.__name__}"
|
|
|
|
else:
|
|
|
|
provider_name = f"{len(model.best_provider.providers)}+ Providers"
|
|
|
|
provider_url = provider_urls[model.base_provider]
|
|
|
|
netloc = urlparse(provider_url).netloc.replace("www.", "")
|
2023-07-28 13:07:17 +03:00
|
|
|
website = f"[{netloc}]({provider_url})"
|
2023-09-12 05:40:10 +03:00
|
|
|
|
2023-07-28 13:07:17 +03:00
|
|
|
lines.append(f"| {name} | {base_provider} | {provider_name} | {website} |")
|
|
|
|
|
|
|
|
print("\n".join(lines))
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2024-02-09 16:24:15 +03:00
|
|
|
#print_providers()
|
|
|
|
#print("\n", "-" * 50, "\n")
|
2023-09-23 03:30:45 +03:00
|
|
|
print_models()
|