gpt4free/tool/readme_table.py

158 lines
5.2 KiB
Python
Raw Normal View History

2023-07-28 13:07:17 +03:00
import re
import sys
from pathlib import Path
from urllib.parse import urlparse
sys.path.append(str(Path(__file__).parent.parent))
import asyncio
from g4f import models
from g4f.Provider.base_provider import AsyncProvider, BaseProvider
2023-09-23 03:29:55 +03:00
from g4f.Provider.retry_provider import RetryProvider
from testing.test_providers import get_providers
logging = False
2023-07-28 13:07:17 +03:00
def print_imports():
print("##### Providers:")
print("```py")
print("from g4f.Provider import (")
for _provider in get_providers():
if _provider.working:
print(f" {_provider.__name__},")
2023-09-23 03:29:55 +03:00
print(")")
print("# Usage:")
print("response = g4f.ChatCompletion.create(..., provider=ProviderName)")
print("```")
print()
print()
def print_async():
print("##### Async support:")
print("```py")
2023-09-12 01:49:05 +03:00
print("_providers = [")
for _provider in get_providers():
if _provider.working and issubclass(_provider, AsyncProvider):
2023-09-12 01:49:05 +03:00
print(f" g4f.Provider.{_provider.__name__},")
print("]")
print("```")
print()
print()
2023-07-28 13:07:17 +03:00
async def test_async(provider: type[BaseProvider]):
if not provider.working:
return False
model = models.gpt_35_turbo.name if provider.supports_gpt_35_turbo else models.default.name
messages = [{"role": "user", "content": "Hello Assistant!"}]
try:
if issubclass(provider, AsyncProvider):
response = await provider.create_async(model=model, messages=messages)
else:
response = provider.create_completion(model=model, messages=messages, stream=False)
return True if response else False
except Exception as e:
if logging:
print(f"{provider.__name__}: {e.__class__.__name__}: {e}")
return False
async def test_async_list(providers: list[type[BaseProvider]]):
responses: list = [
test_async(_provider)
for _provider in providers
]
return await asyncio.gather(*responses)
2023-07-28 13:07:17 +03:00
def print_providers():
lines = [
"| Website| Provider| gpt-3.5 | gpt-4 | Streaming | Asynchron | Status | Auth |",
"| ------ | ------- | ------- | ----- | --------- | --------- | ------ | ---- |",
2023-07-28 13:07:17 +03:00
]
2023-07-28 13:07:17 +03:00
providers = get_providers()
2023-09-23 03:30:45 +03:00
responses = asyncio.run(test_async_list(providers))
2023-08-24 22:32:22 +03:00
for is_working in (True, False):
for idx, _provider in enumerate(providers):
2023-08-24 22:32:22 +03:00
if is_working != _provider.working:
continue
2023-09-23 03:29:55 +03:00
if _provider == RetryProvider:
continue
2023-08-24 22:32:22 +03:00
netloc = urlparse(_provider.url).netloc
website = f"[{netloc}]({_provider.url})"
2023-09-23 03:29:55 +03:00
provider_name = f"`g4f.Provider.{_provider.__name__}`"
2023-08-24 22:32:22 +03:00
has_gpt_35 = "✔️" if _provider.supports_gpt_35_turbo else ""
has_gpt_4 = "✔️" if _provider.supports_gpt_4 else ""
stream = "✔️" if _provider.supports_stream else ""
can_async = "✔️" if issubclass(_provider, AsyncProvider) else ""
2023-08-24 22:32:22 +03:00
if _provider.working:
2023-09-23 03:29:55 +03:00
status = '![Active](https://img.shields.io/badge/Active-brightgreen)'
2023-09-23 03:30:45 +03:00
if responses[idx]:
status = '![Active](https://img.shields.io/badge/Active-brightgreen)'
else:
status = '![Unknown](https://img.shields.io/badge/Unknown-grey)'
2023-08-24 22:32:22 +03:00
else:
status = '![Inactive](https://img.shields.io/badge/Inactive-red)'
auth = "✔️" if _provider.needs_auth else ""
lines.append(
f"| {website} | {provider_name} | {has_gpt_35} | {has_gpt_4} | {stream} | {can_async} | {status} | {auth} |"
2023-08-24 22:32:22 +03:00
)
2023-07-28 13:07:17 +03:00
print("\n".join(lines))
def print_models():
base_provider_names = {
"cohere": "Cohere",
"google": "Google",
"openai": "OpenAI",
"anthropic": "Anthropic",
"replicate": "Replicate",
"huggingface": "Huggingface",
}
provider_urls = {
"Bard": "https://bard.google.com/",
"H2o": "https://www.h2o.ai/",
"Vercel": "https://sdk.vercel.ai/",
}
lines = [
"| Model | Base Provider | Provider | Website |",
"| ----- | ------------- | -------- | ------- |",
]
_models = get_models()
for model in _models:
if not model.best_provider or model.best_provider.__name__ not in provider_urls:
2023-08-24 22:32:22 +03:00
continue
2023-07-28 13:07:17 +03:00
name = re.split(r":|/", model.name)[-1]
2023-07-28 13:07:17 +03:00
base_provider = base_provider_names[model.base_provider]
2023-09-18 00:23:54 +03:00
provider_name = f"g4f.provider.{model.best_provider.__name__}"
2023-07-28 13:07:17 +03:00
provider_url = provider_urls[model.best_provider.__name__]
netloc = urlparse(provider_url).netloc
website = f"[{netloc}]({provider_url})"
2023-07-28 13:07:17 +03:00
lines.append(f"| {name} | {base_provider} | {provider_name} | {website} |")
print("\n".join(lines))
def get_models():
_models = [item[1] for item in models.__dict__.items()]
_models = [model for model in _models if type(model) is models.Model]
return [model for model in _models if model.name not in ["gpt-3.5-turbo", "gpt-4"]]
if __name__ == "__main__":
2023-09-23 03:30:45 +03:00
print_imports()
print_async()
print_providers()
2023-09-23 03:30:45 +03:00
print("\n", "-" * 50, "\n")
print_models()