gpt4free/etc/testing/test_providers.py

99 lines
3.0 KiB
Python
Raw Normal View History

2023-10-16 02:46:58 +03:00
# from g4f.Provider import __all__, ProviderUtils
# from g4f import ChatCompletion
# import concurrent.futures
# _ = [
# 'BaseProvider',
# 'AsyncProvider',
# 'AsyncGeneratorProvider',
# 'RetryProvider'
# ]
# def test_provider(provider):
# try:
# provider = (ProviderUtils.convert[provider])
# if provider.working and not provider.needs_auth:
# print('testing', provider.__name__)
# completion = ChatCompletion.create(model='gpt-3.5-turbo',
# messages=[{"role": "user", "content": "hello"}], provider=provider)
# return completion, provider.__name__
# except Exception as e:
# #print(f'Failed to test provider: {provider} | {e}')
# return None
# with concurrent.futures.ThreadPoolExecutor() as executor:
# futures = []
# for provider in __all__:
# if provider not in _:
# futures.append(executor.submit(test_provider, provider))
# for future in concurrent.futures.as_completed(futures):
# result = future.result()
# if result:
# print(f'{result[1]} | {result[0]}')
2023-07-28 13:07:17 +03:00
import sys
from pathlib import Path
from colorama import Fore, Style
2023-07-28 13:07:17 +03:00
sys.path.append(str(Path(__file__).parent.parent))
2023-08-23 00:27:34 +03:00
from g4f import BaseProvider, models, Provider
2023-07-28 13:07:17 +03:00
2023-08-23 00:27:34 +03:00
logging = False
2023-07-28 13:07:17 +03:00
2023-07-28 13:07:17 +03:00
def main():
providers = get_providers()
2023-08-23 00:27:34 +03:00
failed_providers = []
2023-07-28 13:07:17 +03:00
for _provider in providers:
2023-08-23 00:27:34 +03:00
if _provider.needs_auth:
continue
print("Provider:", _provider.__name__)
2023-08-25 01:32:13 +03:00
result = test(_provider)
2023-08-23 00:27:34 +03:00
print("Result:", result)
if _provider.working and not result:
2023-08-25 01:32:13 +03:00
failed_providers.append(_provider)
2023-07-28 13:07:17 +03:00
2023-08-24 22:32:22 +03:00
print()
if failed_providers:
print(f"{Fore.RED + Style.BRIGHT}Failed providers:{Style.RESET_ALL}")
2023-08-25 01:32:13 +03:00
for _provider in failed_providers:
2023-08-24 22:32:22 +03:00
print(f"{Fore.RED}{_provider.__name__}")
else:
print(f"{Fore.GREEN + Style.BRIGHT}All providers are working")
2023-07-28 13:07:17 +03:00
def get_providers() -> list[type[BaseProvider]]:
2023-10-04 08:20:51 +03:00
providers = dir(Provider)
providers = [getattr(Provider, provider) for provider in providers if provider != "RetryProvider"]
providers = [provider for provider in providers if isinstance(provider, type)]
return [provider for provider in providers if issubclass(provider, BaseProvider)]
2023-07-28 13:07:17 +03:00
def create_response(_provider: type[BaseProvider]) -> str:
model = models.gpt_35_turbo.name if _provider.supports_gpt_35_turbo else models.default.name
2023-07-28 13:07:17 +03:00
response = _provider.create_completion(
model=model,
messages=[{"role": "user", "content": "Hello, who are you? Answer in detail much as possible."}],
2023-07-28 13:07:17 +03:00
stream=False,
)
return "".join(response)
2023-08-24 22:32:22 +03:00
def test(_provider: type[BaseProvider]) -> bool:
2023-07-28 13:07:17 +03:00
try:
response = create_response(_provider)
assert type(response) is str
2023-08-24 22:32:22 +03:00
assert len(response) > 0
2023-08-23 00:27:34 +03:00
return response
2023-07-28 13:07:17 +03:00
except Exception as e:
2023-08-23 00:27:34 +03:00
if logging:
print(e)
2023-07-28 13:07:17 +03:00
return False
if __name__ == "__main__":
2023-10-04 00:06:27 +03:00
main()