2024-05-15 22:07:49 +03:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
from .Openai import Openai
|
|
|
|
from ...typing import AsyncResult, Messages
|
|
|
|
|
|
|
|
class PerplexityApi(Openai):
|
|
|
|
label = "Perplexity API"
|
|
|
|
url = "https://www.perplexity.ai"
|
|
|
|
working = True
|
|
|
|
default_model = "llama-3-sonar-large-32k-online"
|
|
|
|
models = [
|
|
|
|
"llama-3-sonar-small-32k-chat",
|
|
|
|
"llama-3-sonar-small-32k-online",
|
|
|
|
"llama-3-sonar-large-32k-chat",
|
|
|
|
"llama-3-sonar-large-32k-online",
|
|
|
|
"llama-3-8b-instruct",
|
|
|
|
"llama-3-70b-instruct",
|
|
|
|
]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def create_async_generator(
|
|
|
|
cls,
|
|
|
|
model: str,
|
|
|
|
messages: Messages,
|
|
|
|
api_base: str = "https://api.perplexity.ai",
|
|
|
|
**kwargs
|
|
|
|
) -> AsyncResult:
|
|
|
|
return super().create_async_generator(
|
|
|
|
model, messages, api_base=api_base, **kwargs
|
2024-09-24 13:23:53 +03:00
|
|
|
)
|