2023-06-16 18:52:32 +03:00
|
|
|
from typing import List, Optional, Tuple
|
2023-06-11 00:59:16 +03:00
|
|
|
from uuid import UUID
|
2023-06-04 00:12:42 +03:00
|
|
|
|
|
|
|
from pydantic import BaseModel
|
|
|
|
|
|
|
|
|
|
|
|
class ChatMessage(BaseModel):
|
2023-06-16 18:52:32 +03:00
|
|
|
model: str = "gpt-3.5-turbo-16k"
|
2023-06-04 00:12:42 +03:00
|
|
|
question: str
|
|
|
|
# A list of tuples where each tuple is (speaker, text)
|
|
|
|
history: List[Tuple[str, str]]
|
|
|
|
temperature: float = 0.0
|
|
|
|
max_tokens: int = 256
|
|
|
|
use_summarization: bool = False
|
2023-06-20 10:54:23 +03:00
|
|
|
chat_id: Optional[UUID] = None
|
|
|
|
chat_name: Optional[str] = None
|
|
|
|
|
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
class ChatQuestion(BaseModel):
|
|
|
|
question: str
|
2023-08-18 12:59:20 +03:00
|
|
|
model: Optional[str]
|
|
|
|
temperature: Optional[float]
|
|
|
|
max_tokens: Optional[int]
|
2023-08-10 11:25:08 +03:00
|
|
|
brain_id: Optional[UUID]
|
2023-08-22 15:23:27 +03:00
|
|
|
prompt_id: Optional[UUID]
|