2023-10-12 10:39:56 +03:00
|
|
|
from typing import List, Optional
|
2023-06-11 00:59:16 +03:00
|
|
|
from uuid import UUID
|
2023-07-31 22:34:34 +03:00
|
|
|
from venv import logger
|
2023-06-23 18:59:53 +03:00
|
|
|
|
2023-07-04 18:56:54 +03:00
|
|
|
from auth import AuthBearer, get_current_user
|
2023-08-03 13:11:25 +03:00
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
2023-06-30 11:10:59 +03:00
|
|
|
from fastapi.responses import StreamingResponse
|
2023-10-19 16:52:20 +03:00
|
|
|
from llm.qa_base import QABaseBrainPicking
|
2023-08-21 00:20:57 +03:00
|
|
|
from llm.qa_headless import HeadlessQA
|
2023-08-21 13:25:16 +03:00
|
|
|
from models import (
|
|
|
|
Brain,
|
|
|
|
BrainEntity,
|
2023-08-21 13:58:22 +03:00
|
|
|
Chat,
|
2023-08-21 13:25:16 +03:00
|
|
|
ChatQuestion,
|
2023-08-21 15:05:13 +03:00
|
|
|
UserIdentity,
|
|
|
|
UserUsage,
|
2023-08-21 13:25:16 +03:00
|
|
|
get_supabase_db,
|
|
|
|
)
|
2023-10-12 10:39:56 +03:00
|
|
|
from models.databases.supabase.chats import QuestionAndAnswer
|
2023-08-21 13:25:16 +03:00
|
|
|
from repository.chat import (
|
2023-08-21 13:58:22 +03:00
|
|
|
ChatUpdatableProperties,
|
|
|
|
CreateChatProperties,
|
|
|
|
GetChatHistoryOutput,
|
2023-08-21 13:25:16 +03:00
|
|
|
create_chat,
|
|
|
|
get_chat_by_id,
|
2023-08-21 13:58:22 +03:00
|
|
|
get_user_chats,
|
|
|
|
update_chat,
|
2023-08-21 13:25:16 +03:00
|
|
|
)
|
2023-10-12 10:39:56 +03:00
|
|
|
from repository.chat.add_question_and_answer import add_question_and_answer
|
2023-09-07 18:23:31 +03:00
|
|
|
from repository.chat.get_chat_history_with_notifications import (
|
|
|
|
ChatItem,
|
|
|
|
get_chat_history_with_notifications,
|
|
|
|
)
|
2023-09-13 14:47:12 +03:00
|
|
|
from repository.notification.remove_chat_notifications import remove_chat_notifications
|
2023-11-08 18:07:21 +03:00
|
|
|
|
2023-10-30 12:18:23 +03:00
|
|
|
from routes.chat.factory import get_chat_strategy
|
|
|
|
from routes.chat.utils import (
|
|
|
|
NullableUUID,
|
|
|
|
check_user_requests_limit,
|
|
|
|
delete_chat_from_db,
|
|
|
|
)
|
2023-09-26 11:35:52 +03:00
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
chat_router = APIRouter()
|
2023-06-12 18:58:05 +03:00
|
|
|
|
2023-06-20 10:54:23 +03:00
|
|
|
|
2023-08-21 00:20:57 +03:00
|
|
|
@chat_router.get("/chat/healthz", tags=["Health"])
|
|
|
|
async def healthz():
|
|
|
|
return {"status": "ok"}
|
|
|
|
|
|
|
|
|
2023-06-11 00:59:16 +03:00
|
|
|
# get all chats
|
2023-06-15 15:43:40 +03:00
|
|
|
@chat_router.get("/chat", dependencies=[Depends(AuthBearer())], tags=["Chat"])
|
2023-08-21 15:05:13 +03:00
|
|
|
async def get_chats(current_user: UserIdentity = Depends(get_current_user)):
|
2023-06-15 15:43:40 +03:00
|
|
|
"""
|
|
|
|
Retrieve all chats for the current user.
|
|
|
|
|
|
|
|
- `current_user`: The current authenticated user.
|
|
|
|
- Returns a list of all chats for the user.
|
|
|
|
|
|
|
|
This endpoint retrieves all the chats associated with the current authenticated user. It returns a list of chat objects
|
|
|
|
containing the chat ID and chat name for each chat.
|
|
|
|
"""
|
2023-08-10 11:25:08 +03:00
|
|
|
chats = get_user_chats(str(current_user.id))
|
2023-06-11 00:59:16 +03:00
|
|
|
return {"chats": chats}
|
|
|
|
|
2023-06-20 10:54:23 +03:00
|
|
|
|
2023-06-11 00:59:16 +03:00
|
|
|
# delete one chat
|
2023-06-22 18:50:06 +03:00
|
|
|
@chat_router.delete(
|
|
|
|
"/chat/{chat_id}", dependencies=[Depends(AuthBearer())], tags=["Chat"]
|
|
|
|
)
|
2023-06-20 10:54:23 +03:00
|
|
|
async def delete_chat(chat_id: UUID):
|
2023-06-15 15:43:40 +03:00
|
|
|
"""
|
|
|
|
Delete a specific chat by chat ID.
|
|
|
|
"""
|
2023-08-03 21:24:42 +03:00
|
|
|
supabase_db = get_supabase_db()
|
2023-09-07 18:23:31 +03:00
|
|
|
remove_chat_notifications(chat_id)
|
|
|
|
|
2023-08-03 21:24:42 +03:00
|
|
|
delete_chat_from_db(supabase_db=supabase_db, chat_id=chat_id)
|
2023-06-11 00:59:16 +03:00
|
|
|
return {"message": f"{chat_id} has been deleted."}
|
|
|
|
|
2023-06-20 10:54:23 +03:00
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
# update existing chat metadata
|
|
|
|
@chat_router.put(
|
|
|
|
"/chat/{chat_id}/metadata", dependencies=[Depends(AuthBearer())], tags=["Chat"]
|
|
|
|
)
|
|
|
|
async def update_chat_metadata_handler(
|
|
|
|
chat_data: ChatUpdatableProperties,
|
|
|
|
chat_id: UUID,
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user: UserIdentity = Depends(get_current_user),
|
2023-06-22 18:50:06 +03:00
|
|
|
) -> Chat:
|
|
|
|
"""
|
|
|
|
Update chat attributes
|
|
|
|
"""
|
|
|
|
|
2023-07-10 15:27:49 +03:00
|
|
|
chat = get_chat_by_id(chat_id) # pyright: ignore reportPrivateUsage=none
|
2023-08-03 13:11:25 +03:00
|
|
|
if str(current_user.id) != chat.user_id:
|
2023-06-23 11:36:55 +03:00
|
|
|
raise HTTPException(
|
2023-07-10 15:27:49 +03:00
|
|
|
status_code=403, # pyright: ignore reportPrivateUsage=none
|
|
|
|
detail="You should be the owner of the chat to update it.", # pyright: ignore reportPrivateUsage=none
|
2023-06-23 11:36:55 +03:00
|
|
|
)
|
2023-06-22 18:50:06 +03:00
|
|
|
return update_chat(chat_id=chat_id, chat_data=chat_data)
|
|
|
|
|
|
|
|
|
|
|
|
# create new chat
|
|
|
|
@chat_router.post("/chat", dependencies=[Depends(AuthBearer())], tags=["Chat"])
|
|
|
|
async def create_chat_handler(
|
|
|
|
chat_data: CreateChatProperties,
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user: UserIdentity = Depends(get_current_user),
|
2023-06-20 10:54:23 +03:00
|
|
|
):
|
2023-06-15 15:43:40 +03:00
|
|
|
"""
|
2023-06-22 18:50:06 +03:00
|
|
|
Create a new chat with initial chat messages.
|
2023-06-15 15:43:40 +03:00
|
|
|
"""
|
2023-06-22 18:50:06 +03:00
|
|
|
|
2023-06-29 09:00:34 +03:00
|
|
|
return create_chat(user_id=current_user.id, chat_data=chat_data)
|
2023-06-11 00:59:16 +03:00
|
|
|
|
2023-06-20 10:54:23 +03:00
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
# add new question to chat
|
|
|
|
@chat_router.post(
|
2023-07-14 22:02:26 +03:00
|
|
|
"/chat/{chat_id}/question",
|
|
|
|
dependencies=[
|
|
|
|
Depends(
|
|
|
|
AuthBearer(),
|
|
|
|
),
|
|
|
|
],
|
|
|
|
tags=["Chat"],
|
2023-06-22 18:50:06 +03:00
|
|
|
)
|
|
|
|
async def create_question_handler(
|
|
|
|
request: Request,
|
|
|
|
chat_question: ChatQuestion,
|
2023-06-20 10:54:23 +03:00
|
|
|
chat_id: UUID,
|
2023-07-14 22:02:26 +03:00
|
|
|
brain_id: NullableUUID
|
|
|
|
| UUID
|
|
|
|
| None = Query(..., description="The ID of the brain"),
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user: UserIdentity = Depends(get_current_user),
|
2023-08-10 11:25:08 +03:00
|
|
|
) -> GetChatHistoryOutput:
|
2023-08-01 17:25:02 +03:00
|
|
|
"""
|
|
|
|
Add a new question to the chat.
|
|
|
|
"""
|
2023-09-26 15:24:17 +03:00
|
|
|
|
2023-10-30 12:18:23 +03:00
|
|
|
chat_instance = get_chat_strategy(brain_id)
|
|
|
|
|
|
|
|
chat_instance.validate_authorization(user_id=current_user.id, brain_id=brain_id)
|
2023-09-26 19:41:02 +03:00
|
|
|
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user.openai_api_key = request.headers.get("Openai-Api-Key")
|
2023-08-01 17:25:02 +03:00
|
|
|
brain = Brain(id=brain_id)
|
2023-09-16 01:22:42 +03:00
|
|
|
brain_details: BrainEntity | None = None
|
|
|
|
|
|
|
|
userDailyUsage = UserUsage(
|
|
|
|
id=current_user.id,
|
|
|
|
email=current_user.email,
|
|
|
|
openai_api_key=current_user.openai_api_key,
|
|
|
|
)
|
|
|
|
userSettings = userDailyUsage.get_user_settings()
|
2023-10-06 21:32:23 +03:00
|
|
|
is_model_ok = (brain_details or chat_question).model in userSettings.get("models", ["gpt-3.5-turbo"]) # type: ignore
|
2023-08-01 17:25:02 +03:00
|
|
|
|
2023-08-21 15:05:13 +03:00
|
|
|
if not current_user.openai_api_key:
|
2023-10-30 12:18:23 +03:00
|
|
|
current_user.openai_api_key = chat_instance.get_openai_api_key(
|
|
|
|
brain_id=brain_id, user_id=current_user.id
|
|
|
|
)
|
2023-08-01 17:25:02 +03:00
|
|
|
# Retrieve chat model (temperature, max_tokens, model)
|
|
|
|
if (
|
|
|
|
not chat_question.model
|
|
|
|
or not chat_question.temperature
|
|
|
|
or not chat_question.max_tokens
|
|
|
|
):
|
|
|
|
# TODO: create ChatConfig class (pick config from brain or user or chat) and use it here
|
2023-08-07 17:35:23 +03:00
|
|
|
chat_question.model = chat_question.model or brain.model or "gpt-3.5-turbo"
|
2023-09-30 23:32:53 +03:00
|
|
|
chat_question.temperature = (
|
|
|
|
chat_question.temperature or brain.temperature or 0.1
|
|
|
|
)
|
2023-11-01 10:52:49 +03:00
|
|
|
chat_question.max_tokens = chat_question.max_tokens or brain.max_tokens or 512
|
2023-08-01 17:25:02 +03:00
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
try:
|
2023-08-21 15:05:13 +03:00
|
|
|
check_user_requests_limit(current_user)
|
2023-09-16 01:22:42 +03:00
|
|
|
is_model_ok = (brain_details or chat_question).model in userSettings.get("models", ["gpt-3.5-turbo"]) # type: ignore
|
2023-10-30 12:18:23 +03:00
|
|
|
gpt_answer_generator = chat_instance.get_answer_generator(
|
|
|
|
chat_id=str(chat_id),
|
|
|
|
model=chat_question.model if is_model_ok else "gpt-3.5-turbo", # type: ignore
|
|
|
|
max_tokens=chat_question.max_tokens,
|
|
|
|
temperature=chat_question.temperature,
|
|
|
|
brain_id=str(brain_id),
|
|
|
|
user_openai_api_key=current_user.openai_api_key, # pyright: ignore reportPrivateUsage=none
|
2023-11-02 00:33:47 +03:00
|
|
|
streaming=False,
|
2023-10-30 12:18:23 +03:00
|
|
|
prompt_id=chat_question.prompt_id,
|
2023-11-08 18:07:21 +03:00
|
|
|
user_id=current_user.id,
|
2023-10-30 12:18:23 +03:00
|
|
|
)
|
2023-06-30 11:10:59 +03:00
|
|
|
|
2023-08-10 11:25:08 +03:00
|
|
|
chat_answer = gpt_answer_generator.generate_answer(chat_id, chat_question)
|
2023-06-22 18:50:06 +03:00
|
|
|
|
|
|
|
return chat_answer
|
|
|
|
except HTTPException as e:
|
|
|
|
raise e
|
2023-06-20 10:54:23 +03:00
|
|
|
|
|
|
|
|
2023-06-30 11:10:59 +03:00
|
|
|
# stream new question response from chat
|
|
|
|
@chat_router.post(
|
|
|
|
"/chat/{chat_id}/question/stream",
|
2023-07-14 22:02:26 +03:00
|
|
|
dependencies=[
|
|
|
|
Depends(
|
|
|
|
AuthBearer(),
|
|
|
|
),
|
|
|
|
],
|
2023-06-30 11:10:59 +03:00
|
|
|
tags=["Chat"],
|
|
|
|
)
|
|
|
|
async def create_stream_question_handler(
|
|
|
|
request: Request,
|
|
|
|
chat_question: ChatQuestion,
|
|
|
|
chat_id: UUID,
|
2023-07-14 22:02:26 +03:00
|
|
|
brain_id: NullableUUID
|
|
|
|
| UUID
|
|
|
|
| None = Query(..., description="The ID of the brain"),
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user: UserIdentity = Depends(get_current_user),
|
2023-06-30 11:10:59 +03:00
|
|
|
) -> StreamingResponse:
|
2023-10-30 12:18:23 +03:00
|
|
|
chat_instance = get_chat_strategy(brain_id)
|
|
|
|
chat_instance.validate_authorization(user_id=current_user.id, brain_id=brain_id)
|
2023-06-30 11:10:59 +03:00
|
|
|
|
2023-08-01 17:25:02 +03:00
|
|
|
# Retrieve user's OpenAI API key
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user.openai_api_key = request.headers.get("Openai-Api-Key")
|
2023-08-01 17:25:02 +03:00
|
|
|
brain = Brain(id=brain_id)
|
2023-08-21 13:45:32 +03:00
|
|
|
brain_details: BrainEntity | None = None
|
2023-09-16 01:22:42 +03:00
|
|
|
userDailyUsage = UserUsage(
|
|
|
|
id=current_user.id,
|
|
|
|
email=current_user.email,
|
|
|
|
openai_api_key=current_user.openai_api_key,
|
|
|
|
)
|
|
|
|
|
|
|
|
userSettings = userDailyUsage.get_user_settings()
|
2023-08-01 17:25:02 +03:00
|
|
|
|
2023-08-21 15:05:13 +03:00
|
|
|
if not current_user.openai_api_key:
|
2023-10-30 12:18:23 +03:00
|
|
|
current_user.openai_api_key = chat_instance.get_openai_api_key(
|
|
|
|
brain_id=brain_id, user_id=current_user.id
|
|
|
|
)
|
2023-08-01 17:25:02 +03:00
|
|
|
|
|
|
|
# Retrieve chat model (temperature, max_tokens, model)
|
|
|
|
if (
|
|
|
|
not chat_question.model
|
2023-08-11 21:44:48 +03:00
|
|
|
or chat_question.temperature is None
|
2023-08-01 17:25:02 +03:00
|
|
|
or not chat_question.max_tokens
|
|
|
|
):
|
|
|
|
# TODO: create ChatConfig class (pick config from brain or user or chat) and use it here
|
2023-08-07 17:35:23 +03:00
|
|
|
chat_question.model = chat_question.model or brain.model or "gpt-3.5-turbo"
|
2023-08-01 17:25:02 +03:00
|
|
|
chat_question.temperature = chat_question.temperature or brain.temperature or 0
|
|
|
|
chat_question.max_tokens = chat_question.max_tokens or brain.max_tokens or 256
|
|
|
|
|
2023-06-30 11:10:59 +03:00
|
|
|
try:
|
2023-07-31 22:34:34 +03:00
|
|
|
logger.info(f"Streaming request for {chat_question.model}")
|
2023-08-21 15:05:13 +03:00
|
|
|
check_user_requests_limit(current_user)
|
2023-10-19 16:52:20 +03:00
|
|
|
gpt_answer_generator: HeadlessQA | QABaseBrainPicking
|
2023-09-16 01:22:42 +03:00
|
|
|
# TODO check if model is in the list of models available for the user
|
2023-09-16 14:51:08 +03:00
|
|
|
|
2023-09-16 01:22:42 +03:00
|
|
|
is_model_ok = (brain_details or chat_question).model in userSettings.get("models", ["gpt-3.5-turbo"]) # type: ignore
|
2023-09-16 14:51:08 +03:00
|
|
|
|
2023-10-30 12:18:23 +03:00
|
|
|
gpt_answer_generator = chat_instance.get_answer_generator(
|
|
|
|
chat_id=str(chat_id),
|
|
|
|
model=(brain_details or chat_question).model if is_model_ok else "gpt-3.5-turbo", # type: ignore
|
|
|
|
max_tokens=(brain_details or chat_question).max_tokens, # type: ignore
|
|
|
|
temperature=(brain_details or chat_question).temperature, # type: ignore
|
|
|
|
user_openai_api_key=current_user.openai_api_key, # pyright: ignore reportPrivateUsage=none
|
|
|
|
streaming=True,
|
|
|
|
prompt_id=chat_question.prompt_id,
|
|
|
|
brain_id=str(brain_id),
|
2023-11-08 18:07:21 +03:00
|
|
|
user_id=current_user.id,
|
2023-10-30 12:18:23 +03:00
|
|
|
)
|
|
|
|
|
2023-06-30 11:10:59 +03:00
|
|
|
return StreamingResponse(
|
2023-08-10 11:25:08 +03:00
|
|
|
gpt_answer_generator.generate_stream(chat_id, chat_question),
|
2023-06-30 11:10:59 +03:00
|
|
|
media_type="text/event-stream",
|
|
|
|
)
|
|
|
|
|
|
|
|
except HTTPException as e:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
2023-06-22 18:50:06 +03:00
|
|
|
# get chat history
|
|
|
|
@chat_router.get(
|
|
|
|
"/chat/{chat_id}/history", dependencies=[Depends(AuthBearer())], tags=["Chat"]
|
|
|
|
)
|
|
|
|
async def get_chat_history_handler(
|
|
|
|
chat_id: UUID,
|
2023-09-07 18:23:31 +03:00
|
|
|
) -> List[ChatItem]:
|
2023-06-22 18:50:06 +03:00
|
|
|
# TODO: RBAC with current_user
|
2023-09-07 18:23:31 +03:00
|
|
|
return get_chat_history_with_notifications(chat_id)
|
2023-10-12 10:39:56 +03:00
|
|
|
|
|
|
|
|
|
|
|
@chat_router.post(
|
|
|
|
"/chat/{chat_id}/question/answer",
|
|
|
|
dependencies=[Depends(AuthBearer())],
|
|
|
|
tags=["Chat"],
|
|
|
|
)
|
|
|
|
async def add_question_and_answer_handler(
|
|
|
|
chat_id: UUID,
|
|
|
|
question_and_answer: QuestionAndAnswer,
|
|
|
|
) -> Optional[Chat]:
|
|
|
|
"""
|
|
|
|
Add a new question and anwser to the chat.
|
|
|
|
"""
|
|
|
|
return add_question_and_answer(chat_id, question_and_answer)
|