2023-09-20 10:35:37 +03:00
|
|
|
import os
|
2023-09-08 12:03:14 +03:00
|
|
|
from typing import Optional
|
2023-06-28 20:39:27 +03:00
|
|
|
from uuid import UUID
|
2023-06-11 00:59:16 +03:00
|
|
|
|
2023-07-04 18:56:54 +03:00
|
|
|
from auth import AuthBearer, get_current_user
|
2023-09-14 12:56:59 +03:00
|
|
|
from celery_worker import process_file_and_notify
|
2023-09-20 10:35:37 +03:00
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request, UploadFile
|
|
|
|
from logger import get_logger
|
2023-09-18 22:28:07 +03:00
|
|
|
from models import Brain, UserIdentity, UserUsage
|
2023-09-20 10:35:37 +03:00
|
|
|
from models.databases.supabase.knowledge import CreateKnowledgeProperties
|
|
|
|
from models.databases.supabase.notifications import CreateNotificationProperties
|
2023-09-07 14:22:06 +03:00
|
|
|
from models.notifications import NotificationsStatusEnum
|
2023-08-21 13:25:16 +03:00
|
|
|
from repository.brain import get_brain_details
|
2023-09-14 12:56:59 +03:00
|
|
|
from repository.files.upload_file import upload_file_storage
|
2023-09-20 10:35:37 +03:00
|
|
|
from repository.knowledge.add_knowledge import add_knowledge
|
2023-09-07 14:22:06 +03:00
|
|
|
from repository.notification.add_notification import add_notification
|
2023-08-21 13:25:16 +03:00
|
|
|
from repository.user_identity import get_user_identity
|
2023-07-19 14:41:46 +03:00
|
|
|
from routes.authorizations.brain_authorization import (
|
|
|
|
RoleEnum,
|
|
|
|
validate_brain_authorization,
|
|
|
|
)
|
2023-09-20 10:35:37 +03:00
|
|
|
from utils.file import convert_bytes, get_file_size
|
2023-07-19 14:41:46 +03:00
|
|
|
|
2023-09-20 10:35:37 +03:00
|
|
|
logger = get_logger(__name__)
|
2023-06-11 00:59:16 +03:00
|
|
|
upload_router = APIRouter()
|
|
|
|
|
|
|
|
|
2023-08-21 00:20:57 +03:00
|
|
|
@upload_router.get("/upload/healthz", tags=["Health"])
|
|
|
|
async def healthz():
|
|
|
|
return {"status": "ok"}
|
|
|
|
|
|
|
|
|
2023-06-15 15:43:40 +03:00
|
|
|
@upload_router.post("/upload", dependencies=[Depends(AuthBearer())], tags=["Upload"])
|
2023-07-04 18:56:54 +03:00
|
|
|
async def upload_file(
|
|
|
|
request: Request,
|
|
|
|
uploadFile: UploadFile,
|
|
|
|
brain_id: UUID = Query(..., description="The ID of the brain"),
|
2023-09-08 12:03:14 +03:00
|
|
|
chat_id: Optional[UUID] = Query(None, description="The ID of the chat"),
|
2023-07-04 18:56:54 +03:00
|
|
|
enable_summarization: bool = False,
|
2023-08-21 15:05:13 +03:00
|
|
|
current_user: UserIdentity = Depends(get_current_user),
|
2023-07-04 18:56:54 +03:00
|
|
|
):
|
2023-07-19 14:41:46 +03:00
|
|
|
validate_brain_authorization(
|
|
|
|
brain_id, current_user.id, [RoleEnum.Editor, RoleEnum.Owner]
|
|
|
|
)
|
2023-07-04 18:56:54 +03:00
|
|
|
brain = Brain(id=brain_id)
|
2023-09-13 14:47:12 +03:00
|
|
|
userDailyUsage = UserUsage(
|
|
|
|
id=current_user.id,
|
|
|
|
email=current_user.email,
|
|
|
|
openai_api_key=current_user.openai_api_key,
|
|
|
|
)
|
|
|
|
userSettings = userDailyUsage.get_user_settings()
|
2023-06-11 00:59:16 +03:00
|
|
|
|
2023-07-04 18:56:54 +03:00
|
|
|
if request.headers.get("Openai-Api-Key"):
|
2023-09-13 14:47:12 +03:00
|
|
|
brain.max_brain_size = userSettings.get("max_brain_size", 1000000000)
|
2023-07-24 15:17:33 +03:00
|
|
|
|
2023-09-13 14:47:12 +03:00
|
|
|
remaining_free_space = userSettings.get("max_brain_size", 1000000000)
|
2023-06-12 18:58:05 +03:00
|
|
|
|
2023-06-28 20:39:27 +03:00
|
|
|
file_size = get_file_size(uploadFile)
|
2023-06-11 00:59:16 +03:00
|
|
|
if remaining_free_space - file_size < 0:
|
2023-07-04 18:56:54 +03:00
|
|
|
message = {
|
2023-08-21 15:05:13 +03:00
|
|
|
"message": f"❌ UserIdentity's brain will exceed maximum capacity with this upload. Maximum file allowed is : {convert_bytes(remaining_free_space)}",
|
2023-07-04 18:56:54 +03:00
|
|
|
"type": "error",
|
|
|
|
}
|
2023-09-14 12:56:59 +03:00
|
|
|
return message
|
|
|
|
upload_notification = None
|
|
|
|
if chat_id:
|
|
|
|
upload_notification = add_notification(
|
|
|
|
CreateNotificationProperties(
|
|
|
|
action="UPLOAD",
|
|
|
|
chat_id=chat_id,
|
|
|
|
status=NotificationsStatusEnum.Pending,
|
2023-09-07 14:22:06 +03:00
|
|
|
)
|
2023-07-04 18:56:54 +03:00
|
|
|
)
|
2023-09-14 12:56:59 +03:00
|
|
|
openai_api_key = request.headers.get("Openai-Api-Key", None)
|
|
|
|
if openai_api_key is None:
|
|
|
|
brain_details = get_brain_details(brain_id)
|
|
|
|
if brain_details:
|
|
|
|
openai_api_key = brain_details.openai_api_key
|
|
|
|
if openai_api_key is None:
|
|
|
|
openai_api_key = get_user_identity(current_user.id).openai_api_key
|
|
|
|
|
|
|
|
file_content = await uploadFile.read()
|
|
|
|
filename_with_brain_id = str(brain_id) + "/" + str(uploadFile.filename)
|
|
|
|
|
2023-09-20 10:35:37 +03:00
|
|
|
try:
|
|
|
|
fileInStorage = upload_file_storage(file_content, filename_with_brain_id)
|
|
|
|
logger.info(f"File {fileInStorage} uploaded successfully")
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
if "The resource already exists" in str(e):
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=403,
|
|
|
|
detail=f"File {uploadFile.filename} already exists in storage.",
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=500, detail="Failed to upload file to storage."
|
|
|
|
)
|
|
|
|
|
|
|
|
knowledge_to_add = CreateKnowledgeProperties(
|
|
|
|
brain_id=brain_id,
|
|
|
|
file_name=uploadFile.filename,
|
|
|
|
extension=os.path.splitext(
|
|
|
|
uploadFile.filename # pyright: ignore reportPrivateUsage=none
|
|
|
|
)[-1].lower(),
|
|
|
|
)
|
|
|
|
|
|
|
|
added_knowledge = add_knowledge(knowledge_to_add)
|
|
|
|
logger.info(f"Knowledge {added_knowledge} added successfully")
|
|
|
|
|
2023-09-14 12:56:59 +03:00
|
|
|
process_file_and_notify.delay(
|
|
|
|
file_name=filename_with_brain_id,
|
2023-09-18 22:28:07 +03:00
|
|
|
file_original_name=uploadFile.filename,
|
2023-09-14 12:56:59 +03:00
|
|
|
enable_summarization=enable_summarization,
|
|
|
|
brain_id=brain_id,
|
|
|
|
openai_api_key=openai_api_key,
|
|
|
|
notification_id=upload_notification.id if upload_notification else None,
|
|
|
|
)
|
|
|
|
return {"message": "File processing has started."}
|