feat(settings): refactored

This commit is contained in:
Stan Girard 2023-06-19 22:46:25 +02:00
parent b8ec6fd342
commit 72a6ae3dc0
28 changed files with 60 additions and 60 deletions

View File

@ -2,8 +2,8 @@
from datetime import datetime
from fastapi import HTTPException
from models.settings import CommonsDep
from pydantic import DateError
from utils.common import CommonsDep
async def verify_api_key(api_key: str, commons: CommonsDep):

View File

@ -5,8 +5,8 @@ from auth.api_key_handler import get_user_from_api_key, verify_api_key
from auth.jwt_token_handler import decode_access_token, verify_token
from fastapi import Depends, HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
class AuthBearer(HTTPBearer):

View File

@ -1,4 +1,10 @@
from typing import Annotated, Any, Dict, List, Tuple, Union
from fastapi import Depends
from langchain.embeddings.openai import OpenAIEmbeddings
from pydantic import BaseSettings
from supabase import Client, create_client
from vectorstore.supabase import SupabaseVectorStore
class BrainSettings(BaseSettings):
@ -6,3 +12,22 @@ class BrainSettings(BaseSettings):
anthropic_api_key: str
supabase_url: str
supabase_service_key: str
def common_dependencies() -> dict:
settings = BrainSettings()
embeddings = OpenAIEmbeddings(openai_api_key=settings.openai_api_key)
supabase_client: Client = create_client(settings.supabase_url, settings.supabase_service_key)
documents_vector_store = SupabaseVectorStore(
supabase_client, embeddings, table_name="vectors")
summaries_vector_store = SupabaseVectorStore(
supabase_client, embeddings, table_name="summaries")
return {
"supabase": supabase_client,
"embeddings": embeddings,
"documents_vector_store": documents_vector_store,
"summaries_vector_store": summaries_vector_store
}
CommonsDep = Annotated[dict, Depends(common_dependencies)]

View File

@ -10,7 +10,7 @@ from langchain.document_loaders import TextLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.schema import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter
from utils.common import documents_vector_store
from models.settings import CommonsDep
from utils.file import compute_sha1_from_content
# # Create a function to transcribe audio using Whisper
@ -32,7 +32,7 @@ from utils.file import compute_sha1_from_content
# return transcript
# async def process_audio(upload_file: UploadFile, stats_db):
async def process_audio(upload_file: UploadFile, enable_summarization: bool, user, user_openai_api_key):
async def process_audio(commons: CommonsDep, upload_file: UploadFile, enable_summarization: bool, user, user_openai_api_key):
file_sha = ""
dateshort = time.strftime("%Y%m%d-%H%M%S")
@ -70,6 +70,6 @@ async def process_audio(upload_file: UploadFile, enable_summarization: bool, use
# if st.secrets.self_hosted == "false":
# add_usage(stats_db, "embedding", "audio", metadata={"file_name": file_meta_name,"file_type": ".txt", "chunk_size": chunk_size, "chunk_overlap": chunk_overlap})
documents_vector_store.add_documents(docs_with_metadata)
commons.documents_vector_store.add_documents(docs_with_metadata)
return documents_vector_store

View File

@ -8,7 +8,7 @@ from typing import Optional
from fastapi import UploadFile
from langchain.schema import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter
from utils.common import CommonsDep
from models.settings import CommonsDep
from utils.file import compute_sha1_from_content, compute_sha1_from_file
from utils.vectors import Neurons, create_summary

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders.csv_loader import CSVLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import Docx2txtLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders.epub import UnstructuredEPubLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -4,8 +4,8 @@ import time
from langchain.document_loaders import GitLoader
from langchain.schema import Document
from langchain.text_splitter import RecursiveCharacterTextSplitter
from models.settings import CommonsDep
from parsers.common import file_already_exists_from_content
from utils.common import CommonsDep
from utils.file import compute_sha1_from_content
from utils.vectors import Neurons

View File

@ -6,7 +6,7 @@ import unicodedata
import requests
from fastapi import UploadFile
from langchain.document_loaders import UnstructuredHTMLLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import UnstructuredMarkdownLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import NotebookLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import UnstructuredODTLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import PyMuPDFLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import UnstructuredPowerPointLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -1,6 +1,6 @@
from fastapi import UploadFile
from langchain.document_loaders import TextLoader
from utils.common import CommonsDep
from models.settings import CommonsDep
from .common import process_file

View File

@ -8,9 +8,9 @@ from asyncpg.exceptions import UniqueViolationError
from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends
from logger import get_logger
from models.settings import CommonsDep
from models.users import User
from pydantic import BaseModel
from utils.common import CommonsDep
from utils.users import fetch_user_id_from_credentials
logger = get_logger(__name__)

View File

@ -7,8 +7,8 @@ from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends, Request
from logger import get_logger
from models.brains import Brain, BrainToUpdate
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
from utils.users import fetch_user_id_from_credentials
logger = get_logger(__name__)

View File

@ -5,10 +5,10 @@ from uuid import UUID
from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends, Request
from models.chats import ChatMessage
from models.settings import CommonsDep, common_dependencies
from models.users import User
from utils.chats import (create_chat, get_chat_name_from_first_question,
update_chat)
from utils.common import CommonsDep
from utils.users import (create_user, fetch_user_id_from_credentials,
update_user_request_count)
from utils.vectors import get_answer
@ -33,7 +33,7 @@ def fetch_user_stats(commons, user, date):
# get all chats
@chat_router.get("/chat", dependencies=[Depends(AuthBearer())], tags=["Chat"])
async def get_chats(commons: CommonsDep, current_user: User = Depends(get_current_user)):
async def get_chats(current_user: User = Depends(get_current_user)):
"""
Retrieve all chats for the current user.
@ -43,13 +43,14 @@ async def get_chats(commons: CommonsDep, current_user: User = Depends(get_curren
This endpoint retrieves all the chats associated with the current authenticated user. It returns a list of chat objects
containing the chat ID and chat name for each chat.
"""
commons = common_dependencies()
user_id = fetch_user_id_from_credentials(commons, {"email": current_user.email})
chats = get_user_chats(commons, user_id)
return {"chats": chats}
# get one chat
@chat_router.get("/chat/{chat_id}", dependencies=[Depends(AuthBearer())], tags=["Chat"])
async def get_chats(commons: CommonsDep, chat_id: UUID):
async def get_chats( chat_id: UUID):
"""
Retrieve details of a specific chat by chat ID.
@ -59,6 +60,7 @@ async def get_chats(commons: CommonsDep, chat_id: UUID):
This endpoint retrieves the details of a specific chat identified by the provided chat ID. It returns the chat ID and its
history, which includes the chat messages exchanged in the chat.
"""
commons = common_dependencies()
chats = get_chat_details(commons, chat_id)
if len(chats) > 0:
return {"chatId": chat_id, "history": chats[0]['history']}
@ -67,10 +69,11 @@ async def get_chats(commons: CommonsDep, chat_id: UUID):
# delete one chat
@chat_router.delete("/chat/{chat_id}", dependencies=[Depends(AuthBearer())], tags=["Chat"])
async def delete_chat(commons: CommonsDep, chat_id: UUID):
async def delete_chat( chat_id: UUID):
"""
Delete a specific chat by chat ID.
"""
commons = common_dependencies()
delete_chat_from_db(commons, chat_id)
return {"message": f"{chat_id} has been deleted."}

View File

@ -5,9 +5,9 @@ from tempfile import SpooledTemporaryFile
from auth.auth_bearer import AuthBearer, get_current_user
from crawl.crawler import CrawlWebsite
from fastapi import APIRouter, Depends, Request, UploadFile
from models.settings import CommonsDep
from models.users import User
from parsers.github import process_github
from utils.common import CommonsDep
from utils.file import convert_bytes
from utils.processors import filter_file

View File

@ -1,7 +1,7 @@
from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
explore_router = APIRouter()

View File

@ -2,8 +2,8 @@ import os
from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends, Request, UploadFile
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
from utils.file import convert_bytes, get_file_size
from utils.processors import filter_file

View File

@ -3,8 +3,8 @@ import time
from auth.auth_bearer import AuthBearer, get_current_user
from fastapi import APIRouter, Depends, Request
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
user_router = APIRouter()

View File

@ -1,6 +1,6 @@
from logger import get_logger
from models.chats import ChatMessage
from utils.common import CommonsDep
from models.settings import CommonsDep
logger = get_logger(__name__)

View File

@ -2,33 +2,7 @@ import os
from typing import Annotated
from fastapi import Depends
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import SupabaseVectorStore
from logger import get_logger
from supabase import Client, create_client
from models.settings import common_dependencies
logger = get_logger(__name__)
openai_api_key = os.environ.get("OPENAI_API_KEY")
anthropic_api_key = os.environ.get("ANTHROPIC_API_KEY")
supabase_url = os.environ.get("SUPABASE_URL")
supabase_key = os.environ.get("SUPABASE_SERVICE_KEY")
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
supabase_client: Client = create_client(supabase_url, supabase_key)
documents_vector_store = SupabaseVectorStore(
supabase_client, embeddings, table_name="vectors")
summaries_vector_store = SupabaseVectorStore(
supabase_client, embeddings, table_name="summaries")
def common_dependencies():
return {
"supabase": supabase_client,
"embeddings": embeddings,
"documents_vector_store": documents_vector_store,
"summaries_vector_store": summaries_vector_store
}
CommonsDep = Annotated[dict, Depends(common_dependencies)]

View File

@ -1,6 +1,7 @@
import os
from fastapi import Depends, FastAPI, UploadFile
from models.settings import CommonsDep
from models.users import User
from parsers.audio import process_audio
from parsers.common import file_already_exists
@ -14,8 +15,6 @@ from parsers.odt import process_odt
from parsers.pdf import process_pdf
from parsers.powerpoint import process_powerpoint
from parsers.txt import process_txt
from utils.common import CommonsDep
from supabase import Client
file_processors = {

View File

@ -1,8 +1,8 @@
import time
from logger import get_logger
from models.settings import CommonsDep
from models.users import User
from utils.common import CommonsDep
logger = get_logger(__name__)

View File

@ -4,9 +4,8 @@ from llm.qa import BrainPicking, BrainSettings
from llm.summarization import llm_evaluate_summaries, llm_summerize
from logger import get_logger
from models.chats import ChatMessage
from models.settings import BrainSettings
from models.settings import BrainSettings, CommonsDep
from pydantic import BaseModel
from utils.common import CommonsDep
logger = get_logger(__name__)