quivr/backend/utils/vectors.py

61 lines
2.1 KiB
Python
Raw Normal View History

2023-05-22 09:39:55 +03:00
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.schema import Document
2023-06-28 20:39:27 +03:00
from llm.brainpicking import BrainSettings
from llm.summarization import llm_summerize
2023-05-22 09:39:55 +03:00
from logger import get_logger
2023-06-19 23:46:25 +03:00
from models.settings import BrainSettings, CommonsDep
2023-06-19 22:15:35 +03:00
from pydantic import BaseModel
2023-05-22 09:39:55 +03:00
logger = get_logger(__name__)
2023-06-19 22:15:35 +03:00
class Neurons(BaseModel):
2023-06-19 22:15:35 +03:00
commons: CommonsDep
settings = BrainSettings()
2023-06-28 20:39:27 +03:00
def create_vector(self, doc, user_openai_api_key=None):
2023-06-19 22:15:35 +03:00
logger.info(f"Creating vector for document")
logger.info(f"Document: {doc}")
if user_openai_api_key:
self.commons["documents_vector_store"]._embedding = OpenAIEmbeddings(
openai_api_key=user_openai_api_key
)
2023-06-19 22:15:35 +03:00
try:
sids = self.commons["documents_vector_store"].add_documents([doc])
2023-06-19 22:15:35 +03:00
if sids and len(sids) > 0:
2023-06-28 20:39:27 +03:00
return sids
2023-06-19 22:15:35 +03:00
except Exception as e:
logger.error(f"Error creating vector for document {e}")
def create_embedding(self, content):
return self.commons["embeddings"].embed_query(content)
2023-06-19 22:15:35 +03:00
def similarity_search(self, query, table="match_summaries", top_k=5, threshold=0.5):
2023-06-19 22:15:35 +03:00
query_embedding = self.create_embedding(query)
summaries = (
self.commons["supabase"]
.rpc(
table,
{
"query_embedding": query_embedding,
"match_count": top_k,
"match_threshold": threshold,
},
)
.execute()
)
2023-06-19 22:15:35 +03:00
return summaries.data
def create_summary(commons: CommonsDep, document_id, content, metadata):
2023-05-22 09:39:55 +03:00
logger.info(f"Summarizing document {content[:100]}")
summary = llm_summerize(content)
logger.info(f"Summary: {summary}")
metadata["document_id"] = document_id
summary_doc_with_metadata = Document(page_content=summary, metadata=metadata)
sids = commons["summaries_vector_store"].add_documents([summary_doc_with_metadata])
2023-05-22 09:39:55 +03:00
if sids and len(sids) > 0:
2023-06-28 20:39:27 +03:00
commons['supabase'].table("summaries").update(
{"document_id": document_id}).match({"id": sids[0]}).execute()