2023-05-13 00:05:31 +03:00
|
|
|
import streamlit as st
|
2023-05-13 00:58:19 +03:00
|
|
|
import os
|
2023-05-13 00:22:21 +03:00
|
|
|
from loaders.audio import process_audio
|
2023-05-13 00:05:31 +03:00
|
|
|
from loaders.txt import process_txt
|
|
|
|
from loaders.csv import process_csv
|
|
|
|
from loaders.markdown import process_markdown
|
|
|
|
from utils import compute_sha1_from_content
|
2023-05-13 01:25:12 +03:00
|
|
|
from loaders.pdf import process_pdf
|
2023-05-13 00:05:31 +03:00
|
|
|
|
|
|
|
def file_uploader(supabase, openai_key, vector_store):
|
2023-05-13 00:22:21 +03:00
|
|
|
file_processors = {
|
|
|
|
".txt": process_txt,
|
|
|
|
".csv": process_csv,
|
|
|
|
".md": process_markdown,
|
2023-05-13 01:25:12 +03:00
|
|
|
".markdown": process_markdown,
|
2023-05-13 00:22:21 +03:00
|
|
|
".m4a": process_audio,
|
|
|
|
".mp3": process_audio,
|
|
|
|
".webm": process_audio,
|
|
|
|
".mp4": process_audio,
|
|
|
|
".mpga": process_audio,
|
|
|
|
".wav": process_audio,
|
|
|
|
".mpeg": process_audio,
|
2023-05-13 01:25:12 +03:00
|
|
|
".pdf": process_pdf,
|
2023-05-13 00:22:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
files = st.file_uploader("Upload a file", accept_multiple_files=True, type=list(file_processors.keys()))
|
2023-05-13 00:05:31 +03:00
|
|
|
if st.button("Add to Database"):
|
|
|
|
if files is not None:
|
2023-05-13 00:22:21 +03:00
|
|
|
for file in files:
|
2023-05-13 00:05:31 +03:00
|
|
|
if file_already_exists(supabase, file):
|
|
|
|
st.write(f"😎 {file.name} is already in the database.")
|
2023-05-13 01:25:12 +03:00
|
|
|
elif file.size < 1:
|
|
|
|
st.write(f"💨 {file.name} is empty.")
|
2023-05-13 00:22:21 +03:00
|
|
|
else:
|
|
|
|
file_extension = os.path.splitext(file.name)[-1]
|
|
|
|
if file_extension in file_processors:
|
|
|
|
file_processors[file_extension](vector_store, file)
|
2023-05-13 00:05:31 +03:00
|
|
|
st.write(f"✅ {file.name} ")
|
|
|
|
else:
|
|
|
|
st.write(f"❌ {file.name} is not a valid file type.")
|
|
|
|
|
|
|
|
def file_already_exists(supabase, file):
|
|
|
|
file_sha1 = compute_sha1_from_content(file.getvalue())
|
|
|
|
response = supabase.table("documents").select("id").eq("metadata->>file_sha1", file_sha1).execute()
|
2023-05-13 00:22:21 +03:00
|
|
|
return len(response.data) > 0
|