quivr/backend/core/examples/chatbot/main.py
Stan Girard 5ff8d4ee81
feat: Add Quivr chatbot example (#2827)
The commit adds a new Quivr chatbot example to the repository. The
example demonstrates how to create a simple chatbot using Quivr and
Chainlit. Users can upload a text file and ask questions about its
content. The commit includes the necessary files, installation
instructions, and usage guidelines.
2024-07-10 12:42:49 -07:00

64 lines
1.6 KiB
Python

import tempfile
import chainlit as cl
from quivr_core import Brain
@cl.on_chat_start
async def on_chat_start():
files = None
# Wait for the user to upload a file
while files is None:
files = await cl.AskFileMessage(
content="Please upload a text .txt file to begin!",
accept=["text/plain"],
max_size_mb=20,
timeout=180,
).send()
file = files[0]
msg = cl.Message(content=f"Processing `{file.name}`...", disable_feedback=True)
await msg.send()
with open(file.path, "r", encoding="utf-8") as f:
text = f.read()
with tempfile.NamedTemporaryFile(
mode="w", suffix=".txt", delete=False
) as temp_file:
temp_file.write(text)
temp_file.flush()
temp_file_path = temp_file.name
brain = Brain.from_files(name="user_brain", file_paths=[temp_file_path])
# Store the file path in the session
cl.user_session.set("file_path", temp_file_path)
# Let the user know that the system is ready
msg.content = f"Processing `{file.name}` done. You can now ask questions!"
await msg.update()
cl.user_session.set("brain", brain)
@cl.on_message
async def main(message: cl.Message):
brain = cl.user_session.get("brain") # type: Brain
if brain is None:
await cl.Message(content="Please upload a file first.").send()
return
# Prepare the message for streaming
msg = cl.Message(content="")
await msg.send()
# Use the ask_stream method for streaming responses
async for chunk in brain.ask_streaming(message.content):
await msg.stream_token(chunk.answer)
await msg.send()