mirror of
https://github.com/StanGirard/quivr.git
synced 2024-11-24 05:55:13 +03:00
refactor(chat): separate logic into hooks (#237)
This commit is contained in:
parent
f2164db12c
commit
59c02228b6
@ -1,98 +0,0 @@
|
||||
"use client";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { AnimatePresence, motion } from "framer-motion";
|
||||
import { FC, Ref, forwardRef, useEffect, useRef } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
interface ChatMessagesProps {
|
||||
history: Array<[string, string]>;
|
||||
}
|
||||
|
||||
const ChatMessages: FC<ChatMessagesProps> = ({ history }) => {
|
||||
const lastChatRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
lastChatRef.current?.scrollIntoView({ behavior: "auto", block: "start" });
|
||||
}, [history]);
|
||||
|
||||
return (
|
||||
<div className="overflow-hidden flex flex-col gap-5 scrollbar scroll-smooth">
|
||||
{history.length === 0 ? (
|
||||
<div className="text-center opacity-50">
|
||||
Ask a question, or describe a task.
|
||||
</div>
|
||||
) : (
|
||||
<AnimatePresence initial={false}>
|
||||
{history.map(([speaker, text], idx) => {
|
||||
if (idx % 2 === 0)
|
||||
return (
|
||||
<ChatMessage
|
||||
ref={idx === history.length - 1 ? lastChatRef : null}
|
||||
key={idx}
|
||||
speaker={speaker}
|
||||
text={text}
|
||||
/>
|
||||
);
|
||||
else {
|
||||
return (
|
||||
<ChatMessage
|
||||
ref={idx === history.length - 1 ? lastChatRef : null}
|
||||
key={idx}
|
||||
speaker={speaker}
|
||||
text={text}
|
||||
left
|
||||
/>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</AnimatePresence>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const ChatMessage = forwardRef(
|
||||
(
|
||||
{
|
||||
speaker,
|
||||
text,
|
||||
left = false,
|
||||
}: {
|
||||
speaker: string;
|
||||
text: string;
|
||||
left?: boolean;
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
return (
|
||||
<motion.div
|
||||
ref={ref as Ref<HTMLDivElement>}
|
||||
initial={{ y: -24, opacity: 0 }}
|
||||
animate={{
|
||||
y: 0,
|
||||
opacity: 1,
|
||||
transition: { duration: 0.2, ease: "easeOut" },
|
||||
}}
|
||||
exit={{ y: -24, opacity: 0 }}
|
||||
className={cn(
|
||||
"py-3 px-3 rounded-lg border border-black/10 dark:border-white/25 flex flex-col max-w-4xl overflow-hidden scroll-pt-32",
|
||||
left ? "self-start mr-20" : "self-end ml-20"
|
||||
)}
|
||||
>
|
||||
<span className={cn("capitalize text-xs")}>{speaker}</span>
|
||||
<>
|
||||
<ReactMarkdown
|
||||
// remarkRehypeOptions={{}}
|
||||
className="prose dark:prose-invert"
|
||||
>
|
||||
{text}
|
||||
</ReactMarkdown>
|
||||
</>
|
||||
</motion.div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
ChatMessage.displayName = "ChatMessage";
|
||||
|
||||
export default ChatMessages;
|
50
frontend/app/chat/components/ChatMessage.tsx
Normal file
50
frontend/app/chat/components/ChatMessage.tsx
Normal file
@ -0,0 +1,50 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import { motion } from "framer-motion";
|
||||
import { forwardRef, Ref } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
const ChatMessage = forwardRef(
|
||||
(
|
||||
{
|
||||
speaker,
|
||||
text,
|
||||
left = false,
|
||||
}: {
|
||||
speaker: string;
|
||||
text: string;
|
||||
left?: boolean;
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
return (
|
||||
<motion.div
|
||||
ref={ref as Ref<HTMLDivElement>}
|
||||
initial={{ y: -24, opacity: 0 }}
|
||||
animate={{
|
||||
y: 0,
|
||||
opacity: 1,
|
||||
transition: { duration: 0.2, ease: "easeOut" },
|
||||
}}
|
||||
exit={{ y: -24, opacity: 0 }}
|
||||
className={cn(
|
||||
"py-3 px-3 rounded-lg border border-black/10 dark:border-white/25 flex flex-col max-w-4xl overflow-hidden scroll-pt-32",
|
||||
left ? "self-start mr-20" : "self-end ml-20"
|
||||
)}
|
||||
>
|
||||
<span className={cn("capitalize text-xs")}>{speaker}</span>
|
||||
<>
|
||||
<ReactMarkdown
|
||||
// remarkRehypeOptions={{}}
|
||||
className="prose dark:prose-invert"
|
||||
>
|
||||
{text}
|
||||
</ReactMarkdown>
|
||||
</>
|
||||
</motion.div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
ChatMessage.displayName = "ChatMessage";
|
||||
|
||||
export default ChatMessage;
|
41
frontend/app/chat/components/ChatMessages.tsx
Normal file
41
frontend/app/chat/components/ChatMessages.tsx
Normal file
@ -0,0 +1,41 @@
|
||||
"use client";
|
||||
import { AnimatePresence } from "framer-motion";
|
||||
import { FC, useEffect, useRef } from "react";
|
||||
import ChatMessage from "./ChatMessage";
|
||||
|
||||
interface ChatMessagesProps {
|
||||
history: Array<[string, string]>;
|
||||
}
|
||||
|
||||
const ChatMessages: FC<ChatMessagesProps> = ({ history }) => {
|
||||
const lastChatRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
lastChatRef.current?.scrollIntoView({ behavior: "auto", block: "start" });
|
||||
}, [history]);
|
||||
|
||||
return (
|
||||
<div className="overflow-hidden flex flex-col gap-5 scrollbar scroll-smooth">
|
||||
{history.length === 0 ? (
|
||||
<div className="text-center opacity-50">
|
||||
Ask a question, or describe a task.
|
||||
</div>
|
||||
) : (
|
||||
<AnimatePresence initial={false}>
|
||||
{history.map(([speaker, text], idx) => {
|
||||
return (
|
||||
<ChatMessage
|
||||
ref={idx === history.length - 1 ? lastChatRef : null}
|
||||
key={idx}
|
||||
speaker={speaker}
|
||||
text={text}
|
||||
left={idx % 2 !== 0}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</AnimatePresence>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
export default ChatMessages;
|
@ -1,6 +1,6 @@
|
||||
export function isSpeechRecognitionSupported() {
|
||||
if (
|
||||
typeof window !== undefined &&
|
||||
typeof window !== "undefined" &&
|
||||
("SpeechRecognition" in window || "webkitSpeechRecognition" in window)
|
||||
) {
|
||||
return true;
|
44
frontend/app/chat/hooks/useQuestion.ts
Normal file
44
frontend/app/chat/hooks/useQuestion.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import { useState } from "react";
|
||||
|
||||
import { useSupabase } from "@/app/supabase-provider";
|
||||
import { useBrainConfig } from "@/lib/context/BrainConfigProvider/hooks/useBrainConfig";
|
||||
import { useAxios } from "@/lib/useAxios";
|
||||
import { redirect } from "next/navigation";
|
||||
export const useQuestion = () => {
|
||||
const [question, setQuestion] = useState("");
|
||||
const [history, setHistory] = useState<Array<[string, string]>>([]);
|
||||
const [isPending, setIsPending] = useState(false);
|
||||
const { session } = useSupabase();
|
||||
const { axiosInstance } = useAxios();
|
||||
const {
|
||||
config: { maxTokens, model, temperature },
|
||||
} = useBrainConfig();
|
||||
if (session === null) {
|
||||
redirect("/login");
|
||||
}
|
||||
|
||||
const askQuestion = async () => {
|
||||
setHistory((hist) => [...hist, ["user", question]]);
|
||||
setIsPending(true);
|
||||
// TODO:
|
||||
|
||||
const response = await axiosInstance.post(`/chat/`, {
|
||||
model,
|
||||
question,
|
||||
history,
|
||||
temperature,
|
||||
max_tokens: maxTokens,
|
||||
});
|
||||
setHistory(response.data.history);
|
||||
setQuestion("");
|
||||
setIsPending(false);
|
||||
};
|
||||
|
||||
return {
|
||||
isPending,
|
||||
history,
|
||||
question,
|
||||
setQuestion,
|
||||
askQuestion,
|
||||
};
|
||||
};
|
58
frontend/app/chat/hooks/useSpeech.ts
Normal file
58
frontend/app/chat/hooks/useSpeech.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { isSpeechRecognitionSupported } from "../helpers/isSpeechRecognitionSupported";
|
||||
import { useQuestion } from "./useQuestion";
|
||||
|
||||
export const useSpeech = () => {
|
||||
const [isListening, setIsListening] = useState(false);
|
||||
const [speechSupported, setSpeechSupported] = useState(false);
|
||||
const { setQuestion } = useQuestion();
|
||||
|
||||
useEffect(() => {
|
||||
if (isSpeechRecognitionSupported()) {
|
||||
setSpeechSupported(true);
|
||||
const SpeechRecognition =
|
||||
window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
|
||||
const mic = new SpeechRecognition();
|
||||
|
||||
mic.continuous = true;
|
||||
mic.interimResults = false;
|
||||
mic.lang = "en-US";
|
||||
|
||||
mic.onstart = () => {
|
||||
console.log("Mics on");
|
||||
};
|
||||
|
||||
mic.onend = () => {
|
||||
console.log("Mics off");
|
||||
};
|
||||
|
||||
mic.onerror = (event: SpeechRecognitionErrorEvent) => {
|
||||
console.log(event.error);
|
||||
setIsListening(false);
|
||||
};
|
||||
|
||||
mic.onresult = (event: SpeechRecognitionEvent) => {
|
||||
const interimTranscript =
|
||||
event.results[event.results.length - 1][0].transcript;
|
||||
setQuestion((prevQuestion) => prevQuestion + interimTranscript);
|
||||
};
|
||||
|
||||
if (isListening) {
|
||||
mic.start();
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (mic) {
|
||||
mic.stop();
|
||||
}
|
||||
};
|
||||
}
|
||||
}, [isListening]);
|
||||
|
||||
const startListening = () => {
|
||||
setIsListening((prevIsListening) => !prevIsListening);
|
||||
};
|
||||
|
||||
return { startListening, speechSupported, isListening };
|
||||
};
|
@ -1,93 +1,17 @@
|
||||
"use client";
|
||||
import { useBrainConfig } from "@/lib/context/BrainConfigProvider/hooks/useBrainConfig";
|
||||
import { useAxios } from "@/lib/useAxios";
|
||||
import Link from "next/link";
|
||||
import { redirect } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
import { MdMic, MdMicOff, MdSettings } from "react-icons/md";
|
||||
import Button from "../components/ui/Button";
|
||||
import Card from "../components/ui/Card";
|
||||
import PageHeading from "../components/ui/PageHeading";
|
||||
import { useSupabase } from "../supabase-provider";
|
||||
import ChatMessages from "./ChatMessages";
|
||||
import { isSpeechRecognitionSupported } from "./helpers";
|
||||
import ChatMessages from "./components/ChatMessages";
|
||||
import { useQuestion } from "./hooks/useQuestion";
|
||||
import { useSpeech } from "./hooks/useSpeech";
|
||||
|
||||
export default function ChatPage() {
|
||||
const [question, setQuestion] = useState("");
|
||||
const [history, setHistory] = useState<Array<[string, string]>>([]);
|
||||
const [isPending, setIsPending] = useState(false);
|
||||
const [isListening, setIsListening] = useState(false);
|
||||
const { session } = useSupabase();
|
||||
const { axiosInstance } = useAxios();
|
||||
const {
|
||||
config: { maxTokens, model, temperature },
|
||||
} = useBrainConfig();
|
||||
if (session === null) {
|
||||
redirect("/login");
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (isSpeechRecognitionSupported()) {
|
||||
const SpeechRecognition =
|
||||
window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
|
||||
const mic = new SpeechRecognition();
|
||||
|
||||
mic.continuous = true;
|
||||
mic.interimResults = false;
|
||||
mic.lang = "en-US";
|
||||
|
||||
mic.onstart = () => {
|
||||
console.log("Mics on");
|
||||
};
|
||||
|
||||
mic.onend = () => {
|
||||
console.log("Mics off");
|
||||
};
|
||||
|
||||
mic.onerror = (event: SpeechRecognitionErrorEvent) => {
|
||||
console.log(event.error);
|
||||
setIsListening(false);
|
||||
};
|
||||
|
||||
mic.onresult = (event: SpeechRecognitionEvent) => {
|
||||
const interimTranscript =
|
||||
event.results[event.results.length - 1][0].transcript;
|
||||
setQuestion((prevQuestion) => prevQuestion + interimTranscript);
|
||||
};
|
||||
|
||||
if (isListening) {
|
||||
mic.start();
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (mic) {
|
||||
mic.stop();
|
||||
}
|
||||
};
|
||||
}
|
||||
}, [isListening]);
|
||||
|
||||
const askQuestion = async () => {
|
||||
setHistory((hist) => [...hist, ["user", question]]);
|
||||
setIsPending(true);
|
||||
setIsListening(false);
|
||||
|
||||
const response = await axiosInstance.post(`/chat/`, {
|
||||
model,
|
||||
question,
|
||||
history,
|
||||
temperature,
|
||||
max_tokens: maxTokens,
|
||||
});
|
||||
setHistory(response.data.history);
|
||||
setQuestion("");
|
||||
setIsPending(false);
|
||||
};
|
||||
|
||||
const handleListen = () => {
|
||||
setIsListening((prevIsListening) => !prevIsListening);
|
||||
};
|
||||
const { history, isPending, question, askQuestion, setQuestion } =
|
||||
useQuestion();
|
||||
const { isListening, speechSupported, startListening } = useSpeech();
|
||||
|
||||
return (
|
||||
<main className="min-h-screen w-full flex flex-col pt-32">
|
||||
@ -123,8 +47,8 @@ export default function ChatPage() {
|
||||
className="px-3"
|
||||
variant={"tertiary"}
|
||||
type="button"
|
||||
onClick={handleListen}
|
||||
disabled={!isSpeechRecognitionSupported()}
|
||||
onClick={startListening}
|
||||
disabled={!speechSupported}
|
||||
>
|
||||
{isListening ? (
|
||||
<MdMicOff className="text-2xl" />
|
||||
|
Loading…
Reference in New Issue
Block a user