mirror of
https://github.com/StanGirard/quivr.git
synced 2024-11-27 18:32:55 +03:00
feat: remove mic button (#1075)
This commit is contained in:
parent
bd2358f425
commit
e769eddd76
@ -1,33 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use client";
|
||||
import { MdMic, MdMicOff } from "react-icons/md";
|
||||
|
||||
import Button from "@/lib/components/ui/Button";
|
||||
import { useSpeech } from "./hooks/useSpeech";
|
||||
|
||||
type MicButtonProps = {
|
||||
setMessage: (newValue: string | ((prevValue: string) => string)) => void;
|
||||
};
|
||||
|
||||
export const MicButton = ({ setMessage }: MicButtonProps): JSX.Element => {
|
||||
const { isListening, speechSupported, startListening } = useSpeech({
|
||||
setMessage,
|
||||
});
|
||||
|
||||
return (
|
||||
<Button
|
||||
className="p-2 sm:px-3"
|
||||
variant={"tertiary"}
|
||||
type="button"
|
||||
onClick={startListening}
|
||||
disabled={!speechSupported}
|
||||
data-testid="mic-button"
|
||||
>
|
||||
{isListening ? (
|
||||
<MdMicOff className="text-lg sm:text-xl lg:text-2xl" />
|
||||
) : (
|
||||
<MdMic className="text-lg sm:text-xl lg:text-2xl" />
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
};
|
@ -1,64 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { useEffect, useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import { isSpeechRecognitionSupported } from "@/lib/helpers/isSpeechRecognitionSupported";
|
||||
|
||||
type useSpeechProps = {
|
||||
setMessage: (newValue: string | ((prevValue: string) => string)) => void;
|
||||
};
|
||||
|
||||
export const useSpeech = ({ setMessage }: useSpeechProps) => {
|
||||
const [isListening, setIsListening] = useState(false);
|
||||
const [speechSupported, setSpeechSupported] = useState(false);
|
||||
const { t } = useTranslation();
|
||||
|
||||
useEffect(() => {
|
||||
if (isSpeechRecognitionSupported()) {
|
||||
setSpeechSupported(true);
|
||||
const SpeechRecognition =
|
||||
window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
|
||||
const mic = new SpeechRecognition();
|
||||
|
||||
mic.continuous = true;
|
||||
mic.interimResults = false;
|
||||
mic.lang = t("lang");
|
||||
|
||||
mic.onstart = () => {
|
||||
console.log("Mics on");
|
||||
};
|
||||
|
||||
mic.onend = () => {
|
||||
console.log("Mics off");
|
||||
};
|
||||
|
||||
mic.onerror = (event: SpeechRecognitionErrorEvent) => {
|
||||
console.log(event.error);
|
||||
setIsListening(false);
|
||||
};
|
||||
|
||||
mic.onresult = (event: SpeechRecognitionEvent) => {
|
||||
const interimTranscript =
|
||||
event.results[event.results.length - 1][0].transcript;
|
||||
setMessage((prevMessage) => prevMessage + interimTranscript);
|
||||
};
|
||||
|
||||
if (isListening) {
|
||||
mic.start();
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (mic) {
|
||||
mic.stop();
|
||||
}
|
||||
};
|
||||
}
|
||||
}, [isListening, setMessage]);
|
||||
|
||||
const startListening = () => {
|
||||
setIsListening((prevIsListening) => !prevIsListening);
|
||||
};
|
||||
|
||||
return { startListening, speechSupported, isListening };
|
||||
};
|
@ -5,7 +5,6 @@ import Button from "@/lib/components/ui/Button";
|
||||
|
||||
import { ChatBar } from "./components/ChatBar/ChatBar";
|
||||
import { ConfigModal } from "./components/ConfigModal";
|
||||
import { MicButton } from "./components/MicButton/MicButton";
|
||||
import { useChatInput } from "./hooks/useChatInput";
|
||||
|
||||
export const ChatInput = (): JSX.Element => {
|
||||
@ -42,7 +41,6 @@ export const ChatInput = (): JSX.Element => {
|
||||
: t("chat", { ns: "chat" })}
|
||||
</Button>
|
||||
<div className="flex items-center">
|
||||
<MicButton setMessage={setMessage} />
|
||||
<ConfigModal chatId={chatId} />
|
||||
</div>
|
||||
</div>
|
||||
|
Loading…
Reference in New Issue
Block a user