mirror of
https://github.com/SamyRai/turash.git
synced 2025-12-26 23:01:33 +00:00
- Remove nested git repository from bugulma/frontend/.git - Add all frontend files to main repository tracking - Convert from separate frontend/backend repos to unified monorepo - Preserve all frontend code and development history as tracked files - Eliminate nested repository complexity for simpler development workflow This creates a proper monorepo structure with frontend and backend coexisting in the same repository for easier development and deployment.
111 lines
3.0 KiB
TypeScript
111 lines
3.0 KiB
TypeScript
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
|
import { fileToDataUrl } from '@/lib/utils.ts';
|
|
import { useChat } from '@/hooks/useChat.ts';
|
|
import { useSpeechRecognition } from '@/hooks/useSpeechRecognition.ts';
|
|
|
|
export const useChatbot = () => {
|
|
const [isOpen, setIsOpen] = useState(false);
|
|
const [inputValue, setInputValue] = useState('');
|
|
const [attachedImage, setAttachedImage] = useState<{ file: File; previewUrl: string } | null>(
|
|
null
|
|
);
|
|
|
|
const { messages, isLoading, sendMessage, clearChat } = useChat();
|
|
const messagesEndRef = useRef<HTMLDivElement>(null);
|
|
const inputRef = useRef<HTMLInputElement>(null);
|
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
|
const { isListening, transcript, startListening, stopListening, isSupported } =
|
|
useSpeechRecognition();
|
|
|
|
// Update input value when speech recognition provides transcript
|
|
const handleTranscriptUpdate = useCallback((newTranscript: string) => {
|
|
if (newTranscript) {
|
|
setInputValue(newTranscript);
|
|
}
|
|
}, []);
|
|
|
|
useEffect(() => {
|
|
handleTranscriptUpdate(transcript);
|
|
}, [transcript, handleTranscriptUpdate]);
|
|
|
|
const toggleChat = useCallback(() => {
|
|
setIsOpen((prev) => !prev);
|
|
}, []);
|
|
|
|
const handleSendMessage = useCallback(() => {
|
|
if (inputValue.trim() || attachedImage) {
|
|
sendMessage({ text: inputValue, imageUrl: attachedImage?.previewUrl });
|
|
setInputValue('');
|
|
setAttachedImage(null);
|
|
}
|
|
}, [sendMessage, inputValue, attachedImage]);
|
|
|
|
const handleClearChat = useCallback(() => {
|
|
// A confirmation could be added here in a real app
|
|
clearChat();
|
|
}, [clearChat]);
|
|
|
|
const handleFileChange = useCallback(async (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
const file = e.target.files?.[0];
|
|
if (file && file.type.startsWith('image/')) {
|
|
try {
|
|
const previewUrl = await fileToDataUrl(file);
|
|
setAttachedImage({ file, previewUrl });
|
|
} catch (error) {
|
|
console.error('Error creating image preview:', error);
|
|
}
|
|
}
|
|
}, []);
|
|
|
|
useEffect(() => {
|
|
if (isOpen) {
|
|
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
|
|
inputRef.current?.focus();
|
|
}
|
|
}, [messages, isOpen]);
|
|
|
|
const showSuggestions = messages.length === 1 && messages[0].id === 'init';
|
|
|
|
return useMemo(
|
|
() => ({
|
|
isOpen,
|
|
toggleChat,
|
|
messages,
|
|
isLoading,
|
|
handleSendMessage,
|
|
handleClearChat,
|
|
messagesEndRef,
|
|
inputRef,
|
|
showSuggestions,
|
|
isListening,
|
|
transcript,
|
|
startListening,
|
|
stopListening,
|
|
isSpeechSupported: isSupported,
|
|
inputValue,
|
|
setInputValue,
|
|
attachedImage,
|
|
setAttachedImage,
|
|
fileInputRef,
|
|
handleFileChange,
|
|
}),
|
|
[
|
|
isOpen,
|
|
messages,
|
|
isLoading,
|
|
handleSendMessage,
|
|
handleClearChat,
|
|
showSuggestions,
|
|
isListening,
|
|
transcript,
|
|
startListening,
|
|
stopListening,
|
|
isSupported,
|
|
inputValue,
|
|
attachedImage,
|
|
handleFileChange,
|
|
toggleChat,
|
|
]
|
|
);
|
|
};
|