import { Button, Card, Slider } from "@theme"; import { useChatSettings } from "@utils/context/chatSettings/useChatSettings.ts"; import { formatBytes } from "@utils/format.ts"; import { CONVERSATION_STARTERS, MODELS } from "@utils/models.ts"; import { TOOLS } from "@utils/tools.ts"; import { RotateCcw, Settings } from "lucide-react"; import { useEffect, useMemo, useRef, useState, useSyncExternalStore, } from "react"; import TextGeneration from "../textGeneration/TextGeneration.ts"; import type { ChatMessageAssistant, ChatMessageUser, } from "../textGeneration/types.ts"; import cn from "../utils/classnames.ts"; import ChatForm from "./ChatForm.tsx"; import Message from "./Message.tsx"; enum State { IDLE, INITIALIZING, READY, GENERATING, } export default function Chat({ className = "" }: { className?: string }) { const { openSettingsModal, settings, downloadedModels } = useChatSettings(); const initializedModelKey = useRef(null); const [downloadProgress, setDownloadProgress] = useState(0); const [state, setState] = useState(State.IDLE); const settingsKey = useRef(null); const [starterCategory, setStarterCategory] = useState(0); const messagesEndRef = useRef(null); const generator = useMemo(() => new TextGeneration(), []); const getSnapshot = () => generator.chatMessages; const messages = useSyncExternalStore( generator.onChatMessageUpdate, getSnapshot ); useEffect(() => { const newSettingsKey = JSON.stringify(settings); if (!settings || settingsKey.current === newSettingsKey) return; settingsKey.current = newSettingsKey; initializeConversation(); }, [settings]); useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }, [messages]); if (!settings) return; const modelDownloaded = downloadedModels.includes(settings.modelKey); const initializeConversation = () => generator.initializeConversation( TOOLS.filter((tool) => settings.tools.includes(tool.name)), settings.temperature, settings.enableThinking, settings.systemPrompt ); const initializeModel = async () => { setState(State.INITIALIZING); setDownloadProgress(0); await generator.initializeModel(settings.modelKey, (percentage) => setDownloadProgress(percentage) ); initializedModelKey.current = settings.modelKey; setState(State.READY); }; const generate = async (prompt: string) => { if (initializedModelKey.current !== settings.modelKey) { await initializeModel(); } setState(State.GENERATING); await generator.runAgent(prompt); setState(State.READY); }; const model = MODELS[settings.modelKey]; const ready: boolean = state === State.READY || modelDownloaded; const conversationMessages = messages.filter(({ role }) => role !== "system"); return ( <>
{state === State.IDLE && !modelDownloaded ? (

You are about to load {model.title}.
Once downloaded, the model ({formatBytes(model.size)}) will be cached and reused when you revisit the page.

Everything runs directly in your browser using 🤗 Transformers.js and ONNX Runtime Web, meaning your conversations aren't sent to a server. You can even disconnect from the internet after the model has loaded!

) : state === State.INITIALIZING ? (

{modelDownloaded ? "initializing the model..." : "downloading the model..."} {downloadProgress.toFixed(2)}%

) : conversationMessages.length === 0 ? (

How can I help you?

{CONVERSATION_STARTERS.map((starter, index) => ( ))}
{CONVERSATION_STARTERS[starterCategory].prompts.map( (prompt, index) => (
) )}
) : (
{conversationMessages.map((message, index) => (
{message.role === "user" ? (

{(message as ChatMessageUser).content}

) : message.role === "assistant" ? ( ) : null}
))}
)}
generate(prompt)} isGenerating={state === State.GENERATING} onAbort={() => generator.abort()} />
); }