import { v4 as uuidv4 } from "uuid"; import { ReactNode, useEffect, useRef } from "react"; import { cn } from "@/lib/utils"; import { useStreamContext } from "@/providers/Stream"; import { useState, FormEvent } from "react"; import { Input } from "../ui/input"; import { Button } from "../ui/button"; import { Checkpoint, Message } from "@langchain/langgraph-sdk"; import { AssistantMessage, AssistantMessageLoading } from "./messages/ai"; import { HumanMessage } from "./messages/human"; import { DO_NOT_RENDER_ID_PREFIX, ensureToolCallsHaveResponses, } from "@/lib/ensure-tool-responses"; import { LangGraphLogoSVG } from "../icons/langgraph"; import { TooltipIconButton } from "./tooltip-icon-button"; import { ArrowDown, SquarePen } from "lucide-react"; import { ArrowDown, LoaderCircle, SquarePen } from "lucide-react"; import { StringParam, useQueryParam } from "use-query-params"; import { StickToBottom, useStickToBottomContext } from "use-stick-to-bottom"; function StickyToBottomContent(props: { content: ReactNode; footer?: ReactNode; className?: string; contentClassName?: string; }) { const context = useStickToBottomContext(); return (
{props.content}
{props.footer}
); } function ScrollToBottom(props: { className?: string }) { const { isAtBottom, scrollToBottom } = useStickToBottomContext(); if (isAtBottom) return null; return ( ); } export function Thread() { const [threadId, setThreadId] = useQueryParam("threadId", StringParam); const [input, setInput] = useState(""); const [firstTokenReceived, setFirstTokenReceived] = useState(false); const stream = useStreamContext(); const messages = stream.messages; const isLoading = stream.isLoading; // TODO: this should be part of the useStream hook const prevMessageLength = useRef(0); useEffect(() => { if ( messages.length !== prevMessageLength.current && messages?.length && messages[messages.length - 1].type === "ai" ) { setFirstTokenReceived(true); } prevMessageLength.current = messages.length; }, [messages]); const handleSubmit = (e: FormEvent) => { e.preventDefault(); if (!input.trim() || isLoading) return; setFirstTokenReceived(false); const newHumanMessage: Message = { id: uuidv4(), type: "human", content: input, }; const toolMessages = ensureToolCallsHaveResponses(stream.messages); stream.submit( { messages: [...toolMessages, newHumanMessage] }, { streamMode: ["values"], optimisticValues: (prev) => ({ ...prev, messages: [ ...(prev.messages ?? []), ...toolMessages, newHumanMessage, ], }), }, ); setInput(""); }; const handleRegenerate = ( parentCheckpoint: Checkpoint | null | undefined, ) => { // Do this so the loading state is correct prevMessageLength.current = prevMessageLength.current - 1; setFirstTokenReceived(false); stream.submit(undefined, { checkpoint: parentCheckpoint, streamMode: ["values"], }); }; return (
{threadId && (
setThreadId(null)} >
)} {messages .filter((m) => !m.id?.startsWith(DO_NOT_RENDER_ID_PREFIX)) .map((message, index) => message.type === "human" ? ( ) : ( ), )} {isLoading && !firstTokenReceived && ( )} } footer={
{!threadId && (

LangGraph Chat

)}
setInput(e.target.value)} placeholder="Type your message..." className="px-4 py-6 border-none bg-transparent shadow-none ring-0 outline-none focus:outline-none focus:ring-0" />
{stream.isLoading ? ( ) : ( )}
} />
); }