diff --git a/backend/chat/chat.py b/backend/chat/chat.py index a1be8e936..49c0cf739 100644 --- a/backend/chat/chat.py +++ b/backend/chat/chat.py @@ -28,20 +28,36 @@ def __init__( def create_agent(self): """Create React Agent Graph""" - #If not vector store, split and talk to the report - llm_provider_name = getattr(self.config, "llm_provider") - fast_llm_model = getattr(self.config, "fast_llm_model") - temperature = getattr(self.config, "temperature") - fast_token_limit = getattr(self.config, "fast_token_limit") + cfg = Config() - provider = get_llm(llm_provider_name, model=fast_llm_model, temperature=temperature, max_tokens=fast_token_limit, **self.config.llm_kwargs).llm + # Retrieve LLM using get_llm with settings from config + provider = get_llm( + llm_provider=cfg.smart_llm_provider, + model=cfg.smart_llm_model, + temperature=0.35, + max_tokens=cfg.smart_token_limit, + **self.config.llm_kwargs + ).llm + + # If vector_store is not initialized, process documents and add to vector_store if not self.vector_store: documents = self._process_document(self.report) self.chat_config = {"configurable": {"thread_id": str(uuid.uuid4())}} - self.embedding = Memory(getattr(self.config, 'embedding_provider', None), self.headers).get_embeddings() + self.embedding = Memory( + cfg.embedding_provider, + cfg.embedding_model, + **cfg.embedding_kwargs + ).get_embeddings() self.vector_store = InMemoryVectorStore(self.embedding) self.vector_store.add_texts(documents) - graph = create_react_agent(provider, tools=[self.vector_store_tool(self.vector_store)], checkpointer=MemorySaver()) + + # Create the React Agent Graph with the configured provider + graph = create_react_agent( + provider, + tools=[self.vector_store_tool(self.vector_store)], + checkpointer=MemorySaver() + ) + return graph def vector_store_tool(self, vector_store) -> Tool: diff --git a/frontend/nextjs/app/globals.css b/frontend/nextjs/app/globals.css index b8cec3521..7a0b0b158 100644 --- a/frontend/nextjs/app/globals.css +++ b/frontend/nextjs/app/globals.css @@ -12,6 +12,12 @@ html { scroll-behavior: smooth; } +textarea { + max-height: 300px; /* Set an appropriate max height */ + overflow-y: auto; /* Enable internal scrolling */ + /* transition: height 0.2s ease-in-out; */ +} + .log-message { word-wrap: break-word; /* For handling long URLs or text */ overflow-wrap: break-word; /* For handling overflow in modern browsers */ diff --git a/frontend/nextjs/components/InputArea.tsx b/frontend/nextjs/components/InputArea.tsx index 4168132f4..783f6ba0a 100644 --- a/frontend/nextjs/components/InputArea.tsx +++ b/frontend/nextjs/components/InputArea.tsx @@ -1,5 +1,5 @@ import Image from "next/image"; -import { FC } from "react"; +import { FC, useRef } from "react"; import TypeAnimation from "./TypeAnimation"; type TInputAreaProps = { @@ -11,38 +11,92 @@ type TInputAreaProps = { reset?: () => void; }; +// Debounce function to limit the rate at which a function can fire +function debounce(func, wait) { + let timeout; + return function executedFunction(...args) { + const later = () => { + clearTimeout(timeout); + func(...args); + }; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; +} + const InputArea: FC = ({ promptValue, setPromptValue, - handleSubmit: handleSubmit, - handleSecondary: handleSecondary, + handleSubmit, + handleSecondary, disabled, reset, }) => { - const placeholder = handleSecondary ? "Follow up questions..." : "What would you like to research next?" + const placeholder = handleSecondary + ? "Follow up questions..." + : "What would you like to research next?"; + + const textareaRef = useRef(null); + + const resetHeight = () => { + if (textareaRef.current) { + textareaRef.current.style.height = '3em'; // Reset to base height + } + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + if (e.shiftKey) { + return; // Allow new line on Shift+Enter + } else { + e.preventDefault(); + if (!disabled) { + if (reset) reset(); + handleSubmit(promptValue); + setPromptValue(''); // Clear prompt value + resetHeight(); // Reset height after submit + } + } + } + }; + + // Debounced version of the height adjustment function + const adjustHeight = debounce((target) => { + target.style.height = 'auto'; // Reset height to auto to allow shrinking + target.style.height = `${target.scrollHeight}px`; // Adjust height + }, 100); // Adjust the delay as needed + + const handleTextareaChange = (e: React.ChangeEvent) => { + const target = e.target; + adjustHeight(target); // Use debounced function + setPromptValue(target.value); + }; + return (
{ e.preventDefault(); if (reset) reset(); handleSubmit(promptValue); + setPromptValue(''); // Clear prompt value + resetHeight(); }} > - { - handleSecondary && + {handleSecondary && (
{ - if (!disabled){ + onClick={(e) => { + if (!disabled) { e.preventDefault(); if (reset) reset(); handleSecondary(promptValue); - } + setPromptValue(''); // Clear prompt value + resetHeight(); } - } + }} > {disabled && (
@@ -59,15 +113,20 @@ const InputArea: FC = ({ className={disabled ? "invisible" : ""} />
- } - setPromptValue(e.target.value)} + rows={2} + onKeyDown={handleKeyDown} + onChange={handleTextareaChange} />
-
"{question}"
+
"{question}"
); };