diff --git a/src/ketr-chat/src/App.css b/src/ketr-chat/src/App.css
index 25088e1..e3632b4 100644
--- a/src/ketr-chat/src/App.css
+++ b/src/ketr-chat/src/App.css
@@ -2,14 +2,6 @@ div {
box-sizing: border-box
}
-.App {
- display: flex;
- text-align: center;
- max-height: 100%;
- height: 100%;
- flex-direction: column;
-}
-
.SystemInfo {
display: flex;
flex-direction: column;
diff --git a/src/ketr-chat/src/App.tsx b/src/ketr-chat/src/App.tsx
index 2ebe87e..21fd551 100644
--- a/src/ketr-chat/src/App.tsx
+++ b/src/ketr-chat/src/App.tsx
@@ -18,6 +18,7 @@ import AppBar from '@mui/material/AppBar';
import Drawer from '@mui/material/Drawer';
import Toolbar from '@mui/material/Toolbar';
import SettingsIcon from '@mui/icons-material/Settings';
+import CloseIcon from '@mui/icons-material/Close';
import IconButton, { IconButtonProps } from '@mui/material/IconButton';
import Box from '@mui/material/Box';
import CssBaseline from '@mui/material/CssBaseline';
@@ -53,13 +54,10 @@ import '@fontsource/roboto/700.css';
//const use_mui_markdown = true
const use_mui_markdown = true
-
const welcomeMarkdown = `
# Welcome to Ketr-Chat
-This LLM agent was built by James Ketrenos in order to provide answers to any questions you may have about his work history.
-
-In addition to being a RAG enabled expert system, the LLM is configured with real-time access to weather, stocks, the current time, and can answer questions about the contents of a website.
+This LLM agent was built by James Ketrenos in order to provide answers to any questions you may have about his work history. In addition to being a RAG enabled expert system, the LLM is configured with real-time access to weather, stocks, the current time, and can answer questions about the contents of a website.
You can ask things like:
*
@@ -559,13 +557,26 @@ const App = () => {
}
};
- // Scroll to bottom of conversation when conversation updates
- useEffect(() => {
- const queryElement = document.getElementById('QueryInput');
- if (queryElement) {
- queryElement.scrollIntoView();
- }
- }, [conversation]);
+ const isScrolledToBottom = useCallback(()=> {
+ // Current vertical scroll position
+ const scrollTop = window.scrollY || document.documentElement.scrollTop;
+
+ // Total height of the page content
+ const scrollHeight = document.documentElement.scrollHeight;
+
+ // Height of the visible window
+ const clientHeight = document.documentElement.clientHeight;
+
+ // If we're at the bottom (allowing a small buffer of 5px)
+ return scrollTop + clientHeight >= scrollHeight - 5;
+ }, []);
+
+ const scrollToBottom = useCallback(() => {
+ console.log("Scroll to bottom");
+ window.scrollTo({
+ top: document.body.scrollHeight,
+ });
+ }, []);
// Set the snack pop-up and open it
const setSnack = useCallback((message: string, severity: SeverityType = "success") => {
@@ -960,22 +971,25 @@ const App = () => {
const userMessage = [{ role: 'user', content: query }];
+ let scrolledToBottom = isScrolledToBottom();
+
// Add user message to conversation
const newConversation: MessageList = [
...conversation,
...userMessage
];
setConversation(newConversation);
-
+ scrollToBottom();
+
// Clear input
setQuery('');
- setTimeout(() => {
- document.getElementById("QueryIput")?.focus();
- }, 1000);
+ // setTimeout(() => {
+ // document.getElementById("QueryIput")?.focus();
+ // }, 1000);
try {
+ scrolledToBottom = isScrolledToBottom();
setProcessing(true);
-
// Create a unique ID for the processing message
const processingId = Date.now().toString();
@@ -984,6 +998,9 @@ const App = () => {
...prev,
{ role: 'assistant', content: 'Processing request...', id: processingId, isProcessing: true }
]);
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
// Make the fetch request with proper headers
const response = await fetch(getConnectionBase(loc) + `/api/chat/${sessionId}`, {
@@ -998,8 +1015,13 @@ const App = () => {
// We'll guess that the response will be around 500 tokens...
const token_guess = 500;
const estimate = Math.round(token_guess / lastEvalTPS + contextStatus.context_used / lastPromptTPS);
+
+ scrolledToBottom = isScrolledToBottom();
setSnack(`Query sent. Response estimated in ${estimate}s.`, "info");
startCountdown(Math.round(estimate));
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
@@ -1034,18 +1056,23 @@ const App = () => {
// Force an immediate state update based on the message type
if (update.status === 'processing') {
+ scrolledToBottom = isScrolledToBottom();
// Update processing message with immediate re-render
setConversation(prev => prev.map(msg =>
msg.id === processingId
? { ...msg, content: update.message }
: msg
));
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
} else if (update.status === 'done') {
// Replace processing message with final result
+ scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
update.message
@@ -1056,12 +1083,19 @@ const App = () => {
setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus();
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
} else if (update.status === 'error') {
// Show error
+ scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
{ role: 'assistant', type: 'error', content: update.message }
]);
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
}
} catch (e) {
setSnack("Error processing query", "error")
@@ -1076,10 +1110,14 @@ const App = () => {
const update = JSON.parse(buffer);
if (update.status === 'done') {
+ scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
update.message
]);
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
}
} catch (e) {
setSnack("Error processing query", "error")
@@ -1091,12 +1129,16 @@ const App = () => {
} catch (error) {
console.error('Fetch error:', error);
setSnack("Unable to process query", "error");
+ scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => !msg.isProcessing),
{ role: 'assistant', type: 'error', content: `Error: ${error}` }
]);
setProcessing(false);
stopCountdown();
+ if (scrolledToBottom) {
+ setTimeout(() => { scrollToBottom() }, 0);
+ }
}
};
@@ -1114,7 +1156,7 @@ const App = () => {
const Offset = styled('div')(({ theme }) => theme.mixins.toolbar);
return (
-
+
{
Ketr-Chat
+
+ {
+ (mobileOpen === true || isScrolledToBottom()) &&
+
+
+
+
+
+ }
+
@@ -1219,7 +1277,6 @@ const App = () => {