624 lines
22 KiB
TypeScript
624 lines
22 KiB
TypeScript
import React, { useState, useImperativeHandle, forwardRef, useEffect, useRef, useCallback } from 'react';
|
|
import Typography from '@mui/material/Typography';
|
|
import Tooltip from '@mui/material/Tooltip';
|
|
import IconButton from '@mui/material/IconButton';
|
|
import Button from '@mui/material/Button';
|
|
import Box from '@mui/material/Box';
|
|
import SendIcon from '@mui/icons-material/Send';
|
|
import CancelIcon from '@mui/icons-material/Cancel';
|
|
import { SxProps, Theme } from '@mui/material';
|
|
import PropagateLoader from "react-spinners/PropagateLoader";
|
|
|
|
import { Message, MessageList, BackstoryMessage } from './Message';
|
|
import { ContextStatus } from './ContextStatus';
|
|
import { Scrollable } from './Scrollable';
|
|
import { DeleteConfirmation } from './DeleteConfirmation';
|
|
import { Query } from './ChatQuery';
|
|
import './Conversation.css';
|
|
import { BackstoryTextField, BackstoryTextFieldRef } from './BackstoryTextField';
|
|
import { BackstoryElementProps } from './BackstoryTab';
|
|
import { connectionBase } from '../Global';
|
|
|
|
const loadingMessage: BackstoryMessage = { "role": "status", "content": "Establishing connection with server..." };
|
|
|
|
type ConversationMode = 'chat' | 'job_description' | 'resume' | 'fact_check';
|
|
|
|
interface ConversationHandle {
|
|
submitQuery: (query: Query) => void;
|
|
fetchHistory: () => void;
|
|
}
|
|
|
|
interface ConversationProps extends BackstoryElementProps {
|
|
className?: string, // Override default className
|
|
type: ConversationMode, // Type of Conversation chat
|
|
placeholder?: string, // Prompt to display in TextField input
|
|
actionLabel?: string, // Label to put on the primary button
|
|
resetAction?: () => void, // Callback when Reset is pressed
|
|
resetLabel?: string, // Label to put on Reset button
|
|
defaultPrompts?: React.ReactElement[], // Set of Elements to display after the TextField
|
|
defaultQuery?: string, // Default text to populate the TextField input
|
|
preamble?: MessageList, // Messages to display at start of Conversation until Action has been invoked
|
|
hidePreamble?: boolean, // Whether to hide the preamble after an Action has been invoked
|
|
hideDefaultPrompts?: boolean, // Whether to hide the defaultPrompts after an Action has been invoked
|
|
messageFilter?: ((messages: MessageList) => MessageList) | undefined, // Filter callback to determine which Messages to display in Conversation
|
|
messages?: MessageList, //
|
|
sx?: SxProps<Theme>,
|
|
onResponse?: ((message: BackstoryMessage) => void) | undefined, // Event called when a query completes (provides messages)
|
|
};
|
|
|
|
const Conversation = forwardRef<ConversationHandle, ConversationProps>((props: ConversationProps, ref) => {
|
|
const {
|
|
sessionId,
|
|
actionLabel,
|
|
className,
|
|
defaultPrompts,
|
|
defaultQuery,
|
|
hideDefaultPrompts,
|
|
hidePreamble,
|
|
messageFilter,
|
|
messages,
|
|
onResponse,
|
|
placeholder,
|
|
preamble,
|
|
resetAction,
|
|
resetLabel,
|
|
setSnack,
|
|
submitQuery,
|
|
sx,
|
|
type,
|
|
} = props;
|
|
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
|
|
const [processing, setProcessing] = useState<boolean>(false);
|
|
const [countdown, setCountdown] = useState<number>(0);
|
|
const [conversation, setConversation] = useState<MessageList>([]);
|
|
const [filteredConversation, setFilteredConversation] = useState<MessageList>([]);
|
|
const [processingMessage, setProcessingMessage] = useState<BackstoryMessage | undefined>(undefined);
|
|
const [streamingMessage, setStreamingMessage] = useState<BackstoryMessage | undefined>(undefined);
|
|
const timerRef = useRef<any>(null);
|
|
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
|
|
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
|
|
const [noInteractions, setNoInteractions] = useState<boolean>(true);
|
|
const conversationRef = useRef<MessageList>([]);
|
|
const viewableElementRef = useRef<HTMLDivElement>(null);
|
|
const backstoryTextRef = useRef<BackstoryTextFieldRef>(null);
|
|
const stopRef = useRef(false);
|
|
|
|
// Keep the ref updated whenever items changes
|
|
useEffect(() => {
|
|
conversationRef.current = conversation;
|
|
}, [conversation]);
|
|
|
|
// Update the context status
|
|
const updateContextStatus = useCallback(() => {
|
|
const fetchContextStatus = async () => {
|
|
try {
|
|
const response = await fetch(connectionBase + `/api/context-status/${sessionId}/${type}`, {
|
|
method: 'GET',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
});
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
const data = await response.json();
|
|
setContextStatus(data);
|
|
}
|
|
catch (error) {
|
|
console.error('Error getting context status:', error);
|
|
setSnack("Unable to obtain context status.", "error");
|
|
}
|
|
};
|
|
fetchContextStatus();
|
|
}, [setContextStatus, setSnack, sessionId, type]);
|
|
|
|
/* Transform the 'Conversation' by filtering via callback, then adding
|
|
* preamble and messages based on whether the conversation
|
|
* has any elements yet */
|
|
useEffect(() => {
|
|
let filtered = [];
|
|
if (messageFilter === undefined) {
|
|
filtered = conversation;
|
|
// console.log('No message filter provided. Using all messages.', filtered);
|
|
} else {
|
|
//console.log('Filtering conversation...')
|
|
filtered = messageFilter(conversation); /* Do not copy conversation or useEffect will loop forever */
|
|
//console.log(`${conversation.length - filtered.length} messages filtered out.`);
|
|
}
|
|
if (filtered.length === 0) {
|
|
setFilteredConversation([
|
|
...(preamble || []),
|
|
...(messages || []),
|
|
]);
|
|
} else {
|
|
setFilteredConversation([
|
|
...(hidePreamble ? [] : (preamble || [])),
|
|
...(messages || []),
|
|
...filtered,
|
|
]);
|
|
};
|
|
}, [conversation, setFilteredConversation, messageFilter, preamble, messages, hidePreamble]);
|
|
|
|
const fetchHistory = useCallback(async () => {
|
|
let retries = 5;
|
|
while (--retries > 0) {
|
|
try {
|
|
const response = await fetch(connectionBase + `/api/history/${sessionId}/${type}`, {
|
|
method: 'GET',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
});
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
const { messages } = await response.json();
|
|
|
|
if (messages === undefined || messages.length === 0) {
|
|
console.log(`History returned for ${type} from server with 0 entries`)
|
|
setConversation([])
|
|
setNoInteractions(true);
|
|
} else {
|
|
console.log(`History returned for ${type} from server with ${messages.length} entries:`, messages)
|
|
|
|
const backstoryMessages: BackstoryMessage[] = messages;
|
|
|
|
setConversation(backstoryMessages.flatMap((backstoryMessage: BackstoryMessage) => {
|
|
if (backstoryMessage.status === "partial") {
|
|
return [{
|
|
...backstoryMessage,
|
|
role: "assistant",
|
|
content: backstoryMessage.response || "",
|
|
expanded: false,
|
|
expandable: true,
|
|
}]
|
|
}
|
|
return [{
|
|
role: 'user',
|
|
content: backstoryMessage.prompt || "",
|
|
}, {
|
|
...backstoryMessage,
|
|
role: ['done'].includes(backstoryMessage.status || "") ? "assistant" : backstoryMessage.status,
|
|
content: backstoryMessage.response || "",
|
|
}] as MessageList;
|
|
}));
|
|
setNoInteractions(false);
|
|
}
|
|
setProcessingMessage(undefined);
|
|
setStreamingMessage(undefined);
|
|
updateContextStatus();
|
|
return;
|
|
|
|
} catch (error) {
|
|
console.error('Error generating session ID:', error);
|
|
setProcessingMessage({ role: "error", content: `Unable to obtain history from server. Retrying in 3 seconds (${retries} remain.)` });
|
|
setTimeout(() => {
|
|
setProcessingMessage(undefined);
|
|
}, 3000);
|
|
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
setSnack("Unable to obtain chat history.", "error");
|
|
}
|
|
};
|
|
}, [setConversation, updateContextStatus, setSnack, type, sessionId]);
|
|
|
|
// Set the initial chat history to "loading" or the welcome message if loaded.
|
|
useEffect(() => {
|
|
if (sessionId === undefined) {
|
|
setProcessingMessage(loadingMessage);
|
|
return;
|
|
}
|
|
|
|
fetchHistory();
|
|
}, [fetchHistory, sessionId, setProcessing]);
|
|
|
|
const startCountdown = (seconds: number) => {
|
|
if (timerRef.current) clearInterval(timerRef.current);
|
|
setCountdown(seconds);
|
|
timerRef.current = setInterval(() => {
|
|
setCountdown((prev) => {
|
|
if (prev <= 1) {
|
|
clearInterval(timerRef.current);
|
|
timerRef.current = null;
|
|
return 0;
|
|
}
|
|
return prev - 1;
|
|
});
|
|
}, 1000);
|
|
};
|
|
|
|
const stopCountdown = () => {
|
|
if (timerRef.current) {
|
|
clearInterval(timerRef.current);
|
|
timerRef.current = null;
|
|
setCountdown(0);
|
|
}
|
|
};
|
|
|
|
const handleEnter = (value: string) => {
|
|
const query: Query = {
|
|
prompt: value
|
|
}
|
|
sendQuery(query);
|
|
};
|
|
|
|
useImperativeHandle(ref, () => ({
|
|
submitQuery: (query: Query) => {
|
|
sendQuery(query);
|
|
},
|
|
fetchHistory: () => { return fetchHistory(); }
|
|
}));
|
|
|
|
// If context status changes, show a warning if necessary. If it drops
|
|
// back below the threshold, clear the warning trigger
|
|
useEffect(() => {
|
|
const context_used_percentage = Math.round(100 * contextStatus.context_used / contextStatus.max_context);
|
|
if (context_used_percentage >= 90 && !contextWarningShown) {
|
|
setSnack(`${context_used_percentage}% of context used. You may wish to start a new chat.`, "warning");
|
|
setContextWarningShown(true);
|
|
}
|
|
if (context_used_percentage < 90 && contextWarningShown) {
|
|
setContextWarningShown(false);
|
|
}
|
|
setContextUsedPercentage(context_used_percentage)
|
|
}, [contextStatus, setContextWarningShown, contextWarningShown, setContextUsedPercentage, setSnack]);
|
|
|
|
const reset = async () => {
|
|
try {
|
|
const response = await fetch(connectionBase + `/api/reset/${sessionId}/${type}`, {
|
|
method: 'PUT',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'application/json',
|
|
},
|
|
body: JSON.stringify({ reset: ['history'] })
|
|
});
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
if (!response.body) {
|
|
throw new Error('Response body is null');
|
|
}
|
|
|
|
setProcessingMessage(undefined);
|
|
setStreamingMessage(undefined);
|
|
setConversation([]);
|
|
setNoInteractions(true);
|
|
|
|
} catch (e) {
|
|
setSnack("Error resetting history", "error")
|
|
console.error('Error resetting history:', e);
|
|
}
|
|
};
|
|
|
|
const cancelQuery = () => {
|
|
console.log("Stop query");
|
|
stopRef.current = true;
|
|
};
|
|
|
|
const sendQuery = async (query: Query) => {
|
|
query.prompt = query.prompt.trim();
|
|
|
|
// If the request was empty, a default request was provided,
|
|
// and there is no prompt for the user, send the default request.
|
|
if (!query.prompt && defaultQuery && !prompt) {
|
|
query.prompt = defaultQuery.trim();
|
|
}
|
|
|
|
// Do not send an empty request.
|
|
if (!query.prompt) {
|
|
return;
|
|
}
|
|
|
|
stopRef.current = false;
|
|
|
|
setNoInteractions(false);
|
|
|
|
setConversation([
|
|
...conversationRef.current,
|
|
{
|
|
role: 'user',
|
|
origin: type,
|
|
content: query.prompt,
|
|
disableCopy: true
|
|
}
|
|
]);
|
|
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
|
|
try {
|
|
setProcessing(true);
|
|
|
|
// Add initial processing message
|
|
setProcessingMessage(
|
|
{ role: 'status', content: 'Submitting request...', disableCopy: true }
|
|
);
|
|
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
|
|
let data: any = query;
|
|
if (type === "job_description") {
|
|
data = {
|
|
prompt: "",
|
|
agent_options: {
|
|
job_description: query.prompt,
|
|
}
|
|
}
|
|
}
|
|
|
|
const response = await fetch(connectionBase + `/api/${type}/${sessionId}`, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'application/json',
|
|
},
|
|
body: JSON.stringify(data)
|
|
});
|
|
|
|
setSnack(`Query sent.`, "info");
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
if (!response.body) {
|
|
throw new Error('Response body is null');
|
|
}
|
|
|
|
let streaming_response = ""
|
|
// Set up stream processing with explicit chunking
|
|
const reader = response.body.getReader();
|
|
const decoder = new TextDecoder();
|
|
let buffer = '';
|
|
|
|
const process_line = async (line: string) => {
|
|
let update = JSON.parse(line);
|
|
|
|
switch (update.status) {
|
|
case 'done':
|
|
case 'partial':
|
|
if (update.status === 'done') stopCountdown();
|
|
if (update.status === 'done') setStreamingMessage(undefined);
|
|
if (update.status === 'done') setProcessingMessage(undefined);
|
|
const backstoryMessage: BackstoryMessage = update;
|
|
setConversation([
|
|
...conversationRef.current, {
|
|
...backstoryMessage,
|
|
role: 'assistant',
|
|
origin: type,
|
|
prompt: ['done', 'partial'].includes(update.status) ? update.prompt : '',
|
|
content: backstoryMessage.response || "",
|
|
expanded: update.status === "done" ? true : false,
|
|
expandable: update.status === "done" ? false : true,
|
|
}] as MessageList);
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
|
|
const metadata = update.metadata;
|
|
if (metadata) {
|
|
updateContextStatus();
|
|
}
|
|
|
|
if (onResponse) {
|
|
onResponse(update);
|
|
}
|
|
break;
|
|
case 'error':
|
|
// Show error
|
|
setConversation([
|
|
...conversationRef.current, {
|
|
...update,
|
|
role: 'error',
|
|
origin: type,
|
|
content: update.response || "",
|
|
}] as MessageList);
|
|
|
|
setProcessing(false);
|
|
stopCountdown();
|
|
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
break;
|
|
default:
|
|
// Force an immediate state update based on the message type
|
|
// Update processing message with immediate re-render
|
|
if (update.status === "streaming") {
|
|
streaming_response += update.chunk
|
|
setStreamingMessage({ role: update.status, content: streaming_response, disableCopy: true });
|
|
} else {
|
|
setProcessingMessage({ role: update.status, content: update.response, disableCopy: true });
|
|
/* Reset stream on non streaming message */
|
|
streaming_response = ""
|
|
}
|
|
startCountdown(Math.ceil(update.remaining_time));
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
break;
|
|
}
|
|
}
|
|
|
|
while (!stopRef.current) {
|
|
const { done, value } = await reader.read();
|
|
if (done) {
|
|
break;
|
|
}
|
|
const chunk = decoder.decode(value, { stream: true });
|
|
|
|
// Process each complete line immediately
|
|
buffer += chunk;
|
|
let lines = buffer.split('\n');
|
|
buffer = lines.pop() || ''; // Keep incomplete line in buffer
|
|
for (const line of lines) {
|
|
if (!line.trim()) continue;
|
|
try {
|
|
await process_line(line);
|
|
} catch (e) {
|
|
setSnack("Error processing query", "error")
|
|
console.error(e);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process any remaining buffer content
|
|
if (buffer.trim()) {
|
|
try {
|
|
await process_line(buffer);
|
|
} catch (e) {
|
|
setSnack("Error processing query", "error")
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
if (stopRef.current) {
|
|
await reader.cancel();
|
|
setProcessingMessage(undefined);
|
|
setStreamingMessage(undefined);
|
|
setSnack("Processing cancelled", "warning");
|
|
}
|
|
stopCountdown();
|
|
setProcessing(false);
|
|
stopRef.current = false;
|
|
} catch (error) {
|
|
console.error('Fetch error:', error);
|
|
setSnack("Unable to process query", "error");
|
|
setProcessingMessage({ role: 'error', content: "Unable to process query", disableCopy: true });
|
|
setTimeout(() => {
|
|
setProcessingMessage(undefined);
|
|
}, 5000);
|
|
stopRef.current = false;
|
|
setProcessing(false);
|
|
stopCountdown();
|
|
return;
|
|
}
|
|
};
|
|
|
|
return (
|
|
<Scrollable
|
|
className={`${className || ""} Conversation`}
|
|
autoscroll
|
|
textFieldRef={viewableElementRef}
|
|
fallbackThreshold={0.5}
|
|
sx={{
|
|
p: 1,
|
|
mt: 0,
|
|
...sx
|
|
}}
|
|
>
|
|
{
|
|
filteredConversation.map((message, index) =>
|
|
<Message key={index} expanded={message.expanded === undefined ? true : message.expanded} {...{ sendQuery, message, connectionBase, sessionId, setSnack, submitQuery }} />
|
|
)
|
|
}
|
|
{
|
|
processingMessage !== undefined &&
|
|
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: processingMessage, submitQuery }} />
|
|
}
|
|
{
|
|
streamingMessage !== undefined &&
|
|
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: streamingMessage, submitQuery }} />
|
|
}
|
|
<Box sx={{
|
|
display: "flex",
|
|
flexDirection: "column",
|
|
alignItems: "center",
|
|
justifyContent: "center",
|
|
m: 1,
|
|
}}>
|
|
<PropagateLoader
|
|
size="10px"
|
|
loading={processing}
|
|
aria-label="Loading Spinner"
|
|
data-testid="loader"
|
|
/>
|
|
{processing === true && countdown > 0 && (
|
|
<Box
|
|
sx={{
|
|
pt: 1,
|
|
fontSize: "0.7rem",
|
|
color: "darkgrey"
|
|
}}
|
|
>Response will be stopped in: {countdown}s</Box>
|
|
)}
|
|
</Box>
|
|
<Box className="Query" sx={{ display: "flex", flexDirection: "column", p: 1, flexGrow: 1 }}>
|
|
{placeholder &&
|
|
<Box sx={{ display: "flex", flexGrow: 1, p: 0, m: 0, flexDirection: "column" }}
|
|
ref={viewableElementRef}>
|
|
<BackstoryTextField
|
|
ref={backstoryTextRef}
|
|
disabled={processing}
|
|
onEnter={handleEnter}
|
|
placeholder={placeholder}
|
|
/>
|
|
</Box>
|
|
}
|
|
|
|
<Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}>
|
|
<DeleteConfirmation
|
|
label={resetLabel || "all data"}
|
|
disabled={sessionId === undefined || processingMessage !== undefined || noInteractions}
|
|
onDelete={() => { reset(); resetAction && resetAction(); }} />
|
|
<Tooltip title={actionLabel || "Send"}>
|
|
<span style={{ display: "flex", flexGrow: 1 }}>
|
|
<Button
|
|
sx={{ m: 1, gap: 1, flexGrow: 1 }}
|
|
variant="contained"
|
|
disabled={sessionId === undefined || processingMessage !== undefined}
|
|
onClick={() => { sendQuery({ prompt: (backstoryTextRef.current && backstoryTextRef.current.getAndResetValue()) || "" }); }}>
|
|
{actionLabel}<SendIcon />
|
|
</Button>
|
|
</span>
|
|
</Tooltip>
|
|
<Tooltip title="Cancel">
|
|
<span style={{ display: "flex" }}> { /* This span is used to wrap the IconButton to ensure Tooltip works even when disabled */}
|
|
<IconButton
|
|
aria-label="cancel"
|
|
onClick={() => { cancelQuery(); }}
|
|
sx={{ display: "flex", margin: 'auto 0px' }}
|
|
size="large"
|
|
edge="start"
|
|
disabled={stopRef.current || sessionId === undefined || processing === false}
|
|
>
|
|
<CancelIcon />
|
|
</IconButton>
|
|
</span>
|
|
</Tooltip>
|
|
</Box>
|
|
</Box>
|
|
{(noInteractions || !hideDefaultPrompts) && defaultPrompts !== undefined && defaultPrompts.length &&
|
|
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
|
{
|
|
defaultPrompts.map((element, index) => {
|
|
return (<Box key={index}>{element}</Box>);
|
|
})
|
|
}
|
|
</Box>
|
|
}
|
|
<Box sx={{ ml: "0.25rem", fontSize: "0.6rem", color: "darkgrey", display: "flex", flexShrink: 1, flexDirection: "row", gap: 1, mb: "auto", mt: 1 }}>
|
|
Context used: {contextUsedPercentage}% {contextStatus.context_used}/{contextStatus.max_context}
|
|
{
|
|
contextUsedPercentage >= 90 ? <Typography sx={{ fontSize: "0.6rem", color: "red" }}>WARNING: Context almost exhausted. You should start a new chat.</Typography>
|
|
: (contextUsedPercentage >= 50 ? <Typography sx={{ fontSize: "0.6rem", color: "orange" }}>NOTE: Context is getting long. Queries will be slower, and the LLM may stop issuing tool calls.</Typography>
|
|
: <></>)
|
|
}
|
|
</Box>
|
|
<Box sx={{ display: "flex", flexGrow: 1 }}></Box>
|
|
</Scrollable>
|
|
);
|
|
});
|
|
|
|
export type {
|
|
ConversationProps,
|
|
ConversationHandle,
|
|
};
|
|
|
|
export {
|
|
Conversation
|
|
}; |