Compare commits

..

No commits in common. "e6f6aad86a5a12a1c040b47a520b976ff79dd510" and "ca9dd950b3eeedf7e116f83211259b4d98c0bf45" have entirely different histories.

13 changed files with 634 additions and 912 deletions

View File

@ -62,6 +62,14 @@ button {
justify-self: end; /* Align the first column content to the right */ justify-self: end; /* Align the first column content to the right */
} }
.ChatBox {
display: flex;
flex-direction: column;
flex-grow: 1;
max-width: 1024px;
margin: 0 auto;
}
.DocBox { .DocBox {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
@ -104,27 +112,17 @@ button {
padding-right: 16px !important; padding-right: 16px !important;
} }
.ChatBox {
display: flex;
flex-direction: column;
flex-grow: 1;
max-width: 1024px;
width: 100%;
margin: 0 auto;
background-color: #D3CDBF;
}
.Conversation { .Conversation {
display: flex; display: flex;
background-color: #F5F5F5; background-color: #F5F5F5;
border: 1px solid #E0E0E0; border: 1px solid #E0E0E0;
flex-grow: 1; flex-grow: 1;
overflow-y: auto;
padding: 10px; padding: 10px;
flex-direction: column; flex-direction: column;
height: 100%;
max-height: 100%;
font-size: 0.9rem; font-size: 0.9rem;
width: 100%;
/* max-width: 1024px; */
margin: 0 auto;
} }
.user-message.MuiCard-root { .user-message.MuiCard-root {

View File

@ -20,7 +20,7 @@ import MenuIcon from '@mui/icons-material/Menu';
import { ResumeBuilder } from './ResumeBuilder'; import { ResumeBuilder } from './ResumeBuilder';
import { Message, ChatQuery, MessageList, MessageData } from './Message'; import { Message, ChatQuery, MessageList, MessageData } from './Message';
import { SetSnackType, SeverityType } from './Snack'; import { SeverityType } from './Snack';
import { VectorVisualizer } from './VectorVisualizer'; import { VectorVisualizer } from './VectorVisualizer';
import { Controls } from './Controls'; import { Controls } from './Controls';
import { Conversation, ConversationHandle } from './Conversation'; import { Conversation, ConversationHandle } from './Conversation';
@ -68,6 +68,7 @@ function CustomTabPanel(props: TabPanelProps) {
} }
const App = () => { const App = () => {
const conversationRef = useRef<any>(null);
const [processing, setProcessing] = useState(false); const [processing, setProcessing] = useState(false);
const [sessionId, setSessionId] = useState<string | undefined>(undefined); const [sessionId, setSessionId] = useState<string | undefined>(undefined);
const [connectionBase,] = useState<string>(getConnectionBase(window.location)) const [connectionBase,] = useState<string>(getConnectionBase(window.location))
@ -85,13 +86,11 @@ const App = () => {
const chatRef = useRef<ConversationHandle>(null); const chatRef = useRef<ConversationHandle>(null);
// Set the snack pop-up and open it // Set the snack pop-up and open it
const setSnack: SetSnackType = useCallback<SetSnackType>((message: string, severity: SeverityType = "success") => { const setSnack = useCallback((message: string, severity: SeverityType = "success") => {
setTimeout(() => {
setSnackMessage(message); setSnackMessage(message);
setSnackSeverity(severity); setSnackSeverity(severity);
setSnackOpen(true); setSnackOpen(true);
}); }, []);
}, [setSnackMessage, setSnackSeverity, setSnackOpen]);
useEffect(() => { useEffect(() => {
if (prevIsDesktopRef.current === isDesktop) if (prevIsDesktopRef.current === isDesktop)
@ -173,34 +172,27 @@ What would you like to know about James?
const url = new URL(window.location.href); const url = new URL(window.location.href);
const pathParts = url.pathname.split('/').filter(Boolean); const pathParts = url.pathname.split('/').filter(Boolean);
const fetchSession = async () => { if (!pathParts.length) {
try { console.log("No session id -- creating a new session")
const response = await fetch(connectionBase + `/api/context`, { fetch(connectionBase + `/api/context`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
}); })
.then(response => response.json())
if (!response.ok) { .then(data => {
throw Error("Server is temporarily down."); console.log(`Session id: ${data.id} -- returned from server`)
}
const data = await response.json();
setSessionId(data.id); setSessionId(data.id);
window.history.replaceState({}, '', `/${data.id}`); window.history.replaceState({}, '', `/${data.id}`);
} catch (error: any) { })
setSnack("Server is temporarily down", "error"); .catch(error => console.error('Error generating session ID:', error));
};
};
if (!pathParts.length) {
console.log("No session id -- creating a new session")
fetchSession();
} else { } else {
console.log(`Session id: ${pathParts[0]} -- existing session`) console.log(`Session id: ${pathParts[0]} -- existing session`)
setSessionId(pathParts[0]); setSessionId(pathParts[0]);
} }
}, [setSessionId, connectionBase, setSnack]);
}, [setSessionId, connectionBase]);
const handleMenuClose = () => { const handleMenuClose = () => {
setIsMenuClosing(true); setIsMenuClosing(true);
@ -403,7 +395,7 @@ What would you like to know about James?
</Box> </Box>
<CustomTabPanel tab={tab} index={0}> <CustomTabPanel tab={tab} index={0}>
<Box component="main" sx={{ flexGrow: 1, overflow: 'auto' }} className="ChatBox"> <Box component="main" sx={{ flexGrow: 1, overflow: 'auto' }} className="ChatBox" ref={conversationRef}>
<Conversation <Conversation
ref={chatRef} ref={chatRef}
{...{ {...{

View File

@ -61,32 +61,7 @@ function ChatBubble({ role, isFullWidth, children, sx, className }: ChatBubblePr
borderRadius: defaultRadius, borderRadius: defaultRadius,
color: theme.palette.text.primary, // Charcoal Black (#2E2E2E) — much better contrast color: theme.palette.text.primary, // Charcoal Black (#2E2E2E) — much better contrast
opacity: 0.95, opacity: 0.95,
}, }
'status': {
...defaultStyle,
backgroundColor: 'rgba(74, 122, 125, 0.15)', // Translucent dusty teal
border: `1px solid ${theme.palette.secondary.light}`, // Lighter dusty teal
borderRadius: defaultRadius,
maxWidth: isFullWidth ? '100%' : '75%',
alignSelf: 'center',
color: theme.palette.secondary.dark, // Darker dusty teal for text
fontWeight: 500, // Slightly bolder than normal
fontSize: '0.95rem', // Slightly smaller
padding: '8px 12px',
opacity: 0.9,
transition: 'opacity 0.3s ease-in-out', // Smooth fade effect for appearing/disappearing
},
'error': {
...defaultStyle,
backgroundColor: '#F8E7E7', // Soft light red background
border: `1px solid #D83A3A`, // Prominent red border
borderRadius: defaultRadius,
maxWidth: isFullWidth ? '100%' : '90%',
alignSelf: 'center',
color: '#8B2525', // Deep red text for good contrast
padding: '10px 16px',
boxShadow: '0 1px 3px rgba(216, 58, 58, 0.15)', // Subtle shadow with red tint
},
}; };
return ( return (

View File

@ -14,7 +14,7 @@ import Box from '@mui/material/Box';
import ResetIcon from '@mui/icons-material/History'; import ResetIcon from '@mui/icons-material/History';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore'; import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import { SetSnackType } from './Snack'; import { SeverityType } from './Snack';
type Tool = { type Tool = {
type: string, type: string,
@ -32,7 +32,7 @@ type Tool = {
interface ControlsParams { interface ControlsParams {
connectionBase: string, connectionBase: string,
sessionId: string | undefined, sessionId: string | undefined,
setSnack: SetSnackType, setSnack: (message: string, severity?: SeverityType) => void,
}; };
type GPUInfo = { type GPUInfo = {

View File

@ -11,10 +11,10 @@ import { SxProps, Theme } from '@mui/material';
import PropagateLoader from "react-spinners/PropagateLoader"; import PropagateLoader from "react-spinners/PropagateLoader";
import { Message, MessageList, MessageData } from './Message'; import { Message, MessageList, MessageData } from './Message';
import { SetSnackType } from './Snack'; import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus'; import { ContextStatus } from './ContextStatus';
const loadingMessage: MessageData = { "role": "status", "content": "Establishing connection with server..." }; const loadingMessage: MessageData = { "role": "assistant", "content": "Establishing connection with server..." };
type ConversationMode = 'chat' | 'job_description' | 'resume' | 'fact_check'; type ConversationMode = 'chat' | 'job_description' | 'resume' | 'fact_check';
@ -23,73 +23,42 @@ interface ConversationHandle {
} }
interface ConversationProps { interface ConversationProps {
className?: string, // Override default className className?: string,
type: ConversationMode, // Type of Conversation chat type: ConversationMode
prompt?: string, // Prompt to display in TextField input prompt: string,
actionLabel?: string, // Label to put on the primary button actionLabel?: string,
resetAction?: () => void, // Callback when Reset is pressed resetAction?: () => void,
multiline?: boolean, // Render TextField as multiline or not resetLabel?: string,
resetLabel?: string, // Label to put on Reset button connectionBase: string,
connectionBase: string, // Base URL for fetch() calls sessionId?: string,
sessionId?: string, // Session ID for fetch() calls setSnack: (message: string, severity: SeverityType) => void,
setSnack: SetSnackType, // Callback to display snack popups defaultPrompts?: React.ReactElement[],
defaultPrompts?: React.ReactElement[], // Set of Elements to display after the TextField preamble?: MessageList,
defaultQuery?: string, // Default text to populate the TextField input hideDefaultPrompts?: boolean,
preamble?: MessageList, // Messages to display at start of Conversation until Action has been invoked messageFilter?: (messages: MessageList) => MessageList,
hidePreamble?: boolean, // Whether to hide the preamble after an Action has been invoked messages?: MessageList,
hideDefaultPrompts?: boolean, // Whether to hide the defaultPrompts after an Action has been invoked
messageFilter?: ((messages: MessageList) => MessageList) | undefined, // Filter callback to determine which Messages to display in Conversation
messages?: MessageList, //
sx?: SxProps<Theme>, sx?: SxProps<Theme>,
onResponse?: ((message: MessageData) => MessageData) | undefined, // Event called when a query completes (provides messages)
}; };
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...props }: ConversationProps, ref) => {
className,
type,
prompt,
actionLabel,
resetAction,
multiline,
resetLabel,
connectionBase,
sessionId,
setSnack,
defaultPrompts,
hideDefaultPrompts,
defaultQuery,
preamble,
hidePreamble,
messageFilter,
messages,
sx,
onResponse
}: ConversationProps, ref) => {
const [query, setQuery] = useState<string>(""); const [query, setQuery] = useState<string>("");
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0); const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
const [processing, setProcessing] = useState<boolean>(false); const [processing, setProcessing] = useState<boolean>(false);
const [countdown, setCountdown] = useState<number>(0); const [countdown, setCountdown] = useState<number>(0);
const [conversation, setConversation] = useState<MessageList>([]); const [conversation, setConversation] = useState<MessageList>([]);
const [filteredConversation, setFilteredConversation] = useState<MessageList>([]);
const [processingMessage, setProcessingMessage] = useState<MessageData | undefined>(undefined);
const timerRef = useRef<any>(null); const timerRef = useRef<any>(null);
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35); const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430); const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 }); const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false); const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
const [noInteractions, setNoInteractions] = useState<boolean>(true); const [noInteractions, setNoInteractions] = useState<boolean>(true);
const conversationRef = useRef<MessageList>([]); const setSnack = props.setSnack;
// Keep the ref updated whenever items changes
useEffect(() => {
conversationRef.current = conversation;
}, [conversation]);
// Update the context status // Update the context status
const updateContextStatus = useCallback(() => { const updateContextStatus = useCallback(() => {
const fetchContextStatus = async () => { const fetchContextStatus = async () => {
try { try {
const response = await fetch(connectionBase + `/api/context-status/${sessionId}/${type}`, { const response = await fetch(props.connectionBase + `/api/context-status/${props.sessionId}/${props.type}`, {
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -109,75 +78,51 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
} }
}; };
fetchContextStatus(); fetchContextStatus();
}, [setContextStatus, connectionBase, setSnack, sessionId, type]); }, [setContextStatus, props.connectionBase, setSnack, props.sessionId, props.type]);
/* Transform the 'Conversation' by filtering via callback, then adding
* preamble and messages based on whether the conversation
* has any elements yet */
useEffect(() => {
let filtered = [];
if (messageFilter === undefined) {
filtered = conversation;
} else {
//console.log('Filtering conversation...')
filtered = messageFilter(conversation); /* Do not copy conversation or useEffect will loop forever */
//console.log(`${conversation.length - filtered.length} messages filtered out.`);
}
if (filtered.length === 0) {
setFilteredConversation([
...(preamble || []),
...(messages || []),
]);
} else {
setFilteredConversation([
...(hidePreamble ? [] : (preamble || [])),
...(messages || []),
...filtered,
]);
};
}, [conversation, setFilteredConversation, messageFilter, preamble, messages, hidePreamble]);
// Set the initial chat history to "loading" or the welcome message if loaded. // Set the initial chat history to "loading" or the welcome message if loaded.
useEffect(() => { useEffect(() => {
if (sessionId === undefined) { if (props.sessionId === undefined) {
setProcessingMessage(loadingMessage); setConversation([loadingMessage]);
return; return;
} }
const fetchHistory = async () => { const fetchHistory = async () => {
try { try {
const response = await fetch(connectionBase + `/api/history/${sessionId}/${type}`, { const response = await fetch(props.connectionBase + `/api/history/${props.sessionId}/${props.type}`, {
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
}); });
if (!response.ok) { if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`); throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
} }
const data = await response.json(); const data = await response.json();
console.log(`History returned from server with ${data.length} entries`)
console.log(`History returned for ${type} from server with ${data.length} entries`)
if (data.length === 0) { if (data.length === 0) {
setConversation([]) setConversation([
...(props.preamble || []),
...(props.messages || []),
]);
setNoInteractions(true); setNoInteractions(true);
} else { } else {
setConversation(data); setConversation([
...(props.messages || []),
...(props.messageFilter ? props.messageFilter(data) : data)
]);
setNoInteractions(false); setNoInteractions(false);
} }
setProcessingMessage(undefined);
updateContextStatus(); updateContextStatus();
} catch (error) { } catch (error) {
console.error('Error generating session ID:', error); console.error('Error generating session ID:', error);
setProcessingMessage({ role: "error", content: "Unable to obtain history from server." });
setSnack("Unable to obtain chat history.", "error"); setSnack("Unable to obtain chat history.", "error");
} }
}; };
if (props.sessionId !== undefined) {
fetchHistory(); fetchHistory();
}, [setConversation, setFilteredConversation, updateContextStatus, connectionBase, setSnack, type, sessionId]); }
}, [props.sessionId, setConversation, updateContextStatus, props.connectionBase, setSnack, props.preamble, props.type]);
const isScrolledToBottom = useCallback(()=> { const isScrolledToBottom = useCallback(()=> {
// Current vertical scroll position // Current vertical scroll position
@ -229,8 +174,12 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
}; };
const handleKeyPress = (event: any) => { const handleKeyPress = (event: any) => {
if (event.key === 'Enter' && !event.shiftKey) { if (event.key === 'Enter') {
switch (event.target.id) {
case 'QueryInput':
sendQuery(query); sendQuery(query);
break;
}
} }
}; };
@ -240,6 +189,10 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
} }
})); }));
const submitQuery = (query: string) => {
sendQuery(query);
}
// If context status changes, show a warning if necessary. If it drops // If context status changes, show a warning if necessary. If it drops
// back below the threshold, clear the warning trigger // back below the threshold, clear the warning trigger
useEffect(() => { useEffect(() => {
@ -256,7 +209,7 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
const reset = async () => { const reset = async () => {
try { try {
const response = await fetch(connectionBase + `/api/reset/${sessionId}/${type}`, { const response = await fetch(props.connectionBase + `/api/reset/${props.sessionId}/${props.type}`, {
method: 'PUT', method: 'PUT',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -273,7 +226,13 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
throw new Error('Response body is null'); throw new Error('Response body is null');
} }
setConversation([]) props.messageFilter && props.messageFilter([]);
setConversation([
...(props.preamble || []),
...(props.messages || []),
]);
setNoInteractions(true); setNoInteractions(true);
} catch (e) { } catch (e) {
@ -283,40 +242,24 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
}; };
const sendQuery = async (query: string) => { const sendQuery = async (query: string) => {
query = query.trim();
// If the query was empty, a default query was provided,
// and there is no prompt for the user, send the default query.
if (!query && defaultQuery && !prompt) {
query = defaultQuery.trim();
}
// If the query is empty, and a prompt was provided, do not
// send an empty query.
if (!query && prompt) {
return;
}
setNoInteractions(false); setNoInteractions(false);
if (query) { if (!query.trim()) return;
setConversation([
...conversationRef.current,
{
role: 'user',
origin: type,
content: query
}
]);
}
// Add a small delay to ensure React has time to update the UI //setTab(0);
await new Promise(resolve => setTimeout(resolve, 0));
console.log(conversation); const userMessage: MessageData[] = [{ role: 'user', content: query }];
let scrolledToBottom; let scrolledToBottom;
// Add user message to conversation
const newConversation: MessageList = [
...conversation,
...userMessage
];
setConversation(newConversation);
scrollToBottom(); scrollToBottom();
// Clear input // Clear input
setQuery(''); setQuery('');
@ -327,19 +270,16 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
const processingId = Date.now().toString(); const processingId = Date.now().toString();
// Add initial processing message // Add initial processing message
setProcessingMessage( setConversation(prev => [
{ role: 'status', content: 'Submitting request...', id: processingId, isProcessing: true } ...prev,
); { role: 'assistant', content: 'Processing request...', id: processingId, isProcessing: true }
]);
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
if (scrolledToBottom) { if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50); setTimeout(() => { scrollToBottom() }, 50);
} }
// Make the fetch request with proper headers // Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/chat/${sessionId}/${type}`, { const response = await fetch(props.connectionBase + `/api/chat/${props.sessionId}/${props.type}`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -394,43 +334,41 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
if (update.status === 'processing') { if (update.status === 'processing') {
scrolledToBottom = isScrolledToBottom(); scrolledToBottom = isScrolledToBottom();
// Update processing message with immediate re-render // Update processing message with immediate re-render
setProcessingMessage({ role: 'status', content: update.message }); setConversation(prev => prev.map(msg =>
// Add a small delay to ensure React has time to update the UI msg.id === processingId
await new Promise(resolve => setTimeout(resolve, 0)); ? { ...msg, content: update.message }
: msg
));
if (scrolledToBottom) { if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50); setTimeout(() => { scrollToBottom() }, 50);
} }
} else if (update.status === 'done') {
// Replace processing message with final result
scrolledToBottom = isScrolledToBottom();
if (onResponse) {
update.message = onResponse(update.message);
}
setProcessingMessage(undefined);
setConversation([
...conversationRef.current,
update.message
])
// Add a small delay to ensure React has time to update the UI // Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0)); await new Promise(resolve => setTimeout(resolve, 0));
} else if (update.status === 'done') {
// Replace processing message with final result
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
update.message
]);
const metadata = update.message.metadata; const metadata = update.message.metadata;
if (metadata) {
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration; const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration; const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
setLastEvalTPS(evalTPS ? evalTPS : 35); setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35); setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus(); updateContextStatus();
}
if (scrolledToBottom) { if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50); setTimeout(() => { scrollToBottom() }, 50);
} }
} else if (update.status === 'error') { } else if (update.status === 'error') {
// Show error // Show error
scrolledToBottom = isScrolledToBottom(); scrolledToBottom = isScrolledToBottom();
setProcessingMessage({ role: 'error', content: update.message }); setConversation(prev => [
// Add a small delay to ensure React has time to update the UI ...prev.filter(msg => msg.id !== processingId),
await new Promise(resolve => setTimeout(resolve, 0)); { role: 'assistant', type: 'error', content: update.message }
]);
if (scrolledToBottom) { if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50); setTimeout(() => { scrollToBottom() }, 50);
} }
@ -449,12 +387,8 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
if (update.status === 'done') { if (update.status === 'done') {
scrolledToBottom = isScrolledToBottom(); scrolledToBottom = isScrolledToBottom();
if (onResponse) { setConversation(prev => [
update.message = onResponse(update.message); ...prev.filter(msg => msg.id !== processingId),
}
setProcessingMessage(undefined);
setConversation([
...conversationRef.current,
update.message update.message
]); ]);
if (scrolledToBottom) { if (scrolledToBottom) {
@ -476,37 +410,31 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
console.error('Fetch error:', error); console.error('Fetch error:', error);
setSnack("Unable to process query", "error"); setSnack("Unable to process query", "error");
scrolledToBottom = isScrolledToBottom(); scrolledToBottom = isScrolledToBottom();
setProcessingMessage({ role: 'error', content: "Unable to process query" }); setConversation(prev => [
...prev.filter(msg => !msg.isProcessing),
{ role: 'assistant', type: 'error', content: `Error: ${error}` }
]);
setProcessing(false); setProcessing(false);
stopCountdown(); stopCountdown();
if (scrolledToBottom) { if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50); setTimeout(() => { scrollToBottom() }, 50);
} }
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
} }
}; };
return ( return (
<Box className={className || "Conversation"} sx={{ <Box className={props.className || "Conversation"} sx={{ ...props.sx, display: "flex", flexDirection: "column" }}>
display: "flex", flexDirection: "column", flexGrow: 1, p: 1,
...sx
}}>
{ {
filteredConversation.map((message, index) => conversation.map((message, index) =>
<Message key={index} {...{ sendQuery, message, connectionBase, sessionId, setSnack }} /> <Message key={index} {...{ submitQuery, message, connectionBase: props.connectionBase, sessionId: props.sessionId, setSnack }} />
) )
} }
{
processingMessage !== undefined &&
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: processingMessage }} />
}
<Box sx={{ <Box sx={{
display: "flex", display: "flex",
flexDirection: "column", flexDirection: "column",
alignItems: "center", alignItems: "center",
justifyContent: "center", justifyContent: "center",
mb: 1, mb: 1
}}> }}>
<PropagateLoader <PropagateLoader
size="10px" size="10px"
@ -524,52 +452,45 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
>Estimated response time: {countdown}s</Box> >Estimated response time: {countdown}s</Box>
)} )}
</Box> </Box>
<Box className="Query" sx={{ display: "flex", flexDirection: "column", p: 1 }}> <Box className="Query" sx={{ display: "flex", flexDirection: props.type === "job_description" ? "column" : "row", p: 1 }}>
{prompt &&
<TextField <TextField
variant="outlined" variant="outlined"
disabled={processing} disabled={processing}
fullWidth={true} fullWidth
multiline={multiline ? true : false} multiline={props.type === "job_description"}
type="text" type="text"
value={query} value={query}
onChange={(e) => setQuery(e.target.value)} onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress} onKeyDown={handleKeyPress}
placeholder={prompt} placeholder={props.prompt}
id="QueryInput" id="QueryInput"
/> />
}
<Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}> <Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}>
<IconButton <IconButton
sx={{ display: "flex", margin: 'auto 0px' }} sx={{ display: "flex", margin: 'auto 0px' }}
size="large" size="large"
edge="start" edge="start"
color="inherit" color="inherit"
disabled={sessionId === undefined || processingMessage !== undefined}
onClick={() => { reset(); }} onClick={() => { reset(); }}
> >
<Tooltip title={resetLabel || "Reset"} > <Tooltip title={props.resetLabel || "Reset"} >
<ResetIcon /> <ResetIcon />
</Tooltip> </Tooltip>
</IconButton> </IconButton>
<Tooltip title={actionLabel || "Send"}> <Tooltip title={props.actionLabel || "Send"}>
<span style={{ display: "flex", flexGrow: 1 }}>
<Button <Button
sx={{ m: 1, gap: 1, flexGrow: 1 }} sx={{ m: 1, gap: 1, flexGrow: 1 }}
variant="contained" variant="contained"
disabled={sessionId === undefined || processingMessage !== undefined}
onClick={() => { sendQuery(query); }}> onClick={() => { sendQuery(query); }}>
{actionLabel}<SendIcon /> {props.actionLabel}<SendIcon />
</Button> </Button>
</span>
</Tooltip> </Tooltip>
</Box> </Box>
</Box> </Box>
{(noInteractions || !hideDefaultPrompts) && defaultPrompts !== undefined && defaultPrompts.length && {(noInteractions || !props.hideDefaultPrompts) && props.defaultPrompts !== undefined && props.defaultPrompts.length &&
<Box sx={{ display: "flex", flexDirection: "column" }}> <Box sx={{ display: "flex", flexDirection: "column" }}>
{ {
defaultPrompts.map((element, index) => { props.defaultPrompts.map((element, index) => {
return (<Box key={index}>{element}</Box>); return (<Box key={index}>{element}</Box>);
}) })
} }

View File

@ -1,5 +1,8 @@
import React, { useState, useCallback, useRef } from 'react'; import React, { useEffect, useState, useCallback } from 'react';
import { import {
Typography,
Card,
Button,
Tabs, Tabs,
Tab, Tab,
Paper, Paper,
@ -9,18 +12,26 @@ import {
Divider, Divider,
Slider, Slider,
Stack, Stack,
TextField,
Tooltip
} from '@mui/material'; } from '@mui/material';
import { useTheme } from '@mui/material/styles'; import { useTheme } from '@mui/material/styles';
import SendIcon from '@mui/icons-material/Send';
import { import {
ChevronLeft, ChevronLeft,
ChevronRight, ChevronRight,
SwapHoriz, SwapHoriz,
RestartAlt as ResetIcon,
} from '@mui/icons-material'; } from '@mui/icons-material';
import PropagateLoader from "react-spinners/PropagateLoader";
import { SxProps, Theme } from '@mui/material'; import { SxProps, Theme } from '@mui/material';
import { ChatQuery } from './Message'; import MuiMarkdown from 'mui-markdown';
import { MessageList, MessageData } from './Message';
import { SetSnackType } from './Snack'; import { Message, ChatQuery } from './Message';
import { Document } from './Document';
import { MessageData, MessageList } from './Message';
import { SeverityType } from './Snack';
import { Conversation } from './Conversation'; import { Conversation } from './Conversation';
/** /**
@ -29,13 +40,13 @@ import { Conversation } from './Conversation';
* @property {SxProps<Theme>} [sx] - Optional styling properties * @property {SxProps<Theme>} [sx] - Optional styling properties
* @property {string} [connectionBase] - Base URL for fetch calls * @property {string} [connectionBase] - Base URL for fetch calls
* @property {string} [sessionId] - Session ID * @property {string} [sessionId] - Session ID
* @property {SetSnackType} - setSnack UI callback * @property {(message: string, severity: SeverityType) => void} - setSnack UI callback
*/ */
export interface DocumentViewerProps { export interface DocumentViewerProps {
sx?: SxProps<Theme>; sx?: SxProps<Theme>;
connectionBase: string; connectionBase: string;
sessionId: string; sessionId: string;
setSnack: SetSnackType; setSnack: (message: string, severity: SeverityType) => void,
} }
/** /**
* DocumentViewer component * DocumentViewer component
@ -50,17 +61,92 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
setSnack setSnack
}) => { }) => {
// State for editing job description // State for editing job description
const [hasJobDescription, setHasJobDescription] = useState<boolean>(false); const [jobDescription, setJobDescription] = useState<string | undefined>(undefined);
const [hasResume, setHasResume] = useState<boolean>(false); const [facts, setFacts] = useState<MessageData | undefined>(undefined);
const [hasFacts, setHasFacts] = useState<boolean>(false); const [resume, setResume] = useState<MessageData | undefined>(undefined);
const [editJobDescription, setEditJobDescription] = useState<string | undefined>(jobDescription);
// Processing state to show loading indicators
const [processing, setProcessing] = useState<string | undefined>(undefined);
// Theme and responsive design setup
const theme = useTheme(); const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md')); const isMobile = useMediaQuery(theme.breakpoints.down('md'));
const jobConversationRef = useRef<any>(null);
const resumeConversationRef = useRef<any>(null);
const factsConversationRef = useRef<any>(null);
// State for controlling which document is active on mobile
const [activeTab, setActiveTab] = useState<number>(0); const [activeTab, setActiveTab] = useState<number>(0);
const [splitRatio, setSplitRatio] = useState<number>(100); // State for controlling split ratio on desktop
const [splitRatio, setSplitRatio] = useState<number>(0);
/**
* Reset processing state when resume is generated
*/
useEffect(() => {
if (resume !== undefined && processing === "resume") {
setProcessing(undefined);
}
}, [processing, resume]);
/**
* Reset processing state when facts is generated
*/
useEffect(() => {
if (facts !== undefined && processing === "facts") {
setProcessing(undefined);
}
}, [processing, facts]);
/**
* Trigger resume generation and update UI state
*/
const triggerGeneration = useCallback((description: string | undefined) => {
if (description === undefined) {
setProcessing(undefined);
setResume(undefined);
setActiveTab(0);
return;
}
setProcessing("resume");
setTimeout(() => { setActiveTab(1); }, 250); // Switch to resume view on mobile
console.log('generateResume(description);');
}, [/*generateResume*/, setProcessing, setActiveTab, setResume]);
/**
* Trigger fact check and update UI state
*/
const triggerFactCheck = useCallback((resume: string | undefined) => {
if (resume === undefined) {
setProcessing(undefined);
setResume(undefined);
setFacts(undefined);
setActiveTab(1);
return;
}
setProcessing("facts");
console.log('factCheck(resume)');
setTimeout(() => { setActiveTab(2); }, 250); // Switch to resume view on mobile
}, [/*factCheck,*/ setResume, setProcessing, setActiveTab, setFacts]);
useEffect(() => {
setEditJobDescription(jobDescription);
}, [jobDescription, setEditJobDescription]);
/**
* Switch to resume tab when resume become available
*/
useEffect(() => {
if (resume !== undefined) {
setTimeout(() => { setActiveTab(1); }, 250); // Switch to resume view on mobile
}
}, [resume]);
/**
* Switch to fact check tab when facts become available
*/
useEffect(() => {
if (facts !== undefined) {
setTimeout(() => { setActiveTab(2); }, 250); // Switch to resume view on mobile
}
}, [facts]);
/** /**
* Handle tab change for mobile view * Handle tab change for mobile view
@ -83,146 +169,18 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
setSplitRatio(50); setSplitRatio(50);
}; };
/**
* Handle keyboard shortcuts
*/
const handleKeyPress = (event: React.KeyboardEvent): void => {
if (event.key === 'Enter' && event.ctrlKey) {
triggerGeneration(editJobDescription || "");
}
};
const handleJobQuery = (query: string) => { const handleJobQuery = (query: string) => {
console.log(`handleJobQuery: ${query} -- `, jobConversationRef.current ? ' sending' : 'no handler'); triggerGeneration(query);
jobConversationRef.current?.submitQuery(query);
}; };
const handleResumeQuery = (query: string) => {
console.log(`handleResumeQuery: ${query} -- `, resumeConversationRef.current ? ' sending' : 'no handler');
resumeConversationRef.current?.submitQuery(query);
};
const handleFactsQuery = (query: string) => {
console.log(`handleFactsQuery: ${query} -- `, factsConversationRef.current ? ' sending' : 'no handler');
factsConversationRef.current?.submitQuery(query);
};
const filterJobDescriptionMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
let reduced = messages.filter((m, i) => {
const keep = (m.metadata?.origin || m.origin || "no origin") === 'job_description';
if ((m.metadata?.origin || m.origin || "no origin") === 'resume') {
setHasResume(true);
}
// if (!keep) {
// console.log(`filterJobDescriptionMessages: ${i + 1} filtered:`, m);
// } else {
// console.log(`filterJobDescriptionMessages: ${i + 1}:`, m);
// }
return keep;
});
if (reduced.length > 0) {
// First message is always 'info'
reduced[0].role = 'info';
setHasJobDescription(true);
}
/* If there is more than one message, it is user: "...JOB_DESCRIPTION...", assistant: "...stored..."
* which means a resume has been generated. */
if (reduced.length > 1) {
setHasResume(true);
}
/* Filter out any messages which the server injected for state management */
reduced = reduced.filter(m => m.display !== "hide");
return reduced;
}, [setHasJobDescription, setHasResume]);
const filterResumeMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
let reduced = messages.filter((m, i) => {
const keep = (m.metadata?.origin || m.origin || "no origin") === 'resume';
if ((m.metadata?.origin || m.origin || "no origin") === 'fact_check') {
setHasFacts(true);
}
// if (!keep) {
// console.log(`filterResumeMessages: ${i + 1} filtered:`, m);
// } else {
// console.log(`filterResumeMessages: ${i + 1}:`, m);
// }
return keep;
});
/* If there is more than one message, it is user: "...JOB_DESCRIPTION...", assistant: "...RESUME..."
* which means a resume has been generated. */
if (reduced.length > 1) {
/* Remove the assistant message from the UI */
if (reduced[0].role === "user") {
reduced.splice(0, 1);
}
}
/* If Fact Check hasn't occurred yet and there is still more than one message,
* facts have have been generated. */
if (!hasFacts && reduced.length > 1) {
setHasFacts(true);
}
/* Filter out any messages which the server injected for state management */
reduced = reduced.filter(m => m.display !== "hide");
/* If there are any messages, there is a resume */
if (reduced.length > 0) {
// First message is always 'info'
reduced[0].role = 'info';
setHasResume(true);
}
return reduced;
}, [setHasResume, hasFacts, setHasFacts]);
const filterFactsMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
// messages.forEach((m, i) => console.log(`filterFactsMessages: ${i + 1}:`, m))
const reduced = messages.filter(m => {
return (m.metadata?.origin || m.origin || "no origin") === 'fact_check';
});
/* If there is more than one message, it is user: "Fact check this resume...", assistant: "...FACT CHECK..."
* which means facts have been generated. */
if (reduced.length > 1) {
/* Remove the user message from the UI */
if (reduced[0].role === "user") {
reduced.splice(0, 1);
}
reduced[0].role = 'info';
setHasFacts(true);
}
return reduced;
}, [setHasFacts]);
const jobResponse = useCallback((message: MessageData): MessageData => {
console.log('onJobResponse', message);
setHasResume(true);
return message;
}, []);
const resumeResponse = useCallback((message: MessageData): MessageData => {
console.log('onResumeResponse', message);
setHasFacts(true);
return message;
}, [setHasFacts]);
const factsResponse = useCallback((message: MessageData): MessageData => {
console.log('onFactsResponse', message);
return message;
}, []);
const renderJobDescriptionView = useCallback(() => {
const jobDescriptionQuestions = [ const jobDescriptionQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}> <Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="What are the key skills necessary for this position?" submitQuery={handleJobQuery} /> <ChatQuery text="What are the key skills necessary for this position?" submitQuery={handleJobQuery} />
@ -230,184 +188,126 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
</Box>, </Box>,
]; ];
if (!hasJobDescription) { const filterJobDescriptionMessages = (messages: MessageList): MessageList => {
/* The second messages is the RESUME (the LLM response to the JOB-DESCRIPTION) */
if (messages.length > 1) {
setResume(messages[1]);
} else if (resume !== undefined) {
setResume(undefined);
}
/* Filter out the RESUME */
const reduced = messages.filter((message, index) => index != 1);
/* Set the first message as coming from the assistant (rendered as markdown) */
if (reduced.length > 0) {
reduced[0].role = 'assistant';
}
return reduced;
};
const jobDescriptionMessages: MessageList = [];
const renderJobDescriptionView = () => {
if (resume === undefined) {
return <Conversation return <Conversation
ref={jobConversationRef}
{...{ {...{
type: "job_description", sx: { display: "flex", flexGrow: 1 },
actionLabel: "Generate Resume", actionLabel: "Generate Resume",
prompt: "Paste a job description, then click Generate...",
multiline: true, multiline: true,
type: "job_description",
prompt: "Paste a job description, then click Generate...",
messageFilter: filterJobDescriptionMessages, messageFilter: filterJobDescriptionMessages,
onResponse: jobResponse, messages: jobDescriptionMessages,
sessionId, sessionId,
connectionBase, connectionBase,
setSnack, setSnack,
defaultPrompts: jobDescriptionQuestions
}} }}
/> />
} else { } else {
return <Conversation return <Conversation
ref={jobConversationRef}
{...{ {...{
className: "ChatBox",
sx: { display: "flex", flexGrow: 1 },
type: "job_description", type: "job_description",
actionLabel: "Send", actionLabel: "Send",
prompt: "Ask a question about this job description...", prompt: "Ask a question about this job description...",
messageFilter: filterJobDescriptionMessages, messageFilter: filterJobDescriptionMessages,
defaultPrompts: jobDescriptionQuestions, messages: jobDescriptionMessages,
onResponse: jobResponse,
sessionId, sessionId,
connectionBase, connectionBase,
setSnack, setSnack,
defaultPrompts: jobDescriptionQuestions
}} }}
/> />
}
} }
}, [connectionBase, filterJobDescriptionMessages, hasJobDescription, sessionId, setSnack, jobResponse]);
/** /**
* Renders the resume view with loading indicator * Renders the resume view with loading indicator
*/ */
const renderResumeView = useCallback(() => { const renderResumeView = () => (
const resumeQuestions = [ <Box key="ResumeView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0 }}>
<Box sx={{ display: "flex", flexDirection: "row" }}> <Document sx={{ display: "flex", flexGrow: 1 }} title="">
<ChatQuery text="Is this resume a good fit for the provided job description?" submitQuery={handleResumeQuery} /> {resume !== undefined && <Message {...{ message: resume, connectionBase, sessionId, setSnack }} />}
<ChatQuery text="Provide a more concise resume." submitQuery={handleResumeQuery} /> </Document>
</Box>, {processing === "resume" && (
]; <Box sx={{
display: "flex",
if (!hasFacts) { flexDirection: "column",
return <Conversation alignItems: "center",
ref={resumeConversationRef} justifyContent: "center",
{...{ mb: 1,
actionLabel: "Fact Check", height: "10px"
multiline: true, }}>
type: "resume", <PropagateLoader
messageFilter: filterResumeMessages, size="10px"
onResponse: resumeResponse, loading={true}
sessionId, aria-label="Loading Spinner"
connectionBase, data-testid="loader"
setSnack,
}}
/> />
} else { <Typography>Generating resume...</Typography>
return <Conversation </Box>
ref={resumeConversationRef} )}
{...{ <ResumeActionCard
type: "resume", resume={resume}
actionLabel: "Send", processing={processing}
prompt: "Ask a question about this job resume...", triggerFactCheck={triggerFactCheck}
messageFilter: filterResumeMessages,
defaultPrompts: resumeQuestions,
onResponse: resumeResponse,
sessionId,
connectionBase,
setSnack,
}}
/> />
} </Box>
}, [connectionBase, filterResumeMessages, hasFacts, sessionId, setSnack, resumeResponse]); );
/** /**
* Renders the fact check view * Renders the fact check view
*/ */
const renderFactCheckView = useCallback(() => { const renderFactCheckView = () => (
const factsQuestions = [ <Box key="FactView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0, p: 0 }}>
<Box sx={{ display: "flex", flexDirection: "row" }}> <Document sx={{ display: "flex", flexGrow: 1 }} title="">
<ChatQuery text="Rewrite the resume to address any discrepancies." submitQuery={handleFactsQuery} /> {facts !== undefined && <Message {...{ message: facts, connectionBase, sessionId, setSnack }} />}
</Box>, </Document>
]; {processing === "facts" && (
<Box sx={{
return <Conversation display: "flex",
ref={factsConversationRef} flexDirection: "column",
{...{ alignItems: "center",
type: "fact_check", justifyContent: "center",
actionLabel: "Send", mb: 1,
prompt: "Ask a question about any discrepencies...", height: "10px"
messageFilter: filterFactsMessages, }}>
defaultPrompts: factsQuestions, <PropagateLoader
onResponse: factsResponse, size="10px"
sessionId, loading={true}
connectionBase, aria-label="Loading Spinner"
setSnack, data-testid="loader"
}}
/> />
}, [connectionBase, sessionId, setSnack, factsResponse, filterFactsMessages]); <Typography>Fact Checking resume...</Typography>
</Box>
/** )}
* Gets the appropriate content based on active state for Desktop
*/
const getActiveDesktopContent = useCallback(() => {
/* Left panel - Job Description */
const showResume = hasResume
const showFactCheck = hasFacts
const ratio = 75 + 25 * splitRatio / 100;
const otherRatio = showResume ? ratio / (hasFacts ? 3 : 2) : 100;
const resumeRatio = 100 - otherRatio * (hasFacts ? 2 : 1);
const children = [];
children.push(
<Box key="JobDescription" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
{renderJobDescriptionView()}
</Box>);
/* Resume panel - conditionally rendered if resume defined, or processing is in progress */
if (showResume) {
children.push(
<Box key="ResumeView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${resumeRatio}%`, width: `${resumeRatio}%`, maxWidth: `${resumeRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderResumeView()}
</Box> </Box>
); );
}
/* Fact Check panel - conditionally rendered if facts defined, or processing is in progress */
if (showFactCheck) {
children.push(
<Box key="FactCheckView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderFactCheckView()}
</Box>
);
}
/* Split control panel - conditionally rendered if either facts or resume is set */
let slider = <Box key="slider"></Box>;
if (showResume || showFactCheck) {
slider = (
<Paper key="slider" sx={{ p: 2, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<Stack direction="row" spacing={2} alignItems="center" sx={{ width: '60%' }}>
<IconButton onClick={() => setSplitRatio(s => Math.max(0, s - 10))}>
<ChevronLeft />
</IconButton>
<Slider
value={splitRatio}
onChange={handleSliderChange}
aria-label="Split ratio"
min={0}
max={100}
/>
<IconButton onClick={() => setSplitRatio(s => Math.min(100, s + 10))}>
<ChevronRight />
</IconButton>
<IconButton onClick={resetSplit}>
<SwapHoriz />
</IconButton>
</Stack>
</Paper>
);
}
return (
<Box sx={{ ...sx, display: 'flex', flexGrow: 1, flexDirection: 'column', p: 0 }}>
<Box sx={{ display: 'flex', flexGrow: 1, flexDirection: 'row', overflow: 'hidden', p: 0 }}>
{children}
</Box>
{slider}
</Box>
)
}, [renderFactCheckView, renderJobDescriptionView, renderResumeView, splitRatio, sx, hasFacts, hasResume]);
// Render mobile view // Render mobile view
if (isMobile) { if (isMobile) {
@ -437,25 +337,154 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
sx={{ bgcolor: 'background.paper' }} sx={{ bgcolor: 'background.paper' }}
> >
<Tab value={0} label="Job Description" /> <Tab value={0} label="Job Description" />
{hasResume && <Tab value={1} label="Resume" />} {(resume !== undefined || processing === "resume") && <Tab value={1} label="Resume" />}
{hasFacts && <Tab value={2} label="Fact Check" />} {(facts !== undefined || processing === "facts") && <Tab value={2} label="Fact Check" />}
</Tabs> </Tabs>
{/* Document display area */} {/* Document display area */}
<Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, p: 0, width: "100%", ...sx }}> <Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, overflow: 'hidden', p: 0 }}>
{getActiveMobileContent()} {getActiveMobileContent()}
</Box> </Box>
</Box> </Box>
); );
} }
/**
* Gets the appropriate content based on active state for Desktop
*/
const getActiveDesktopContent = () => {
/* Left panel - Job Description */
const showResume = resume !== undefined || processing === "resume"
const showFactCheck = facts !== undefined || processing === "facts"
const otherRatio = showResume ? (100 - splitRatio / 2) : 100;
const children = [];
children.push(
<Box key="JobDescription" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', width: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
{renderJobDescriptionView()}
</Box>);
/* Resume panel - conditionally rendered if resume defined, or processing is in progress */
if (showResume) {
children.push(
<Box key="ResumeView" sx={{ display: 'flex', width: '100%', p: 0, flexGrow: 1, flexDirection: 'row' }}>
<Divider orientation="vertical" flexItem />
{renderResumeView()}
</Box>
);
}
/* Fact Check panel - conditionally rendered if facts defined, or processing is in progress */
if (showFactCheck) {
children.push(
<Box key="FactCheckView" sx={{ display: 'flex', width: `${otherRatio}%`, p: 0, flexGrow: 1, flexDirection: 'row' }}>
<Divider orientation="vertical" flexItem />
{renderFactCheckView()}
</Box>
);
}
/* Split control panel - conditionally rendered if either facts or resume is set */
let slider = <Box key="slider"></Box>;
if (showResume || showFactCheck) {
slider = (
<Paper key="slider" sx={{ p: 2, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<Stack direction="row" spacing={2} alignItems="center" sx={{ width: '60%' }}>
<IconButton onClick={() => setSplitRatio(Math.max(0, splitRatio - 10))}>
<ChevronLeft />
</IconButton>
<Slider
value={splitRatio}
onChange={handleSliderChange}
aria-label="Split ratio"
min={0}
max={100}
/>
<IconButton onClick={() => setSplitRatio(Math.min(100, splitRatio + 10))}>
<ChevronRight />
</IconButton>
<IconButton onClick={resetSplit}>
<SwapHoriz />
</IconButton>
</Stack>
</Paper>
);
}
return ( return (
<Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, width: "100%", ...sx }}> <Box sx={{ ...sx, display: 'flex', flexGrow: 1, flexDirection: 'column', p: 0 }}>
<Box sx={{ display: 'flex', flexGrow: 1, flexDirection: 'row', overflow: 'hidden', p: 0 }}>
{children}
</Box>
{slider}
</Box>
)
}
return (
<Box sx={{ ...sx, display: 'flex', flexDirection: 'column', flexGrow: 1 }}>
{getActiveDesktopContent()} {getActiveDesktopContent()}
</Box> </Box>
); );
}; };
/**
* Props for the ResumeActionCard component
*/
interface ResumeActionCardProps {
resume: any;
processing: string | undefined;
triggerFactCheck: (resume: string | undefined) => void;
}
/**
* Action card displayed underneath the resume with notes and fact check button
*/
const ResumeActionCard: React.FC<ResumeActionCardProps> = ({ resume, processing, triggerFactCheck }) => (
<Box sx={{ display: "flex", justifyContent: "center", flexDirection: "column" }}>
<Card sx={{ display: "flex", overflow: "auto", minHeight: "fit-content", p: 1, flexDirection: "column" }}>
{resume !== undefined || processing === "resume" ? (
<Typography>
<b>NOTE:</b> As with all LLMs, hallucination is always a possibility. Click <b>Fact Check</b> to have the LLM analyze the generated resume vs. the actual resume.
</Typography>
) : (
<Typography>
Once you click <b>Generate</b> under the <b>Job Description</b>, a resume will be generated based on the user's RAG content and the job description.
</Typography>
)}
</Card>
<Box sx={{ display: "flex", justifyContent: "center", flexDirection: "row", flexGrow: 1 }}>
<IconButton
sx={{ display: "flex", margin: 'auto 0px' }}
size="large"
edge="start"
color="inherit"
disabled={processing !== undefined}
onClick={() => { triggerFactCheck(undefined); }}
>
<Tooltip title="Reset Resume">
<ResetIcon />
</Tooltip>
</IconButton>
<Tooltip title="Fact Check">
<span style={{ display: "flex", flexGrow: 1 }}>
<Button
sx={{ m: 1, gap: 1, flexGrow: 1 }}
variant="contained"
disabled={processing !== undefined}
onClick={() => { resume && triggerFactCheck(resume.content); }}
>
Fact Check<SendIcon />
</Button>
</span>
</Tooltip>
</Box>
</Box>
);
export { export {
DocumentViewer DocumentViewer
}; };

View File

@ -27,16 +27,15 @@ import { StyledMarkdown } from './StyledMarkdown';
import { Tooltip } from '@mui/material'; import { Tooltip } from '@mui/material';
import { VectorVisualizer } from './VectorVisualizer'; import { VectorVisualizer } from './VectorVisualizer';
import { SetSnackType } from './Snack'; import { SeverityType } from './Snack';
type MessageRoles = 'info' | 'user' | 'assistant' | 'system' | 'status' | 'error'; type MessageRoles = 'info' | 'user' | 'assistant' | 'system';
type MessageData = { type MessageData = {
role: MessageRoles, role: MessageRoles,
content: string, content: string,
user?: string, user?: string,
origin?: string, type?: string,
display?: string, /* Messages generated on the server for filler should not be shown */
id?: string, id?: string,
isProcessing?: boolean, isProcessing?: boolean,
metadata?: MessageMetaProps metadata?: MessageMetaProps
@ -47,8 +46,6 @@ interface MessageMetaProps {
query_embedding: number[]; query_embedding: number[];
vector_embedding: number[]; vector_embedding: number[];
}, },
origin: string,
full_query?: string,
rag: any, rag: any,
tools: any[], tools: any[],
eval_count: number, eval_count: number,
@ -57,7 +54,7 @@ interface MessageMetaProps {
prompt_eval_duration: number, prompt_eval_duration: number,
sessionId?: string, sessionId?: string,
connectionBase: string, connectionBase: string,
setSnack: SetSnackType, setSnack: (message: string, severity: SeverityType) => void,
} }
type MessageList = MessageData[]; type MessageList = MessageData[];
@ -68,7 +65,7 @@ interface MessageProps {
submitQuery?: (text: string) => void, submitQuery?: (text: string) => void,
sessionId?: string, sessionId?: string,
connectionBase: string, connectionBase: string,
setSnack: SetSnackType, setSnack: (message: string, severity: SeverityType) => void,
}; };
interface ChatQueryInterface { interface ChatQueryInterface {
@ -116,20 +113,6 @@ const MessageMeta = ({ ...props }: MessageMetaProps) => {
</TableBody> </TableBody>
</Table> </Table>
</TableContainer> </TableContainer>
{
props?.full_query !== undefined &&
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Full Query
</Box>
</AccordionSummary>
<AccordionDetails>
<pre>{props.full_query}</pre>
</AccordionDetails>
</Accordion>
}
{ {
props.tools !== undefined && props.tools.length !== 0 && props.tools !== undefined && props.tools.length !== 0 &&
<Accordion sx={{ boxSizing: "border-box" }}> <Accordion sx={{ boxSizing: "border-box" }}>
@ -212,7 +195,7 @@ const ChatQuery = ({ text, submitQuery }: ChatQueryInterface) => {
borderColor: theme => theme.palette.custom.highlight, borderColor: theme => theme.palette.custom.highlight,
m: 1 m: 1
}} }}
size="small" onClick={(e: any) => { submitQuery(text); }}> size="small" onClick={(e: any) => { console.log(text); submitQuery(text); }}>
{text} {text}
</Button> </Button>
); );

View File

@ -1,6 +1,7 @@
import { useState, useCallback, useEffect } from 'react';
import Box from '@mui/material/Box'; import Box from '@mui/material/Box';
import { SeverityType } from './Snack'; import { SeverityType } from './Snack';
import { MessageData } from './Message'; import { MessageData, MessageMetaProps } from './Message';
import { DocumentViewer } from './DocumentViewer'; import { DocumentViewer } from './DocumentViewer';
interface ResumeBuilderProps { interface ResumeBuilderProps {
@ -15,14 +16,15 @@ interface ResumeBuilderProps {
setFacts: (facts: MessageData | undefined) => void, setFacts: (facts: MessageData | undefined) => void,
}; };
// type Resume = { type Resume = {
// resume: MessageData | undefined, resume: MessageData | undefined,
// fact_check: MessageData | undefined, fact_check: MessageData | undefined,
// job_description: string, job_description: string,
// metadata: MessageMetaProps metadata: MessageMetaProps
// }; };
const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => { const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
const [jobDescription, setJobDescription] = useState<string | undefined>(undefined);
if (sessionId === undefined) { if (sessionId === undefined) {
return (<></>); return (<></>);
} }

View File

@ -1,7 +1,5 @@
type SeverityType = 'error' | 'info' | 'success' | 'warning' | undefined; type SeverityType = 'error' | 'info' | 'success' | 'warning' | undefined;
type SetSnackType = (message: string, severity?: SeverityType) => void;
export type { export type {
SeverityType, SeverityType
SetSnackType
}; };

View File

@ -10,7 +10,7 @@ import SendIcon from '@mui/icons-material/Send';
import FormControlLabel from '@mui/material/FormControlLabel'; import FormControlLabel from '@mui/material/FormControlLabel';
import Switch from '@mui/material/Switch'; import Switch from '@mui/material/Switch';
import { SetSnackType } from './Snack'; import { SeverityType } from './Snack';
interface Metadata { interface Metadata {
doc_type?: string; doc_type?: string;
@ -18,7 +18,7 @@ interface Metadata {
} }
interface ResultData { interface ResultData {
embeddings: (number[])[]; embeddings: number[][] | number[][][];
documents: string[]; documents: string[];
metadatas: Metadata[]; metadatas: Metadata[];
ids: string[]; ids: string[];
@ -42,7 +42,7 @@ interface PlotData {
interface VectorVisualizerProps { interface VectorVisualizerProps {
connectionBase: string; connectionBase: string;
sessionId?: string; sessionId?: string;
setSnack: SetSnackType; setSnack: (message: string, severity: SeverityType) => void;
inline?: boolean; inline?: boolean;
rag?: any; rag?: any;
} }
@ -141,18 +141,11 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, rag, inli
if (!result || !result.embeddings) return; if (!result || !result.embeddings) return;
if (result.embeddings.length === 0) return; if (result.embeddings.length === 0) return;
const vectors: (number[])[] = [...result.embeddings]; const vectors: number[][] = [...result.embeddings as number[][]];
const documents = [...result.documents || []]; const documents = [...result.documents || []];
const metadatas = [...result.metadatas || []]; const metadatas = [...result.metadatas || []];
const ids = [...result.ids || []]; const ids = [...result.ids || []];
let is2D = vectors.every((v: number[]) => v.length === 2);
let is3D = vectors.every((v: number[]) => v.length === 3);
if ((view2D && !is2D) || (!view2D && !is3D)) {
return;
}
if (view2D && rag && rag.umap_embedding_2d) { if (view2D && rag && rag.umap_embedding_2d) {
metadatas.unshift({ doc_type: 'query' }); metadatas.unshift({ doc_type: 'query' });
documents.unshift('Query'); documents.unshift('Query');
@ -176,11 +169,10 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, rag, inli
} }
} }
is2D = vectors.every((v: number[]) => v.length === 2); const is2D = vectors.every((v: number[]) => v.length === 2);
is3D = vectors.every((v: number[]) => v.length === 3); const is3D = vectors.every((v: number[]) => v.length === 3);
if (!is2D && !is3D) { if (!is2D && !is3D) {
console.warn('Modified vectors are neither 2D nor 3D'); console.error('Vectors are neither 2D nor 3D');
return; return;
} }

2
src/.gitignore vendored
View File

@ -1,2 +0,0 @@
cert.pem
key.pem

View File

@ -11,6 +11,7 @@ import uuid
import subprocess import subprocess
import re import re
import math import math
import copy
def try_import(module_name, pip_name=None): def try_import(module_name, pip_name=None):
try: try:
@ -168,28 +169,27 @@ Always use tools and [{context_tag}] when possible. Be concise, and never make u
system_generate_resume = f""" system_generate_resume = f"""
Launched on {DateTime()}. Launched on {DateTime()}.
You are a professional resume writer. Your task is to write a concise, polished, and tailored resume for a specific job based only on the individual's [WORK HISTORY]. You are a professional resume writer. Your task is to write a polished, tailored resume for a specific job based only on the individual's [WORK HISTORY].
When answering queries, follow these steps: When answering queries, follow these steps:
1. You must not invent or assume any inforation not explicitly present in the [WORK HISTORY]. 1. You must not invent or assume any inforation not explicitly present in the [WORK HISTORY].
2. Analyze the [JOB DESCRIPTION] to identify skills required for the job. 2. Analyze the [JOB DESCRIPTION] to identify skills required for the job.
3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight from the [WORK HISTORY]. 3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight from the [WORK HISTORY].
4. Identify and emphasize the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION]. 4. Identify and emphasisze the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION].
5. Only provide information from [WORK HISTORY] items if it is relevant to the [JOB DESCRIPTION]. 5. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY].
6. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY]. 6. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
7. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO]. 7. Use the [INTRO] to highlight the use of AI in generating this resume.
8. Use the [INTRO] to highlight the use of AI in generating this resume. 8. Use the [WORK HISTORY] to create a polished, professional resume.
9. Use the [WORK HISTORY] to create a polished, professional resume. 9. Do not list any locations in the resume.
10. Do not list any locations or mailing addresses in the resume.
Structure the resume professionally with the following sections where applicable: Structure the resume professionally with the following sections where applicable:
* "Name: Use full name." * "Name: Use full name."
* "Professional Summary: A 2-4 sentence overview tailored to the job, using [INTRO] to highlight the use of AI in generating this resume." * "Professional Summary: A 2-4 sentence overview tailored to the job, using [INTRO] to highlight the use of AI in generating this resume."
* "Skills: A bullet list of key skills derived from the work history and relevant to the job." * "Skills: A bullet list of key skills derived from the work history and relevant to the job."
* Professional Experience: A detailed list of roles, achievements, and responsibilities from [WORK HISTORY] that relate to the [JOB DESCRIPTION]." * Professional Experience: A detailed list of roles, achievements, and responsibilities from the work history that relate to the job."
* Education: Include only if available in the work history. * Education: Include only if available in the work history."
Do not include any information unless it is provided in [WORK HISTORY] or [INTRO]. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
Ensure the langauge is clear, concise, and aligned with industry standards for professional resumes. Ensure the langauge is clear, concise, and aligned with industry standards for professional resumes.
@ -504,27 +504,20 @@ class WebServer:
except: except:
query = "" query = ""
if not query: if not query:
return JSONResponse({"error": "No query provided for similarity search"}, status_code=400) return JSONResponse({"error": "No query provided"}, status_code=400)
try: try:
chroma_results = self.file_watcher.find_similar(query=query, top_k=10) chroma_results = self.file_watcher.find_similar(query=query, top_k=10)
if not chroma_results: if not chroma_results:
return JSONResponse({"error": "No results found"}, status_code=404) return JSONResponse({"error": "No results found"}, status_code=404)
chroma_embedding = np.array(chroma_results["query_embedding"]).flatten() # Ensure correct shape chroma_embedding = chroma_results["query_embedding"]
print(f"Chroma embedding shape: {chroma_embedding.shape}")
umap_2d = self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 2D output: {umap_2d}, length: {len(umap_2d)}") # Debug output
umap_3d = self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 3D output: {umap_3d}, length: {len(umap_3d)}") # Debug output
return JSONResponse({ return JSONResponse({
**chroma_results, **chroma_results,
"query": query, "query": query,
"umap_embedding_2d": umap_2d, "umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": umap_3d "umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
}) })
except Exception as e: except Exception as e:
@ -792,28 +785,24 @@ class WebServer:
context["sessions"] = { context["sessions"] = {
"chat": { "chat": {
"system_prompt": system_message, "system_prompt": system_message,
"content_seed": None,
"llm_history": context["llm_history"], "llm_history": context["llm_history"],
"user_history": context["user_history"], "user_history": context["user_history"],
"context_tokens": round(len(str(create_system_message(system_message)))) "context_tokens": round(len(str(create_system_message(system_message))))
}, },
"job_description": { "job_description": {
"system_prompt": system_job_description, "system_prompt": system_job_description,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(create_system_message(system_job_description)))) "context_tokens": round(len(str(create_system_message(system_job_description))))
}, },
"resume": { "resume": {
"system_prompt": system_generate_resume, "system_prompt": system_generate_resume,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(create_system_message(system_generate_resume)))) "context_tokens": round(len(str(create_system_message(system_generate_resume))))
}, },
"fact_check": { "fact_check": {
"system_prompt": system_fact_check, "system_prompt": system_fact_check,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(create_system_message(system_fact_check)))) "context_tokens": round(len(str(create_system_message(system_fact_check))))
@ -858,28 +847,24 @@ class WebServer:
"sessions": { "sessions": {
"chat": { "chat": {
"system_prompt": system_message, "system_prompt": system_message,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(system_message)) * 3 / 4), # Estimate context usage "context_tokens": round(len(str(system_message)) * 3 / 4), # Estimate context usage
}, },
"job_description": { "job_description": {
"system_prompt": system_job_description, "system_prompt": system_job_description,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(system_job_description)) * 3 / 4), # Estimate context usage "context_tokens": round(len(str(system_job_description)) * 3 / 4), # Estimate context usage
}, },
"resume": { "resume": {
"system_prompt": system_generate_resume, "system_prompt": system_generate_resume,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(system_generate_resume)) * 3 / 4), # Estimate context usage "context_tokens": round(len(str(system_generate_resume)) * 3 / 4), # Estimate context usage
}, },
"fact_check": { "fact_check": {
"system_prompt": system_fact_check, "system_prompt": system_fact_check,
"content_seed": None,
"llm_history": [], "llm_history": [],
"user_history": [], "user_history": [],
"context_tokens": round(len(str(system_fact_check)) * 3 / 4), # Estimate context usage "context_tokens": round(len(str(system_fact_check)) * 3 / 4), # Estimate context usage
@ -906,67 +891,14 @@ class WebServer:
logging.info(f"Context {context_id} not found. Creating new context.") logging.info(f"Context {context_id} not found. Creating new context.")
return self.load_context(context_id) return self.load_context(context_id)
def generate_rag_results(self, context, content):
results_found = False
if self.file_watcher:
for rag in context["rags"]:
if rag["enabled"] and rag["name"] == "JPK": # Only support JPK rag right now...
yield {"status": "processing", "message": f"Checking RAG context {rag['name']}..."}
chroma_results = self.file_watcher.find_similar(query=content, top_k=10)
if chroma_results:
results_found = True
chroma_embedding = np.array(chroma_results["query_embedding"]).flatten() # Ensure correct shape
print(f"Chroma embedding shape: {chroma_embedding.shape}")
umap_2d = self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 2D output: {umap_2d}, length: {len(umap_2d)}") # Debug output
umap_3d = self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 3D output: {umap_3d}, length: {len(umap_3d)}") # Debug output
yield {
**chroma_results,
"name": rag["name"],
"umap_embedding_2d": umap_2d,
"umap_embedding_3d": umap_3d
}
if not results_found:
yield {"status": "complete", "message": "No RAG context found"}
yield {
"rag": None,
"documents": [],
"embeddings": [],
"umap_embedding_2d": [],
"umap_embedding_3d": []
}
else:
yield {"status": "complete", "message": "RAG processing complete"}
# type: chat
# * Q&A
#
# type: job_description
# * First message sets Job Description and generates Resume
# * Has content (Job Description)
# * Then Q&A of Job Description
#
# type: resume
# * First message sets Resume and generates Fact Check
# * Has no content
# * Then Q&A of Resume
#
# Fact Check:
# * First message sets Fact Check and is Q&A
# * Has content
# * Then Q&A of Fact Check
async def chat(self, context, type, content): async def chat(self, context, type, content):
if not self.file_watcher: if not self.file_watcher:
return return
content = content.strip() content = content.strip()
if not content:
yield {"status": "error", "message": "Invalid request"}
return
if self.processing: if self.processing:
yield {"status": "error", "message": "Busy"} yield {"status": "error", "message": "Busy"}
@ -975,11 +907,10 @@ class WebServer:
self.processing = True self.processing = True
try: try:
session = context["sessions"][type] llm_history = context["sessions"][type]["llm_history"]
llm_history = session["llm_history"] user_history = context["sessions"][type]["user_history"]
user_history = session["user_history"]
metadata = { metadata = {
"origin": type, "type": type,
"rag": { "documents": [] }, "rag": { "documents": [] },
"tools": [], "tools": [],
"eval_count": 0, "eval_count": 0,
@ -991,99 +922,58 @@ class WebServer:
# Default to not using tools # Default to not using tools
enable_tools = False enable_tools = False
# Default to using RAG if there is content to check # Default eo using RAG
if content:
enable_rag = True enable_rag = True
else:
# The first time a particular session type is used, it is handled differently. After the initial pass (once the
# llm_history has more than one entry), the standard 'chat' is used.
if len(user_history) >= 1:
process_type = "chat"
# Do not enable RAG when limiting context to the job description chat
if type == "job_description":
enable_rag = False enable_rag = False
# RAG is disabled when asking questions about the resume
if type == "resume":
enable_rag = False
# The first time through each session type a content_seed may be set for
# future chat sessions; use it once, then clear it
if session["content_seed"]:
preamble = f"{session['content_seed']}"
session["content_seed"] = None
else: else:
preamble = ""
# After the first time a particular session type is used, it is handled as a chat.
# The number of messages indicating the session is ready for chat varies based on
# the type of session
process_type = type process_type = type
match process_type:
case "job_description": if enable_rag:
logging.info(f"job_description user_history len: {len(user_history)}") for rag in context["rags"]:
if len(user_history) >= 2: # USER, ASSISTANT if rag["enabled"] and rag["name"] == "JPK": # Only support JPK rag right now...
process_type = "chat" yield {"status": "processing", "message": f"Checking RAG context {rag['name']}..."}
case "resume": chroma_results = self.file_watcher.find_similar(query=content, top_k=10)
logging.info(f"resume user_history len: {len(user_history)}") if chroma_results:
if len(user_history) >= 3: # USER, ASSISTANT, FACT_CHECK chroma_embedding = chroma_results["query_embedding"]
process_type = "chat" metadata["rag"] = {
case "fact_check": **chroma_results,
process_type = "chat" # Fact Check is always a chat session "name": rag["name"],
"umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
}
match process_type: match process_type:
# Normal chat interactions with context history # Normal chat interactions with context history
case "chat": case "chat":
if not content:
yield {"status": "error", "message": "No query provided for chat."}
logging.info(f"user_history len: {len(user_history)}")
return
enable_tools = True enable_tools = True
preamble = ""
# Generate RAG content if enabled, based on the content
rag_context = "" rag_context = ""
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, content):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]: for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n" rag_context += doc
if rag_context: if rag_context:
preamble = f""" preamble = f"""
1. Respond to this query: {content} 1. Respond to this query: {content}
2. If there is information in the [{context_tag}] to enhance the answer, do so: 2. If there is information in this context to enhance the answer, do so:
[{context_tag}] [{context_tag}]
{rag_context} {rag_context}
[/{context_tag}] [/{context_tag}]
Use that information to respond to: """ Use that information to respond to: """
system_prompt = context["sessions"]["chat"]["system_prompt"] # Single job_description is provided; generate a resume
# On first entry, a single job_description is provided ("user")
# Generate a resume to append to RESUME history
case "job_description": case "job_description":
# Generate RAG content if enabled, based on the content
# Always force the full resume to be in context # Always force the full resume to be in context
resume_doc = open(defines.resume_doc, "r").read() resume_doc = open(defines.resume_doc, "r").read()
rag_context = f"{resume_doc}\n" work_history = f"{resume_doc}\n"
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, content):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]: for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n" work_history += f"{doc}\n"
preamble = f""" preamble = f"""
[INTRO] [INTRO]
@ -1091,130 +981,77 @@ Use that information to respond to:"""
[/INTRO] [/INTRO]
[WORK HISTORY] [WORK HISTORY]
{rag_context} {work_history}
[/WORK HISTORY] [/WORK HISTORY]
[JOB DESCRIPTION] [JOB DESCRIPTION]
{content} {content}
[/JOB DESCRIPTION] [/JOB DESCRIPTION]
"""
context["sessions"]["job_description"]["content_seed"] = preamble + "Use the above information to answer this query: "
preamble += f"""
1. Use the above [INTRO] and [WORK HISTORY] to create the resume for the [JOB DESCRIPTION]. 1. Use the above [INTRO] and [WORK HISTORY] to create the resume for the [JOB DESCRIPTION].
2. Do not use content from the [JOB DESCRIPTION] in the response unless the [WORK HISTORY] mentions them. 2. Do not use content from the [JOB DESCRIPTION] in the response unless the [WORK HISTORY] mentions them.
""" """
# Seed the history for job_description # Seed the first context messages with the resume from the 'job_description' session
messages = [ {
"role": "user", "content": content
}, {
"role": "assistant", "content": "Job description stored to use in future queries.", "display": "hide"
} ]
# Strip out the 'display' key when adding to llm_history
llm_history.extend([{k: v for k, v in m.items() if k != 'display'} for m in messages])
user_history.extend([{**m, "origin": "job_description"} for m in messages])
# Switch to resume session for LLM responses
metadata["origin"] = "resume"
session = context["sessions"]["resume"]
system_prompt = session["system_prompt"]
llm_history = session["llm_history"] = []
user_history = session["user_history"] = []
# Ignore the passed in content and invoke Fact Check
case "resume": case "resume":
if len(context["sessions"]["resume"]["user_history"]) < 2: # USER, **ASSISTANT** raise Exception(f"Invalid chat type: {type}")
raise Exception(f"No resume found in user history.")
resume = context["sessions"]["resume"]["user_history"][1] # Fact check the resume created by the 'job_description' using only the RAG and resume
case "fact_check":
if len(context["sessions"]["resume"]["llm_history"]) < 3: # SYSTEM, USER, **ASSISTANT**
yield {"status": "done", "message": "No resume history found." }
return
resume = context["sessions"]["resume"]["llm_history"][2]
metadata = copy.deepcopy(resume["metadata"])
metadata["eval_count"] = 0
metadata["eval_duration"] = 0
metadata["prompt_eval_count"] = 0
metadata["prompt_eval_duration"] = 0
# Generate RAG content if enabled, based on the content
# Always force the full resume to be in context
resume_doc = open(defines.resume_doc, "r").read() resume_doc = open(defines.resume_doc, "r").read()
rag_context = f"{resume_doc}\n" work_history = f"{resume_doc}\n"
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, resume["content"]):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]: for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n" work_history += f"{doc}\n"
preamble = f""" preamble = f"""
[WORK HISTORY] [WORK HISTORY]
{rag_context} {work_history}
[/WORK HISTORY] [/WORK HISTORY]
[RESUME] [RESUME]
{resume['content']} {resume['content']}
[/RESUME] [/RESUME]
Perform the following:
1. Do not invent or assume any information not explicitly present in the [WORK HISTORY].
2. Analyze the [RESUME] to identify any discrepancies or inaccuracies based on the [WORK HISTORY].
""" """
content = resume['content']
context["sessions"]["resume"]["content_seed"] = f""" raise Exception(f"Invalid chat type: {type}")
[RESUME]
{resume["content"]}
[/RESUME]
Use the above [RESUME] to answer this query:
"""
content = "Fact check the resume and report discrepancies."
# Seed the history for resume
messages = [ {
"role": "user", "content": "Fact check resume", "origin": "resume", "display": "hide"
}, {
"role": "assistant", "content": "Resume fact checked.", "origin": "resume", "display": "hide"
} ]
# Do not add this to the LLM history; it is only used for UI presentation
user_history.extend(messages)
# Switch to fact_check session for LLM responses
metadata["origin"] = "fact_check"
session = context["sessions"]["fact_check"]
system_prompt = session["system_prompt"]
llm_history = session["llm_history"] = []
user_history = session["user_history"] = []
case _: case _:
raise Exception(f"Invalid chat type: {type}") raise Exception(f"Invalid chat type: {type}")
llm_history.append({"role": "user", "content": preamble + content}) llm_history.append({"role": "user", "content": preamble + content})
user_history.append({"role": "user", "content": content, "origin": metadata["origin"]}) user_history.append({"role": "user", "content": content})
metadata["full_query"] = llm_history[-1]["content"]
if context["message_history_length"]: if context["message_history_length"]:
messages = create_system_message(system_prompt) + llm_history[-context["message_history_length"]:] messages = create_system_message(context["sessions"][type]["system_prompt"]) + llm_history[-context["message_history_length"]:]
else: else:
messages = create_system_message(system_prompt) + llm_history messages = create_system_message(context["sessions"][type]["system_prompt"]) + llm_history
# Estimate token length of new messages # Estimate token length of new messages
ctx_size = self.get_optimal_ctx_size(context["sessions"][process_type]["context_tokens"], messages=llm_history[-1]["content"]) ctx_size = self.get_optimal_ctx_size(context["sessions"][type]["context_tokens"], messages=llm_history[-1]["content"])
if len(user_history) > 2: processing_type = "Processing query..."
processing_message = f"Processing {'RAG augmented ' if enable_rag else ''}query..."
else:
match type: match type:
case "job_description": case "job_description":
processing_message = f"Generating {'RAG augmented ' if enable_rag else ''}resume..." processing_type = "Generating resume..."
case "resume": case "fact_check":
processing_message = f"Fact Checking {'RAG augmented ' if enable_rag else ''}resume..." processing_type = "Fact Checking resume..."
case _: if len(llm_history) > 1:
processing_message = f"Processing {'RAG augmented ' if enable_rag else ''}query..." processing_type = "Processing query..."
yield {"status": "processing", "message": processing_message, "num_ctx": ctx_size} yield {"status": "processing", "message": processing_type, "num_ctx": ctx_size}
# Use the async generator in an async for loop # Use the async generator in an async for loop
try: try:
@ -1223,6 +1060,9 @@ Use the above [RESUME] to answer this query:
else: else:
response = self.client.chat(model=self.model, messages=messages, options={ "num_ctx": ctx_size }) response = self.client.chat(model=self.model, messages=messages, options={ "num_ctx": ctx_size })
except Exception as e: except Exception as e:
logging.info(f"1. {messages[0]}")
logging.info(f"[LAST]. {messages[-1]}")
logging.exception({ "model": self.model, "error": str(e) }) logging.exception({ "model": self.model, "error": str(e) })
yield {"status": "error", "message": f"An error occurred communicating with LLM"} yield {"status": "error", "message": f"An error occurred communicating with LLM"}
return return
@ -1231,7 +1071,7 @@ Use the above [RESUME] to answer this query:
metadata["eval_duration"] += response["eval_duration"] metadata["eval_duration"] += response["eval_duration"]
metadata["prompt_eval_count"] += response["prompt_eval_count"] metadata["prompt_eval_count"] += response["prompt_eval_count"]
metadata["prompt_eval_duration"] += response["prompt_eval_duration"] metadata["prompt_eval_duration"] += response["prompt_eval_duration"]
session["context_tokens"] = response["prompt_eval_count"] + response["eval_count"] context["sessions"][type]["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
tools_used = [] tools_used = []
@ -1269,13 +1109,12 @@ Use the above [RESUME] to answer this query:
if isinstance(tool_result, list): if isinstance(tool_result, list):
messages.extend(tool_result) messages.extend(tool_result)
else: else:
if tool_result:
messages.append(tool_result) messages.append(tool_result)
metadata["tools"] = tools_used metadata["tools"] = tools_used
# Estimate token length of new messages # Estimate token length of new messages
ctx_size = self.get_optimal_ctx_size(session["context_tokens"], messages=messages[pre_add_index:]) ctx_size = self.get_optimal_ctx_size(context["sessions"][type]["context_tokens"], messages=messages[pre_add_index:])
yield {"status": "processing", "message": "Generating final response...", "num_ctx": ctx_size } yield {"status": "processing", "message": "Generating final response...", "num_ctx": ctx_size }
# Decrease creativity when processing tool call requests # Decrease creativity when processing tool call requests
response = self.client.chat(model=self.model, messages=messages, stream=False, options={ "num_ctx": ctx_size }) #, "temperature": 0.5 }) response = self.client.chat(model=self.model, messages=messages, stream=False, options={ "num_ctx": ctx_size }) #, "temperature": 0.5 })
@ -1283,24 +1122,23 @@ Use the above [RESUME] to answer this query:
metadata["eval_duration"] += response["eval_duration"] metadata["eval_duration"] += response["eval_duration"]
metadata["prompt_eval_count"] += response["prompt_eval_count"] metadata["prompt_eval_count"] += response["prompt_eval_count"]
metadata["prompt_eval_duration"] += response["prompt_eval_duration"] metadata["prompt_eval_duration"] += response["prompt_eval_duration"]
session["context_tokens"] = response["prompt_eval_count"] + response["eval_count"] context["sessions"][type]["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
reply = response["message"]["content"] reply = response["message"]["content"]
final_message = {"role": "assistant", "content": reply } final_message = {"role": "assistant", "content": reply }
# history is provided to the LLM and should not have additional metadata # history is provided to the LLM and should not have additional metadata
llm_history.append(final_message) llm_history.append(final_message)
# user_history is provided to the REST API and does not include CONTEXT
# It does include metadata
final_message["metadata"] = metadata final_message["metadata"] = metadata
user_history.append({**final_message, "origin": metadata["origin"]})
# user_history is provided to the REST API and does not include CONTEXT or metadata
user_history.append(final_message)
# Return the REST API with metadata # Return the REST API with metadata
yield {"status": "done", "message": final_message } yield {"status": "done", "message": final_message }
except Exception as e: except Exception as e:
logging.exception({ "model": self.model, "origin": type, "content": content, "error": str(e) }) logging.exception({ "model": self.model, "messages": messages, "error": str(e) })
yield {"status": "error", "message": f"An error occurred: {str(e)}"} yield {"status": "error", "message": f"An error occurred: {str(e)}"}
finally: finally:

View File

@ -244,17 +244,15 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
logging.warning("No embeddings found in the collection.") logging.warning("No embeddings found in the collection.")
return return
# During initialization
logging.info(f"Updating 2D UMAP for {len(self._umap_collection['embeddings'])} vectors") logging.info(f"Updating 2D UMAP for {len(self._umap_collection['embeddings'])} vectors")
vectors = np.array(self._umap_collection["embeddings"]) vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_2d = umap.UMAP(n_components=2, random_state=8911, metric="cosine", n_neighbors=15, min_dist=0.1) self._umap_model_2d = umap.UMAP(n_components=2, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_embedding_2d = self._umap_model_2d.fit_transform(vectors) self._umap_embedding_2d = self._umap_model_2d.fit_transform(vectors)
logging.info(f"2D UMAP model n_components: {self._umap_model_2d.n_components}") # Should be 2
logging.info(f"Updating 3D UMAP for {len(self._umap_collection['embeddings'])} vectors") logging.info(f"Updating 3D UMAP for {len(self._umap_collection['embeddings'])} vectors")
self._umap_model_3d = umap.UMAP(n_components=3, random_state=8911, metric="cosine", n_neighbors=15, min_dist=0.1) vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_3d = umap.UMAP(n_components=3, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_embedding_3d = self._umap_model_3d.fit_transform(vectors) self._umap_embedding_3d = self._umap_model_3d.fit_transform(vectors)
logging.info(f"3D UMAP model n_components: {self._umap_model_3d.n_components}") # Should be 3
def _get_vector_collection(self, recreate=False): def _get_vector_collection(self, recreate=False):
"""Get or create a ChromaDB collection.""" """Get or create a ChromaDB collection."""
@ -457,8 +455,6 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
logging.info(f"Vectorstore initialized with {self.collection.count()} documents") logging.info(f"Vectorstore initialized with {self.collection.count()} documents")
self._update_umaps()
# Show stats # Show stats
try: try:
all_metadata = self.collection.get()['metadatas'] all_metadata = self.collection.get()['metadatas']