Working with refactored code

This commit is contained in:
James Ketr 2025-04-24 16:08:26 -07:00
parent 7059d5ef24
commit e6f6aad86a
13 changed files with 896 additions and 627 deletions

View File

@ -62,14 +62,6 @@ button {
justify-self: end; /* Align the first column content to the right */
}
.ChatBox {
display: flex;
flex-direction: column;
flex-grow: 1;
max-width: 1024px;
margin: 0 auto;
}
.DocBox {
display: flex;
flex-direction: column;
@ -112,17 +104,27 @@ button {
padding-right: 16px !important;
}
.ChatBox {
display: flex;
flex-direction: column;
flex-grow: 1;
max-width: 1024px;
width: 100%;
margin: 0 auto;
background-color: #D3CDBF;
}
.Conversation {
display: flex;
background-color: #F5F5F5;
border: 1px solid #E0E0E0;
flex-grow: 1;
overflow-y: auto;
padding: 10px;
flex-direction: column;
height: 100%;
max-height: 100%;
font-size: 0.9rem;
width: 100%;
/* max-width: 1024px; */
margin: 0 auto;
}
.user-message.MuiCard-root {

View File

@ -20,7 +20,7 @@ import MenuIcon from '@mui/icons-material/Menu';
import { ResumeBuilder } from './ResumeBuilder';
import { Message, ChatQuery, MessageList, MessageData } from './Message';
import { SeverityType } from './Snack';
import { SetSnackType, SeverityType } from './Snack';
import { VectorVisualizer } from './VectorVisualizer';
import { Controls } from './Controls';
import { Conversation, ConversationHandle } from './Conversation';
@ -68,7 +68,6 @@ function CustomTabPanel(props: TabPanelProps) {
}
const App = () => {
const conversationRef = useRef<any>(null);
const [processing, setProcessing] = useState(false);
const [sessionId, setSessionId] = useState<string | undefined>(undefined);
const [connectionBase,] = useState<string>(getConnectionBase(window.location))
@ -86,11 +85,13 @@ const App = () => {
const chatRef = useRef<ConversationHandle>(null);
// Set the snack pop-up and open it
const setSnack = useCallback((message: string, severity: SeverityType = "success") => {
setSnackMessage(message);
setSnackSeverity(severity);
setSnackOpen(true);
}, []);
const setSnack: SetSnackType = useCallback<SetSnackType>((message: string, severity: SeverityType = "success") => {
setTimeout(() => {
setSnackMessage(message);
setSnackSeverity(severity);
setSnackOpen(true);
});
}, [setSnackMessage, setSnackSeverity, setSnackOpen]);
useEffect(() => {
if (prevIsDesktopRef.current === isDesktop)
@ -172,27 +173,34 @@ What would you like to know about James?
const url = new URL(window.location.href);
const pathParts = url.pathname.split('/').filter(Boolean);
const fetchSession = async () => {
try {
const response = await fetch(connectionBase + `/api/context`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw Error("Server is temporarily down.");
}
const data = await response.json();
setSessionId(data.id);
window.history.replaceState({}, '', `/${data.id}`);
} catch (error: any) {
setSnack("Server is temporarily down", "error");
};
};
if (!pathParts.length) {
console.log("No session id -- creating a new session")
fetch(connectionBase + `/api/context`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
})
.then(response => response.json())
.then(data => {
console.log(`Session id: ${data.id} -- returned from server`)
setSessionId(data.id);
window.history.replaceState({}, '', `/${data.id}`);
})
.catch(error => console.error('Error generating session ID:', error));
fetchSession();
} else {
console.log(`Session id: ${pathParts[0]} -- existing session`)
setSessionId(pathParts[0]);
}
}, [setSessionId, connectionBase]);
}, [setSessionId, connectionBase, setSnack]);
const handleMenuClose = () => {
setIsMenuClosing(true);
@ -395,7 +403,7 @@ What would you like to know about James?
</Box>
<CustomTabPanel tab={tab} index={0}>
<Box component="main" sx={{ flexGrow: 1, overflow: 'auto' }} className="ChatBox" ref={conversationRef}>
<Box component="main" sx={{ flexGrow: 1, overflow: 'auto' }} className="ChatBox">
<Conversation
ref={chatRef}
{...{

View File

@ -61,7 +61,32 @@ function ChatBubble({ role, isFullWidth, children, sx, className }: ChatBubblePr
borderRadius: defaultRadius,
color: theme.palette.text.primary, // Charcoal Black (#2E2E2E) — much better contrast
opacity: 0.95,
}
},
'status': {
...defaultStyle,
backgroundColor: 'rgba(74, 122, 125, 0.15)', // Translucent dusty teal
border: `1px solid ${theme.palette.secondary.light}`, // Lighter dusty teal
borderRadius: defaultRadius,
maxWidth: isFullWidth ? '100%' : '75%',
alignSelf: 'center',
color: theme.palette.secondary.dark, // Darker dusty teal for text
fontWeight: 500, // Slightly bolder than normal
fontSize: '0.95rem', // Slightly smaller
padding: '8px 12px',
opacity: 0.9,
transition: 'opacity 0.3s ease-in-out', // Smooth fade effect for appearing/disappearing
},
'error': {
...defaultStyle,
backgroundColor: '#F8E7E7', // Soft light red background
border: `1px solid #D83A3A`, // Prominent red border
borderRadius: defaultRadius,
maxWidth: isFullWidth ? '100%' : '90%',
alignSelf: 'center',
color: '#8B2525', // Deep red text for good contrast
padding: '10px 16px',
boxShadow: '0 1px 3px rgba(216, 58, 58, 0.15)', // Subtle shadow with red tint
},
};
return (

View File

@ -14,7 +14,7 @@ import Box from '@mui/material/Box';
import ResetIcon from '@mui/icons-material/History';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import { SeverityType } from './Snack';
import { SetSnackType } from './Snack';
type Tool = {
type: string,
@ -32,7 +32,7 @@ type Tool = {
interface ControlsParams {
connectionBase: string,
sessionId: string | undefined,
setSnack: (message: string, severity?: SeverityType) => void,
setSnack: SetSnackType,
};
type GPUInfo = {

View File

@ -11,10 +11,10 @@ import { SxProps, Theme } from '@mui/material';
import PropagateLoader from "react-spinners/PropagateLoader";
import { Message, MessageList, MessageData } from './Message';
import { SeverityType } from './Snack';
import { SetSnackType } from './Snack';
import { ContextStatus } from './ContextStatus';
const loadingMessage: MessageData = { "role": "assistant", "content": "Establishing connection with server..." };
const loadingMessage: MessageData = { "role": "status", "content": "Establishing connection with server..." };
type ConversationMode = 'chat' | 'job_description' | 'resume' | 'fact_check';
@ -23,42 +23,73 @@ interface ConversationHandle {
}
interface ConversationProps {
className?: string,
type: ConversationMode
prompt: string,
actionLabel?: string,
resetAction?: () => void,
resetLabel?: string,
connectionBase: string,
sessionId?: string,
setSnack: (message: string, severity: SeverityType) => void,
defaultPrompts?: React.ReactElement[],
preamble?: MessageList,
hideDefaultPrompts?: boolean,
messageFilter?: (messages: MessageList) => MessageList,
messages?: MessageList,
className?: string, // Override default className
type: ConversationMode, // Type of Conversation chat
prompt?: string, // Prompt to display in TextField input
actionLabel?: string, // Label to put on the primary button
resetAction?: () => void, // Callback when Reset is pressed
multiline?: boolean, // Render TextField as multiline or not
resetLabel?: string, // Label to put on Reset button
connectionBase: string, // Base URL for fetch() calls
sessionId?: string, // Session ID for fetch() calls
setSnack: SetSnackType, // Callback to display snack popups
defaultPrompts?: React.ReactElement[], // Set of Elements to display after the TextField
defaultQuery?: string, // Default text to populate the TextField input
preamble?: MessageList, // Messages to display at start of Conversation until Action has been invoked
hidePreamble?: boolean, // Whether to hide the preamble after an Action has been invoked
hideDefaultPrompts?: boolean, // Whether to hide the defaultPrompts after an Action has been invoked
messageFilter?: ((messages: MessageList) => MessageList) | undefined, // Filter callback to determine which Messages to display in Conversation
messages?: MessageList, //
sx?: SxProps<Theme>,
onResponse?: ((message: MessageData) => MessageData) | undefined, // Event called when a query completes (provides messages)
};
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...props }: ConversationProps, ref) => {
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
className,
type,
prompt,
actionLabel,
resetAction,
multiline,
resetLabel,
connectionBase,
sessionId,
setSnack,
defaultPrompts,
hideDefaultPrompts,
defaultQuery,
preamble,
hidePreamble,
messageFilter,
messages,
sx,
onResponse
}: ConversationProps, ref) => {
const [query, setQuery] = useState<string>("");
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
const [processing, setProcessing] = useState<boolean>(false);
const [countdown, setCountdown] = useState<number>(0);
const [conversation, setConversation] = useState<MessageList>([]);
const [filteredConversation, setFilteredConversation] = useState<MessageList>([]);
const [processingMessage, setProcessingMessage] = useState<MessageData | undefined>(undefined);
const timerRef = useRef<any>(null);
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
const [noInteractions, setNoInteractions] = useState<boolean>(true);
const setSnack = props.setSnack;
const conversationRef = useRef<MessageList>([]);
// Keep the ref updated whenever items changes
useEffect(() => {
conversationRef.current = conversation;
}, [conversation]);
// Update the context status
const updateContextStatus = useCallback(() => {
const fetchContextStatus = async () => {
try {
const response = await fetch(props.connectionBase + `/api/context-status/${props.sessionId}/${props.type}`, {
const response = await fetch(connectionBase + `/api/context-status/${sessionId}/${type}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@ -78,51 +109,75 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
}
};
fetchContextStatus();
}, [setContextStatus, props.connectionBase, setSnack, props.sessionId, props.type]);
}, [setContextStatus, connectionBase, setSnack, sessionId, type]);
/* Transform the 'Conversation' by filtering via callback, then adding
* preamble and messages based on whether the conversation
* has any elements yet */
useEffect(() => {
let filtered = [];
if (messageFilter === undefined) {
filtered = conversation;
} else {
//console.log('Filtering conversation...')
filtered = messageFilter(conversation); /* Do not copy conversation or useEffect will loop forever */
//console.log(`${conversation.length - filtered.length} messages filtered out.`);
}
if (filtered.length === 0) {
setFilteredConversation([
...(preamble || []),
...(messages || []),
]);
} else {
setFilteredConversation([
...(hidePreamble ? [] : (preamble || [])),
...(messages || []),
...filtered,
]);
};
}, [conversation, setFilteredConversation, messageFilter, preamble, messages, hidePreamble]);
// Set the initial chat history to "loading" or the welcome message if loaded.
useEffect(() => {
if (props.sessionId === undefined) {
setConversation([loadingMessage]);
if (sessionId === undefined) {
setProcessingMessage(loadingMessage);
return;
}
const fetchHistory = async () => {
try {
const response = await fetch(props.connectionBase + `/api/history/${props.sessionId}/${props.type}`, {
const response = await fetch(connectionBase + `/api/history/${sessionId}/${type}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
}
const data = await response.json();
console.log(`History returned from server with ${data.length} entries`)
console.log(`History returned for ${type} from server with ${data.length} entries`)
if (data.length === 0) {
setConversation([
...(props.preamble || []),
...(props.messages || []),
]);
setConversation([])
setNoInteractions(true);
} else {
setConversation([
...(props.messages || []),
...(props.messageFilter ? props.messageFilter(data) : data)
]);
setConversation(data);
setNoInteractions(false);
}
setProcessingMessage(undefined);
updateContextStatus();
} catch (error) {
console.error('Error generating session ID:', error);
setProcessingMessage({ role: "error", content: "Unable to obtain history from server." });
setSnack("Unable to obtain chat history.", "error");
}
};
if (props.sessionId !== undefined) {
fetchHistory();
}
}, [props.sessionId, setConversation, updateContextStatus, props.connectionBase, setSnack, props.preamble, props.type]);
fetchHistory();
}, [setConversation, setFilteredConversation, updateContextStatus, connectionBase, setSnack, type, sessionId]);
const isScrolledToBottom = useCallback(()=> {
// Current vertical scroll position
@ -174,12 +229,8 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
};
const handleKeyPress = (event: any) => {
if (event.key === 'Enter') {
switch (event.target.id) {
case 'QueryInput':
sendQuery(query);
break;
}
if (event.key === 'Enter' && !event.shiftKey) {
sendQuery(query);
}
};
@ -189,10 +240,6 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
}
}));
const submitQuery = (query: string) => {
sendQuery(query);
}
// If context status changes, show a warning if necessary. If it drops
// back below the threshold, clear the warning trigger
useEffect(() => {
@ -209,7 +256,7 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
const reset = async () => {
try {
const response = await fetch(props.connectionBase + `/api/reset/${props.sessionId}/${props.type}`, {
const response = await fetch(connectionBase + `/api/reset/${sessionId}/${type}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
@ -226,13 +273,7 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
throw new Error('Response body is null');
}
props.messageFilter && props.messageFilter([]);
setConversation([
...(props.preamble || []),
...(props.messages || []),
]);
setConversation([])
setNoInteractions(true);
} catch (e) {
@ -242,24 +283,40 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
};
const sendQuery = async (query: string) => {
query = query.trim();
// If the query was empty, a default query was provided,
// and there is no prompt for the user, send the default query.
if (!query && defaultQuery && !prompt) {
query = defaultQuery.trim();
}
// If the query is empty, and a prompt was provided, do not
// send an empty query.
if (!query && prompt) {
return;
}
setNoInteractions(false);
if (!query.trim()) return;
if (query) {
setConversation([
...conversationRef.current,
{
role: 'user',
origin: type,
content: query
}
]);
}
//setTab(0);
const userMessage: MessageData[] = [{ role: 'user', content: query }];
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
console.log(conversation);
let scrolledToBottom;
// Add user message to conversation
const newConversation: MessageList = [
...conversation,
...userMessage
];
setConversation(newConversation);
scrollToBottom();
// Clear input
setQuery('');
@ -270,16 +327,19 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
const processingId = Date.now().toString();
// Add initial processing message
setConversation(prev => [
...prev,
{ role: 'assistant', content: 'Processing request...', id: processingId, isProcessing: true }
]);
setProcessingMessage(
{ role: 'status', content: 'Submitting request...', id: processingId, isProcessing: true }
);
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
// Make the fetch request with proper headers
const response = await fetch(props.connectionBase + `/api/chat/${props.sessionId}/${props.type}`, {
const response = await fetch(connectionBase + `/api/chat/${sessionId}/${type}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
@ -334,41 +394,43 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
if (update.status === 'processing') {
scrolledToBottom = isScrolledToBottom();
// Update processing message with immediate re-render
setConversation(prev => prev.map(msg =>
msg.id === processingId
? { ...msg, content: update.message }
: msg
));
setProcessingMessage({ role: 'status', content: update.message });
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
} else if (update.status === 'done') {
// Replace processing message with final result
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
if (onResponse) {
update.message = onResponse(update.message);
}
setProcessingMessage(undefined);
setConversation([
...conversationRef.current,
update.message
]);
])
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
const metadata = update.message.metadata;
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus();
if (metadata) {
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus();
}
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
} else if (update.status === 'error') {
// Show error
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
{ role: 'assistant', type: 'error', content: update.message }
]);
setProcessingMessage({ role: 'error', content: update.message });
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
@ -387,8 +449,12 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
if (update.status === 'done') {
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
if (onResponse) {
update.message = onResponse(update.message);
}
setProcessingMessage(undefined);
setConversation([
...conversationRef.current,
update.message
]);
if (scrolledToBottom) {
@ -410,31 +476,37 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
console.error('Fetch error:', error);
setSnack("Unable to process query", "error");
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => !msg.isProcessing),
{ role: 'assistant', type: 'error', content: `Error: ${error}` }
]);
setProcessingMessage({ role: 'error', content: "Unable to process query" });
setProcessing(false);
stopCountdown();
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
}
};
return (
<Box className={props.className || "Conversation"} sx={{ ...props.sx, display: "flex", flexDirection: "column" }}>
<Box className={className || "Conversation"} sx={{
display: "flex", flexDirection: "column", flexGrow: 1, p: 1,
...sx
}}>
{
conversation.map((message, index) =>
<Message key={index} {...{ submitQuery, message, connectionBase: props.connectionBase, sessionId: props.sessionId, setSnack }} />
filteredConversation.map((message, index) =>
<Message key={index} {...{ sendQuery, message, connectionBase, sessionId, setSnack }} />
)
}
{
processingMessage !== undefined &&
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: processingMessage }} />
}
<Box sx={{
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
mb: 1
mb: 1,
}}>
<PropagateLoader
size="10px"
@ -452,45 +524,52 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ ...pro
>Estimated response time: {countdown}s</Box>
)}
</Box>
<Box className="Query" sx={{ display: "flex", flexDirection: props.type === "job_description" ? "column" : "row", p: 1 }}>
<TextField
<Box className="Query" sx={{ display: "flex", flexDirection: "column", p: 1 }}>
{prompt &&
<TextField
variant="outlined"
disabled={processing}
fullWidth
multiline={props.type === "job_description"}
fullWidth={true}
multiline={multiline ? true : false}
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder={props.prompt}
placeholder={prompt}
id="QueryInput"
/>
}
<Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}>
<IconButton
sx={{ display: "flex", margin: 'auto 0px' }}
size="large"
edge="start"
color="inherit"
disabled={sessionId === undefined || processingMessage !== undefined}
onClick={() => { reset(); }}
>
<Tooltip title={props.resetLabel || "Reset"} >
<Tooltip title={resetLabel || "Reset"} >
<ResetIcon />
</Tooltip>
</IconButton>
<Tooltip title={props.actionLabel || "Send"}>
<Button
sx={{ m: 1, gap: 1, flexGrow: 1 }}
variant="contained"
onClick={() => { sendQuery(query); }}>
{props.actionLabel}<SendIcon />
</Button>
<Tooltip title={actionLabel || "Send"}>
<span style={{ display: "flex", flexGrow: 1 }}>
<Button
sx={{ m: 1, gap: 1, flexGrow: 1 }}
variant="contained"
disabled={sessionId === undefined || processingMessage !== undefined}
onClick={() => { sendQuery(query); }}>
{actionLabel}<SendIcon />
</Button>
</span>
</Tooltip>
</Box>
</Box>
{(noInteractions || !props.hideDefaultPrompts) && props.defaultPrompts !== undefined && props.defaultPrompts.length &&
{(noInteractions || !hideDefaultPrompts) && defaultPrompts !== undefined && defaultPrompts.length &&
<Box sx={{ display: "flex", flexDirection: "column" }}>
{
props.defaultPrompts.map((element, index) => {
defaultPrompts.map((element, index) => {
return (<Box key={index}>{element}</Box>);
})
}

View File

@ -1,8 +1,5 @@
import React, { useEffect, useState, useCallback } from 'react';
import React, { useState, useCallback, useRef } from 'react';
import {
Typography,
Card,
Button,
Tabs,
Tab,
Paper,
@ -12,26 +9,18 @@ import {
Divider,
Slider,
Stack,
TextField,
Tooltip
} from '@mui/material';
import { useTheme } from '@mui/material/styles';
import SendIcon from '@mui/icons-material/Send';
import {
ChevronLeft,
ChevronRight,
SwapHoriz,
RestartAlt as ResetIcon,
} from '@mui/icons-material';
import PropagateLoader from "react-spinners/PropagateLoader";
import { SxProps, Theme } from '@mui/material';
import MuiMarkdown from 'mui-markdown';
import { Message, ChatQuery } from './Message';
import { Document } from './Document';
import { MessageData, MessageList } from './Message';
import { SeverityType } from './Snack';
import { ChatQuery } from './Message';
import { MessageList, MessageData } from './Message';
import { SetSnackType } from './Snack';
import { Conversation } from './Conversation';
/**
@ -40,13 +29,13 @@ import { Conversation } from './Conversation';
* @property {SxProps<Theme>} [sx] - Optional styling properties
* @property {string} [connectionBase] - Base URL for fetch calls
* @property {string} [sessionId] - Session ID
* @property {(message: string, severity: SeverityType) => void} - setSnack UI callback
* @property {SetSnackType} - setSnack UI callback
*/
export interface DocumentViewerProps {
sx?: SxProps<Theme>;
connectionBase: string;
sessionId: string;
setSnack: (message: string, severity: SeverityType) => void,
setSnack: SetSnackType;
}
/**
* DocumentViewer component
@ -61,92 +50,17 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
setSnack
}) => {
// State for editing job description
const [jobDescription, setJobDescription] = useState<string | undefined>(undefined);
const [facts, setFacts] = useState<MessageData | undefined>(undefined);
const [resume, setResume] = useState<MessageData | undefined>(undefined);
const [editJobDescription, setEditJobDescription] = useState<string | undefined>(jobDescription);
// Processing state to show loading indicators
const [processing, setProcessing] = useState<string | undefined>(undefined);
// Theme and responsive design setup
const [hasJobDescription, setHasJobDescription] = useState<boolean>(false);
const [hasResume, setHasResume] = useState<boolean>(false);
const [hasFacts, setHasFacts] = useState<boolean>(false);
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
// State for controlling which document is active on mobile
const jobConversationRef = useRef<any>(null);
const resumeConversationRef = useRef<any>(null);
const factsConversationRef = useRef<any>(null);
const [activeTab, setActiveTab] = useState<number>(0);
// State for controlling split ratio on desktop
const [splitRatio, setSplitRatio] = useState<number>(0);
/**
* Reset processing state when resume is generated
*/
useEffect(() => {
if (resume !== undefined && processing === "resume") {
setProcessing(undefined);
}
}, [processing, resume]);
/**
* Reset processing state when facts is generated
*/
useEffect(() => {
if (facts !== undefined && processing === "facts") {
setProcessing(undefined);
}
}, [processing, facts]);
/**
* Trigger resume generation and update UI state
*/
const triggerGeneration = useCallback((description: string | undefined) => {
if (description === undefined) {
setProcessing(undefined);
setResume(undefined);
setActiveTab(0);
return;
}
setProcessing("resume");
setTimeout(() => { setActiveTab(1); }, 250); // Switch to resume view on mobile
console.log('generateResume(description);');
}, [/*generateResume*/, setProcessing, setActiveTab, setResume]);
/**
* Trigger fact check and update UI state
*/
const triggerFactCheck = useCallback((resume: string | undefined) => {
if (resume === undefined) {
setProcessing(undefined);
setResume(undefined);
setFacts(undefined);
setActiveTab(1);
return;
}
setProcessing("facts");
console.log('factCheck(resume)');
setTimeout(() => { setActiveTab(2); }, 250); // Switch to resume view on mobile
}, [/*factCheck,*/ setResume, setProcessing, setActiveTab, setFacts]);
useEffect(() => {
setEditJobDescription(jobDescription);
}, [jobDescription, setEditJobDescription]);
/**
* Switch to resume tab when resume become available
*/
useEffect(() => {
if (resume !== undefined) {
setTimeout(() => { setActiveTab(1); }, 250); // Switch to resume view on mobile
}
}, [resume]);
/**
* Switch to fact check tab when facts become available
*/
useEffect(() => {
if (facts !== undefined) {
setTimeout(() => { setActiveTab(2); }, 250); // Switch to resume view on mobile
}
}, [facts]);
const [splitRatio, setSplitRatio] = useState<number>(100);
/**
* Handle tab change for mobile view
@ -168,146 +82,332 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
const resetSplit = (): void => {
setSplitRatio(50);
};
/**
* Handle keyboard shortcuts
*/
const handleKeyPress = (event: React.KeyboardEvent): void => {
if (event.key === 'Enter' && event.ctrlKey) {
triggerGeneration(editJobDescription || "");
}
};
const handleJobQuery = (query: string) => {
triggerGeneration(query);
console.log(`handleJobQuery: ${query} -- `, jobConversationRef.current ? ' sending' : 'no handler');
jobConversationRef.current?.submitQuery(query);
};
const jobDescriptionQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="What are the key skills necessary for this position?" submitQuery={handleJobQuery} />
<ChatQuery text="How much should this position pay (accounting for inflation)?" submitQuery={handleJobQuery} />
</Box>,
];
const handleResumeQuery = (query: string) => {
console.log(`handleResumeQuery: ${query} -- `, resumeConversationRef.current ? ' sending' : 'no handler');
resumeConversationRef.current?.submitQuery(query);
};
const filterJobDescriptionMessages = (messages: MessageList): MessageList => {
/* The second messages is the RESUME (the LLM response to the JOB-DESCRIPTION) */
if (messages.length > 1) {
setResume(messages[1]);
} else if (resume !== undefined) {
setResume(undefined);
const handleFactsQuery = (query: string) => {
console.log(`handleFactsQuery: ${query} -- `, factsConversationRef.current ? ' sending' : 'no handler');
factsConversationRef.current?.submitQuery(query);
};
const filterJobDescriptionMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
/* Filter out the RESUME */
const reduced = messages.filter((message, index) => index != 1);
let reduced = messages.filter((m, i) => {
const keep = (m.metadata?.origin || m.origin || "no origin") === 'job_description';
if ((m.metadata?.origin || m.origin || "no origin") === 'resume') {
setHasResume(true);
}
// if (!keep) {
// console.log(`filterJobDescriptionMessages: ${i + 1} filtered:`, m);
// } else {
// console.log(`filterJobDescriptionMessages: ${i + 1}:`, m);
// }
return keep;
});
/* Set the first message as coming from the assistant (rendered as markdown) */
if (reduced.length > 0) {
reduced[0].role = 'assistant';
// First message is always 'info'
reduced[0].role = 'info';
setHasJobDescription(true);
}
/* If there is more than one message, it is user: "...JOB_DESCRIPTION...", assistant: "...stored..."
* which means a resume has been generated. */
if (reduced.length > 1) {
setHasResume(true);
}
/* Filter out any messages which the server injected for state management */
reduced = reduced.filter(m => m.display !== "hide");
return reduced;
};
}, [setHasJobDescription, setHasResume]);
const jobDescriptionMessages: MessageList = [];
const filterResumeMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
const renderJobDescriptionView = () => {
if (resume === undefined) {
let reduced = messages.filter((m, i) => {
const keep = (m.metadata?.origin || m.origin || "no origin") === 'resume';
if ((m.metadata?.origin || m.origin || "no origin") === 'fact_check') {
setHasFacts(true);
}
// if (!keep) {
// console.log(`filterResumeMessages: ${i + 1} filtered:`, m);
// } else {
// console.log(`filterResumeMessages: ${i + 1}:`, m);
// }
return keep;
});
/* If there is more than one message, it is user: "...JOB_DESCRIPTION...", assistant: "...RESUME..."
* which means a resume has been generated. */
if (reduced.length > 1) {
/* Remove the assistant message from the UI */
if (reduced[0].role === "user") {
reduced.splice(0, 1);
}
}
/* If Fact Check hasn't occurred yet and there is still more than one message,
* facts have have been generated. */
if (!hasFacts && reduced.length > 1) {
setHasFacts(true);
}
/* Filter out any messages which the server injected for state management */
reduced = reduced.filter(m => m.display !== "hide");
/* If there are any messages, there is a resume */
if (reduced.length > 0) {
// First message is always 'info'
reduced[0].role = 'info';
setHasResume(true);
}
return reduced;
}, [setHasResume, hasFacts, setHasFacts]);
const filterFactsMessages = useCallback((messages: MessageList): MessageList => {
if (messages === undefined || messages.length === 0) {
return [];
}
// messages.forEach((m, i) => console.log(`filterFactsMessages: ${i + 1}:`, m))
const reduced = messages.filter(m => {
return (m.metadata?.origin || m.origin || "no origin") === 'fact_check';
});
/* If there is more than one message, it is user: "Fact check this resume...", assistant: "...FACT CHECK..."
* which means facts have been generated. */
if (reduced.length > 1) {
/* Remove the user message from the UI */
if (reduced[0].role === "user") {
reduced.splice(0, 1);
}
reduced[0].role = 'info';
setHasFacts(true);
}
return reduced;
}, [setHasFacts]);
const jobResponse = useCallback((message: MessageData): MessageData => {
console.log('onJobResponse', message);
setHasResume(true);
return message;
}, []);
const resumeResponse = useCallback((message: MessageData): MessageData => {
console.log('onResumeResponse', message);
setHasFacts(true);
return message;
}, [setHasFacts]);
const factsResponse = useCallback((message: MessageData): MessageData => {
console.log('onFactsResponse', message);
return message;
}, []);
const renderJobDescriptionView = useCallback(() => {
const jobDescriptionQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="What are the key skills necessary for this position?" submitQuery={handleJobQuery} />
<ChatQuery text="How much should this position pay (accounting for inflation)?" submitQuery={handleJobQuery} />
</Box>,
];
if (!hasJobDescription) {
return <Conversation
ref={jobConversationRef}
{...{
sx: { display: "flex", flexGrow: 1 },
actionLabel: "Generate Resume",
multiline: true,
type: "job_description",
actionLabel: "Generate Resume",
prompt: "Paste a job description, then click Generate...",
multiline: true,
messageFilter: filterJobDescriptionMessages,
messages: jobDescriptionMessages,
onResponse: jobResponse,
sessionId,
connectionBase,
setSnack,
defaultPrompts: jobDescriptionQuestions
}}
/>
} else {
return <Conversation
ref={jobConversationRef}
{...{
className: "ChatBox",
sx: { display: "flex", flexGrow: 1 },
type: "job_description",
actionLabel: "Send",
prompt: "Ask a question about this job description...",
messageFilter: filterJobDescriptionMessages,
messages: jobDescriptionMessages,
defaultPrompts: jobDescriptionQuestions,
onResponse: jobResponse,
sessionId,
connectionBase,
setSnack,
defaultPrompts: jobDescriptionQuestions
}}
/>
}
}
}, [connectionBase, filterJobDescriptionMessages, hasJobDescription, sessionId, setSnack, jobResponse]);
/**
* Renders the resume view with loading indicator
*/
const renderResumeView = () => (
<Box key="ResumeView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0 }}>
<Document sx={{ display: "flex", flexGrow: 1 }} title="">
{resume !== undefined && <Message {...{ message: resume, connectionBase, sessionId, setSnack }} />}
</Document>
{processing === "resume" && (
<Box sx={{
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
mb: 1,
height: "10px"
}}>
<PropagateLoader
size="10px"
loading={true}
aria-label="Loading Spinner"
data-testid="loader"
/>
<Typography>Generating resume...</Typography>
</Box>
)}
<ResumeActionCard
resume={resume}
processing={processing}
triggerFactCheck={triggerFactCheck}
const renderResumeView = useCallback(() => {
const resumeQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="Is this resume a good fit for the provided job description?" submitQuery={handleResumeQuery} />
<ChatQuery text="Provide a more concise resume." submitQuery={handleResumeQuery} />
</Box>,
];
if (!hasFacts) {
return <Conversation
ref={resumeConversationRef}
{...{
actionLabel: "Fact Check",
multiline: true,
type: "resume",
messageFilter: filterResumeMessages,
onResponse: resumeResponse,
sessionId,
connectionBase,
setSnack,
}}
/>
</Box>
);
} else {
return <Conversation
ref={resumeConversationRef}
{...{
type: "resume",
actionLabel: "Send",
prompt: "Ask a question about this job resume...",
messageFilter: filterResumeMessages,
defaultPrompts: resumeQuestions,
onResponse: resumeResponse,
sessionId,
connectionBase,
setSnack,
}}
/>
}
}, [connectionBase, filterResumeMessages, hasFacts, sessionId, setSnack, resumeResponse]);
/**
* Renders the fact check view
*/
const renderFactCheckView = () => (
<Box key="FactView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0, p: 0 }}>
<Document sx={{ display: "flex", flexGrow: 1 }} title="">
{facts !== undefined && <Message {...{ message: facts, connectionBase, sessionId, setSnack }} />}
</Document>
{processing === "facts" && (
<Box sx={{
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
mb: 1,
height: "10px"
}}>
<PropagateLoader
size="10px"
loading={true}
aria-label="Loading Spinner"
data-testid="loader"
/>
<Typography>Fact Checking resume...</Typography>
const renderFactCheckView = useCallback(() => {
const factsQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="Rewrite the resume to address any discrepancies." submitQuery={handleFactsQuery} />
</Box>,
];
return <Conversation
ref={factsConversationRef}
{...{
type: "fact_check",
actionLabel: "Send",
prompt: "Ask a question about any discrepencies...",
messageFilter: filterFactsMessages,
defaultPrompts: factsQuestions,
onResponse: factsResponse,
sessionId,
connectionBase,
setSnack,
}}
/>
}, [connectionBase, sessionId, setSnack, factsResponse, filterFactsMessages]);
/**
* Gets the appropriate content based on active state for Desktop
*/
const getActiveDesktopContent = useCallback(() => {
/* Left panel - Job Description */
const showResume = hasResume
const showFactCheck = hasFacts
const ratio = 75 + 25 * splitRatio / 100;
const otherRatio = showResume ? ratio / (hasFacts ? 3 : 2) : 100;
const resumeRatio = 100 - otherRatio * (hasFacts ? 2 : 1);
const children = [];
children.push(
<Box key="JobDescription" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
{renderJobDescriptionView()}
</Box>);
/* Resume panel - conditionally rendered if resume defined, or processing is in progress */
if (showResume) {
children.push(
<Box key="ResumeView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${resumeRatio}%`, width: `${resumeRatio}%`, maxWidth: `${resumeRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderResumeView()}
</Box>
)}
</Box>
);
);
}
/* Fact Check panel - conditionally rendered if facts defined, or processing is in progress */
if (showFactCheck) {
children.push(
<Box key="FactCheckView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderFactCheckView()}
</Box>
);
}
/* Split control panel - conditionally rendered if either facts or resume is set */
let slider = <Box key="slider"></Box>;
if (showResume || showFactCheck) {
slider = (
<Paper key="slider" sx={{ p: 2, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<Stack direction="row" spacing={2} alignItems="center" sx={{ width: '60%' }}>
<IconButton onClick={() => setSplitRatio(s => Math.max(0, s - 10))}>
<ChevronLeft />
</IconButton>
<Slider
value={splitRatio}
onChange={handleSliderChange}
aria-label="Split ratio"
min={0}
max={100}
/>
<IconButton onClick={() => setSplitRatio(s => Math.min(100, s + 10))}>
<ChevronRight />
</IconButton>
<IconButton onClick={resetSplit}>
<SwapHoriz />
</IconButton>
</Stack>
</Paper>
);
}
return (
<Box sx={{ ...sx, display: 'flex', flexGrow: 1, flexDirection: 'column', p: 0 }}>
<Box sx={{ display: 'flex', flexGrow: 1, flexDirection: 'row', overflow: 'hidden', p: 0 }}>
{children}
</Box>
{slider}
</Box>
)
}, [renderFactCheckView, renderJobDescriptionView, renderResumeView, splitRatio, sx, hasFacts, hasResume]);
// Render mobile view
if (isMobile) {
@ -337,154 +437,25 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
sx={{ bgcolor: 'background.paper' }}
>
<Tab value={0} label="Job Description" />
{(resume !== undefined || processing === "resume") && <Tab value={1} label="Resume" />}
{(facts !== undefined || processing === "facts") && <Tab value={2} label="Fact Check" />}
{hasResume && <Tab value={1} label="Resume" />}
{hasFacts && <Tab value={2} label="Fact Check" />}
</Tabs>
{/* Document display area */}
<Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, overflow: 'hidden', p: 0 }}>
<Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, p: 0, width: "100%", ...sx }}>
{getActiveMobileContent()}
</Box>
</Box>
);
}
/**
* Gets the appropriate content based on active state for Desktop
*/
const getActiveDesktopContent = () => {
/* Left panel - Job Description */
const showResume = resume !== undefined || processing === "resume"
const showFactCheck = facts !== undefined || processing === "facts"
const otherRatio = showResume ? (100 - splitRatio / 2) : 100;
const children = [];
children.push(
<Box key="JobDescription" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', width: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
{renderJobDescriptionView()}
</Box>);
/* Resume panel - conditionally rendered if resume defined, or processing is in progress */
if (showResume) {
children.push(
<Box key="ResumeView" sx={{ display: 'flex', width: '100%', p: 0, flexGrow: 1, flexDirection: 'row' }}>
<Divider orientation="vertical" flexItem />
{renderResumeView()}
</Box>
);
}
/* Fact Check panel - conditionally rendered if facts defined, or processing is in progress */
if (showFactCheck) {
children.push(
<Box key="FactCheckView" sx={{ display: 'flex', width: `${otherRatio}%`, p: 0, flexGrow: 1, flexDirection: 'row' }}>
<Divider orientation="vertical" flexItem />
{renderFactCheckView()}
</Box>
);
}
/* Split control panel - conditionally rendered if either facts or resume is set */
let slider = <Box key="slider"></Box>;
if (showResume || showFactCheck) {
slider = (
<Paper key="slider" sx={{ p: 2, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
<Stack direction="row" spacing={2} alignItems="center" sx={{ width: '60%' }}>
<IconButton onClick={() => setSplitRatio(Math.max(0, splitRatio - 10))}>
<ChevronLeft />
</IconButton>
<Slider
value={splitRatio}
onChange={handleSliderChange}
aria-label="Split ratio"
min={0}
max={100}
/>
<IconButton onClick={() => setSplitRatio(Math.min(100, splitRatio + 10))}>
<ChevronRight />
</IconButton>
<IconButton onClick={resetSplit}>
<SwapHoriz />
</IconButton>
</Stack>
</Paper>
);
}
return (
<Box sx={{ ...sx, display: 'flex', flexGrow: 1, flexDirection: 'column', p: 0 }}>
<Box sx={{ display: 'flex', flexGrow: 1, flexDirection: 'row', overflow: 'hidden', p: 0 }}>
{children}
</Box>
{slider}
</Box>
)
}
return (
<Box sx={{ ...sx, display: 'flex', flexDirection: 'column', flexGrow: 1 }}>
<Box sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, width: "100%", ...sx }}>
{getActiveDesktopContent()}
</Box>
);
};
/**
* Props for the ResumeActionCard component
*/
interface ResumeActionCardProps {
resume: any;
processing: string | undefined;
triggerFactCheck: (resume: string | undefined) => void;
}
/**
* Action card displayed underneath the resume with notes and fact check button
*/
const ResumeActionCard: React.FC<ResumeActionCardProps> = ({ resume, processing, triggerFactCheck }) => (
<Box sx={{ display: "flex", justifyContent: "center", flexDirection: "column" }}>
<Card sx={{ display: "flex", overflow: "auto", minHeight: "fit-content", p: 1, flexDirection: "column" }}>
{resume !== undefined || processing === "resume" ? (
<Typography>
<b>NOTE:</b> As with all LLMs, hallucination is always a possibility. Click <b>Fact Check</b> to have the LLM analyze the generated resume vs. the actual resume.
</Typography>
) : (
<Typography>
Once you click <b>Generate</b> under the <b>Job Description</b>, a resume will be generated based on the user's RAG content and the job description.
</Typography>
)}
</Card>
<Box sx={{ display: "flex", justifyContent: "center", flexDirection: "row", flexGrow: 1 }}>
<IconButton
sx={{ display: "flex", margin: 'auto 0px' }}
size="large"
edge="start"
color="inherit"
disabled={processing !== undefined}
onClick={() => { triggerFactCheck(undefined); }}
>
<Tooltip title="Reset Resume">
<ResetIcon />
</Tooltip>
</IconButton>
<Tooltip title="Fact Check">
<span style={{ display: "flex", flexGrow: 1 }}>
<Button
sx={{ m: 1, gap: 1, flexGrow: 1 }}
variant="contained"
disabled={processing !== undefined}
onClick={() => { resume && triggerFactCheck(resume.content); }}
>
Fact Check<SendIcon />
</Button>
</span>
</Tooltip>
</Box>
</Box>
);
export {
DocumentViewer
};

View File

@ -27,15 +27,16 @@ import { StyledMarkdown } from './StyledMarkdown';
import { Tooltip } from '@mui/material';
import { VectorVisualizer } from './VectorVisualizer';
import { SeverityType } from './Snack';
import { SetSnackType } from './Snack';
type MessageRoles = 'info' | 'user' | 'assistant' | 'system';
type MessageRoles = 'info' | 'user' | 'assistant' | 'system' | 'status' | 'error';
type MessageData = {
role: MessageRoles,
content: string,
user?: string,
type?: string,
origin?: string,
display?: string, /* Messages generated on the server for filler should not be shown */
id?: string,
isProcessing?: boolean,
metadata?: MessageMetaProps
@ -46,6 +47,8 @@ interface MessageMetaProps {
query_embedding: number[];
vector_embedding: number[];
},
origin: string,
full_query?: string,
rag: any,
tools: any[],
eval_count: number,
@ -54,7 +57,7 @@ interface MessageMetaProps {
prompt_eval_duration: number,
sessionId?: string,
connectionBase: string,
setSnack: (message: string, severity: SeverityType) => void,
setSnack: SetSnackType,
}
type MessageList = MessageData[];
@ -65,7 +68,7 @@ interface MessageProps {
submitQuery?: (text: string) => void,
sessionId?: string,
connectionBase: string,
setSnack: (message: string, severity: SeverityType) => void,
setSnack: SetSnackType,
};
interface ChatQueryInterface {
@ -113,6 +116,20 @@ const MessageMeta = ({ ...props }: MessageMetaProps) => {
</TableBody>
</Table>
</TableContainer>
{
props?.full_query !== undefined &&
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Full Query
</Box>
</AccordionSummary>
<AccordionDetails>
<pre>{props.full_query}</pre>
</AccordionDetails>
</Accordion>
}
{
props.tools !== undefined && props.tools.length !== 0 &&
<Accordion sx={{ boxSizing: "border-box" }}>
@ -195,7 +212,7 @@ const ChatQuery = ({ text, submitQuery }: ChatQueryInterface) => {
borderColor: theme => theme.palette.custom.highlight,
m: 1
}}
size="small" onClick={(e: any) => { console.log(text); submitQuery(text); }}>
size="small" onClick={(e: any) => { submitQuery(text); }}>
{text}
</Button>
);

View File

@ -1,7 +1,6 @@
import { useState, useCallback, useEffect } from 'react';
import Box from '@mui/material/Box';
import { SeverityType } from './Snack';
import { MessageData, MessageMetaProps } from './Message';
import { MessageData } from './Message';
import { DocumentViewer } from './DocumentViewer';
interface ResumeBuilderProps {
@ -16,15 +15,14 @@ interface ResumeBuilderProps {
setFacts: (facts: MessageData | undefined) => void,
};
type Resume = {
resume: MessageData | undefined,
fact_check: MessageData | undefined,
job_description: string,
metadata: MessageMetaProps
};
// type Resume = {
// resume: MessageData | undefined,
// fact_check: MessageData | undefined,
// job_description: string,
// metadata: MessageMetaProps
// };
const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
const [jobDescription, setJobDescription] = useState<string | undefined>(undefined);
if (sessionId === undefined) {
return (<></>);
}

View File

@ -1,5 +1,7 @@
type SeverityType = 'error' | 'info' | 'success' | 'warning' | undefined;
type SetSnackType = (message: string, severity?: SeverityType) => void;
export type {
SeverityType
SeverityType,
SetSnackType
};

View File

@ -149,7 +149,6 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, rag, inli
let is2D = vectors.every((v: number[]) => v.length === 2);
let is3D = vectors.every((v: number[]) => v.length === 3);
console.log(`Embeddings are ${is2D ? '2D' : is3D ? '3D' : 'invaalid'} and view2D is ${view2D}`);
if ((view2D && !is2D) || (!view2D && !is3D)) {
return;
}

2
src/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
cert.pem
key.pem

View File

@ -11,7 +11,6 @@ import uuid
import subprocess
import re
import math
import copy
def try_import(module_name, pip_name=None):
try:
@ -169,27 +168,28 @@ Always use tools and [{context_tag}] when possible. Be concise, and never make u
system_generate_resume = f"""
Launched on {DateTime()}.
You are a professional resume writer. Your task is to write a polished, tailored resume for a specific job based only on the individual's [WORK HISTORY].
You are a professional resume writer. Your task is to write a concise, polished, and tailored resume for a specific job based only on the individual's [WORK HISTORY].
When answering queries, follow these steps:
1. You must not invent or assume any inforation not explicitly present in the [WORK HISTORY].
2. Analyze the [JOB DESCRIPTION] to identify skills required for the job.
3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight from the [WORK HISTORY].
4. Identify and emphasisze the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION].
5. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY].
6. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
7. Use the [INTRO] to highlight the use of AI in generating this resume.
8. Use the [WORK HISTORY] to create a polished, professional resume.
9. Do not list any locations in the resume.
4. Identify and emphasize the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION].
5. Only provide information from [WORK HISTORY] items if it is relevant to the [JOB DESCRIPTION].
6. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY].
7. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
8. Use the [INTRO] to highlight the use of AI in generating this resume.
9. Use the [WORK HISTORY] to create a polished, professional resume.
10. Do not list any locations or mailing addresses in the resume.
Structure the resume professionally with the following sections where applicable:
* "Name: Use full name."
* "Professional Summary: A 2-4 sentence overview tailored to the job, using [INTRO] to highlight the use of AI in generating this resume."
* "Skills: A bullet list of key skills derived from the work history and relevant to the job."
* Professional Experience: A detailed list of roles, achievements, and responsibilities from the work history that relate to the job."
* Education: Include only if available in the work history."
* Professional Experience: A detailed list of roles, achievements, and responsibilities from [WORK HISTORY] that relate to the [JOB DESCRIPTION]."
* Education: Include only if available in the work history.
Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
Ensure the langauge is clear, concise, and aligned with industry standards for professional resumes.
@ -504,22 +504,29 @@ class WebServer:
except:
query = ""
if not query:
return JSONResponse({"error": "No query provided"}, status_code=400)
return JSONResponse({"error": "No query provided for similarity search"}, status_code=400)
try:
chroma_results = self.file_watcher.find_similar(query=query, top_k=10)
if not chroma_results:
return JSONResponse({"error": "No results found"}, status_code=404)
chroma_embedding = chroma_results["query_embedding"]
chroma_embedding = np.array(chroma_results["query_embedding"]).flatten() # Ensure correct shape
print(f"Chroma embedding shape: {chroma_embedding.shape}")
umap_2d = self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 2D output: {umap_2d}, length: {len(umap_2d)}") # Debug output
umap_3d = self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 3D output: {umap_3d}, length: {len(umap_3d)}") # Debug output
return JSONResponse({
**chroma_results,
"query": query,
"umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
"umap_embedding_2d": umap_2d,
"umap_embedding_3d": umap_3d
})
except Exception as e:
logging.error(e)
#return JSONResponse({"error": str(e)}, 500)
@ -785,24 +792,28 @@ class WebServer:
context["sessions"] = {
"chat": {
"system_prompt": system_message,
"content_seed": None,
"llm_history": context["llm_history"],
"user_history": context["user_history"],
"context_tokens": round(len(str(create_system_message(system_message))))
},
"job_description": {
"system_prompt": system_job_description,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(create_system_message(system_job_description))))
},
"resume": {
"system_prompt": system_generate_resume,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(create_system_message(system_generate_resume))))
},
"fact_check": {
"system_prompt": system_fact_check,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(create_system_message(system_fact_check))))
@ -847,24 +858,28 @@ class WebServer:
"sessions": {
"chat": {
"system_prompt": system_message,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(system_message)) * 3 / 4), # Estimate context usage
},
"job_description": {
"system_prompt": system_job_description,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(system_job_description)) * 3 / 4), # Estimate context usage
},
"resume": {
"system_prompt": system_generate_resume,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(system_generate_resume)) * 3 / 4), # Estimate context usage
},
"fact_check": {
"system_prompt": system_fact_check,
"content_seed": None,
"llm_history": [],
"user_history": [],
"context_tokens": round(len(str(system_fact_check)) * 3 / 4), # Estimate context usage
@ -891,14 +906,67 @@ class WebServer:
logging.info(f"Context {context_id} not found. Creating new context.")
return self.load_context(context_id)
def generate_rag_results(self, context, content):
results_found = False
if self.file_watcher:
for rag in context["rags"]:
if rag["enabled"] and rag["name"] == "JPK": # Only support JPK rag right now...
yield {"status": "processing", "message": f"Checking RAG context {rag['name']}..."}
chroma_results = self.file_watcher.find_similar(query=content, top_k=10)
if chroma_results:
results_found = True
chroma_embedding = np.array(chroma_results["query_embedding"]).flatten() # Ensure correct shape
print(f"Chroma embedding shape: {chroma_embedding.shape}")
umap_2d = self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 2D output: {umap_2d}, length: {len(umap_2d)}") # Debug output
umap_3d = self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
print(f"UMAP 3D output: {umap_3d}, length: {len(umap_3d)}") # Debug output
yield {
**chroma_results,
"name": rag["name"],
"umap_embedding_2d": umap_2d,
"umap_embedding_3d": umap_3d
}
if not results_found:
yield {"status": "complete", "message": "No RAG context found"}
yield {
"rag": None,
"documents": [],
"embeddings": [],
"umap_embedding_2d": [],
"umap_embedding_3d": []
}
else:
yield {"status": "complete", "message": "RAG processing complete"}
# type: chat
# * Q&A
#
# type: job_description
# * First message sets Job Description and generates Resume
# * Has content (Job Description)
# * Then Q&A of Job Description
#
# type: resume
# * First message sets Resume and generates Fact Check
# * Has no content
# * Then Q&A of Resume
#
# Fact Check:
# * First message sets Fact Check and is Q&A
# * Has content
# * Then Q&A of Fact Check
async def chat(self, context, type, content):
if not self.file_watcher:
return
content = content.strip()
if not content:
yield {"status": "error", "message": "Invalid request"}
return
if self.processing:
yield {"status": "error", "message": "Busy"}
@ -907,10 +975,11 @@ class WebServer:
self.processing = True
try:
llm_history = context["sessions"][type]["llm_history"]
user_history = context["sessions"][type]["user_history"]
session = context["sessions"][type]
llm_history = session["llm_history"]
user_history = session["user_history"]
metadata = {
"type": type,
"origin": type,
"rag": { "documents": [] },
"tools": [],
"eval_count": 0,
@ -922,136 +991,230 @@ class WebServer:
# Default to not using tools
enable_tools = False
# Default eo using RAG
enable_rag = True
# The first time a particular session type is used, it is handled differently. After the initial pass (once the
# llm_history has more than one entry), the standard 'chat' is used.
if len(user_history) >= 1:
process_type = "chat"
# Do not enable RAG when limiting context to the job description chat
if type == "job_description":
enable_rag = False
# Default to using RAG if there is content to check
if content:
enable_rag = True
else:
process_type = type
enable_rag = False
if enable_rag:
for rag in context["rags"]:
if rag["enabled"] and rag["name"] == "JPK": # Only support JPK rag right now...
yield {"status": "processing", "message": f"Checking RAG context {rag['name']}..."}
chroma_results = self.file_watcher.find_similar(query=content, top_k=10)
if chroma_results:
chroma_embedding = chroma_results["query_embedding"]
metadata["rag"] = {
**chroma_results,
"name": rag["name"],
"umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
}
# RAG is disabled when asking questions about the resume
if type == "resume":
enable_rag = False
# The first time through each session type a content_seed may be set for
# future chat sessions; use it once, then clear it
if session["content_seed"]:
preamble = f"{session['content_seed']}"
session["content_seed"] = None
else:
preamble = ""
# After the first time a particular session type is used, it is handled as a chat.
# The number of messages indicating the session is ready for chat varies based on
# the type of session
process_type = type
match process_type:
case "job_description":
logging.info(f"job_description user_history len: {len(user_history)}")
if len(user_history) >= 2: # USER, ASSISTANT
process_type = "chat"
case "resume":
logging.info(f"resume user_history len: {len(user_history)}")
if len(user_history) >= 3: # USER, ASSISTANT, FACT_CHECK
process_type = "chat"
case "fact_check":
process_type = "chat" # Fact Check is always a chat session
match process_type:
# Normal chat interactions with context history
case "chat":
if not content:
yield {"status": "error", "message": "No query provided for chat."}
logging.info(f"user_history len: {len(user_history)}")
return
enable_tools = True
preamble = ""
# Generate RAG content if enabled, based on the content
rag_context = ""
for doc in metadata["rag"]["documents"]:
rag_context += doc
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, content):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n"
if rag_context:
preamble = f"""
1. Respond to this query: {content}
2. If there is information in this context to enhance the answer, do so:
[{context_tag}]
{rag_context}
[/{context_tag}]
Use that information to respond to: """
1. Respond to this query: {content}
2. If there is information in the [{context_tag}] to enhance the answer, do so:
[{context_tag}]
{rag_context}
[/{context_tag}]
Use that information to respond to:"""
# Single job_description is provided; generate a resume
system_prompt = context["sessions"]["chat"]["system_prompt"]
# On first entry, a single job_description is provided ("user")
# Generate a resume to append to RESUME history
case "job_description":
# Generate RAG content if enabled, based on the content
# Always force the full resume to be in context
resume_doc = open(defines.resume_doc, "r").read()
work_history = f"{resume_doc}\n"
for doc in metadata["rag"]["documents"]:
work_history += f"{doc}\n"
rag_context = f"{resume_doc}\n"
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, content):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n"
preamble = f"""
[INTRO]
{resume_intro}
[/INTRO]
[INTRO]
{resume_intro}
[/INTRO]
[WORK HISTORY]
{work_history}
[/WORK HISTORY]
[WORK HISTORY]
{rag_context}
[/WORK HISTORY]
[JOB DESCRIPTION]
{content}
[/JOB DESCRIPTION]
[JOB DESCRIPTION]
{content}
[/JOB DESCRIPTION]
"""
context["sessions"]["job_description"]["content_seed"] = preamble + "Use the above information to answer this query: "
1. Use the above [INTRO] and [WORK HISTORY] to create the resume for the [JOB DESCRIPTION].
2. Do not use content from the [JOB DESCRIPTION] in the response unless the [WORK HISTORY] mentions them.
"""
preamble += f"""
1. Use the above [INTRO] and [WORK HISTORY] to create the resume for the [JOB DESCRIPTION].
2. Do not use content from the [JOB DESCRIPTION] in the response unless the [WORK HISTORY] mentions them.
"""
# Seed the first context messages with the resume from the 'job_description' session
# Seed the history for job_description
messages = [ {
"role": "user", "content": content
}, {
"role": "assistant", "content": "Job description stored to use in future queries.", "display": "hide"
} ]
# Strip out the 'display' key when adding to llm_history
llm_history.extend([{k: v for k, v in m.items() if k != 'display'} for m in messages])
user_history.extend([{**m, "origin": "job_description"} for m in messages])
# Switch to resume session for LLM responses
metadata["origin"] = "resume"
session = context["sessions"]["resume"]
system_prompt = session["system_prompt"]
llm_history = session["llm_history"] = []
user_history = session["user_history"] = []
# Ignore the passed in content and invoke Fact Check
case "resume":
raise Exception(f"Invalid chat type: {type}")
# Fact check the resume created by the 'job_description' using only the RAG and resume
case "fact_check":
if len(context["sessions"]["resume"]["llm_history"]) < 3: # SYSTEM, USER, **ASSISTANT**
yield {"status": "done", "message": "No resume history found." }
return
resume = context["sessions"]["resume"]["llm_history"][2]
metadata = copy.deepcopy(resume["metadata"])
metadata["eval_count"] = 0
metadata["eval_duration"] = 0
metadata["prompt_eval_count"] = 0
metadata["prompt_eval_duration"] = 0
if len(context["sessions"]["resume"]["user_history"]) < 2: # USER, **ASSISTANT**
raise Exception(f"No resume found in user history.")
resume = context["sessions"]["resume"]["user_history"][1]
# Generate RAG content if enabled, based on the content
# Always force the full resume to be in context
resume_doc = open(defines.resume_doc, "r").read()
work_history = f"{resume_doc}\n"
for doc in metadata["rag"]["documents"]:
work_history += f"{doc}\n"
rag_context = f"{resume_doc}\n"
if enable_rag:
# Initialize metadata["rag"] to None or a default value
metadata["rag"] = None
for value in self.generate_rag_results(context, resume["content"]):
if "status" in value:
yield value
else:
if value.get("documents") or value.get("rag") is not None:
metadata["rag"] = value
if metadata["rag"]:
for doc in metadata["rag"]["documents"]:
rag_context += f"{doc}\n"
preamble = f"""
[WORK HISTORY]
{work_history}
[/WORK HISTORY]
[WORK HISTORY]
{rag_context}
[/WORK HISTORY]
[RESUME]
{resume['content']}
[/RESUME]
"""
content = resume['content']
[RESUME]
{resume['content']}
[/RESUME]
raise Exception(f"Invalid chat type: {type}")
Perform the following:
1. Do not invent or assume any information not explicitly present in the [WORK HISTORY].
2. Analyze the [RESUME] to identify any discrepancies or inaccuracies based on the [WORK HISTORY].
"""
context["sessions"]["resume"]["content_seed"] = f"""
[RESUME]
{resume["content"]}
[/RESUME]
Use the above [RESUME] to answer this query:
"""
content = "Fact check the resume and report discrepancies."
# Seed the history for resume
messages = [ {
"role": "user", "content": "Fact check resume", "origin": "resume", "display": "hide"
}, {
"role": "assistant", "content": "Resume fact checked.", "origin": "resume", "display": "hide"
} ]
# Do not add this to the LLM history; it is only used for UI presentation
user_history.extend(messages)
# Switch to fact_check session for LLM responses
metadata["origin"] = "fact_check"
session = context["sessions"]["fact_check"]
system_prompt = session["system_prompt"]
llm_history = session["llm_history"] = []
user_history = session["user_history"] = []
case _:
raise Exception(f"Invalid chat type: {type}")
llm_history.append({"role": "user", "content": preamble + content})
user_history.append({"role": "user", "content": content})
user_history.append({"role": "user", "content": content, "origin": metadata["origin"]})
metadata["full_query"] = llm_history[-1]["content"]
if context["message_history_length"]:
messages = create_system_message(context["sessions"][type]["system_prompt"]) + llm_history[-context["message_history_length"]:]
messages = create_system_message(system_prompt) + llm_history[-context["message_history_length"]:]
else:
messages = create_system_message(context["sessions"][type]["system_prompt"]) + llm_history
messages = create_system_message(system_prompt) + llm_history
# Estimate token length of new messages
ctx_size = self.get_optimal_ctx_size(context["sessions"][type]["context_tokens"], messages=llm_history[-1]["content"])
ctx_size = self.get_optimal_ctx_size(context["sessions"][process_type]["context_tokens"], messages=llm_history[-1]["content"])
processing_type = "Processing query..."
match type:
case "job_description":
processing_type = "Generating resume..."
case "fact_check":
processing_type = "Fact Checking resume..."
if len(llm_history) > 1:
processing_type = "Processing query..."
yield {"status": "processing", "message": processing_type, "num_ctx": ctx_size}
if len(user_history) > 2:
processing_message = f"Processing {'RAG augmented ' if enable_rag else ''}query..."
else:
match type:
case "job_description":
processing_message = f"Generating {'RAG augmented ' if enable_rag else ''}resume..."
case "resume":
processing_message = f"Fact Checking {'RAG augmented ' if enable_rag else ''}resume..."
case _:
processing_message = f"Processing {'RAG augmented ' if enable_rag else ''}query..."
yield {"status": "processing", "message": processing_message, "num_ctx": ctx_size}
# Use the async generator in an async for loop
try:
@ -1060,9 +1223,6 @@ class WebServer:
else:
response = self.client.chat(model=self.model, messages=messages, options={ "num_ctx": ctx_size })
except Exception as e:
logging.info(f"1. {messages[0]}")
logging.info(f"[LAST]. {messages[-1]}")
logging.exception({ "model": self.model, "error": str(e) })
yield {"status": "error", "message": f"An error occurred communicating with LLM"}
return
@ -1071,7 +1231,7 @@ class WebServer:
metadata["eval_duration"] += response["eval_duration"]
metadata["prompt_eval_count"] += response["prompt_eval_count"]
metadata["prompt_eval_duration"] += response["prompt_eval_duration"]
context["sessions"][type]["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
session["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
tools_used = []
@ -1109,12 +1269,13 @@ class WebServer:
if isinstance(tool_result, list):
messages.extend(tool_result)
else:
messages.append(tool_result)
if tool_result:
messages.append(tool_result)
metadata["tools"] = tools_used
# Estimate token length of new messages
ctx_size = self.get_optimal_ctx_size(context["sessions"][type]["context_tokens"], messages=messages[pre_add_index:])
ctx_size = self.get_optimal_ctx_size(session["context_tokens"], messages=messages[pre_add_index:])
yield {"status": "processing", "message": "Generating final response...", "num_ctx": ctx_size }
# Decrease creativity when processing tool call requests
response = self.client.chat(model=self.model, messages=messages, stream=False, options={ "num_ctx": ctx_size }) #, "temperature": 0.5 })
@ -1122,23 +1283,24 @@ class WebServer:
metadata["eval_duration"] += response["eval_duration"]
metadata["prompt_eval_count"] += response["prompt_eval_count"]
metadata["prompt_eval_duration"] += response["prompt_eval_duration"]
context["sessions"][type]["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
session["context_tokens"] = response["prompt_eval_count"] + response["eval_count"]
reply = response["message"]["content"]
final_message = {"role": "assistant", "content": reply }
# history is provided to the LLM and should not have additional metadata
llm_history.append(final_message)
final_message["metadata"] = metadata
# user_history is provided to the REST API and does not include CONTEXT or metadata
user_history.append(final_message)
# user_history is provided to the REST API and does not include CONTEXT
# It does include metadata
final_message["metadata"] = metadata
user_history.append({**final_message, "origin": metadata["origin"]})
# Return the REST API with metadata
yield {"status": "done", "message": final_message }
except Exception as e:
logging.exception({ "model": self.model, "messages": messages, "error": str(e) })
logging.exception({ "model": self.model, "origin": type, "content": content, "error": str(e) })
yield {"status": "error", "message": f"An error occurred: {str(e)}"}
finally:

View File

@ -243,16 +243,18 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
if not self._umap_collection or not len(self._umap_collection["embeddings"]):
logging.warning("No embeddings found in the collection.")
return
# During initialization
logging.info(f"Updating 2D UMAP for {len(self._umap_collection['embeddings'])} vectors")
vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_2d = umap.UMAP(n_components=2, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_model_2d = umap.UMAP(n_components=2, random_state=8911, metric="cosine", n_neighbors=15, min_dist=0.1)
self._umap_embedding_2d = self._umap_model_2d.fit_transform(vectors)
logging.info(f"2D UMAP model n_components: {self._umap_model_2d.n_components}") # Should be 2
logging.info(f"Updating 3D UMAP for {len(self._umap_collection['embeddings'])} vectors")
vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_3d = umap.UMAP(n_components=3, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_model_3d = umap.UMAP(n_components=3, random_state=8911, metric="cosine", n_neighbors=15, min_dist=0.1)
self._umap_embedding_3d = self._umap_model_3d.fit_transform(vectors)
logging.info(f"3D UMAP model n_components: {self._umap_model_3d.n_components}") # Should be 3
def _get_vector_collection(self, recreate=False):
"""Get or create a ChromaDB collection."""
@ -455,6 +457,8 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
logging.info(f"Vectorstore initialized with {self.collection.count()} documents")
self._update_umaps()
# Show stats
try:
all_metadata = self.collection.get()['metadatas']