Refactoring to a single Conversation element

This commit is contained in:
James Ketr 2025-04-22 13:05:54 -07:00
parent 02915b9a23
commit 4ce616b64b
12 changed files with 592 additions and 437 deletions

View File

@ -4,6 +4,11 @@ div {
word-break: break-word;
}
.gl-container #scene {
top: 0px !important;
left: 0px !important;
}
pre {
max-width: 100%;
max-height: 100%;

View File

@ -19,8 +19,7 @@ import MenuIcon from '@mui/icons-material/Menu';
import { ResumeBuilder } from './ResumeBuilder';
import { Message } from './Message';
import { MessageData } from './MessageMeta';
import { Message, ChatQuery, MessageList, MessageData } from './Message';
import { SeverityType } from './Snack';
import { VectorVisualizer } from './VectorVisualizer';
import { Controls } from './Controls';
@ -33,6 +32,8 @@ import '@fontsource/roboto/400.css';
import '@fontsource/roboto/500.css';
import '@fontsource/roboto/700.css';
import MuiMarkdown from 'mui-markdown';
const getConnectionBase = (loc: any): string => {
if (!loc.host.match(/.*battle-linux.*/)) {
@ -130,10 +131,41 @@ const App = () => {
}, [about, setAbout])
const handleSubmitChatQuery = () => {
chatRef.current?.submitQuery();
const handleSubmitChatQuery = (query: string) => {
console.log(`handleSubmitChatQuery: ${query} -- `, chatRef.current ? ' sending' : 'no handler');
chatRef.current?.submitQuery(query);
};
const chatPreamble: MessageList = [
{
role: 'info',
content: `
# Welcome to Backstory
Backstory is a RAG enabled expert system with access to real-time data running self-hosted
(no cloud) versions of industry leading Large and Small Language Models (LLM/SLMs).
It was written by James Ketrenos in order to provide answers to
questions potential employers may have about his work history.
What would you like to know about James?
`
}
];
const chatQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<ChatQuery text="What is James Ketrenos' work history?" submitQuery={handleSubmitChatQuery} />
<ChatQuery text="What programming languages has James used?" submitQuery={handleSubmitChatQuery} />
<ChatQuery text="What are James' professional strengths?" submitQuery={handleSubmitChatQuery} />
<ChatQuery text="What are today's headlines on CNBC.com?" submitQuery={handleSubmitChatQuery} />
</Box>,
<MuiMarkdown>
As with all LLM interactions, the results may not be 100% accurate. If you have questions about my career,
I'd love to hear from you. You can send me an email at **james_backstory@ketrenos.com**.
</MuiMarkdown>
];
// Extract the sessionId from the URL if present, otherwise
// request a sessionId from the server.
useEffect(() => {
@ -368,11 +400,13 @@ const App = () => {
ref={chatRef}
{...{
type: "chat",
prompt: "Enter your question...",
prompt: "What would you like to know about James?",
sessionId,
connectionBase,
setSnack
}}
setSnack,
preamble: chatPreamble,
defaultPrompts: chatQuestions
}}
/>
</Box>
</CustomTabPanel>
@ -392,7 +426,7 @@ const App = () => {
<CustomTabPanel tab={tab} index={3}>
<Box className="ChatBox">
<Box className="Conversation">
<Message {...{ message: { role: 'assistant', content: about }, submitQuery: handleSubmitChatQuery }} />
<Message {...{ message: { role: 'assistant', content: about }, submitQuery: handleSubmitChatQuery, connectionBase, sessionId, setSnack }} />
</Box>
</Box>
</CustomTabPanel>

View File

@ -2,7 +2,7 @@ import { Box } from '@mui/material';
import { useTheme } from '@mui/material/styles';
import { SxProps, Theme } from '@mui/material';
import React from 'react';
import { MessageRoles } from './MessageMeta';
import { MessageRoles } from './Message';
interface ChatBubbleProps {
role: MessageRoles,
@ -16,64 +16,52 @@ interface ChatBubbleProps {
function ChatBubble({ role, isFullWidth, children, sx, className }: ChatBubbleProps) {
const theme = useTheme();
const defaultRadius = '16px';
const defaultStyle = {
padding: theme.spacing(1, 1),
fontSize: '0.875rem',
alignSelf: 'flex-start', // Left-aligned is used by default
maxWidth: '100%',
minWidth: '80%',
'& > *': {
color: 'inherit', // Children inherit 'color' from parent
},
}
const styles = {
'user': {
...defaultStyle,
backgroundColor: theme.palette.background.default, // Warm Gray (#D3CDBF)
border: `1px solid ${theme.palette.custom.highlight}`, // Golden Ochre (#D4A017)
borderRadius: '16px 16px 0 16px', // Rounded, flat bottom-right for user
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
borderRadius: `${defaultRadius} ${defaultRadius} 0 ${defaultRadius}`, // Rounded, flat bottom-right for user
alignSelf: 'flex-end', // Right-aligned for user
color: theme.palette.primary.main, // Midnight Blue (#1A2536) for text
'& > *': {
color: 'inherit', // Children inherit Midnight Blue unless overridden
},
},
'assistant': {
...defaultStyle,
backgroundColor: theme.palette.primary.main, // Midnight Blue (#1A2536)
border: `1px solid ${theme.palette.secondary.main}`, // Dusty Teal (#4A7A7D)
borderRadius: '16px 16px 16px 0', // Rounded, flat bottom-left for assistant
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
alignSelf: 'flex-start', // Left-aligned for assistant
borderRadius: `${defaultRadius} ${defaultRadius} ${defaultRadius} 0`, // Rounded, flat bottom-left for assistant
color: theme.palette.primary.contrastText, // Warm Gray (#D3CDBF) for text
'& > *': {
color: 'inherit', // Children inherit Warm Gray unless overridden
},
},
'system': {
...defaultStyle,
backgroundColor: '#EDEAE0', // Soft warm gray that plays nice with #D3CDBF
border: `1px dashed ${theme.palette.custom.highlight}`, // Golden Ochre
borderRadius: '12px',
padding: theme.spacing(1, 2),
borderRadius: defaultRadius,
maxWidth: isFullWidth ? '100%' : '90%',
minWidth: '60%',
alignSelf: 'center',
color: theme.palette.text.primary, // Charcoal Black
fontStyle: 'italic',
fontSize: '0.95rem',
'& > *': {
color: 'inherit',
},
},
'info': {
...defaultStyle,
backgroundColor: '#BFD8D8', // Softened Dusty Teal
border: `1px solid ${theme.palette.secondary.main}`, // Dusty Teal
borderRadius: '16px',
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '70%',
alignSelf: 'flex-start',
borderRadius: defaultRadius,
color: theme.palette.text.primary, // Charcoal Black (#2E2E2E) — much better contrast
opacity: 0.95,
fontSize: '0.875rem',
'& > *': {
color: 'inherit',
},
}
};
return (

View File

@ -8,50 +8,30 @@ import SendIcon from '@mui/icons-material/Send';
import PropagateLoader from "react-spinners/PropagateLoader";
import { Message, MessageList } from './Message';
import { Message, MessageList, MessageData } from './Message';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
import { MessageData } from './MessageMeta';
const welcomeMarkdown = `
# Welcome to Backstory
Backstory was written by James Ketrenos in order to provide answers to
questions potential employers may have about his work history.
You can ask things like:
<ChatQuery text="What is James Ketrenos' work history?"/>
<ChatQuery text="What programming languages has James used?"/>
<ChatQuery text="What are James' professional strengths?"/>
<ChatQuery text="What are today's headlines on CNBC.com?"/>
You can click the text above to submit that query, or type it in yourself (or whatever questions you may have.)
Backstory is a RAG enabled expert system with access to real-time data running self-hosted
(no cloud) versions of industry leading Large and Small Language Models (LLM/SLMs).
As with all LLM interactions, the results may not be 100% accurate. If you have questions about my career, I'd love to hear from you. You can send me an email at **james_backstory@ketrenos.com**.`;
const welcomeMessage: MessageData = {
"role": "assistant", "content": welcomeMarkdown
};
const loadingMessage: MessageData = { "role": "assistant", "content": "Instancing chat session..." };
const loadingMessage: MessageData = { "role": "assistant", "content": "Establishing connection with server..." };
type ConversationMode = 'chat' | 'fact-check' | 'system';
interface ConversationHandle {
submitQuery: () => void;
submitQuery: (query: string) => void;
}
interface ConversationProps {
type: ConversationMode
prompt: string,
connectionBase: string,
sessionId: string | undefined,
sessionId?: string,
setSnack: (message: string, severity: SeverityType) => void,
defaultPrompts?: React.ReactElement[],
preamble?: MessageList,
hideDefaultPrompts?: boolean,
};
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt, type, sessionId, setSnack, connectionBase} : ConversationProps, ref) => {
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({ prompt, type, preamble, hideDefaultPrompts, defaultPrompts, sessionId, setSnack, connectionBase }: ConversationProps, ref) => {
const [query, setQuery] = useState<string>("");
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
const [processing, setProcessing] = useState<boolean>(false);
@ -62,6 +42,7 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
const [noInteractions, setNoInteractions] = useState<boolean>(true);
// Update the context status
const updateContextStatus = useCallback(() => {
@ -89,34 +70,44 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
fetchContextStatus();
}, [setContextStatus, connectionBase, setSnack, sessionId]);
// Set the initial chat history to "loading" or the welcome message if loaded.
useEffect(() => {
if (sessionId === undefined) {
setConversation([loadingMessage]);
} else {
fetch(connectionBase + `/api/history/${sessionId}`, {
// Set the initial chat history to "loading" or the welcome message if loaded.
useEffect(() => {
if (sessionId === undefined) {
setConversation([loadingMessage]);
return;
}
const fetchHistory = async () => {
try {
const response = await fetch(connectionBase + `/api/history/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
})
.then(response => response.json())
.then(data => {
console.log(`Session id: ${sessionId} -- history returned from server with ${data.length} entries`)
setConversation([
welcomeMessage,
...data
]);
})
.catch(error => {
console.error('Error generating session ID:', error);
setSnack("Unable to obtain chat history.", "error");
});
});
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
}
const data = await response.json();
console.log(`Session id: ${sessionId} -- history returned from server with ${data.length} entries`)
if (data.length === 0) {
setConversation(preamble || []);
setNoInteractions(true);
} else {
setConversation(data);
setNoInteractions(false);
}
updateContextStatus();
} catch (error) {
console.error('Error generating session ID:', error);
setSnack("Unable to obtain chat history.", "error");
}
}, [sessionId, setConversation, updateContextStatus, connectionBase, setSnack]);
};
if (sessionId !== undefined) {
fetchHistory();
}
}, [sessionId, setConversation, updateContextStatus, connectionBase, setSnack, preamble]);
const isScrolledToBottom = useCallback(()=> {
// Current vertical scroll position
const scrollTop = window.scrollY || document.documentElement.scrollTop;
@ -138,7 +129,6 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
});
}, []);
const startCountdown = (seconds: number) => {
if (timerRef.current) clearInterval(timerRef.current);
setCountdown(seconds);
@ -159,10 +149,6 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
}, 1000);
};
const submitQuery = (text: string) => {
sendQuery(text);
}
const stopCountdown = () => {
if (timerRef.current) {
clearInterval(timerRef.current);
@ -182,12 +168,16 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
};
useImperativeHandle(ref, () => ({
submitQuery: () => {
submitQuery: (query: string) => {
sendQuery(query);
}
}));
// If context status changes, show a warning if necessary. If it drops
const submitQuery = (query: string) => {
sendQuery(query);
}
// If context status changes, show a warning if necessary. If it drops
// back below the threshold, clear the warning trigger
useEffect(() => {
const context_used_percentage = Math.round(100 * contextStatus.context_used / contextStatus.max_context);
@ -202,6 +192,8 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
}, [contextStatus, setContextWarningShown, contextWarningShown, setContextUsedPercentage, setSnack]);
const sendQuery = async (query: string) => {
setNoInteractions(false);
if (!query.trim()) return;
//setTab(0);
@ -381,9 +373,8 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
};
return (
<Box className="ConversationContainer" sx={{ display: "flex", flexDirection: "column", height: "100%", overflowY: "auto" }}>
<Box className="Conversation" sx={{ flexGrow: 2, p: 1 }}>
{conversation.map((message, index) => <Message key={index} submitQuery={submitQuery} message={message} />)}
<Box className="Conversation" sx={{ display: "flex", flexDirection: "column", overflowY: "auto" }}>
{conversation.map((message, index) => <Message key={index} {...{ submitQuery, message, connectionBase, sessionId, setSnack }} />)}
<Box sx={{
display: "flex",
flexDirection: "column",
@ -407,7 +398,32 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
>Estimated response time: {countdown}s</Box>
)}
</Box>
<Box sx={{ ml: "0.25rem", fontSize: "0.6rem", color: "darkgrey", display: "flex", flexDirection: "row", gap: 1, mt: "auto" }}>
<Box className="Query" sx={{ display: "flex", flexDirection: "row", p: 1 }}>
<TextField
variant="outlined"
disabled={processing}
fullWidth
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder={prompt}
id="QueryInput"
/>
<Tooltip title="Send">
<Button sx={{ m: 1 }} variant="contained" onClick={() => { sendQuery(query); }}><SendIcon /></Button>
</Tooltip>
</Box>
{(noInteractions || !hideDefaultPrompts) && defaultPrompts !== undefined && defaultPrompts.length &&
<Box sx={{ display: "flex", flexDirection: "column" }}>
{
defaultPrompts.map((element, index) => {
return (<Box key={index}>{element}</Box>);
})
}
</Box>
}
<Box sx={{ ml: "0.25rem", fontSize: "0.6rem", color: "darkgrey", display: "flex", flexShrink: 1, flexDirection: "row", gap: 1, mb: "auto", mt: 1 }}>
Context used: {contextUsedPercentage}% {contextStatus.context_used}/{contextStatus.max_context}
{
contextUsedPercentage >= 90 ? <Typography sx={{ fontSize: "0.6rem", color: "red" }}>WARNING: Context almost exhausted. You should start a new chat.</Typography>
@ -415,23 +431,7 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt,
: <></>)
}
</Box>
</Box>
<Box className="Query" sx={{ display: "flex", flexDirection: "row", p: 1 }}>
<TextField
variant="outlined"
disabled={processing}
fullWidth
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Enter your question..."
id="QueryInput"
/>
<Tooltip title="Send">
<Button sx={{ m: 1 }} variant="contained" onClick={() => { sendQuery(query); }}><SendIcon /></Button>
</Tooltip>
</Box>
<Box sx={{ display: "flex", flexGrow: 1 }}></Box>
</Box>
);
});

View File

@ -1,27 +0,0 @@
import { SxProps, Theme } from '@mui/material';
import { MessageData } from './MessageMeta';
/**
* Props for the DocumentViewer component
* @interface DocumentViewerProps
* @property {function} generateResume - Function to generate a resume based on job description
* @property {MessageData | undefined} resume - The generated resume data
* @property {function} setResume - Function to set the generated resume
* @property {function} factCheck - Function to fact check the generated resume
* @property {MessageData | undefined} facts - The fact check results
* @property {function} setFacts - Function to set the fact check results
* @property {string} jobDescription - The initial job description
* @property {function} setJobDescription - Function to set the job description
* @property {SxProps<Theme>} [sx] - Optional styling properties
*/
export interface DocumentViewerProps {
generateResume: (jobDescription: string) => void;
resume: MessageData | undefined;
setResume: (resume: MessageData | undefined) => void;
factCheck: (resume: string) => void;
facts: MessageData | undefined;
setFacts: (facts: MessageData | undefined) => void;
jobDescription: string | undefined;
setJobDescription: (jobDescription: string | undefined) => void;
sx?: SxProps<Theme>;
}

View File

@ -24,12 +24,42 @@ import {
RestartAlt as ResetIcon,
} from '@mui/icons-material';
import PropagateLoader from "react-spinners/PropagateLoader";
import { SxProps, Theme } from '@mui/material';
import MuiMarkdown from 'mui-markdown';
import { Message } from './Message';
import { Document } from './Document';
import { DocumentViewerProps } from './DocumentTypes';
import MuiMarkdown from 'mui-markdown';
import { MessageData } from './Message';
import { SeverityType } from './Snack';
/**
* Props for the DocumentViewer component
* @interface DocumentViewerProps
* @property {function} generateResume - Function to generate a resume based on job description
* @property {MessageData | undefined} resume - The generated resume data
* @property {function} setResume - Function to set the generated resume
* @property {function} factCheck - Function to fact check the generated resume
* @property {MessageData | undefined} facts - The fact check results
* @property {function} setFacts - Function to set the fact check results
* @property {string} jobDescription - The initial job description
* @property {function} setJobDescription - Function to set the job description
* @property {SxProps<Theme>} [sx] - Optional styling properties
*/
export interface DocumentViewerProps {
generateResume: (jobDescription: string) => void;
resume: MessageData | undefined;
setResume: (resume: MessageData | undefined) => void;
factCheck: (resume: string) => void;
facts: MessageData | undefined;
setFacts: (facts: MessageData | undefined) => void;
jobDescription: string | undefined;
setJobDescription: (jobDescription: string | undefined) => void;
sx?: SxProps<Theme>;
connectionBase: string;
sessionId: string;
setSnack: (message: string, severity: SeverityType) => void,
}
/**
* DocumentViewer component
*
@ -44,7 +74,10 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
setResume,
facts,
setFacts,
sx
sx,
connectionBase,
sessionId,
setSnack
}) => {
// State for editing job description
const [editJobDescription, setEditJobDescription] = useState<string | undefined>(jobDescription);
@ -223,7 +256,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
const renderResumeView = () => (
<Box key="ResumeView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0 }}>
<Document sx={{ display: "flex", flexGrow: 1 }} title="">
{resume !== undefined && <Message message={resume} />}
{resume !== undefined && <Message {...{ message: resume, connectionBase, sessionId, setSnack }} />}
</Document>
{processing === "resume" && (
<Box sx={{
@ -257,13 +290,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
const renderFactCheckView = () => (
<Box key="FactView" sx={{ display: "flex", flexDirection: "column", overflow: "auto", flexGrow: 1, flexBasis: 0, p: 0 }}>
<Document sx={{ display: "flex", flexGrow: 1 }} title="">
{facts !== undefined && <Message message={facts} />}
{/* <pre>
With over 20 years of experience as a software architect, team lead, and developer, James Ketrenos brings a unique blend of technical expertise and leadership to the table. Focused on advancing energy-efficient AI solutions, he excels in designing, building, and deploying scalable systems that enable rapid product development. His extensive background in Linux software architecture, DevOps, and open-source technologies makes him an ideal candidate for leading roles at technology-driven companies.
---
</pre> */}
{facts !== undefined && <Message {...{ message: facts, connectionBase, sessionId, setSnack }} />}
</Document>
{processing === "facts" && (
<Box sx={{

View File

@ -1,4 +1,15 @@
import { useState, useRef } from 'react';
import Divider from '@mui/material/Divider';
import Accordion from '@mui/material/Accordion';
import AccordionSummary from '@mui/material/AccordionSummary';
import AccordionDetails from '@mui/material/AccordionDetails';
import Card from '@mui/material/Card';
import Table from '@mui/material/Table';
import TableBody from '@mui/material/TableBody';
import TableCell from '@mui/material/TableCell';
import TableContainer from '@mui/material/TableContainer';
import TableHead from '@mui/material/TableHead';
import TableRow from '@mui/material/TableRow';
import Box from '@mui/material/Box';
import Button from '@mui/material/Button';
import IconButton from '@mui/material/IconButton';
@ -11,17 +22,50 @@ import { ExpandMore } from './ExpandMore';
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
import CheckIcon from '@mui/icons-material/Check';
import { MessageData, MessageMeta } from './MessageMeta';
import { ChatBubble } from './ChatBubble';
import { StyledMarkdown } from './StyledMarkdown';
import { Tooltip } from '@mui/material';
import { VectorVisualizer } from './VectorVisualizer';
import { SeverityType } from './Snack';
type MessageRoles = 'info' | 'user' | 'assistant' | 'system';
type MessageData = {
role: MessageRoles,
content: string,
user?: string,
type?: string,
id?: string,
isProcessing?: boolean,
metadata?: MessageMetaProps
};
interface MessageMetaProps {
query?: {
query_embedding: number[];
vector_embedding: number[];
},
rag: any,
tools: any[],
eval_count: number,
eval_duration: number,
prompt_eval_count: number,
prompt_eval_duration: number,
sessionId?: string,
connectionBase: string,
setSnack: (message: string, severity: SeverityType) => void,
}
type MessageList = MessageData[];
interface MessageInterface {
interface MessageProps {
message?: MessageData,
isFullWidth?: boolean,
submitQuery?: (text: string) => void
submitQuery?: (text: string) => void,
sessionId?: string,
connectionBase: string,
setSnack: (message: string, severity: SeverityType) => void,
};
interface ChatQueryInterface {
@ -29,18 +73,135 @@ interface ChatQueryInterface {
submitQuery?: (text: string) => void
}
const MessageMeta = ({ ...props }: MessageMetaProps) => {
return (<>
<Box sx={{ fontSize: "0.8rem", mb: 1 }}>
Below is the LLM performance of this query. Note that if tools are called, the
entire context is processed for each separate tool request by the LLM. This
can dramatically increase the total time for a response.
</Box>
<TableContainer component={Card} className="PromptStats" sx={{ mb: 1 }}>
<Table aria-label="prompt stats" size="small">
<TableHead>
<TableRow>
<TableCell></TableCell>
<TableCell align="right" >Tokens</TableCell>
<TableCell align="right">Time (s)</TableCell>
<TableCell align="right">TPS</TableCell>
</TableRow>
</TableHead>
<TableBody>
<TableRow key="prompt" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Prompt</TableCell>
<TableCell align="right">{props.prompt_eval_count}</TableCell>
<TableCell align="right">{Math.round(props.prompt_eval_duration / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round(props.prompt_eval_count * 10 ** 9 / props.prompt_eval_duration)}</TableCell>
</TableRow>
<TableRow key="response" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Response</TableCell>
<TableCell align="right">{props.eval_count}</TableCell>
<TableCell align="right">{Math.round(props.eval_duration / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round(props.eval_count * 10 ** 9 / props.eval_duration)}</TableCell>
</TableRow>
<TableRow key="total" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Total</TableCell>
<TableCell align="right">{props.prompt_eval_count + props.eval_count}</TableCell>
<TableCell align="right">{Math.round((props.prompt_eval_duration + props.eval_duration) / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round((props.prompt_eval_count + props.eval_count) * 10 ** 9 / (props.prompt_eval_duration + props.eval_duration))}</TableCell>
</TableRow>
</TableBody>
</Table>
</TableContainer>
{
props.tools !== undefined && props.tools.length !== 0 &&
<Accordion sx={{ boxSizing: "border-box" }}>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Tools queried
</Box>
</AccordionSummary>
<AccordionDetails>
{props.tools.map((tool: any, index: number) => <Box key={index}>
{index !== 0 && <Divider />}
<Box sx={{ fontSize: "0.75rem", display: "flex", flexDirection: "column", mt: 0.5 }}>
<div style={{ display: "flex", paddingRight: "1rem", whiteSpace: "nowrap" }}>
{tool.tool}
</div>
<div style={{
display: "flex",
padding: "3px",
whiteSpace: "pre-wrap",
flexGrow: 1,
border: "1px solid #E0E0E0",
wordBreak: "break-all",
maxHeight: "5rem",
overflow: "auto"
}}>
{JSON.stringify(tool.result, null, 2)}
</div>
</Box>
</Box>)}
</AccordionDetails>
</Accordion>
}
{
props?.rag?.name !== undefined && <>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Top RAG {props.rag.ids.length} matches from '{props.rag.name}' collection against embedding vector of {props.rag.query_embedding.length} dimensions
</Box>
</AccordionSummary>
<AccordionDetails>
{props.rag.ids.map((id: number, index: number) => <Box key={index}>
{index !== 0 && <Divider />}
<Box sx={{ fontSize: "0.75rem", display: "flex", flexDirection: "row", mb: 0.5, mt: 0.5 }}>
<div style={{ display: "flex", flexDirection: "column", paddingRight: "1rem", minWidth: "10rem" }}>
<div style={{ whiteSpace: "nowrap" }}>Doc ID: {props.rag.ids[index].slice(-10)}</div>
<div style={{ whiteSpace: "nowrap" }}>Similarity: {Math.round(props.rag.distances[index] * 100) / 100}</div>
<div style={{ whiteSpace: "nowrap" }}>Type: {props.rag.metadatas[index].doc_type}</div>
<div style={{ whiteSpace: "nowrap" }}>Chunk Len: {props.rag.documents[index].length}</div>
</div>
<div style={{ display: "flex", padding: "3px", flexGrow: 1, border: "1px solid #E0E0E0", maxHeight: "5rem", overflow: "auto" }}>{props.rag.documents[index]}</div>
</Box>
</Box>
)}
</AccordionDetails>
</Accordion>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
UMAP Vector Visualization of RAG
</Box>
</AccordionSummary>
<AccordionDetails>
<VectorVisualizer inline {...props} rag={props?.rag} />
</AccordionDetails>
</Accordion>
</>
}
</>
);
};
const ChatQuery = ({ text, submitQuery }: ChatQueryInterface) => {
return (submitQuery
? <Button variant="outlined" sx={{
if (submitQuery === undefined) {
return (<Box>{text}</Box>);
}
return (
<Button variant="outlined" sx={{
color: theme => theme.palette.custom.highlight, // Golden Ochre (#D4A017)
borderColor: theme => theme.palette.custom.highlight,
m: 1
}}
size="small" onClick={(e: any) => { console.log(text); submitQuery(text); }}>{text}</Button>
: <Box>{text}</Box>);
size="small" onClick={(e: any) => { console.log(text); submitQuery(text); }}>
{text}
</Button>
);
}
const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
const Message = ({ message, submitQuery, isFullWidth, sessionId, setSnack, connectionBase }: MessageProps) => {
const [expanded, setExpanded] = useState<boolean>(false);
const [copied, setCopied] = useState(false);
const textFieldRef = useRef(null);
@ -72,32 +233,31 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
const formattedContent = message.content.trim();
return (
<ChatBubble className="Message" isFullWidth={isFullWidth} role={message.role} sx={{ flexGrow: 1, pb: message.metadata ? 0 : "8px", m: 0, mb: 1, mt: 1, overflowX: "auto" }}>
<ChatBubble className="Message" isFullWidth={isFullWidth} role={message.role} sx={{ pb: message.metadata ? 0 : "8px", m: 0, mb: 1, mt: 1, overflowX: "auto" }}>
<CardContent ref={textFieldRef} sx={{ position: "relative", display: "flex", flexDirection: "column", overflowX: "auto" }}>
<Tooltip title="Copy to clipboard" placement="top" arrow>
<IconButton
onClick={handleCopy}
sx={{
position: 'absolute',
top: 8,
right: 8,
width: 24,
height: 24,
bgcolor: 'background.paper',
'&:hover': { bgcolor: 'action.hover' },
}}
size="small"
color={copied ? "success" : "default"}
>
<IconButton
onClick={handleCopy}
sx={{
position: 'absolute',
top: 0,
right: 0,
width: 24,
height: 24,
bgcolor: 'background.paper',
'&:hover': { bgcolor: 'action.hover' },
}}
size="small"
color={copied ? "success" : "default"}
>
{copied ? <CheckIcon sx={{ width: 16, height: 16 }} /> : <ContentCopyIcon sx={{ width: 16, height: 16 }} />}
</IconButton>
</IconButton>
</Tooltip>
{message.role !== 'user' ?
<StyledMarkdown
className="MessageContent"
sx={{ display: "flex", color: 'text.secondary' }}
{...{ content: formattedContent, submitQuery }} />
:
<Typography
@ -110,8 +270,8 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
}
</CardContent>
{message.metadata && <>
<CardActions disableSpacing>
<Typography sx={{ color: "darkgrey", p: 1, textAlign: "end", flexGrow: 1 }}>LLM information for this query</Typography>
<CardActions disableSpacing sx={{ justifySelf: "flex-end" }}>
<Button variant="text" onClick={handleExpandClick} sx={{ color: "darkgrey", p: 1, flexGrow: 0 }}>LLM information for this query</Button>
<ExpandMore
expand={expanded}
onClick={handleExpandClick}
@ -123,7 +283,7 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
</CardActions>
<Collapse in={expanded} timeout="auto" unmountOnExit>
<CardContent>
<MessageMeta metadata={message.metadata} />
<MessageMeta {...{ ...message.metadata, sessionId, connectionBase, setSnack }} />
</CardContent>
</Collapse>
</>}
@ -132,12 +292,17 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
};
export type {
MessageInterface,
MessageProps,
MessageList,
ChatQueryInterface,
MessageMetaProps,
MessageData,
MessageRoles
};
export {
Message,
ChatQuery,
MessageMeta
};

View File

@ -1,138 +0,0 @@
//import React, { useState, useEffect, useRef, useCallback, ReactElement } from 'react';
import Divider from '@mui/material/Divider';
import Accordion from '@mui/material/Accordion';
import AccordionSummary from '@mui/material/AccordionSummary';
import AccordionDetails from '@mui/material/AccordionDetails';
import Box from '@mui/material/Box';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import Card from '@mui/material/Card';
import Table from '@mui/material/Table';
import TableBody from '@mui/material/TableBody';
import TableCell from '@mui/material/TableCell';
import TableContainer from '@mui/material/TableContainer';
import TableHead from '@mui/material/TableHead';
import TableRow from '@mui/material/TableRow';
type MessageMetadata = {
rag: any,
tools: any[],
eval_count: number,
eval_duration: number,
prompt_eval_count: number,
prompt_eval_duration: number
};
type MessageRoles = 'info' | 'user' | 'assistant' | 'system';
type MessageData = {
role: MessageRoles,
content: string,
user?: string,
type?: string,
id?: string,
isProcessing?: boolean,
metadata?: MessageMetadata
};
interface MessageMetaInterface {
metadata: MessageMetadata
}
const MessageMeta = ({ metadata }: MessageMetaInterface) => {
if (metadata === undefined) {
return <></>
}
return (<>
<Box sx={{ fontSize: "0.8rem", mb: 1 }}>
Below is the LLM performance of this query. Note that if tools are called, the entire context is processed for each separate tool request by the LLM. This can dramatically increase the total time for a response.
</Box>
<TableContainer component={Card} className="PromptStats" sx={{ mb: 1 }}>
<Table aria-label="prompt stats" size="small">
<TableHead>
<TableRow>
<TableCell></TableCell>
<TableCell align="right" >Tokens</TableCell>
<TableCell align="right">Time (s)</TableCell>
<TableCell align="right">TPS</TableCell>
</TableRow>
</TableHead>
<TableBody>
<TableRow key="prompt" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Prompt</TableCell>
<TableCell align="right">{metadata.prompt_eval_count}</TableCell>
<TableCell align="right">{Math.round(metadata.prompt_eval_duration / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round(metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration)}</TableCell>
</TableRow>
<TableRow key="response" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Response</TableCell>
<TableCell align="right">{metadata.eval_count}</TableCell>
<TableCell align="right">{Math.round(metadata.eval_duration / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round(metadata.eval_count * 10 ** 9 / metadata.eval_duration)}</TableCell>
</TableRow>
<TableRow key="total" sx={{ '&:last-child td, &:last-child th': { border: 0 } }}>
<TableCell component="th" scope="row">Total</TableCell>
<TableCell align="right">{metadata.prompt_eval_count + metadata.eval_count}</TableCell>
<TableCell align="right">{Math.round((metadata.prompt_eval_duration + metadata.eval_duration) / 10 ** 7) / 100}</TableCell>
<TableCell align="right">{Math.round((metadata.prompt_eval_count + metadata.eval_count) * 10 ** 9 / (metadata.prompt_eval_duration + metadata.eval_duration))}</TableCell>
</TableRow>
</TableBody>
</Table>
</TableContainer>
{
metadata.tools !== undefined && metadata.tools.length !== 0 &&
<Accordion sx={{ boxSizing: "border-box" }}>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Tools queried
</Box>
</AccordionSummary>
<AccordionDetails>
{metadata.tools.map((tool: any, index: number) => <Box key={index}>
{index !== 0 && <Divider />}
<Box sx={{ fontSize: "0.75rem", display: "flex", flexDirection: "column", mt: 0.5 }}>
<div style={{ display: "flex", paddingRight: "1rem", whiteSpace: "nowrap" }}>
{tool.tool}
</div>
<div style={{ display: "flex", padding: "3px", whiteSpace: "pre-wrap", flexGrow: 1, border: "1px solid #E0E0E0", wordBreak: "break-all", maxHeight: "5rem", overflow: "auto" }}>{JSON.stringify(tool.result, null, 2)}</div>
</Box>
</Box>)}
</AccordionDetails>
</Accordion>
}
{
metadata?.rag?.name !== undefined &&
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>
Top RAG {metadata.rag.ids.length} matches from '{metadata.rag.name}' collection against embedding vector of {metadata.rag.query_embedding.length} dimensions
</Box>
</AccordionSummary>
<AccordionDetails>
{metadata.rag.ids.map((id: number, index: number) => <Box key={index}>
{index !== 0 && <Divider />}
<Box sx={{ fontSize: "0.75rem", display: "flex", flexDirection: "row", mb: 0.5, mt: 0.5 }}>
<div style={{ display: "flex", flexDirection: "column", paddingRight: "1rem", minWidth: "10rem" }}>
<div style={{ whiteSpace: "nowrap" }}>Doc ID: {metadata.rag.ids[index].slice(-10)}</div>
<div style={{ whiteSpace: "nowrap" }}>Similarity: {Math.round(metadata.rag.distances[index] * 100) / 100}</div>
<div style={{ whiteSpace: "nowrap" }}>Type: {metadata.rag.metadatas[index].doc_type}</div>
<div style={{ whiteSpace: "nowrap" }}>Chunk Len: {metadata.rag.documents[index].length}</div>
</div>
<div style={{ display: "flex", padding: "3px", flexGrow: 1, border: "1px solid #E0E0E0", maxHeight: "5rem", overflow: "auto" }}>{metadata.rag.documents[index]}</div>
</Box>
</Box>
)}
</AccordionDetails>
</Accordion>
}
</>
);
};
export type {
MessageMetadata,
MessageMetaInterface,
MessageData,
MessageRoles,
};
export { MessageMeta };

View File

@ -2,7 +2,7 @@ import { useState, useCallback, useEffect } from 'react';
import Box from '@mui/material/Box';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
import { MessageData, MessageMetadata } from './MessageMeta';
import { MessageData, MessageMetaProps } from './Message';
import { DocumentViewer } from './DocumentViewer';
interface ResumeBuilderProps {
@ -21,7 +21,7 @@ type Resume = {
resume: MessageData | undefined,
fact_check: MessageData | undefined,
job_description: string,
metadata: MessageMetadata
metadata: MessageMetaProps
};
const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
@ -350,7 +350,7 @@ const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, proc
overflowY: "auto",
flexDirection: "column",
height: "calc(0vh - 0px)", // Hack to make the height work
}} {...{ factCheck, facts, jobDescription, generateResume, resume, setFacts, setResume, setJobDescription }} />
}} {...{ factCheck, facts, jobDescription, generateResume, resume, setFacts, setResume, setSnack, setJobDescription, connectionBase, sessionId }} />
</Box>
</Box>
);

View File

@ -7,6 +7,8 @@ import TextField from '@mui/material/TextField';
import Tooltip from '@mui/material/Tooltip';
import Button from '@mui/material/Button';
import SendIcon from '@mui/icons-material/Send';
import FormControlLabel from '@mui/material/FormControlLabel';
import Switch from '@mui/material/Switch';
import { SeverityType } from './Snack';
@ -19,6 +21,8 @@ interface ResultData {
embeddings: number[][] | number[][][];
documents: string[];
metadatas: Metadata[];
ids: string[];
dimensions: number;
}
interface PlotData {
@ -39,6 +43,8 @@ interface VectorVisualizerProps {
connectionBase: string;
sessionId?: string;
setSnack: (message: string, severity: SeverityType) => void;
inline?: boolean;
rag?: any;
}
interface ChromaResult {
@ -48,7 +54,8 @@ interface ChromaResult {
metadatas: Metadata[];
query_embedding: number[];
query?: string;
vector_embedding?: number[];
umap_embedding_2d?: number[];
umap_embedding_3d?: number[];
}
const normalizeDimension = (arr: number[]): number[] => {
@ -89,11 +96,12 @@ const symbolMap: Record<string, string> = {
'query': 'circle',
};
const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectionBase, sessionId }) => {
const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, rag, inline, connectionBase, sessionId }) => {
const [plotData, setPlotData] = useState<PlotData | null>(null);
const [query, setQuery] = useState<string>('');
const [queryEmbedding, setQueryEmbedding] = useState<ChromaResult | undefined>(undefined);
const [newQuery, setNewQuery] = useState<string>('');
const [newQueryEmbedding, setNewQueryEmbedding] = useState<ChromaResult | undefined>(undefined);
const [result, setResult] = useState<ResultData | undefined>(undefined);
const [view2D, setView2D] = useState<boolean>(true);
const [tooltip, setTooltip] = useState<{
visible: boolean,
// x: number,
@ -105,7 +113,7 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
// Get the collection to visualize
useEffect(() => {
if (result !== undefined || sessionId === undefined) {
if ((result !== undefined && result.dimensions !== (view2D ? 3 : 2)) || sessionId === undefined) {
return;
}
const fetchCollection = async () => {
@ -115,9 +123,10 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ dimensions: 3 }),
body: JSON.stringify({ dimensions: view2D ? 2 : 3 }),
});
const data = await response.json();
const data: ResultData = await response.json();
data.dimensions = view2D ? 2 : 3;
setResult(data);
} catch (error) {
console.error('Error obtaining collection information:', error);
@ -126,7 +135,7 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
};
fetchCollection();
}, [result, setResult, connectionBase, setSnack, sessionId])
}, [result, setResult, connectionBase, setSnack, sessionId, view2D])
useEffect(() => {
if (!result || !result.embeddings) return;
@ -135,12 +144,31 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
const vectors: number[][] = [...result.embeddings as number[][]];
const documents = [...result.documents || []];
const metadatas = [...result.metadatas || []];
const ids = [...result.ids || []];
if (queryEmbedding !== undefined && queryEmbedding.vector_embedding !== undefined) {
if (view2D && rag && rag.umap_embedding_2d) {
metadatas.unshift({ doc_type: 'query' });
documents.unshift(queryEmbedding.query || '');
vectors.unshift(queryEmbedding.vector_embedding);
documents.unshift('Query');
vectors.unshift(rag.umap_embedding_2d);
}
if (!view2D && rag && rag.umap_embedding_3d) {
metadatas.unshift({ doc_type: 'query' });
documents.unshift('Query');
vectors.unshift(rag.umap_embedding_3d);
}
if (newQueryEmbedding !== undefined) {
metadatas.unshift({ doc_type: 'query' });
documents.unshift(newQueryEmbedding.query || '');
if (view2D && newQueryEmbedding.umap_embedding_2d) {
vectors.unshift(newQueryEmbedding.umap_embedding_2d);
}
if (!view2D && newQueryEmbedding.umap_embedding_3d) {
vectors.unshift(newQueryEmbedding.umap_embedding_3d);
}
}
const is2D = vectors.every((v: number[]) => v.length === 2);
const is3D = vectors.every((v: number[]) => v.length === 3);
if (!is2D && !is3D) {
@ -148,11 +176,19 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
return;
}
const doc_types = metadatas.map(m => m.doc_type || 'unknown');
const sizes = doc_types.map(type => {
const doc_types = metadatas.map(m => m.doc_type || 'unknown')
const sizes = doc_types.map((type, index) => {
if (!sizeMap[type]) {
sizeMap[type] = 5;
}
/* If this is a match, increase the size */
if (rag && rag.ids.includes(ids[index])) {
return sizeMap[type] + 5;
}
if (newQueryEmbedding && newQueryEmbedding.ids && newQueryEmbedding.ids.includes(ids[index])) {
return sizeMap[type] + 5;
}
return sizeMap[type];
});
const symbols = doc_types.map(type => {
@ -189,7 +225,7 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
},
xaxis: { title: 'X', gridcolor: '#cccccc', zerolinecolor: '#aaaaaa' },
yaxis: { title: 'Y', gridcolor: '#cccccc', zerolinecolor: '#aaaaaa' },
margin: { r: 20, b: 10, l: 10, t: 40 },
margin: { r: 0, b: 0, l: 0, t: 0 },
};
const data: any = {
@ -212,17 +248,23 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
setPlotData({ data, layout });
}, [result, queryEmbedding]);
}, [result, newQueryEmbedding, rag, view2D, setPlotData, setSnack]);
if (setSnack === undefined) {
console.error('setSnack function is undefined');
return null;
}
const handleKeyPress = (event: any) => {
if (event.key === 'Enter') {
sendQuery(query);
sendQuery(newQuery);
}
};
const sendQuery = async (query: string) => {
if (!query.trim()) return;
setQuery('');
setNewQuery('');
try {
const response = await fetch(`${connectionBase}/api/similarity/${sessionId}`, {
method: 'PUT',
@ -231,11 +273,11 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
},
body: JSON.stringify({
query: query,
dimensions: view2D ? 2 : 3,
})
});
const chroma: ChromaResult = await response.json();
console.log('Chroma:', chroma);
setQueryEmbedding(chroma);
setNewQueryEmbedding(chroma);
} catch (error) {
console.error('Error obtaining query similarity information:', error);
setSnack("Unable to obtain query similarity information.", "error");
@ -249,86 +291,93 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
);
return (
<>
<Card sx={{ display: 'flex', flexDirection: 'column', justifyContent: 'center', alignItems: 'center', mb: 1, pt: 0 }}>
<Typography variant="h6" sx={{ p: 1, pt: 0 }}>
Similarity Visualization via Uniform Manifold Approximation and Projection (UMAP)
</Typography>
</Card>
<Box sx={{ display: 'flex', flexGrow: 1, justifyContent: 'center', alignItems: 'center' }}>
<Plot
onClick={(event: any) => {
const point = event.points[0];
console.log('Point:', point);
const type = point.customdata.type;
const text = point.customdata.doc;
const emoji = emojiMap[type] || '❓';
setTooltip({
visible: true,
background: point['marker.color'],
color: getTextColorForBackground(point['marker.color']),
content: `${emoji} ${type.toUpperCase()}\n${text}`,
});
}}
<Box className="VectorVisualizer" sx={{ display: 'flex', flexDirection: 'column', flexGrow: 1, overflow: 'hidden' }}>
{
!inline &&
<Card sx={{ display: 'flex', flexDirection: 'column', justifyContent: 'center', alignItems: 'center', mb: 1, pt: 0 }}>
<Typography variant="h6" sx={{ p: 1, pt: 0 }}>
Similarity Visualization via Uniform Manifold Approximation and Projection (UMAP)
</Typography>
</Card>
}
<FormControlLabel sx={{ display: "inline-flex", width: "fit-content", mb: '-2.5rem', zIndex: 100, ml: 1, flexBasis: 0, flexGrow: 0 }} control={<Switch checked={!view2D} />} onChange={() => setView2D(!view2D)} label="3D" />
<Plot
onClick={(event: any) => {
const point = event.points[0];
console.log('Point:', point);
const type = point.customdata.type;
const text = point.customdata.doc;
const emoji = emojiMap[type] || '❓';
setTooltip({
visible: true,
background: point['marker.color'],
color: getTextColorForBackground(point['marker.color']),
content: `${emoji} ${type.toUpperCase()}\n${text}`,
});
}}
data={[plotData.data]}
useResizeHandler={true}
config={{
responsive: true,
displayModeBar: false,
displaylogo: false,
showSendToCloud: false,
staticPlot: false,
}}
style={{ width: '100%', height: '100%' }}
layout={plotData.layout}
data={[plotData.data]}
useResizeHandler={true}
config={{
responsive: true,
// displayModeBar: false,
displaylogo: false,
showSendToCloud: false,
staticPlot: false,
}}
style={{ display: "flex", flexGrow: 1, justifyContent: 'center', alignItems: 'center', minHeight: '30vh', height: '30vh', padding: 0, margin: 0 }}
layout={plotData.layout}
/>
</Box>
<Card sx={{
display: 'flex',
flexDirection: 'column',
flexGrow: 1,
mt: 1,
p: 0.5,
color: tooltip?.color || '#2E2E2E',
background: tooltip?.background || '#FFFFFF',
whiteSpace: 'pre-line',
zIndex: 1000,
overflow: 'auto',
maxHeight: '20vh',
minHeight: '20vh',
overflowWrap: 'break-all',
wordBreak: 'break-all',
}}
>
<Typography variant="body2" sx={{ p: 1, pt: 0 }}>
{tooltip?.content}
</Typography>
</Card>
{ queryEmbedding !== undefined &&
{!inline &&
<Card sx={{
display: 'flex',
flexDirection: 'column',
flexGrow: 1,
mt: 1,
p: 0.5,
color: tooltip?.color || '#2E2E2E',
background: tooltip?.background || '#FFFFFF',
whiteSpace: 'pre-line',
zIndex: 1000,
overflow: 'auto',
maxHeight: '20vh',
minHeight: '20vh',
overflowWrap: 'break-all',
wordBreak: 'break-all',
}}
>
<Typography variant="body2" sx={{ p: 1, pt: 0 }}>
{tooltip?.content}
</Typography>
</Card>
}
{!inline && newQueryEmbedding !== undefined &&
<Card sx={{ display: 'flex', flexDirection: 'column', justifyContent: 'center', alignItems: 'center', mt: 1, pb: 0 }}>
<Typography variant="h6" sx={{ p: 1, pt: 0, maxHeight: '5rem', overflow: 'auto' }}>
Query: {queryEmbedding.query}
Query: {newQueryEmbedding.query}
</Typography>
</Card>
}
<Box className="Query" sx={{ display: "flex", flexDirection: "row", p: 1 }}>
<TextField
variant="outlined"
fullWidth
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Enter query to find related documents..."
id="QueryInput"
/>
<Tooltip title="Send">
<Button sx={{ m: 1 }} variant="contained" onClick={() => { sendQuery(query); }}><SendIcon /></Button>
</Tooltip>
</Box>
</>
{
!inline &&
<Box className="Query" sx={{ display: "flex", flexDirection: "row", p: 1 }}>
<TextField
variant="outlined"
fullWidth
type="text"
value={newQuery}
onChange={(e) => setNewQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Enter query to find related documents..."
id="QueryInput"
/>
<Tooltip title="Send">
<Button sx={{ m: 1 }} variant="contained" onClick={() => { sendQuery(newQuery); }}><SendIcon /></Button>
</Tooltip>
</Box>
}
</Box>
);
};

View File

@ -515,12 +515,16 @@ class WebServer:
dimensions = 2
try:
result = self.file_watcher.collection.get(include=["embeddings", "documents", "metadatas"])
vectors = np.array(result["embeddings"])
umap_model = umap.UMAP(n_components=dimensions, random_state=42) #, n_neighbors=15, min_dist=0.1)
embedding = umap_model.fit_transform(vectors)
context["umap_model"] = umap_model
result["embeddings"] = embedding.tolist()
result = self.file_watcher.umap_collection
if dimensions == 2:
logging.info("Returning 2D UMAP")
umap_embedding = self.file_watcher.umap_embedding_2d
else:
logging.info("Returning 3D UMAP")
umap_embedding = self.file_watcher.umap_embedding_3d
result["embeddings"] = umap_embedding.tolist()
return JSONResponse(result)
except Exception as e:
@ -536,10 +540,6 @@ class WebServer:
logging.warning(f"Invalid context_id: {context_id}")
return JSONResponse({"error": "Invalid context_id"}, status_code=400)
context = self.upsert_context(context_id)
if not context.get("umap_model"):
return JSONResponse({"error": "No umap_model found in context"}, status_code=404)
try:
data = await request.json()
query = data.get("query", "")
@ -552,9 +552,15 @@ class WebServer:
chroma_results = self.file_watcher.find_similar(query=query, top_k=10)
if not chroma_results:
return JSONResponse({"error": "No results found"}, status_code=404)
chroma_embedding = chroma_results["query_embedding"]
umap_embedding = context["umap_model"].transform([chroma_embedding])[0].tolist()
return JSONResponse({ **chroma_results, "query": query, "vector_embedding": umap_embedding })
chroma_embedding = chroma_results["query_embedding"]
return JSONResponse({
**chroma_results,
"query": query,
"umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
})
except Exception as e:
logging.error(e)
@ -839,8 +845,7 @@ class WebServer:
# Serialize the data to JSON and write to file
with open(file_path, "w") as f:
json.dump(context, f)
if umap_model:
context["umap_model"] = umap_model
return session_id
def load_context(self, session_id):
@ -894,7 +899,6 @@ class WebServer:
logging.warning("No context ID provided. Creating a new context.")
return self.create_context()
if context_id in self.contexts:
logging.info(f"Context {context_id} found.")
return self.contexts[context_id]
logging.info(f"Context {context_id} not found. Creating new context.")
return self.load_context(context_id)
@ -931,7 +935,13 @@ class WebServer:
chroma_results = self.file_watcher.find_similar(query=content, top_k=10)
if chroma_results:
rag_docs.extend(chroma_results["documents"])
metadata["rag"] = { "name": rag["name"], **chroma_results }
chroma_embedding = chroma_results["query_embedding"]
metadata["rag"] = {
**chroma_results,
"name": rag["name"],
"umap_embedding_2d": self.file_watcher.umap_model_2d.transform([chroma_embedding])[0].tolist(),
"umap_embedding_3d": self.file_watcher.umap_model_3d.transform([chroma_embedding])[0].tolist()
}
preamble = ""
if len(rag_docs):
preamble = f"""

View File

@ -19,6 +19,7 @@ from langchain.text_splitter import CharacterTextSplitter
from langchain.schema import Document
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import umap
# Import your existing modules
if __name__ == "__main__":
@ -52,7 +53,8 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
# Initialize ChromaDB collection
self._collection = self._get_vector_collection(recreate=recreate)
self._update_umaps()
# Setup text splitter
self.text_splitter = CharacterTextSplitter(
chunk_size=chunk_size,
@ -67,7 +69,27 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
@property
def collection(self):
return self._collection
@property
def umap_collection(self):
return self._umap_collection
@property
def umap_embedding_2d(self):
return self._umap_embedding_2d
@property
def umap_embedding_3d(self):
return self._umap_embedding_3d
@property
def umap_model_2d(self):
return self._umap_model_2d
@property
def umap_model_3d(self):
return self._umap_model_3d
def _save_hash_state(self):
"""Save the current file hash state to disk."""
try:
@ -184,7 +206,10 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
# Save the hash state after successful update
self._save_hash_state()
# Re-fit the UMAP for the new content
self._update_umaps()
except Exception as e:
logging.error(f"Error processing update for {file_path}: {e}")
finally:
@ -212,6 +237,23 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
except Exception as e:
logging.error(f"Error removing file from collection: {e}")
def _update_umaps(self):
# Update the UMAP embeddings
self._umap_collection = self._collection.get(include=["embeddings", "documents", "metadatas"])
if not self._umap_collection or not len(self._umap_collection["embeddings"]):
logging.warning("No embeddings found in the collection.")
return
logging.info(f"Updating 2D UMAP for {len(self._umap_collection['embeddings'])} vectors")
vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_2d = umap.UMAP(n_components=2, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_embedding_2d = self._umap_model_2d.fit_transform(vectors)
logging.info(f"Updating 3D UMAP for {len(self._umap_collection['embeddings'])} vectors")
vectors = np.array(self._umap_collection["embeddings"])
self._umap_model_3d = umap.UMAP(n_components=3, random_state=8911, metric="cosine") #, n_neighbors=15, min_dist=0.1)
self._umap_embedding_3d = self._umap_model_3d.fit_transform(vectors)
def _get_vector_collection(self, recreate=False):
"""Get or create a ChromaDB collection."""
# Initialize ChromaDB client