Added Fact Check

This commit is contained in:
James Ketr 2025-04-10 14:48:26 -07:00
parent 7e9effa18c
commit c00f3068fa
13 changed files with 308 additions and 71 deletions

View File

@ -258,6 +258,8 @@ SHELL [ "/bin/bash", "-c" ]
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
libncurses6 \
rsync \
jq \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}

2
frontend/.gitignore vendored
View File

@ -1,4 +1,6 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
deployed
build
# dependencies
/node_modules

9
frontend/build.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
fail() {
echo "$*" >&2
exit 1
}
npm run build || fail "Unable to build frontend"
# Copy the built files to the deployed directory
rsync -avprl --delete build/ deployed/ || fail "Unable to copy built files to deployed directory"

0
frontend/deployed/.keep Normal file
View File

View File

@ -1,11 +1,11 @@
{
"name": "airc",
"name": "backstory",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "airc",
"name": "backstory",
"version": "0.1.0",
"dependencies": {
"@emotion/react": "^11.14.0",

View File

@ -1,5 +1,5 @@
{
"name": "airc",
"name": "backstory",
"version": "0.1.0",
"private": true,
"dependencies": {

View File

@ -33,6 +33,7 @@ import PropagateLoader from "react-spinners/PropagateLoader";
import { ResumeBuilder } from './ResumeBuilder';
import { Message, MessageList } from './Message';
import { MessageData } from './MessageMeta';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
@ -43,11 +44,12 @@ import '@fontsource/roboto/300.css';
import '@fontsource/roboto/400.css';
import '@fontsource/roboto/500.css';
import '@fontsource/roboto/700.css';
import { MessageMetadata } from './MessageMeta';
const welcomeMarkdown = `
# Welcome to Backstory
Backstory was originally written by James Ketrenos in order to provide answers to questions potential employers may have about his work history. Now, you can deploy your own instance, host, and share your own Backstory. Backstory is a RAG enabled expert system with access to real-time data running self-hosted versions of industry leading Large and Small Language Models (LLM/SLMs). You can ask things like:
Backstory was originally written by James Ketrenos in order to provide answers to questions potential employers may have about his work history. Now, you can deploy your own instance, host, and share your own Backstory. Backstory is a RAG enabled expert system with access to real-time data running self-hosted (no cloud) versions of industry leading Large and Small Language Models (LLM/SLMs). You can ask things like:
<ChatQuery text="What is James Ketrenos' work history?"/>
<ChatQuery text="What programming languages has James used?"/>
@ -59,10 +61,10 @@ You can click the text above to submit that query, or type it in yourself (or wh
As with all LLM interactions, the results may not be 100% accurate. If you have questions about my career, I'd love to hear from you. You can send me an email at **james_backstory@ketrenos.com**.
`;
const welcomeMessage = {
const welcomeMessage: MessageData = {
"role": "assistant", "content": welcomeMarkdown
};
const loadingMessage = { "role": "assistant", "content": "Instancing chat session..." };
const loadingMessage: MessageData = { "role": "assistant", "content": "Instancing chat session..." };
type Tool = {
type: string,
@ -305,6 +307,13 @@ function CustomTabPanel(props: TabPanelProps) {
);
}
type Resume = {
resume: MessageData | undefined,
fact_check: MessageData | undefined,
job_description: string,
metadata: MessageMetadata
};
const App = () => {
const [query, setQuery] = useState('');
const [conversation, setConversation] = useState<MessageList>([]);
@ -331,6 +340,9 @@ const App = () => {
const [messageHistoryLength, setMessageHistoryLength] = useState<number>(5);
const [tab, setTab] = useState<number>(0);
const [about, setAbout] = useState<string>("");
const [jobDescription, setJobDescription] = useState<string>("");
const [resume, setResume] = useState<MessageData | undefined>(undefined);
const [facts, setFacts] = useState<MessageData | undefined>(undefined);
const timerRef = useRef<any>(null);
const startCountdown = (seconds: number) => {
@ -563,6 +575,48 @@ const App = () => {
fetchTools();
}, [sessionId, tools, setTools, setSnack, loc]);
// If the jobDescription and resume have not been set, fetch them from the server
useEffect(() => {
if (sessionId === undefined) {
return;
}
if (jobDescription !== "" || resume !== undefined) {
return;
}
const fetchResume = async () => {
try {
// Make the fetch request with proper headers
const response = await fetch(getConnectionBase(loc) + `/api/resume/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
});
if (!response.ok) {
throw Error();
}
const data: Resume[] = await response.json();
if (data.length) {
const lastResume = data[data.length - 1];
setJobDescription(lastResume['job_description']);
setResume(lastResume.resume);
if (lastResume['fact_check'] !== undefined) {
lastResume['fact_check'].role = 'info';
setFacts(lastResume['fact_check'])
} else {
setFacts(undefined)
}
}
} catch (error: any) {
setSnack("Unable to fetch resume", "error");
console.error(error);
}
}
fetchResume();
}, [sessionId, resume, jobDescription, setResume, setJobDescription, setSnack, loc]);
// If the RAGs have not been set, fetch them from the server
useEffect(() => {
if (rags.length || sessionId === undefined) {
@ -798,7 +852,7 @@ const App = () => {
setTab(0);
const userMessage = [{ role: 'user', content: query }];
const userMessage: MessageData[] = [{ role: 'user', content: query }];
let scrolledToBottom;
@ -1129,7 +1183,7 @@ const App = () => {
</CustomTabPanel>
<CustomTabPanel tab={tab} index={1}>
<ResumeBuilder {...{isScrolledToBottom, scrollToBottom, processing, setProcessing, setSnack, connectionBase: getConnectionBase(loc), sessionId }}/>
<ResumeBuilder {...{ isScrolledToBottom, scrollToBottom, facts, setFacts, resume, setResume, jobDescription, processing, setProcessing, setSnack, connectionBase: getConnectionBase(loc), sessionId }} />
</CustomTabPanel>
<CustomTabPanel tab={tab} index={2}>

View File

@ -2,47 +2,81 @@ import { Box } from '@mui/material';
import { useTheme } from '@mui/material/styles';
import { SxProps, Theme } from '@mui/material';
import React from 'react';
import { MessageRoles } from './MessageMeta';
interface ChatBubbleProps {
isUser: boolean;
role: MessageRoles,
isInfo?: boolean;
isFullWidth?: boolean;
children: React.ReactNode;
sx?: SxProps<Theme>;
}
function ChatBubble({ isUser, isFullWidth, children, sx }: ChatBubbleProps) {
function ChatBubble({ role, isFullWidth, children, sx }: ChatBubbleProps) {
const theme = useTheme();
const userStyle = {
backgroundColor: theme.palette.background.default, // Warm Gray (#D3CDBF)
border: `1px solid ${theme.palette.custom.highlight}`, // Golden Ochre (#D4A017)
borderRadius: '16px 16px 0 16px', // Rounded, flat bottom-right for user
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
alignSelf: 'flex-end', // Right-aligned for user
color: theme.palette.primary.main, // Midnight Blue (#1A2536) for text
'& > *': {
color: 'inherit', // Children inherit Midnight Blue unless overridden
const styles = {
'user': {
backgroundColor: theme.palette.background.default, // Warm Gray (#D3CDBF)
border: `1px solid ${theme.palette.custom.highlight}`, // Golden Ochre (#D4A017)
borderRadius: '16px 16px 0 16px', // Rounded, flat bottom-right for user
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
alignSelf: 'flex-end', // Right-aligned for user
color: theme.palette.primary.main, // Midnight Blue (#1A2536) for text
'& > *': {
color: 'inherit', // Children inherit Midnight Blue unless overridden
},
},
};
'assistant': {
backgroundColor: theme.palette.primary.main, // Midnight Blue (#1A2536)
border: `1px solid ${theme.palette.secondary.main}`, // Dusty Teal (#4A7A7D)
borderRadius: '16px 16px 16px 0', // Rounded, flat bottom-left for assistant
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
alignSelf: 'flex-start', // Left-aligned for assistant
color: theme.palette.primary.contrastText, // Warm Gray (#D3CDBF) for text
'& > *': {
color: 'inherit', // Children inherit Warm Gray unless overridden
},
},
'system': {
backgroundColor: '#EDEAE0', // Soft warm gray that plays nice with #D3CDBF
border: `1px dashed ${theme.palette.custom.highlight}`, // Golden Ochre
borderRadius: '12px',
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '90%',
minWidth: '60%',
alignSelf: 'center',
color: theme.palette.text.primary, // Charcoal Black
fontStyle: 'italic',
fontSize: '0.95rem',
'& > *': {
color: 'inherit',
},
},
'info': {
backgroundColor: '#BFD8D8', // Softened Dusty Teal
border: `1px solid ${theme.palette.secondary.main}`, // Dusty Teal
borderRadius: '16px',
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '95%',
minWidth: '70%',
alignSelf: 'flex-start',
color: theme.palette.text.primary, // Charcoal Black (#2E2E2E) — much better contrast
opacity: 0.95,
fontSize: '0.875rem',
'& > *': {
color: 'inherit',
},
}
const assistantStyle = {
backgroundColor: theme.palette.primary.main, // Midnight Blue (#1A2536)
border: `1px solid ${theme.palette.secondary.main}`, // Dusty Teal (#4A7A7D)
borderRadius: '16px 16px 16px 0', // Rounded, flat bottom-left for assistant
padding: theme.spacing(1, 2),
maxWidth: isFullWidth ? '100%' : '100%',
minWidth: '80%',
alignSelf: 'flex-start', // Left-aligned for assistant
color: theme.palette.primary.contrastText, // Warm Gray (#D3CDBF) for text
'& > *': {
color: 'inherit', // Children inherit Warm Gray unless overridden
},
};
return (
<Box sx={{ ...(isUser ? userStyle : assistantStyle), ...sx }}>
<Box sx={{ ...styles[role], ...sx }}>
{children}
</Box>
);

View File

@ -38,6 +38,7 @@ interface DocumentViewerProps {
factCheck: (resume: string) => void,
resume: MessageData | undefined,
facts: MessageData | undefined,
jobDescription: string,
sx?: SxProps<Theme>,
};
@ -61,8 +62,8 @@ const Document: React.FC<DocumentComponentProps> = ({ title, children }) => (
</Box>
);
const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factCheck, resume, facts, sx} : DocumentViewerProps) => {
const [jobDescription, setJobDescription] = useState<string>("");
const DocumentViewer: React.FC<DocumentViewerProps> = ({ generateResume, jobDescription, factCheck, resume, facts, sx }: DocumentViewerProps) => {
const [editJobDescription, setEditJobDescription] = useState<string>(jobDescription);
const [processing, setProcessing] = useState<boolean>(false);
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
@ -84,6 +85,12 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
generateResume(jobDescription);
}, [setProcessing, generateResume]);
useEffect(() => {
if (facts !== undefined) {
setActiveDocMobile(2);
}
}, [facts, setActiveDocMobile]);
// Handle tab change for mobile
const handleTabChange = (_event: React.SyntheticEvent, newValue: number): void => {
setActiveDocMobile(newValue);
@ -101,7 +108,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
const handleKeyPress = (event: any) => {
if (event.key === 'Enter' && event.ctrlKey) {
triggerGeneration(jobDescription);
triggerGeneration(editJobDescription);
}
};
@ -118,6 +125,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
>
<Tab label="Job Description" />
<Tab label="Resume" />
{facts !== undefined && <Tab label="Fact Check" />}
</Tabs>
{/* Document display area */}
@ -135,16 +143,17 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
maxHeight: '100%', // Prevents it from growing larger than the parent height
overflow: 'auto', // Enables scrollbars if the content overflows
}}
value={jobDescription}
onChange={(e) => setJobDescription(e.target.value)}
value={editJobDescription}
onChange={(e) => setEditJobDescription(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Paste a job description (or URL that resolves to one), then click Generate..."
// placeholder="Paste a job description (or URL that resolves to one), then click Generate..."
placeholder="Paste a job description, then click Generate..."
/>
</Document>
<Tooltip title="Generate">
<Button sx={{ m: 1, gap: 1 }} variant="contained" onClick={() => { triggerGeneration(jobDescription); }}>Generate<SendIcon /></Button>
<Button sx={{ m: 1, gap: 1 }} variant="contained" onClick={() => { triggerGeneration(editJobDescription); }}>Generate<SendIcon /></Button>
</Tooltip>
</>) : (<Box sx={{ display: "flex", flexDirection: "column", overflow: "auto" }}>
</>) : (activeDocMobile === 1 ? (<Box sx={{ display: "flex", flexDirection: "column", overflow: "auto" }}>
<Document title="">{resume !== undefined && <Message message={resume} />}</Document>
{processing === true && <>
<Box sx={{
@ -165,7 +174,6 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
</Box>
</>}
<Card sx={{ display: "flex", overflow: "auto", minHeight: "fit-content", p: 1, flexDirection: "column" }}>
{facts !== undefined && <Message message={facts}/> }
{resume !== undefined || processing === true
? <>
<Typography><b>NOTE:</b> As with all LLMs, hallucination is always a possibility. If the generated resume seems too good to be true, <b>Fact Check</b> or, expand the <b>LLM information for this query</b> section (at the end of the resume) and click the links in the <b>Top RAG</b> matches to view the relavent RAG source document to read the details. Or go back to 'Backstory' and ask a question.</Typography> {processing === false && <Tooltip title="Fact Check">
@ -174,7 +182,10 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
: <Typography>Once you click <b>Generate</b> under the <b>Job Description</b>, a resume will be generated based on the user's RAG content and the job description.</Typography>
}
</Card>
</Box>)}
</Box>) :
(<Box sx={{ display: "flex", flexDirection: "column", overflow: "auto" }}>
<Document title="">{facts !== undefined && <Message message={facts} />}</Document>
</Box>))}
</Box>
</Box>
);
@ -198,14 +209,15 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
maxHeight: '100%', // Prevents it from growing larger than the parent height
overflow: 'auto', // Enables scrollbars if the content overflows
}}
value={jobDescription}
onChange={(e) => setJobDescription(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Paste a job description (or URL that resolves to one), then click Generate..."
/>
value={editJobDescription}
onChange={(e) => setEditJobDescription(e.target.value)}
onKeyDown={handleKeyPress}
// placeholder="Paste a job description (or URL that resolves to one), then click Generate..."
placeholder="Paste a job description, then click Generate..."
/>
</Document>
<Tooltip title="Generate">
<Button sx={{ m: 1, gap: 1 }} variant="contained" onClick={() => { triggerGeneration(jobDescription); }}>Generate<SendIcon /></Button>
<Button sx={{ m: 1, gap: 1 }} variant="contained" onClick={() => { triggerGeneration(editJobDescription); }}>Generate<SendIcon /></Button>
</Tooltip>
</Box>
<Divider orientation="vertical" flexItem />
@ -226,7 +238,6 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
/>
</Box>
<Card sx={{ display: "flex", overflow: "auto", minHeight: "fit-content", p: 1, flexDirection: "column" }}>
{facts !== undefined && <Message message={facts} />}
{resume !== undefined || processing === true
? <>
<Typography><b>NOTE:</b> As with all LLMs, hallucination is always a possibility. If the generated resume seems too good to be true, <b>Fact Check</b> or, expand the <b>LLM information for this query</b> section (at the end of the resume) and click the links in the <b>Top RAG</b> matches to view the relavent RAG source document to read the details. Or go back to 'Backstory' and ask a question.</Typography> { processing === false && <Tooltip title="Fact Check">
@ -236,6 +247,13 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({generateResume, factChec
}
</Card>
</Box>
{
facts !== undefined && <>
<Box sx={{ display: 'flex', width: `${100 - splitRatio}%`, pl: 1, flexGrow: 1, flexDirection: 'column' }}>
<Divider orientation="vertical" flexItem />
<Document title=""><Message message={facts} /></Document>
</Box>
</>}
</Box>
{/* Split control panel */}

View File

@ -50,9 +50,9 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
const formattedContent = message.content.trim();
return (
<ChatBubble isFullWidth={isFullWidth} isUser={message.role === 'user'} sx={{ flexGrow: 1, pb: message.metadata ? 0 : "8px", mb: 1, mt: 1 }}>
<ChatBubble isFullWidth={isFullWidth} role={message.role} sx={{ flexGrow: 1, pb: message.metadata ? 0 : "8px", mb: 1, mt: 1 }}>
<CardContent>
{message.role === 'assistant' ?
{message.role !== 'user' ?
<StyledMarkdown {...{ content: formattedContent, submitQuery }} />
:
<Typography variant="body2" sx={{ color: 'text.secondary' }}>

View File

@ -22,8 +22,10 @@ type MessageMetadata = {
prompt_eval_duration: number
};
type MessageRoles = 'info' | 'user' | 'assistant' | 'system';
type MessageData = {
role: string,
role: MessageRoles,
content: string,
user?: string,
type?: string,
@ -129,7 +131,8 @@ const MessageMeta = ({ metadata }: MessageMetaInterface) => {
export type {
MessageMetadata,
MessageMetaInterface,
MessageData
MessageData,
MessageRoles,
};
export { MessageMeta };

View File

@ -1,4 +1,4 @@
import { useState, useCallback, } from 'react';
import { useState, useCallback, useEffect, } from 'react';
import Box from '@mui/material/Box';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
@ -13,17 +13,26 @@ interface ResumeBuilderProps {
connectionBase: string,
sessionId: string | undefined,
setSnack: (message: string, severity?: SeverityType) => void,
resume: MessageData | undefined,
setResume: (resume: MessageData | undefined) => void,
facts: MessageData | undefined,
setFacts: (facts: MessageData | undefined) => void,
jobDescription: string,
};
const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
const ResumeBuilder = ({ jobDescription, facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
// const [countdown, setCountdown] = useState<number>(0);
const [resume, setResume] = useState<MessageData | undefined>(undefined);
const [facts, setFacts] = useState<MessageData | undefined>(undefined);
// const timerRef = useRef<any>(null);
useEffect(() => {
if (facts) {
console.log(facts);
}
}, [facts]);
const updateContextStatus = useCallback(() => {
fetch(connectionBase + `/api/context-status/${sessionId}`, {
method: 'GET',
@ -76,6 +85,7 @@ const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, proc
const generateResume = async (jobDescription: string) => {
if (!jobDescription.trim()) return;
setResume(undefined);
setFacts(undefined);
try {
setProcessing(true);
@ -189,6 +199,7 @@ const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, proc
const factCheck = async (resume: string) => {
if (!resume.trim()) return;
setFacts(undefined);
setSnack('Fact Check is still under development', 'warning');
try {
setProcessing(true);
@ -247,6 +258,7 @@ const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, proc
} else if (update.status === 'done') {
// Replace processing message with final result
update.message.role = 'info';
setFacts(update.message);
const metadata = update.message.metadata;
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
@ -269,7 +281,7 @@ const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, proc
const update = JSON.parse(buffer);
if (update.status === 'done') {
//setGenerateStatus(undefined);
update.message.role = 'info';
setFacts(update.message);
}
} catch (e) {
@ -297,7 +309,7 @@ const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, proc
overflowY: "auto",
flexDirection: "column",
height: "calc(0vh - 0px)", // Hack to make the height work
}} {...{ factCheck, facts, generateResume, resume }} />
}} {...{ factCheck, facts, jobDescription, generateResume, resume }} />
</Box>
</Box>
);

View File

@ -146,15 +146,15 @@ Always use tools and [{context_tag}] when possible. Be concise, and never make u
""".strip()
system_generate_resume = f"""
You are a professional resume writer. Your task is to write a poliched, tailored resume for a specific job based only on the individual's [WORK HISTORY].
You are a professional resume writer. Your task is to write a polished, tailored resume for a specific job based only on the individual's [WORK HISTORY].
When answering queries, follow these steps:
1. You must not invent or assume any inforation not explicitly present in the [WORK HISTORY].
2. Analyze the [JOB DESCRIPTION] to identify skills required for the job.
3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight.
3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight from the [WORK HISTORY].
4. Identify and emphasisze the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION].
5. Do not use the [JOB DESCRIPTION] skills as skills the user posseses unless listed in [WORK HISTORY].
5. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY].
Structure the resume professionally with the following sections where applicable:
@ -164,10 +164,20 @@ Structure the resume professionally with the following sections where applicable
* Professional Experience: A detailed list of roles, achievements, and responsibilities from the work history that relate to the job."
* Education: Include only if available in the work history."
Do not include any information unless it is supported by the provided [WORK HISTORY].
Do not include any information unless it is provided in [WORK HISTORY].
Ensure the langauge is clear, concise, and aligned with industry standards for professional resumes.
"""
system_fact_check = f"""
You are a professional resume fact checker. Your task is to identify any inaccuracies in the [RESUME] based on the individual's [WORK HISTORY].
If there are inaccuracies, list them in a bullet point format.
When answering queries, follow these steps:
1. You must not invent or assume any information not explicitly present in the [WORK HISTORY].
2. Analyze the [RESUME] to identify any discrepancies or inaccuracies based on the [WORK HISTORY].
"""
tool_log = []
command_log = []
model = None
@ -445,6 +455,14 @@ class WebServer:
"message-history-length": context["message_history_length"]
})
@self.app.get('/api/resume/{context_id}')
async def get_resume(context_id: str):
if not is_valid_uuid(context_id):
logging.warning(f"Invalid context_id: {context_id}")
return JSONResponse({"error": "Invalid context_id"}, status_code=400)
context = self.upsert_context(context_id)
return JSONResponse(context["resume_history"])
@self.app.get('/api/system-info/{context_id}')
async def get_system_info(context_id: str):
return JSONResponse(system_info(self.model))
@ -507,6 +525,35 @@ class WebServer:
}
)
@self.app.post('/api/fact-check/{context_id}')
async def post_fact_check(context_id: str, request: Request):
if not is_valid_uuid(context_id):
logging.warning(f"Invalid context_id: {context_id}")
return JSONResponse({"error": "Invalid context_id"}, status_code=400)
context = self.upsert_context(context_id)
data = await request.json()
# Create a custom generator that ensures flushing
async def flush_generator():
async for message in self.fact_check(context=context, content=data['content']):
# Convert to JSON and add newline
yield json.dumps(message) + "\n"
# Save the history as its generated
self.save_context(context_id)
# Explicitly flush after each yield
await asyncio.sleep(0) # Allow the event loop to process the write
# Return StreamingResponse with appropriate headers
return StreamingResponse(
flush_generator(),
media_type="application/json",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no" # Prevents Nginx buffering if you're using it
}
)
@self.app.post('/api/context')
async def create_context():
context = self.create_context()
@ -816,6 +863,13 @@ class WebServer:
self.processing = True
resume_history = context["resume_history"]
resume = {
"job_description": content,
"resume": "",
"metadata": {},
"rag": "",
"fact_check": ""
}
metadata = {
"rag": {},
@ -838,8 +892,10 @@ class WebServer:
preamble = f"""[WORK HISTORY]:\n"""
for doc in rag_docs:
preamble += doc
resume["rag"] += f"{doc}\n"
preamble += f"\n[/WORK HISTORY]\n"
content = f"{preamble}\nUse the above WORK HISTORY to create the resume for this JOB DESCRIPTION. Do not use the JOB DESCRIPTION skills as skills the user posseses unless listed in WORK HISTORY:\n[JOB DESCRIPTION]\n{content}\n[/JOB DESCRIPTION]\n"
try:
@ -866,11 +922,58 @@ class WebServer:
reply = response['response']
final_message = {"role": "assistant", "content": reply, "metadata": metadata }
resume_history.append({
'job_description': content,
'resume': reply,
'metadata': metadata
})
resume['resume'] = final_message
resume_history.append(resume)
# Return the REST API with metadata
yield {"status": "done", "message": final_message }
except Exception as e:
logging.exception({ 'model': self.model, 'content': content, 'error': str(e) })
yield {"status": "error", "message": f"An error occurred: {str(e)}"}
finally:
self.processing = False
async def fact_check(self, context, content):
content = content.strip()
if not content:
yield {"status": "error", "message": "Invalid request"}
return
if self.processing:
yield {"status": "error", "message": "Busy"}
return
self.processing = True
resume_history = context["resume_history"]
if len(resume_history) == 0:
yield {"status": "done", "message": "No resume history found." }
return
resume = resume_history[-1]
metadata = resume["metadata"]
metadata["eval_count"] = 0
metadata["eval_duration"] = 0
metadata["prompt_eval_count"] = 0
metadata["prompt_eval_duration"] = 0
content = f"[WORK HISTORY]:{resume['rag']}[/WORK HISTORY]\n\n[RESUME]\n{resume['resume']['content']}\n[/RESUME]\n\n"
try:
# Estimate token length of new messages
ctx_size = self.get_optimal_ctx_size(context["context_tokens"], messages=[system_fact_check, content])
yield {"status": "processing", "message": "Processing request...", "num_ctx": ctx_size}
response = self.client.generate(model=self.model, system=system_fact_check, prompt=content, options={ 'num_ctx': ctx_size })
logging.info(f"Fact checking {ctx_size} tokens.")
metadata["eval_count"] += response['eval_count']
metadata["eval_duration"] += response['eval_duration']
metadata["prompt_eval_count"] += response['prompt_eval_count']
metadata["prompt_eval_duration"] += response['prompt_eval_duration']
context["context_tokens"] = response['prompt_eval_count'] + response['eval_count']
reply = response['response']
final_message = {"role": "assistant", "content": reply, "metadata": metadata }
resume['fact_check'] = final_message
# Return the REST API with metadata
yield {"status": "done", "message": final_message }