315 lines
10 KiB
TypeScript
315 lines
10 KiB
TypeScript
import { useState, useCallback, } from 'react';
|
|
import Box from '@mui/material/Box';
|
|
import { SeverityType } from './Snack';
|
|
import { ContextStatus } from './ContextStatus';
|
|
import { MessageData } from './MessageMeta';
|
|
import { DocumentViewer } from './DocumentViewer';
|
|
|
|
interface ResumeBuilderProps {
|
|
scrollToBottom: () => void,
|
|
isScrolledToBottom: () => boolean,
|
|
setProcessing: (processing: boolean) => void,
|
|
processing: boolean,
|
|
connectionBase: string,
|
|
sessionId: string | undefined,
|
|
setSnack: (message: string, severity?: SeverityType) => void,
|
|
};
|
|
|
|
const ResumeBuilder = ({ scrollToBottom, isScrolledToBottom, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
|
|
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
|
|
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
|
|
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
|
|
// const [countdown, setCountdown] = useState<number>(0);
|
|
const [resume, setResume] = useState<MessageData | undefined>(undefined);
|
|
const [facts, setFacts] = useState<MessageData | undefined>(undefined);
|
|
// const timerRef = useRef<any>(null);
|
|
|
|
const updateContextStatus = useCallback(() => {
|
|
fetch(connectionBase + `/api/context-status/${sessionId}`, {
|
|
method: 'GET',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
})
|
|
.then(response => response.json())
|
|
.then(data => {
|
|
setContextStatus(data);
|
|
})
|
|
.catch(error => {
|
|
console.error('Error getting context status:', error);
|
|
setSnack("Unable to obtain context status.", "error");
|
|
});
|
|
}, [setContextStatus, connectionBase, setSnack, sessionId]);
|
|
|
|
// const startCountdown = (seconds: number) => {
|
|
// if (timerRef.current) clearInterval(timerRef.current);
|
|
// setCountdown(seconds);
|
|
// timerRef.current = setInterval(() => {
|
|
// setCountdown((prev) => {
|
|
// if (prev <= 1) {
|
|
// clearInterval(timerRef.current);
|
|
// timerRef.current = null;
|
|
// if (isScrolledToBottom()) {
|
|
// setTimeout(() => {
|
|
// scrollToBottom();
|
|
// }, 50)
|
|
// }
|
|
// return 0;
|
|
// }
|
|
// return prev - 1;
|
|
// });
|
|
// }, 1000);
|
|
// };
|
|
|
|
// const stopCountdown = () => {
|
|
// if (timerRef.current) {
|
|
// clearInterval(timerRef.current);
|
|
// timerRef.current = null;
|
|
// setCountdown(0);
|
|
// }
|
|
// };
|
|
|
|
if (sessionId === undefined) {
|
|
return (<></>);
|
|
}
|
|
|
|
const generateResume = async (jobDescription: string) => {
|
|
if (!jobDescription.trim()) return;
|
|
setResume(undefined);
|
|
|
|
try {
|
|
setProcessing(true);
|
|
|
|
// Add initial processing message
|
|
//setGenerateStatus({ role: 'assistant', content: 'Processing request...' });
|
|
|
|
// Make the fetch request with proper headers
|
|
const response = await fetch(connectionBase + `/api/generate-resume/${sessionId}`, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'application/json',
|
|
},
|
|
body: JSON.stringify({ content: jobDescription.trim() }),
|
|
});
|
|
|
|
// We'll guess that the response will be around 500 tokens...
|
|
const token_guess = 500;
|
|
const estimate = Math.round(token_guess / lastEvalTPS + contextStatus.context_used / lastPromptTPS);
|
|
|
|
setSnack(`Job description sent. Response estimated in ${estimate}s.`, "info");
|
|
//startCountdown(Math.round(estimate));
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
if (!response.body) {
|
|
throw new Error('Response body is null');
|
|
}
|
|
|
|
// Set up stream processing with explicit chunking
|
|
const reader = response.body.getReader();
|
|
const decoder = new TextDecoder();
|
|
let buffer = '';
|
|
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) {
|
|
break;
|
|
}
|
|
|
|
const chunk = decoder.decode(value, { stream: true });
|
|
|
|
// Process each complete line immediately
|
|
buffer += chunk;
|
|
let lines = buffer.split('\n');
|
|
buffer = lines.pop() || ''; // Keep incomplete line in buffer
|
|
for (const line of lines) {
|
|
if (!line.trim()) continue;
|
|
|
|
try {
|
|
const update = JSON.parse(line);
|
|
|
|
// Force an immediate state update based on the message type
|
|
if (update.status === 'processing') {
|
|
// Update processing message with immediate re-render
|
|
//setGenerateStatus({ role: 'info', content: update.message });
|
|
console.log(update.num_ctx);
|
|
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
|
|
} else if (update.status === 'done') {
|
|
// Replace processing message with final result
|
|
//setGenerateStatus(undefined);
|
|
setResume(update.message);
|
|
const metadata = update.message.metadata;
|
|
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
|
|
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
|
|
setLastEvalTPS(evalTPS ? evalTPS : 35);
|
|
setLastPromptTPS(promptTPS ? promptTPS : 35);
|
|
updateContextStatus();
|
|
} else if (update.status === 'error') {
|
|
// Show error
|
|
//setGenerateStatus({ role: 'error', content: update.message });
|
|
}
|
|
} catch (e) {
|
|
setSnack("Error generating resume", "error")
|
|
console.error('Error parsing JSON:', e, line);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process any remaining buffer content
|
|
if (buffer.trim()) {
|
|
try {
|
|
const update = JSON.parse(buffer);
|
|
|
|
if (update.status === 'done') {
|
|
//setGenerateStatus(undefined);
|
|
setResume(update.message);
|
|
}
|
|
} catch (e) {
|
|
setSnack("Error processing job description", "error")
|
|
}
|
|
}
|
|
|
|
//stopCountdown();
|
|
setProcessing(false);
|
|
} catch (error) {
|
|
console.error('Fetch error:', error);
|
|
setSnack("Unable to process job description", "error");
|
|
//setGenerateStatus({ role: 'error', content: `Error: ${error}` });
|
|
setProcessing(false);
|
|
//stopCountdown();
|
|
}
|
|
};
|
|
|
|
const factCheck = async (resume: string) => {
|
|
if (!resume.trim()) return;
|
|
setFacts(undefined);
|
|
|
|
try {
|
|
setProcessing(true);
|
|
|
|
const response = await fetch(connectionBase + `/api/fact-check/${sessionId}`, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'application/json',
|
|
},
|
|
body: JSON.stringify({ content: resume.trim() }),
|
|
});
|
|
|
|
// We'll guess that the response will be around 500 tokens...
|
|
const token_guess = 500;
|
|
const estimate = Math.round(token_guess / lastEvalTPS + contextStatus.context_used / lastPromptTPS);
|
|
|
|
setSnack(`Resume sent for Fact Check. Response estimated in ${estimate}s.`, "info");
|
|
//startCountdown(Math.round(estimate));
|
|
|
|
if (!response.ok) {
|
|
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
|
}
|
|
|
|
if (!response.body) {
|
|
throw new Error('Response body is null');
|
|
}
|
|
|
|
// Set up stream processing with explicit chunking
|
|
const reader = response.body.getReader();
|
|
const decoder = new TextDecoder();
|
|
let buffer = '';
|
|
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) {
|
|
break;
|
|
}
|
|
|
|
const chunk = decoder.decode(value, { stream: true });
|
|
|
|
// Process each complete line immediately
|
|
buffer += chunk;
|
|
let lines = buffer.split('\n');
|
|
buffer = lines.pop() || ''; // Keep incomplete line in buffer
|
|
for (const line of lines) {
|
|
if (!line.trim()) continue;
|
|
|
|
try {
|
|
const update = JSON.parse(line);
|
|
|
|
// Force an immediate state update based on the message type
|
|
if (update.status === 'processing') {
|
|
// Add a small delay to ensure React has time to update the UI
|
|
await new Promise(resolve => setTimeout(resolve, 0));
|
|
|
|
} else if (update.status === 'done') {
|
|
// Replace processing message with final result
|
|
setFacts(update.message);
|
|
const metadata = update.message.metadata;
|
|
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
|
|
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
|
|
setLastEvalTPS(evalTPS ? evalTPS : 35);
|
|
setLastPromptTPS(promptTPS ? promptTPS : 35);
|
|
updateContextStatus();
|
|
} else if (update.status === 'error') {
|
|
}
|
|
} catch (e) {
|
|
setSnack("Error generating resume", "error")
|
|
console.error('Error parsing JSON:', e, line);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process any remaining buffer content
|
|
if (buffer.trim()) {
|
|
try {
|
|
const update = JSON.parse(buffer);
|
|
|
|
if (update.status === 'done') {
|
|
//setGenerateStatus(undefined);
|
|
setFacts(update.message);
|
|
}
|
|
} catch (e) {
|
|
setSnack("Error processing resume", "error")
|
|
}
|
|
}
|
|
|
|
//stopCountdown();
|
|
setProcessing(false);
|
|
} catch (error) {
|
|
console.error('Fetch error:', error);
|
|
setSnack("Unable to process resume", "error");
|
|
//setGenerateStatus({ role: 'error', content: `Error: ${error}` });
|
|
setProcessing(false);
|
|
//stopCountdown();
|
|
}
|
|
};
|
|
|
|
return (
|
|
<Box className="DocBox">
|
|
<Box className="Conversation">
|
|
<DocumentViewer sx={{
|
|
display: "flex",
|
|
flexGrow: 1,
|
|
overflowY: "auto",
|
|
flexDirection: "column",
|
|
height: "calc(0vh - 0px)", // Hack to make the height work
|
|
}} {...{ factCheck, facts, generateResume, resume }} />
|
|
</Box>
|
|
</Box>
|
|
);
|
|
}
|
|
|
|
|
|
export type {
|
|
ResumeBuilderProps
|
|
};
|
|
|
|
export {
|
|
ResumeBuilder
|
|
};
|
|
|