diff --git a/frontend/public/docs/about.md b/frontend/public/docs/about.md
index 621ec16..c2a3885 100644
--- a/frontend/public/docs/about.md
+++ b/frontend/public/docs/about.md
@@ -1,7 +1,31 @@
# About Backstory
-This application was developed to achieve a few goals:
+The backstory about Backstory...
-1. See if it is realistic to self-host AI LLMs. Turns out, it is -- with constraints.
-2. Provide a recent example of my capabilities; many of my projects while working for Intel were internally facing. The source code to this project is available on [GitHub](https://github.com/jketreno/backstory).
-3. My career at Intel was diverse. Over the years, I have worked on many projects almost everywhere in the computer ecosystem. That results in a resume that is either too long, or too short. This application is intended to provide a quick way for employers to ask the LLM about me.
\ No newline at end of file
+## Backstory is two things
+
+1. Backstory serves as an interactive Q&A that let's potential employers ask questions about someone's work history (aka "Backstory".) Based on the content the job seeker has provided to the RAG system, that can provide insights into that individual's resume and curriculum vitae that are often left out when people are trying to fit everything onto one page.
+2. A resume builder -- if you have a job position, and you think this person might be a candidate, paste your job description and have a resume produced based on their data. If it looks interesting, reach out to them. If not, hopefully you've gained some insight into what drives them.
+
+-or-
+
+2. As a potential job seeker, you can self host this environment and generate resumes for yourself.
+
+ While this project was generally built for self-hosting with open source models, you can use any of the frontier models. The API adapters in this project can be configured to use infrastructure hosted from Anthropic, Google, Grok, and OpenAI (alphabetical.) For information, see [https://github.com/jketreno/backstory/README.md](https://github.com/jketreno/backstory/README.md#Frontier_Models).
+
+
+## This application was developed to achieve a few goals:
+
+1. See if it is realistic to self-host AI LLMs. Turns out, it is -- with constraints. I've been meaning to write a blog post about what to buy to build an AI PC that can run the latest "small" (7B) parameter models.
+2. Provide a recent example of my capabilities; many of my projects while working for Intel were internally facing. The source code to this project is available on [GitHub](https://github.com/jketreno/backstory). It doesn't touch on much of my history of work, however it does represent the pace at which I can adapt and develop useful solutions to fill a gap.
+3. My career at Intel was diverse. Over the years, I have worked on many projects almost everywhere in the computer ecosystem. That results in a resume that is either too long, or too short. This application is intended to provide a quick way for employers to ask the LLM about me. You can view my resume in totality, or use the Resume Builder to post your job position to see how I fit. Or go the Backstory and ask questions about the projects mentioned in my resume.
+
+## Some questions
+
+Q.
+
+A. I could; but I don't want to store your data. I also don't want to have to be on the hook for support of this service. I like it, it's fun, but it's not what I want as my day-gig, you know? If it was, I wouldn't have built this app...
+
+Q.
+
+A. Try it. See what you find out :)
\ No newline at end of file
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 97e9918..e1387f5 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -28,13 +28,9 @@ import CssBaseline from '@mui/material/CssBaseline';
import ResetIcon from '@mui/icons-material/History';
import SendIcon from '@mui/icons-material/Send';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
-import Card from '@mui/material/Card';
-import CardContent from '@mui/material/CardContent';
import PropagateLoader from "react-spinners/PropagateLoader";
-import { MuiMarkdown } from "mui-markdown";
-
import { ResumeBuilder } from './ResumeBuilder';
import { Message, MessageList } from './Message';
import { SeverityType } from './Snack';
@@ -51,15 +47,14 @@ import '@fontsource/roboto/700.css';
const welcomeMarkdown = `
# Welcome to Backstory
-Backstory was written by James Ketrenos in order to provide answers to questions potential employers may have about his work history. In addition to being a RAG enabled expert system, the LLM is configured with real-time access to weather, stocks, the current time, and can answer questions about the contents of a website.
+Backstory was written by James Ketrenos in order to provide answers to questions potential employers may have about his work history. In addition to being a RAG enabled expert system, the LLM has access to real-time data.
You can ask things like:
-
-
-
+
+
You can click the text above to submit that query, or type it in yourself (or whatever questions you may have.)
@@ -194,7 +189,6 @@ const Controls = ({ tools, rags, systemPrompt, toggleTool, toggleRag, messageHis
{
const sendQuery = async (query: string) => {
if (!query.trim()) return;
+ setTab(0);
+
const userMessage = [{ role: 'user', content: query }];
let scrolledToBottom;
@@ -1153,11 +1149,7 @@ const App = () => {
-
-
- {about}
-
-
+
diff --git a/frontend/src/BackstoryTheme.tsx b/frontend/src/BackstoryTheme.tsx
index 5c49e90..c44aeb2 100644
--- a/frontend/src/BackstoryTheme.tsx
+++ b/frontend/src/BackstoryTheme.tsx
@@ -40,6 +40,18 @@ const backstoryTheme = createTheme({
},
},
components: {
+ MuiLink: {
+ styleOverrides: {
+ root: {
+ color: '#4A7A7D', // Dusty Teal (your secondary color)
+ textDecoration: 'none',
+ '&:hover': {
+ color: '#D4A017', // Golden Ochre on hover
+ textDecoration: 'underline',
+ },
+ },
+ },
+ },
MuiButton: {
styleOverrides: {
root: {
diff --git a/frontend/src/DocumentViewer.tsx b/frontend/src/DocumentViewer.tsx
index 5e9071a..4965a94 100644
--- a/frontend/src/DocumentViewer.tsx
+++ b/frontend/src/DocumentViewer.tsx
@@ -1,6 +1,7 @@
-import React, { useState } from 'react';
+import React, { useEffect, useState, useCallback } from 'react';
import {
Typography,
+ Card,
Button,
Tabs,
Tab,
@@ -22,9 +23,10 @@ import {
SwapHoriz,
} from '@mui/icons-material';
import { SxProps, Theme } from '@mui/material';
-import { MuiMarkdown } from "mui-markdown";
+import PropagateLoader from "react-spinners/PropagateLoader";
import { MessageData } from './MessageMeta';
+import { Message } from './Message';
interface DocumentComponentProps {
title: string;
@@ -37,8 +39,31 @@ interface DocumentViewerProps {
sx?: SxProps,
};
+// Document component
+const Document: React.FC = ({ title, children }) => (
+
+ {
+ title !== "" &&
+
+ {title}
+
+ }
+
+ {children}
+
+
+);
+
const DocumentViewer: React.FC = ({generateResume, resume, sx} : DocumentViewerProps) => {
const [jobDescription, setJobDescription] = useState("");
+ const [processing, setProcessing] = useState(false);
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
@@ -47,6 +72,17 @@ const DocumentViewer: React.FC = ({generateResume, resume,
// State for controlling split ratio on desktop
const [splitRatio, setSplitRatio] = useState(50);
+ useEffect(() => {
+ if (processing && resume !== undefined) {
+ setProcessing(false);
+ }
+ }, [processing, resume, setProcessing]);
+
+ const triggerGeneration = useCallback((jobDescription: string) => {
+ setProcessing(true);
+ generateResume(jobDescription);
+ }, [setProcessing, generateResume]);
+
// Handle tab change for mobile
const handleTabChange = (_event: React.SyntheticEvent, newValue: number): void => {
setActiveDocMobile(newValue);
@@ -64,30 +100,10 @@ const DocumentViewer: React.FC = ({generateResume, resume,
const handleKeyPress = (event: any) => {
if (event.key === 'Enter' && event.ctrlKey) {
- generateResume(jobDescription);
+ triggerGeneration(jobDescription);
}
};
- // Document component
- const Document: React.FC = ({ title, children }) => (
-
- { title !== "" &&
-
- {title}
- }
-
- {children}
-
-
- );
-
// Mobile view
if (isMobile) {
return (
@@ -125,9 +141,24 @@ const DocumentViewer: React.FC = ({generateResume, resume,
/>
- >) : (
- { resume !== undefined && }
- )}
+ >) : (<>
+ {resume !== undefined && }
+
+
+
+ {resume !== undefined && NOTE: As with all LLMs, hallucination is always a possibility. If this resume seems too good to be true, expand the LLM information for this query section and click the links to the relavent RAG source document to read the details. Or go back to 'Backstory' and ask a question.}
+ >)}
);
@@ -162,11 +193,28 @@ const DocumentViewer: React.FC = ({generateResume, resume,
-
- { resume !== undefined && }
+
+ {resume !== undefined && }
+
+
+
+ {resume !== undefined && NOTE: As with all LLMs, hallucination is always a possibility. If this resume seems too good to be true, expand the LLM information for this query section and click the links to the relavent RAG source document to read the details. Or go back to 'Backstory' and ask a question.}
+
{/* Split control panel */}
+
setSplitRatio(Math.max(20, splitRatio - 10))}>
diff --git a/frontend/src/Message.tsx b/frontend/src/Message.tsx
index 1a86b76..9a7dbab 100644
--- a/frontend/src/Message.tsx
+++ b/frontend/src/Message.tsx
@@ -4,13 +4,13 @@ import Button from '@mui/material/Button';
import CardContent from '@mui/material/CardContent';
import CardActions from '@mui/material/CardActions';
import Collapse from '@mui/material/Collapse';
-import { MuiMarkdown } from "mui-markdown";
import Typography from '@mui/material/Typography';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import { ExpandMore } from './ExpandMore';
import { MessageData, MessageMeta } from './MessageMeta';
import { ChatBubble } from './ChatBubble';
+import { StyledMarkdown } from './StyledMarkdown';
type MessageList = MessageData[];
@@ -53,14 +53,7 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
{message.role === 'assistant' ?
-
+
:
{message.content}
@@ -90,10 +83,11 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
};
export type {
- MessageInterface,
- MessageList
+ MessageInterface,
+ MessageList,
};
export {
- Message
+ Message,
+ ChatQuery,
};
diff --git a/frontend/src/ResumeBuilder.tsx b/frontend/src/ResumeBuilder.tsx
index 33c4cb0..f2530d7 100644
--- a/frontend/src/ResumeBuilder.tsx
+++ b/frontend/src/ResumeBuilder.tsx
@@ -91,21 +91,13 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce
const generateResume = async (jobDescription: string) => {
if (!jobDescription.trim()) return;
- // setResume(undefined);
+ setResume(undefined);
- let scrolledToBottom;
-
- scrollToBottom();
-
try {
- scrolledToBottom = isScrolledToBottom();
setProcessing(true);
// Add initial processing message
setGenerateStatus({ role: 'assistant', content: 'Processing request...' });
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/generate-resume/${sessionId}`, {
@@ -121,12 +113,8 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce
const token_guess = 500;
const estimate = Math.round(token_guess / lastEvalTPS + contextStatus.context_used / lastPromptTPS);
- scrolledToBottom = isScrolledToBottom();
setSnack(`Job description sent. Response estimated in ${estimate}s.`, "info");
startCountdown(Math.round(estimate));
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
@@ -161,20 +149,15 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce
// Force an immediate state update based on the message type
if (update.status === 'processing') {
- scrolledToBottom = isScrolledToBottom();
// Update processing message with immediate re-render
setGenerateStatus({ role: 'info', content: update.message });
console.log(update.num_ctx);
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
} else if (update.status === 'done') {
// Replace processing message with final result
- scrolledToBottom = isScrolledToBottom();
setGenerateStatus(undefined);
setResume(update.message);
const metadata = update.message.metadata;
@@ -183,16 +166,9 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce
setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus();
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
} else if (update.status === 'error') {
// Show error
- scrolledToBottom = isScrolledToBottom();
- setGenerateStatus({role: 'error', content: update.message });
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
+ setGenerateStatus({ role: 'error', content: update.message });
}
} catch (e) {
setSnack("Error generating resume", "error")
@@ -207,34 +183,22 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce
const update = JSON.parse(buffer);
if (update.status === 'done') {
- scrolledToBottom = isScrolledToBottom();
setGenerateStatus(undefined);
setResume(update.message);
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 500);
- }
}
} catch (e) {
setSnack("Error processing job description", "error")
}
}
- scrolledToBottom = isScrolledToBottom();
stopCountdown();
setProcessing(false);
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
} catch (error) {
console.error('Fetch error:', error);
setSnack("Unable to process job description", "error");
- scrolledToBottom = isScrolledToBottom();
setGenerateStatus({ role: 'error', content: `Error: ${error}` });
setProcessing(false);
stopCountdown();
- if (scrolledToBottom) {
- setTimeout(() => { scrollToBottom() }, 50);
- }
}
};
@@ -255,7 +219,6 @@ const ResumeBuilder = ({scrollToBottom, isScrolledToBottom, setProcessing, proce