Improved resume generation by reordering context

This commit is contained in:
James Ketr 2025-06-20 11:14:07 -07:00
parent c643b0d8f8
commit 9c9578cc46
17 changed files with 429 additions and 303 deletions

View File

@ -123,6 +123,29 @@ services:
networks:
- internal
ollama-intel:
image: intelanalytics/ipex-llm-inference-cpp-xpu:latest
container_name: ollama
restart: unless-stopped
env_file:
- .env
devices:
- /dev/dri:/dev/dri
volumes:
- ./cache:/root/.cache # Cache hub models and neo_compiler_cache
- ./ollama:/root/.ollama # Cache the ollama models
ports:
- 11434:11434
environment:
- OLLAMA_HOST=0.0.0.0
- DEVICE=Arc
- OLLAMA_INTEL_GPU=true
- OLLAMA_NUM_GPU=999
- ZES_ENABLE_SYSMAN=1
- ONEAPI_DEVICE_SELECTOR=level_zero:0
- TZ=America/Los_Angeles
command: sh -c 'mkdir -p /llm/ollama && cd /llm/ollama && init-ollama && exec ./ollama serve'
ollama:
build:
context: .

View File

@ -12,6 +12,7 @@ import { StatusBox, StatusIcon } from './ui/StatusIcon';
import { CopyBubble } from './CopyBubble';
import { useAppState } from 'hooks/GlobalContext';
import { StreamingOptions } from 'services/api-client';
import { Navigate, useNavigate } from 'react-router-dom';
interface ResumeGeneratorProps {
job: Job;
@ -23,6 +24,7 @@ interface ResumeGeneratorProps {
const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorProps) => {
const { job, candidate, skills, onComplete } = props;
const { setSnack } = useAppState();
const navigate = useNavigate();
const { apiClient, user } = useAuth();
const [resume, setResume] = useState<string>('');
const [prompt, setPrompt] = useState<string>('');
@ -49,9 +51,10 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
const generateResumeHandlers: StreamingOptions<Types.ChatMessageResume> = {
onMessage: (message: Types.ChatMessageResume) => {
setSystemPrompt(message.systemPrompt || '');
setPrompt(message.prompt || '');
setResume(message.resume || '');
const resume: Types.Resume = message.resume;
setSystemPrompt(resume.systemPrompt || '');
setPrompt(resume.prompt || '');
setResume(resume.resume || '');
setStatus('');
},
onStreaming: (chunk: Types.ChatMessageStreaming) => {
@ -115,10 +118,18 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
setSnack('Candidate or job ID is missing.');
return;
}
const controller = apiClient.saveResume(candidate.id, job.id, resume);
const submission: Types.Resume = {
jobId: job.id,
candidateId: candidate.id,
resume,
systemPrompt,
prompt,
};
const controller = apiClient.saveResume(submission);
const result = await controller.promise;
if (result.resume.id) {
setSnack('Resume saved successfully!');
navigate(`/candidate/resumes/${result.resume.id}`);
}
} catch (error) {
console.error('Error saving resume:', error);
@ -177,7 +188,7 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
{resume && !status && !error && (
<Button onClick={handleSave} variant="contained" color="primary" sx={{ mt: 2 }}>
Save Resume
Save Resume and Edit
</Button>
)}
</Box>

View File

@ -44,9 +44,8 @@ interface JobInfoProps {
const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
const { setSnack } = useAppState();
const { job } = props;
const { user, apiClient } = useAuth();
const { sx, action = '', elevation = 1, variant = 'normal' } = props;
const { sx, action = '', elevation = 1, variant = 'normal', job } = props;
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
const isAdmin = user?.isAdmin;
@ -236,6 +235,7 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
return (
<Box
className="JobInfo"
sx={{
display: 'flex',
borderColor: 'transparent',

View File

@ -1,4 +1,9 @@
/* A4 Portrait simulation for MuiMarkdown */
.a4-document .MuiTypography-root {
font-family: 'Roboto', 'Times New Roman', serif;
}
.a4-document {
/* A4 dimensions: 210mm x 297mm */
width: 210mm;
@ -10,12 +15,12 @@
/* Document styling */
background: white;
padding: 8mm; /* 1/4" margins all around */
padding: 12mm; /* 1/4" margins all around */
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
border: 1px solid #e0e0e0;
/* Typography for document feel */
font-family: 'Times New Roman', serif;
font-family: 'Roboto', 'Times New Roman', serif;
font-size: 12pt;
line-height: 1.6;
color: #333;
@ -23,16 +28,12 @@
/* Page break lines - repeating dotted lines every A4 height */
background-image:
repeating-linear-gradient(
transparent,
transparent calc(8mm - 1px),
#00f calc(8mm),
transparent calc(8mm + 1px),
transparent calc(288mm - 1px), /* 297mm - 8mm top/bottom margins */
#00f calc(288mm),
transparent calc(288mm + 1px),
transparent calc(296mm - 1px),
#000 calc(296mm),
transparent calc(296mm + 1px)
#ddd,
#ddd 12mm,
transparent calc(12mm + 1px),
transparent calc(285mm - 1px), /* 297mm - 8mm top/bottom margins */
#ddd calc(285mm),
#ddd 297mm
);
background-size: 100% 297mm;
background-repeat: repeat-y;

View File

@ -26,6 +26,7 @@ import {
DialogActions,
Tabs,
Tab,
Paper,
} from '@mui/material';
import PrintIcon from '@mui/icons-material/Print';
import {
@ -39,7 +40,10 @@ import {
Schedule as ScheduleIcon,
Visibility as VisibilityIcon,
VisibilityOff as VisibilityOffIcon,
ModelTraining,
} from '@mui/icons-material';
import InputIcon from '@mui/icons-material/Input';
import TuneIcon from '@mui/icons-material/Tune';
import PreviewIcon from '@mui/icons-material/Preview';
import EditDocumentIcon from '@mui/icons-material/EditDocument';
@ -52,6 +56,10 @@ import { Resume } from 'types/types';
import { BackstoryTextField } from 'components/BackstoryTextField';
import { JobInfo } from './JobInfo';
import './ResumeInfo.css';
import { Scrollable } from 'components/Scrollable';
import * as Types from 'types/types';
import { StreamingOptions } from 'services/api-client';
import { StatusBox, StatusIcon } from './StatusIcon';
interface ResumeInfoProps {
resume: Resume;
@ -70,15 +78,18 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
const isAdmin = user?.isAdmin;
const [activeResume, setActiveResume] = useState<Resume>({ ...resume });
const [isContentExpanded, setIsContentExpanded] = useState(false);
const [shouldShowMoreButton, setShouldShowMoreButton] = useState(false);
const [deleted, setDeleted] = useState<boolean>(false);
const [editDialogOpen, setEditDialogOpen] = useState<boolean>(false);
const [printDialogOpen, setPrintDialogOpen] = useState<boolean>(false);
const [editContent, setEditContent] = useState<string>('');
const [editSystemPrompt, setEditSystemPrompt] = useState<string>('');
const [editPrompt, setEditPrompt] = useState<string>('');
const [saving, setSaving] = useState<boolean>(false);
const contentRef = useRef<HTMLDivElement>(null);
const [tabValue, setTabValue] = useState('markdown');
const [status, setStatus] = useState<string>('');
const [statusType, setStatusType] = useState<Types.ApiActivityType | null>(null);
const [error, setError] = useState<Types.ChatMessageError | null>(null);
const printContentRef = useRef<HTMLDivElement>(null);
const reactToPrintFn = useReactToPrint({
contentRef: printContentRef,
@ -92,13 +103,6 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
}, [resume, activeResume]);
// Check if content needs truncation
useEffect(() => {
if (contentRef.current && resume.resume) {
const element = contentRef.current;
setShouldShowMoreButton(element.scrollHeight > element.clientHeight);
}
}, [resume.resume]);
const deleteResume = async (id: string | undefined) => {
if (id) {
try {
@ -118,11 +122,17 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
const handleSave = async () => {
setSaving(true);
try {
const result = await apiClient.updateResume(activeResume.id || '', editContent);
const updatedResume = {
const resumeUpdate = {
...activeResume,
resume: editContent,
updatedAt: new Date(),
systemPrompt: editSystemPrompt,
prompt: editPrompt,
};
const result = await apiClient.updateResume(resumeUpdate);
console.log('Resume updated:', result);
const updatedResume = {
...activeResume,
...result
};
setActiveResume(updatedResume);
setSnack('Resume updated successfully.');
@ -135,6 +145,8 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
const handleEditOpen = () => {
setEditContent(activeResume.resume);
setEditSystemPrompt(activeResume.systemPrompt || '');
setEditPrompt(activeResume.prompt || '');
setEditDialogOpen(true);
};
@ -144,6 +156,7 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
const formatDate = (date: Date | undefined) => {
if (!date) return 'N/A';
try {
return new Intl.DateTimeFormat('en-US', {
month: 'short',
day: 'numeric',
@ -151,6 +164,52 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
hour: '2-digit',
minute: '2-digit',
}).format(date);
} catch (error) {
console.error('Error formatting date:', error);
return 'Invalid date';
}
};
const generateResumeHandlers: StreamingOptions<Types.ChatMessageResume> = {
onMessage: (message: Types.ChatMessageResume) => {
const resume: Resume = message.resume;
setEditSystemPrompt(resume.systemPrompt || '');
setEditPrompt(resume.prompt || '');
setEditContent(resume.resume);
setActiveResume({ ...resume });
setStatus('');
setSnack('Resume generation completed successfully.');
},
onStreaming: (chunk: Types.ChatMessageStreaming) => {
if (status === '') {
setStatus('Generating resume...');
setStatusType('generating');
}
setEditContent(chunk.content);
},
onStatus: (status: Types.ChatMessageStatus) => {
console.log('status:', status.content);
setStatusType(status.activity);
setStatus(status.content);
},
onError: (error: Types.ChatMessageError) => {
console.log('error:', error);
setStatusType(null);
setStatus(error.content);
setError(error);
},
};
const generateResume = async (): Promise<void> => {
setStatusType('thinking');
setStatus('Starting resume generation...');
setActiveResume({ ...activeResume, resume: '' }); // Reset resume content
const request = await apiClient.generateResume(
activeResume.candidateId || '',
activeResume.jobId || '',
generateResumeHandlers
);
await request.promise;
};
const handleTabChange = (event: React.SyntheticEvent, newValue: string) => {
@ -158,6 +217,12 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
reactToPrintFn();
return;
}
if (newValue === 'regenerate') {
// Handle resume regeneration logic here
setSnack('Regenerating resume...');
generateResume();
return;
}
setTabValue(newValue);
};
@ -281,22 +346,10 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
/>
<CardContent sx={{ p: 0 }}>
<Box sx={{ position: 'relative' }}>
<Typography
ref={contentRef}
variant="body2"
component="div"
<Scrollable sx={{ maxHeight: '10rem', overflowY: 'auto' }}>
<Box
sx={{
display: '-webkit-box',
WebkitLineClamp: isContentExpanded
? 'unset'
: variant === 'small'
? 5
: variant === 'minimal'
? 3
: 10,
WebkitBoxOrient: 'vertical',
overflow: 'hidden',
textOverflow: 'ellipsis',
display: 'flex',
lineHeight: 1.6,
fontSize: '0.875rem !important',
whiteSpace: 'pre-wrap',
@ -308,21 +361,8 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
}}
>
{activeResume.resume}
</Typography>
{shouldShowMoreButton && variant !== 'all' && (
<Box sx={{ display: 'flex', justifyContent: 'center', mt: 1 }}>
<Button
variant="text"
size="small"
onClick={() => setIsContentExpanded(!isContentExpanded)}
startIcon={isContentExpanded ? <VisibilityOffIcon /> : <VisibilityIcon />}
sx={{ fontSize: '0.75rem' }}
>
{isContentExpanded ? 'Show Less' : 'Show More'}
</Button>
</Box>
)}
</Scrollable>
</Box>
</CardContent>
</Card>
@ -451,14 +491,28 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
height: '100%',
}}
>
<Box sx={{ display: 'flex', flexDirection: 'row', height: '100%', gap: 1, pt: 1, width: '100%', position: 'relative', overflow: 'hidden' }}>
<Paper sx={{ p: 1, flex: 1, display: 'flex', flexDirection: 'column', position: 'relative', maxWidth: "100%", height: '100%', overflow: 'hidden' }}>
<Tabs value={tabValue} onChange={handleTabChange} centered>
<Tab value="markdown" icon={<EditDocumentIcon />} label="Markdown" />
{activeResume.systemPrompt && <Tab value="systemPrompt" icon={<TuneIcon />} label="System Prompt" />}
{activeResume.systemPrompt && <Tab value="prompt" icon={<InputIcon />} label="Prompt" />}
<Tab value="preview" icon={<PreviewIcon />} label="Preview" />
<Tab value="job" icon={<WorkIcon />} label="Job" />
<Tab value="print" icon={<PrintIcon />} label="Print" />
<Tab value="regenerate" icon={<ModelTraining />} label="Regenerate" />
</Tabs>
<Box
ref={printContentRef}
{status && (
<Box sx={{ mt: 0, mb: 1 }}>
<StatusBox>
{statusType && <StatusIcon type={statusType} />}
<Typography variant="body2" sx={{ ml: 1 }}>
{status || 'Processing...'}
</Typography>
</StatusBox>
{status && !error && <LinearProgress sx={{ mt: 1 }} />}
</Box>
)}
<Scrollable
sx={{
display: 'flex',
flexDirection: 'column',
@ -478,7 +532,7 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
onChange={value => setEditContent(value)}
style={{
position: 'relative',
// maxHeight: "100%",
maxHeight: "100%",
height: '100%',
width: '100%',
display: 'flex',
@ -491,14 +545,50 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
placeholder="Enter resume content..."
/>
)}
{tabValue === 'systemPrompt' && (
<BackstoryTextField
value={editSystemPrompt}
onChange={value => setEditSystemPrompt(value)}
style={{
position: 'relative',
maxHeight: "100%",
// height: '100%',
width: '100%',
display: 'flex',
minHeight: '100%',
flexGrow: 1,
flex: 1 /* Take remaining space in some-container */,
overflowY: 'auto' /* Scroll if content overflows */,
}}
placeholder="Edit system prompt..."
/>
)}
{tabValue === 'prompt' && (
<BackstoryTextField
value={editPrompt}
onChange={value => setEditPrompt(value)}
style={{
position: 'relative',
maxHeight: "100%",
height: '100%',
width: '100%',
display: 'flex',
minHeight: '100%',
flexGrow: 1,
flex: 1 /* Take remaining space in some-container */,
overflowY: 'auto' /* Scroll if content overflows */,
}}
placeholder="Edit prompt..."
/>
)}
{tabValue === 'preview' && (
<Box className="document-container">
<Box className="document-container" ref={printContentRef}>
<Box className="a4-document">
<StyledMarkdown
sx={{
position: 'relative',
maxHeight: '100%',
width: '100%',
display: 'flex',
flexGrow: 1,
flex: 1 /* Take remaining space in some-container */,
@ -510,22 +600,15 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
<Box sx={{ p: 2 }}>&nbsp;</Box>
</Box>
)}
{tabValue === 'job' && activeResume.job && (
<JobInfo
variant="all"
job={activeResume.job}
sx={{
p: 2,
position: 'relative',
maxHeight: '100%',
width: '100%',
display: 'flex',
flexGrow: 1,
flex: 1 /* Take remaining space in some-container */,
overflowY: 'auto' /* Scroll if content overflows */,
}}
/>
)}
</Scrollable>
</Paper>
<Scrollable sx={{ flex: 1, display: 'flex', height: '100%', overflowY: 'auto', position: 'relative' }}>
<Paper sx={{ p: 1, flex: 1, display: 'flex', flexDirection: 'column', position: 'relative', backgroundColor: "#f8f0e0" }}>
{activeResume.job !== undefined && <JobInfo variant={"all"} job={activeResume.job} sx={{
mt: 2, backgroundColor: "#f8f0e0", //theme.palette.background.paper,
}} />}
</Paper>
</Scrollable>
</Box>
</DialogContent>
<DialogActions>

View File

@ -699,18 +699,11 @@ class ApiClient {
}
saveResume(
candidate_id: string,
job_id: string,
resume: string,
resume: Types.Resume,
streamingOptions?: StreamingOptions<Types.ResumeMessage>
): StreamingResponse<Types.ResumeMessage> {
const body = JSON.stringify(resume);
return this.streamify<Types.ResumeMessage>(
`/resumes/${candidate_id}/${job_id}`,
body,
streamingOptions,
'Resume'
);
const body = JSON.stringify(formatApiRequest(resume));
return this.streamify<Types.ResumeMessage>(`/resumes`, body, streamingOptions, 'Resume');
}
// Additional API methods for Resume management
@ -810,11 +803,12 @@ class ApiClient {
return handleApiResponse<{ success: boolean; statistics: any }>(response);
}
async updateResume(resumeId: string, content: string): Promise<Types.Resume> {
const response = await fetch(`${this.baseUrl}/resumes/${resumeId}`, {
method: 'PUT',
async updateResume(resume: Types.Resume): Promise<Types.Resume> {
const body = JSON.stringify(formatApiRequest(resume));
const response = await fetch(`${this.baseUrl}/resumes`, {
method: 'PATCH',
headers: this.defaultHeaders,
body: JSON.stringify(content),
body: body,
});
return this.handleApiResponseWithConversion<Types.Resume>(response, 'Resume');
@ -1524,7 +1518,9 @@ class ApiClient {
case 'done':
const message = (
modelType ? convertFromApi<T>(incoming, modelType) : incoming
modelType
? convertFromApi<T>(parseApiResponse<T>(incoming), modelType)
: incoming
) as T;
finalMessage = message;
try {

View File

@ -1,6 +1,6 @@
// Generated TypeScript types from Pydantic models
// Source: src/backend/models.py
// Generated on: 2025-06-18T22:54:34.823060
// Generated on: 2025-06-19T22:17:35.101284
// DO NOT EDIT MANUALLY - This file is auto-generated
// ============================
@ -354,9 +354,7 @@ export interface ChatMessageResume {
content: string;
tunables?: Tunables;
metadata: ChatMessageMetaData;
resume: string;
systemPrompt?: string;
prompt?: string;
resume: Resume;
}
export interface ChatMessageSkillAssessment {
@ -976,6 +974,8 @@ export interface Resume {
jobId: string;
candidateId: string;
resume: string;
systemPrompt?: string;
prompt?: string;
createdAt?: Date;
updatedAt?: Date;
job?: Job;
@ -1377,6 +1377,7 @@ export function convertChatMessageRagSearchFromApi(data: any): ChatMessageRagSea
/**
* Convert ChatMessageResume from API response
* Date fields: timestamp
* Nested models: resume (Resume)
*/
export function convertChatMessageResumeFromApi(data: any): ChatMessageResume {
if (!data) return data;
@ -1385,6 +1386,8 @@ export function convertChatMessageResumeFromApi(data: any): ChatMessageResume {
...data,
// Convert timestamp from ISO string to Date
timestamp: data.timestamp ? new Date(data.timestamp) : undefined,
// Convert nested Resume model
resume: convertResumeFromApi(data.resume),
};
}
/**

View File

@ -19,6 +19,7 @@ from models import (
ChatMessageError,
ChatMessageResume,
ChatMessageStatus,
Resume,
SkillAssessment,
SkillStrength,
)
@ -63,15 +64,12 @@ class GenerateResume(Agent):
if skill and strength in skills_by_strength:
skills_by_strength[strength].append(skill)
# Collect experience evidence
if skill not in experience_evidence:
experience_evidence[skill] = []
# Collect experience evidence, grouped by skill
for evidence in assessment.evidence_details:
source = evidence.source
if source:
if source not in experience_evidence:
experience_evidence[source] = []
experience_evidence[source].append(
{"skill": skill, "quote": evidence.quote, "context": evidence.context}
experience_evidence[skill].append(
{"source": evidence.source, "quote": evidence.quote, "context": evidence.context}
)
# Build the system prompt
@ -91,21 +89,21 @@ Phone: {self.user.phone or 'N/A'}
system_prompt += f"""\
### Strong Skills (prominent in resume):
{", ".join(skills_by_strength[SkillStrength.STRONG])}
* {".\n* ".join(skills_by_strength[SkillStrength.STRONG])}
"""
if len(skills_by_strength[SkillStrength.MODERATE]):
system_prompt += f"""\
### Moderate Skills (demonstrated in resume):
{", ".join(skills_by_strength[SkillStrength.MODERATE])}
* {".\n* ".join(skills_by_strength[SkillStrength.MODERATE])}
"""
if len(skills_by_strength[SkillStrength.WEAK]):
system_prompt += f"""\
### Weaker Skills (mentioned or implied):
{", ".join(skills_by_strength[SkillStrength.WEAK])}
* {".\n* ".join(skills_by_strength[SkillStrength.WEAK])}
"""
system_prompt += """\
@ -114,10 +112,19 @@ Phone: {self.user.phone or 'N/A'}
"""
# Add experience evidence by source/position
for source, evidences in experience_evidence.items():
system_prompt += f"\n### {source}:\n"
for skill, evidences in experience_evidence.items():
system_prompt += f"\n### {skill}:\n"
last_source = None
index = 0
sub_index = 1
for evidence in evidences:
system_prompt += f"- {evidence['skill']}: {evidence['context']}\n"
if last_source != evidence['source']:
index += 1
last_source = evidence['source']
system_prompt += f"{index}. Source: {last_source}:\n"
sub_index = 1
system_prompt += f" {index}.{sub_index}. Quote: \"{evidence['quote']}\"\n Evidence: {evidence['context']}\n"
sub_index += 1
# Add instructions for the resume creation
system_prompt += """\
@ -132,8 +139,8 @@ When sections lack data, output "Information not provided" or use placeholder te
2. Format the resume in a clean, concise, and modern style that will pass ATS systems.
3. Include these sections:
- Professional Summary (highlight strongest skills and experience level)
- Skills (organized by strength, under a single section). When listing skills, rephrase them so they are not identical to the original assessment.
- Professional Experience (focus on achievements and evidence of the skill)
- Skills (organized by strength, under a single section). When listing skills, rephrase them so they are not identical to the original assessment. Do not list the strengths explicitly, but rather integrate them into the skills section.
- Professional Experience (focus on achievements and evidence of the skill.) For the skills listed, identify content from the EXPERIENCE EVIDENCE and summarize experiences with specific details achievements where possible, ordering by date and job.
4. Optional sections, to include only if evidence is present:
- Education section
Certifications section
@ -169,6 +176,11 @@ Format it in clean, ATS-friendly markdown. Provide ONLY the resume with no comme
async def generate_resume(
self, llm: Any, model: str, session_id: str, skills: List[SkillAssessment]
) -> AsyncGenerator[ApiMessage, None]:
if not self.user:
error_message = ChatMessageError(session_id=session_id, content="User must be set before generating resume.")
logger.error(f"⚠️ {error_message.content}")
yield error_message
return
# Stage 1A: Analyze job requirements
status_message = ChatMessageStatus(
session_id=session_id, content="Analyzing job requirements", activity=ApiActivityType.THINKING
@ -208,9 +220,13 @@ Format it in clean, ATS-friendly markdown. Provide ONLY the resume with no comme
status=ApiStatusType.DONE,
content="Resume generation completed successfully.",
metadata=generated_message.metadata,
resume=Resume(
job_id="N/A",
candidate_id=self.user.id,
resume=generated_message.content,
prompt=prompt,
system_prompt=system_prompt,
system_prompt=system_prompt
)
)
yield resume_message
logger.info("✅ Resume generation completed successfully.")

View File

@ -39,24 +39,25 @@ class JobRequirementsAgent(Agent):
logger.info(f"{self.agent_type} - {inspect.stack()[0].function}")
system_prompt = """
You are an objective job requirements analyzer. Your task is to extract and categorize the specific skills,
experiences, and qualifications required in a job description WITHOUT any reference to any candidate.
experiences, and qualifications required in a job description.
## INSTRUCTIONS:
1. Analyze ONLY the job description provided.
1. Analyze ONLY the <|job_description|> provided, and provide only requirements from that description.
2. Extract company information, job title, and all requirements.
3. If a requirement is compound (e.g., "5+ years experience with React, Node.js and MongoDB" or "FastAPI/Django/React"), break it down into individual components.
4. Categorize requirements into:
3. If a requirement can be broken into multiple requirements, do so.
4. Categorize each requirement into one and only one of the following categories:
- Technical skills (required and preferred)
- Experience requirements (required and preferred)
- Education requirements
- Soft skills
- Industry knowledge
- Responsibilities
- Soft skills (e.g., "excellent communication skills")
- Experience (e.g., "5+ years in software development")
- Eduction
- Certifications (e.g., "AWS Certified Solutions Architect")
- Preferred attributes (e.g., "team player", "self-motivated")
- Company values
5. Extract and categorize all requirements and preferences.
6. DO NOT consider any candidate information - this is a pure job analysis task.
7. Provide the output in a structured JSON format as specified below.
6. Provide the output in a structured JSON format as specified below.
7. If there are no requirements in a category, leave it as an empty list.
## OUTPUT FORMAT:
@ -85,17 +86,10 @@ experiences, and qualifications required in a job description WITHOUT any refere
```
Be specific and detailed in your extraction.
If a requirement can be broken down into several separate requirements, split them.
For example, the technical_skill of "Python/Django/FastAPI" should be separated into different requirements: Python, Django, and FastAPI.
For example, if the job description mentions: "Python/Django/FastAPI", you should extract it as:
"technical_skills": { "required": [ "Python", "Django", "FastAPI" ] },
Avoid vague categorizations and be precise about whether skills are explicitly required or just preferred.
"""
prompt = f"Job Description:\n{job_description}"
prompt = f"<|job_description|>\n{job_description}\n</|job_description|>\n"
return system_prompt, prompt
async def analyze_job_requirements(

View File

@ -5,7 +5,7 @@ from redis.asyncio import Redis
if TYPE_CHECKING:
pass
from models import SkillAssessment
from models import Resume, SkillAssessment
class DatabaseProtocol(Protocol):
@ -172,9 +172,6 @@ class DatabaseProtocol(Protocol):
async def delete_all_candidate_documents(self, candidate_id: str) -> int:
...
async def delete_all_resumes_for_user(self, user_id: str) -> int:
...
async def delete_authentication(self, user_id: str) -> bool:
...
@ -190,8 +187,6 @@ class DatabaseProtocol(Protocol):
async def delete_job(self, job_id: str):
...
async def delete_resume(self, user_id: str, resume_id: str) -> bool:
...
async def delete_viewer(self, viewer_id: str):
...
@ -229,12 +224,6 @@ class DatabaseProtocol(Protocol):
async def get_all_jobs(self) -> Dict[str, Any]:
...
async def get_all_resumes_for_user(self, user_id: str) -> List[Dict]:
...
async def get_all_resumes(self) -> Dict[str, List[Dict]]:
...
async def get_authentication(self, user_id: str) -> Optional[Dict[str, Any]]:
...
@ -304,13 +293,34 @@ class DatabaseProtocol(Protocol):
async def get_refresh_token(self, token: str) -> Optional[Dict[str, Any]]:
...
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Dict]:
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Resume]:
...
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Dict]:
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
...
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Dict]:
async def delete_all_resumes_for_user(self, user_id: str) -> int:
...
async def get_all_resumes_for_user(self, user_id: str) -> List[Dict]:
...
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Resume]:
...
async def delete_resume(self, user_id: str, resume_id: str) -> bool:
...
async def get_all_resumes(self) -> Dict[str, List[Dict]]:
...
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Resume]:
...
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Resume]:
...
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Resume]:
...
async def get_resume_statistics(self, user_id: str) -> Dict[str, Any]:
@ -364,9 +374,6 @@ class DatabaseProtocol(Protocol):
async def search_chat_messages(self, session_id: str, query: str) -> List[Dict]:
...
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Dict]:
...
async def set_ai_parameters(self, param_id: str, param_data: Dict):
...
@ -388,9 +395,6 @@ class DatabaseProtocol(Protocol):
async def set_job(self, job_id: str, job_data: Dict):
...
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
...
async def set_viewer(self, viewer_id: str, viewer_data: Dict):
...
@ -414,5 +418,3 @@ class DatabaseProtocol(Protocol):
async def update_document(self, document_id: str, updates: Dict) -> Dict[Any, Any] | None:
...
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Dict]:
...

View File

@ -2,6 +2,8 @@ from datetime import UTC, datetime
import logging
from typing import Any, Dict, List, Optional
from models import Resume
from .protocols import DatabaseProtocol
from ..constants import KEY_PREFIXES
@ -14,27 +16,23 @@ class ResumeMixin(DatabaseProtocol):
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
"""Save a resume for a user"""
try:
# Generate resume_id if not present
if "id" not in resume_data:
raise ValueError("Resume data must include an 'id' field")
resume_id = resume_data["id"]
resume = Resume.model_validate(resume_data)
# Store the resume data
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume.id}"
await self.redis.set(key, self._serialize(resume_data))
# Add resume_id to user's resume list
user_resumes_key = f"{KEY_PREFIXES['user_resumes']}{user_id}"
await self.redis.rpush(user_resumes_key, resume_id) # type: ignore
await self.redis.rpush(user_resumes_key, resume.id) # type: ignore
logger.info(f"📄 Saved resume {resume_id} for user {user_id}")
logger.info(f"📄 Saved resume {resume.id} for user {user_id}")
return True
except Exception as e:
logger.error(f"❌ Error saving resume for user {user_id}: {e}")
return False
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Dict]:
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Resume]:
"""Get a specific resume for a user"""
try:
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
@ -42,7 +40,7 @@ class ResumeMixin(DatabaseProtocol):
if data:
resume_data = self._deserialize(data)
logger.info(f"📄 Retrieved resume {resume_id} for user {user_id}")
return resume_data
return Resume.model_validate(resume_data)
logger.info(f"📄 Resume {resume_id} not found for user {user_id}")
return None
except Exception as e:
@ -178,7 +176,7 @@ class ResumeMixin(DatabaseProtocol):
logger.error(f"❌ Error retrieving all resumes: {e}")
return {}
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Dict]:
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Resume]:
"""Search resumes for a user by content, job title, or candidate name"""
try:
all_resumes = await self.get_all_resumes_for_user(user_id)
@ -200,16 +198,16 @@ class ResumeMixin(DatabaseProtocol):
matching_resumes.append(resume)
logger.info(f"📄 Found {len(matching_resumes)} matching resumes for user {user_id}")
return matching_resumes
return [Resume.model_validate(resume) for resume in matching_resumes]
except Exception as e:
logger.error(f"❌ Error searching resumes for user {user_id}: {e}")
return []
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Dict]:
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Resume]:
"""Get all resumes for a specific candidate created by a user"""
try:
all_resumes = await self.get_all_resumes_for_user(user_id)
candidate_resumes = [resume for resume in all_resumes if resume.get("candidate_id") == candidate_id]
candidate_resumes = [Resume.model_validate(resume) for resume in all_resumes if resume.get("candidate_id") == candidate_id]
logger.info(f"📄 Found {len(candidate_resumes)} resumes for candidate {candidate_id} by user {user_id}")
return candidate_resumes
@ -217,11 +215,11 @@ class ResumeMixin(DatabaseProtocol):
logger.error(f"❌ Error retrieving resumes for candidate {candidate_id} by user {user_id}: {e}")
return []
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Dict]:
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Resume]:
"""Get all resumes for a specific job created by a user"""
try:
all_resumes = await self.get_all_resumes_for_user(user_id)
job_resumes = [resume for resume in all_resumes if resume.get("job_id") == job_id]
job_resumes = [Resume.model_validate(resume) for resume in all_resumes if resume.get("job_id") == job_id]
logger.info(f"📄 Found {len(job_resumes)} resumes for job {job_id} by user {user_id}")
return job_resumes
@ -274,19 +272,20 @@ class ResumeMixin(DatabaseProtocol):
"recent_resumes": [],
}
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Dict]:
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Resume]:
"""Update specific fields of a resume"""
try:
resume_data = await self.get_resume(user_id, resume_id)
if resume_data:
resume_data.update(updates)
resume_data["updated_at"] = datetime.now(UTC).isoformat()
resume_dict = resume_data.model_dump()
resume_dict.update(updates)
resume_dict["updated_at"] = datetime.now(UTC).isoformat()
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
await self.redis.set(key, self._serialize(resume_data))
await self.redis.set(key, self._serialize(resume_dict))
logger.info(f"📄 Updated resume {resume_id} for user {user_id}")
return resume_data
return Resume.model_validate(resume_dict)
return None
except Exception as e:
logger.error(f"❌ Error updating resume {resume_id} for user {user_id}: {e}")

View File

@ -1090,26 +1090,24 @@ class ChatMessageSkillAssessment(ChatMessageUser):
skill_assessment: SkillAssessment = Field(..., alias=str("skillAssessment"))
class ChatMessageResume(ChatMessageUser):
role: ChatSenderType = ChatSenderType.ASSISTANT
metadata: ChatMessageMetaData = Field(default=ChatMessageMetaData())
resume: str = Field(..., alias=str("resume"))
system_prompt: Optional[str] = Field(default=None, alias=str("systemPrompt"))
prompt: Optional[str] = Field(default=None, alias=str("prompt"))
model_config = ConfigDict(populate_by_name=True)
class Resume(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
job_id: str = Field(..., alias=str("jobId"))
candidate_id: str = Field(..., alias=str("candidateId"))
resume: str = Field(..., alias=str("resume"))
resume: str
system_prompt: Optional[str] = Field(default=None)
prompt: Optional[str] = Field(default=None)
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("createdAt"))
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("updatedAt"))
job: Optional[Job] = None
candidate: Optional[Candidate] = None
model_config = ConfigDict(populate_by_name=True)
class ChatMessageResume(ChatMessageUser):
role: ChatSenderType = ChatSenderType.ASSISTANT
metadata: ChatMessageMetaData = Field(default=ChatMessageMetaData())
resume: Resume
model_config = ConfigDict(populate_by_name=True)
class ResumeMessage(ChatMessageUser):
role: ChatSenderType = ChatSenderType.ASSISTANT

View File

@ -1850,6 +1850,7 @@ async def generate_resume(
return
resume: ChatMessageResume = final_message
resume.resume.job_id = job.id
yield resume
return

View File

@ -17,7 +17,7 @@ from markitdown import MarkItDown, StreamInfo
import backstory_traceback as backstory_traceback
import defines
from agents.base import CandidateEntity
from utils.helpers import create_job_from_content, filter_and_paginate, get_document_type_from_filename
from utils.helpers import filter_and_paginate, get_document_type_from_filename
from database.manager import RedisDatabase
from logger import logger
from models import (
@ -67,7 +67,8 @@ async def reformat_as_markdown(database: RedisDatabase, candidate_entity: Candid
prompt=content,
system_prompt="""
You are a document editor. Take the provided job description and reformat as legible markdown.
Return only the markdown content, no other text. Make sure all content is included.
Return only the markdown content, no other text. Make sure all content is included. If the
content is already in markdown format, return it as is.
""",
):
pass

View File

@ -2,8 +2,8 @@
Resume Routes
"""
import json
from datetime import datetime, UTC
from typing import List
import uuid
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query
from fastapi.responses import StreamingResponse
@ -18,37 +18,39 @@ from utils.responses import create_success_response
# Create router for authentication endpoints
router = APIRouter(prefix="/resumes", tags=["resumes"])
@router.post("/{candidate_id}/{job_id}")
@router.post("")
async def create_candidate_resume(
candidate_id: str = Path(..., description="ID of the candidate"),
job_id: str = Path(..., description="ID of the job"),
resume_content: str = Body(...),
resume: Resume = Body(...),
current_user=Depends(get_current_user),
database: RedisDatabase = Depends(get_database),
):
"""Create a new resume for a candidate/job combination"""
async def message_stream_generator():
logger.info(f"🔍 Looking up candidate and job details for {candidate_id}/{job_id}")
# Ignore the resume ID if provided, generate a new one
resume.id = str(uuid.uuid4())
candidate_data = await database.get_candidate(candidate_id)
logger.info(f"📝 Creating resume for candidate {resume.candidate_id} for job {resume.job_id}")
async def message_stream_generator():
logger.info(f"🔍 Looking up candidate and job details for {resume.candidate_id}/{resume.job_id}")
candidate_data = await database.get_candidate(resume.candidate_id)
if not candidate_data:
logger.error(f"❌ Candidate with ID '{candidate_id}' not found")
logger.error(f"❌ Candidate with ID '{resume.candidate_id}' not found")
error_message = ChatMessageError(
session_id=MOCK_UUID, # No session ID for document uploads
content=f"Candidate with ID '{candidate_id}' not found",
content=f"Candidate with ID '{resume.candidate_id}' not found",
)
yield error_message
return
candidate = Candidate.model_validate(candidate_data)
job_data = await database.get_job(job_id)
job_data = await database.get_job(resume.job_id)
if not job_data:
logger.error(f"❌ Job with ID '{job_id}' not found")
logger.error(f"❌ Job with ID '{resume.job_id}' not found")
error_message = ChatMessageError(
session_id=MOCK_UUID, # No session ID for document uploads
content=f"Job with ID '{job_id}' not found",
content=f"Job with ID '{resume.job_id}' not found",
)
yield error_message
return
@ -58,12 +60,6 @@ async def create_candidate_resume(
f"📄 Saving resume for candidate {candidate.first_name} {candidate.last_name} for job '{job.title}'"
)
# Job and Candidate are valid. Save the resume
resume = Resume(
job_id=job_id,
candidate_id=candidate_id,
resume=resume_content,
)
resume_message: ResumeMessage = ResumeMessage(
session_id=MOCK_UUID, # No session ID for document uploads
resume=resume,
@ -240,29 +236,27 @@ async def get_resume_statistics(
logger.error(f"❌ Error retrieving resume statistics for user {current_user.id}: {e}")
raise HTTPException(status_code=500, detail="Failed to retrieve resume statistics")
@router.put("/{resume_id}")
@router.patch("")
async def update_resume(
resume_id: str = Path(..., description="ID of the resume"),
resume: str = Body(..., description="Updated resume content"),
resume: Resume = Body(...),
current_user=Depends(get_current_user),
database: RedisDatabase = Depends(get_database),
):
"""Update the content of a specific resume"""
try:
updates = {"resume": resume, "updated_at": datetime.now(UTC).isoformat()}
updated_resume_data = await database.update_resume(current_user.id, resume_id, updates)
updates = resume.model_dump()
updated_resume_data = await database.update_resume(current_user.id, resume.id, updates)
if not updated_resume_data:
logger.warning(f"⚠️ Resume {resume_id} not found for user {current_user.id}")
logger.warning(f"⚠️ Resume {resume.id} not found for user {current_user.id}")
raise HTTPException(status_code=404, detail="Resume not found")
updated_resume = Resume.model_validate(updated_resume_data) if updated_resume_data else None
if not updated_resume:
logger.warning(f"⚠️ Resume {resume.id} could not be updated for user {current_user.id}")
raise HTTPException(status_code=400, detail="Failed to update resume")
return create_success_response(updated_resume.model_dump(by_alias=True))
return create_success_response(
{"success": True, "message": f"Resume {resume_id} updated successfully", "resume": updated_resume}
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Error updating resume {resume_id} for user {current_user.id}: {e}")
logger.error(f"❌ Error updating resume {resume.id} for user {current_user.id}: {e}")
raise HTTPException(status_code=500, detail="Failed to update resume")

View File

@ -13,10 +13,8 @@ from fastapi.responses import StreamingResponse
import defines
from logger import logger
from models import DocumentType
from models import Job, ChatMessage, DocumentType, ApiStatusType
from models import Job, ChatMessage, ApiStatusType
from typing import List, Dict
from models import Job
import utils.llm_proxy as llm_manager

View File

@ -230,7 +230,7 @@ class OllamaAdapter(BaseLLMAdapter):
else:
response = await self.client.chat(model=model, messages=ollama_messages, stream=False, **kwargs)
usage_stats = self._create_usage_stats(response)
usage_stats = self._create_usage_stats(response.model_dump())
return ChatResponse(
content=response["message"]["content"],
@ -267,7 +267,7 @@ class OllamaAdapter(BaseLLMAdapter):
else:
response = await self.client.generate(model=model, prompt=prompt, stream=False, **kwargs)
usage_stats = self._create_usage_stats(response)
usage_stats = self._create_usage_stats(response.model_dump())
return ChatResponse(
content=response["response"], model=model, finish_reason=response.get("done_reason"), usage=usage_stats
@ -312,7 +312,7 @@ class OllamaAdapter(BaseLLMAdapter):
# Create usage stats if available from the last response
usage_stats = None
if final_response and len(results) == 1:
usage_stats = self._create_usage_stats(final_response)
usage_stats = self._create_usage_stats(final_response.model_dump())
return EmbeddingResponse(data=results, model=model, usage=usage_stats)
@ -326,7 +326,7 @@ class OpenAIAdapter(BaseLLMAdapter):
def __init__(self, **config):
super().__init__(**config)
import openai
import openai # type: ignore
self.client = openai.AsyncOpenAI(api_key=config.get("api_key", os.getenv("OPENAI_API_KEY")))
@ -425,7 +425,7 @@ class AnthropicAdapter(BaseLLMAdapter):
def __init__(self, **config):
super().__init__(**config)
import anthropic
import anthropic # type: ignore
self.client = anthropic.AsyncAnthropic(api_key=config.get("api_key", os.getenv("ANTHROPIC_API_KEY")))
@ -524,7 +524,7 @@ class GeminiAdapter(BaseLLMAdapter):
def __init__(self, **config):
super().__init__(**config)
import google.generativeai as genai
import google.generativeai as genai # type: ignore
genai.configure(api_key=config.get("api_key", os.getenv("GEMINI_API_KEY")))
self.genai = genai
@ -720,7 +720,8 @@ class UnifiedLLMProxy:
if stream is False:
raise ValueError("stream must be True for chat_stream")
result = await self.chat(model, messages, provider, stream=True, **kwargs)
# Type checker now knows this is an AsyncGenerator due to stream=True
if isinstance(result, ChatResponse):
raise RuntimeError("Expected AsyncGenerator, got ChatResponse")
async for chunk in result:
yield chunk
@ -734,7 +735,8 @@ class UnifiedLLMProxy:
"""Get single chat response using specified or default provider"""
result = await self.chat(model, messages, provider, stream=False, **kwargs)
# Type checker now knows this is a ChatResponse due to stream=False
if not isinstance(result, ChatResponse):
raise RuntimeError("Expected ChatResponse, got AsyncGenerator")
return result
async def generate(
@ -753,6 +755,8 @@ class UnifiedLLMProxy:
"""Stream text generation using specified or default provider"""
result = await self.generate(model, prompt, provider, stream=True, **kwargs)
if isinstance(result, ChatResponse):
raise RuntimeError("Expected AsyncGenerator, got ChatResponse")
async for chunk in result:
yield chunk
@ -762,6 +766,8 @@ class UnifiedLLMProxy:
"""Get single generation response using specified or default provider"""
result = await self.generate(model, prompt, provider, stream=False, **kwargs)
if not isinstance(result, ChatResponse):
raise RuntimeError("Expected ChatResponse, got AsyncGenerator")
return result
async def embeddings(