Improved resume generation by reordering context
This commit is contained in:
parent
c643b0d8f8
commit
9c9578cc46
@ -123,6 +123,29 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
|
|
||||||
|
ollama-intel:
|
||||||
|
image: intelanalytics/ipex-llm-inference-cpp-xpu:latest
|
||||||
|
container_name: ollama
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
volumes:
|
||||||
|
- ./cache:/root/.cache # Cache hub models and neo_compiler_cache
|
||||||
|
- ./ollama:/root/.ollama # Cache the ollama models
|
||||||
|
ports:
|
||||||
|
- 11434:11434
|
||||||
|
environment:
|
||||||
|
- OLLAMA_HOST=0.0.0.0
|
||||||
|
- DEVICE=Arc
|
||||||
|
- OLLAMA_INTEL_GPU=true
|
||||||
|
- OLLAMA_NUM_GPU=999
|
||||||
|
- ZES_ENABLE_SYSMAN=1
|
||||||
|
- ONEAPI_DEVICE_SELECTOR=level_zero:0
|
||||||
|
- TZ=America/Los_Angeles
|
||||||
|
command: sh -c 'mkdir -p /llm/ollama && cd /llm/ollama && init-ollama && exec ./ollama serve'
|
||||||
|
|
||||||
ollama:
|
ollama:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
@ -12,6 +12,7 @@ import { StatusBox, StatusIcon } from './ui/StatusIcon';
|
|||||||
import { CopyBubble } from './CopyBubble';
|
import { CopyBubble } from './CopyBubble';
|
||||||
import { useAppState } from 'hooks/GlobalContext';
|
import { useAppState } from 'hooks/GlobalContext';
|
||||||
import { StreamingOptions } from 'services/api-client';
|
import { StreamingOptions } from 'services/api-client';
|
||||||
|
import { Navigate, useNavigate } from 'react-router-dom';
|
||||||
|
|
||||||
interface ResumeGeneratorProps {
|
interface ResumeGeneratorProps {
|
||||||
job: Job;
|
job: Job;
|
||||||
@ -23,6 +24,7 @@ interface ResumeGeneratorProps {
|
|||||||
const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorProps) => {
|
const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorProps) => {
|
||||||
const { job, candidate, skills, onComplete } = props;
|
const { job, candidate, skills, onComplete } = props;
|
||||||
const { setSnack } = useAppState();
|
const { setSnack } = useAppState();
|
||||||
|
const navigate = useNavigate();
|
||||||
const { apiClient, user } = useAuth();
|
const { apiClient, user } = useAuth();
|
||||||
const [resume, setResume] = useState<string>('');
|
const [resume, setResume] = useState<string>('');
|
||||||
const [prompt, setPrompt] = useState<string>('');
|
const [prompt, setPrompt] = useState<string>('');
|
||||||
@ -49,9 +51,10 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
|
|||||||
|
|
||||||
const generateResumeHandlers: StreamingOptions<Types.ChatMessageResume> = {
|
const generateResumeHandlers: StreamingOptions<Types.ChatMessageResume> = {
|
||||||
onMessage: (message: Types.ChatMessageResume) => {
|
onMessage: (message: Types.ChatMessageResume) => {
|
||||||
setSystemPrompt(message.systemPrompt || '');
|
const resume: Types.Resume = message.resume;
|
||||||
setPrompt(message.prompt || '');
|
setSystemPrompt(resume.systemPrompt || '');
|
||||||
setResume(message.resume || '');
|
setPrompt(resume.prompt || '');
|
||||||
|
setResume(resume.resume || '');
|
||||||
setStatus('');
|
setStatus('');
|
||||||
},
|
},
|
||||||
onStreaming: (chunk: Types.ChatMessageStreaming) => {
|
onStreaming: (chunk: Types.ChatMessageStreaming) => {
|
||||||
@ -115,10 +118,18 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
|
|||||||
setSnack('Candidate or job ID is missing.');
|
setSnack('Candidate or job ID is missing.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const controller = apiClient.saveResume(candidate.id, job.id, resume);
|
const submission: Types.Resume = {
|
||||||
|
jobId: job.id,
|
||||||
|
candidateId: candidate.id,
|
||||||
|
resume,
|
||||||
|
systemPrompt,
|
||||||
|
prompt,
|
||||||
|
};
|
||||||
|
const controller = apiClient.saveResume(submission);
|
||||||
const result = await controller.promise;
|
const result = await controller.promise;
|
||||||
if (result.resume.id) {
|
if (result.resume.id) {
|
||||||
setSnack('Resume saved successfully!');
|
setSnack('Resume saved successfully!');
|
||||||
|
navigate(`/candidate/resumes/${result.resume.id}`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error saving resume:', error);
|
console.error('Error saving resume:', error);
|
||||||
@ -177,7 +188,7 @@ const ResumeGenerator: React.FC<ResumeGeneratorProps> = (props: ResumeGeneratorP
|
|||||||
|
|
||||||
{resume && !status && !error && (
|
{resume && !status && !error && (
|
||||||
<Button onClick={handleSave} variant="contained" color="primary" sx={{ mt: 2 }}>
|
<Button onClick={handleSave} variant="contained" color="primary" sx={{ mt: 2 }}>
|
||||||
Save Resume
|
Save Resume and Edit
|
||||||
</Button>
|
</Button>
|
||||||
)}
|
)}
|
||||||
</Box>
|
</Box>
|
||||||
|
@ -44,9 +44,8 @@ interface JobInfoProps {
|
|||||||
|
|
||||||
const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||||
const { setSnack } = useAppState();
|
const { setSnack } = useAppState();
|
||||||
const { job } = props;
|
|
||||||
const { user, apiClient } = useAuth();
|
const { user, apiClient } = useAuth();
|
||||||
const { sx, action = '', elevation = 1, variant = 'normal' } = props;
|
const { sx, action = '', elevation = 1, variant = 'normal', job } = props;
|
||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
|
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
|
||||||
const isAdmin = user?.isAdmin;
|
const isAdmin = user?.isAdmin;
|
||||||
@ -236,6 +235,7 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Box
|
<Box
|
||||||
|
className="JobInfo"
|
||||||
sx={{
|
sx={{
|
||||||
display: 'flex',
|
display: 'flex',
|
||||||
borderColor: 'transparent',
|
borderColor: 'transparent',
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
|
|
||||||
/* A4 Portrait simulation for MuiMarkdown */
|
/* A4 Portrait simulation for MuiMarkdown */
|
||||||
|
.a4-document .MuiTypography-root {
|
||||||
|
font-family: 'Roboto', 'Times New Roman', serif;
|
||||||
|
}
|
||||||
|
|
||||||
.a4-document {
|
.a4-document {
|
||||||
/* A4 dimensions: 210mm x 297mm */
|
/* A4 dimensions: 210mm x 297mm */
|
||||||
width: 210mm;
|
width: 210mm;
|
||||||
@ -10,12 +15,12 @@
|
|||||||
|
|
||||||
/* Document styling */
|
/* Document styling */
|
||||||
background: white;
|
background: white;
|
||||||
padding: 8mm; /* 1/4" margins all around */
|
padding: 12mm; /* 1/4" margins all around */
|
||||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||||
border: 1px solid #e0e0e0;
|
border: 1px solid #e0e0e0;
|
||||||
|
|
||||||
/* Typography for document feel */
|
/* Typography for document feel */
|
||||||
font-family: 'Times New Roman', serif;
|
font-family: 'Roboto', 'Times New Roman', serif;
|
||||||
font-size: 12pt;
|
font-size: 12pt;
|
||||||
line-height: 1.6;
|
line-height: 1.6;
|
||||||
color: #333;
|
color: #333;
|
||||||
@ -23,16 +28,12 @@
|
|||||||
/* Page break lines - repeating dotted lines every A4 height */
|
/* Page break lines - repeating dotted lines every A4 height */
|
||||||
background-image:
|
background-image:
|
||||||
repeating-linear-gradient(
|
repeating-linear-gradient(
|
||||||
transparent,
|
#ddd,
|
||||||
transparent calc(8mm - 1px),
|
#ddd 12mm,
|
||||||
#00f calc(8mm),
|
transparent calc(12mm + 1px),
|
||||||
transparent calc(8mm + 1px),
|
transparent calc(285mm - 1px), /* 297mm - 8mm top/bottom margins */
|
||||||
transparent calc(288mm - 1px), /* 297mm - 8mm top/bottom margins */
|
#ddd calc(285mm),
|
||||||
#00f calc(288mm),
|
#ddd 297mm
|
||||||
transparent calc(288mm + 1px),
|
|
||||||
transparent calc(296mm - 1px),
|
|
||||||
#000 calc(296mm),
|
|
||||||
transparent calc(296mm + 1px)
|
|
||||||
);
|
);
|
||||||
background-size: 100% 297mm;
|
background-size: 100% 297mm;
|
||||||
background-repeat: repeat-y;
|
background-repeat: repeat-y;
|
||||||
|
@ -26,6 +26,7 @@ import {
|
|||||||
DialogActions,
|
DialogActions,
|
||||||
Tabs,
|
Tabs,
|
||||||
Tab,
|
Tab,
|
||||||
|
Paper,
|
||||||
} from '@mui/material';
|
} from '@mui/material';
|
||||||
import PrintIcon from '@mui/icons-material/Print';
|
import PrintIcon from '@mui/icons-material/Print';
|
||||||
import {
|
import {
|
||||||
@ -39,7 +40,10 @@ import {
|
|||||||
Schedule as ScheduleIcon,
|
Schedule as ScheduleIcon,
|
||||||
Visibility as VisibilityIcon,
|
Visibility as VisibilityIcon,
|
||||||
VisibilityOff as VisibilityOffIcon,
|
VisibilityOff as VisibilityOffIcon,
|
||||||
|
ModelTraining,
|
||||||
} from '@mui/icons-material';
|
} from '@mui/icons-material';
|
||||||
|
import InputIcon from '@mui/icons-material/Input';
|
||||||
|
import TuneIcon from '@mui/icons-material/Tune';
|
||||||
import PreviewIcon from '@mui/icons-material/Preview';
|
import PreviewIcon from '@mui/icons-material/Preview';
|
||||||
import EditDocumentIcon from '@mui/icons-material/EditDocument';
|
import EditDocumentIcon from '@mui/icons-material/EditDocument';
|
||||||
|
|
||||||
@ -52,6 +56,10 @@ import { Resume } from 'types/types';
|
|||||||
import { BackstoryTextField } from 'components/BackstoryTextField';
|
import { BackstoryTextField } from 'components/BackstoryTextField';
|
||||||
import { JobInfo } from './JobInfo';
|
import { JobInfo } from './JobInfo';
|
||||||
import './ResumeInfo.css';
|
import './ResumeInfo.css';
|
||||||
|
import { Scrollable } from 'components/Scrollable';
|
||||||
|
import * as Types from 'types/types';
|
||||||
|
import { StreamingOptions } from 'services/api-client';
|
||||||
|
import { StatusBox, StatusIcon } from './StatusIcon';
|
||||||
|
|
||||||
interface ResumeInfoProps {
|
interface ResumeInfoProps {
|
||||||
resume: Resume;
|
resume: Resume;
|
||||||
@ -70,15 +78,18 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
|
const isMobile = useMediaQuery(theme.breakpoints.down('md')) || variant === 'minimal';
|
||||||
const isAdmin = user?.isAdmin;
|
const isAdmin = user?.isAdmin;
|
||||||
const [activeResume, setActiveResume] = useState<Resume>({ ...resume });
|
const [activeResume, setActiveResume] = useState<Resume>({ ...resume });
|
||||||
const [isContentExpanded, setIsContentExpanded] = useState(false);
|
|
||||||
const [shouldShowMoreButton, setShouldShowMoreButton] = useState(false);
|
|
||||||
const [deleted, setDeleted] = useState<boolean>(false);
|
const [deleted, setDeleted] = useState<boolean>(false);
|
||||||
const [editDialogOpen, setEditDialogOpen] = useState<boolean>(false);
|
const [editDialogOpen, setEditDialogOpen] = useState<boolean>(false);
|
||||||
const [printDialogOpen, setPrintDialogOpen] = useState<boolean>(false);
|
const [printDialogOpen, setPrintDialogOpen] = useState<boolean>(false);
|
||||||
const [editContent, setEditContent] = useState<string>('');
|
const [editContent, setEditContent] = useState<string>('');
|
||||||
|
const [editSystemPrompt, setEditSystemPrompt] = useState<string>('');
|
||||||
|
const [editPrompt, setEditPrompt] = useState<string>('');
|
||||||
const [saving, setSaving] = useState<boolean>(false);
|
const [saving, setSaving] = useState<boolean>(false);
|
||||||
const contentRef = useRef<HTMLDivElement>(null);
|
|
||||||
const [tabValue, setTabValue] = useState('markdown');
|
const [tabValue, setTabValue] = useState('markdown');
|
||||||
|
const [status, setStatus] = useState<string>('');
|
||||||
|
const [statusType, setStatusType] = useState<Types.ApiActivityType | null>(null);
|
||||||
|
const [error, setError] = useState<Types.ChatMessageError | null>(null);
|
||||||
|
|
||||||
const printContentRef = useRef<HTMLDivElement>(null);
|
const printContentRef = useRef<HTMLDivElement>(null);
|
||||||
const reactToPrintFn = useReactToPrint({
|
const reactToPrintFn = useReactToPrint({
|
||||||
contentRef: printContentRef,
|
contentRef: printContentRef,
|
||||||
@ -92,13 +103,6 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
}, [resume, activeResume]);
|
}, [resume, activeResume]);
|
||||||
|
|
||||||
// Check if content needs truncation
|
// Check if content needs truncation
|
||||||
useEffect(() => {
|
|
||||||
if (contentRef.current && resume.resume) {
|
|
||||||
const element = contentRef.current;
|
|
||||||
setShouldShowMoreButton(element.scrollHeight > element.clientHeight);
|
|
||||||
}
|
|
||||||
}, [resume.resume]);
|
|
||||||
|
|
||||||
const deleteResume = async (id: string | undefined) => {
|
const deleteResume = async (id: string | undefined) => {
|
||||||
if (id) {
|
if (id) {
|
||||||
try {
|
try {
|
||||||
@ -118,11 +122,17 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
const handleSave = async () => {
|
const handleSave = async () => {
|
||||||
setSaving(true);
|
setSaving(true);
|
||||||
try {
|
try {
|
||||||
const result = await apiClient.updateResume(activeResume.id || '', editContent);
|
const resumeUpdate = {
|
||||||
const updatedResume = {
|
|
||||||
...activeResume,
|
...activeResume,
|
||||||
resume: editContent,
|
resume: editContent,
|
||||||
updatedAt: new Date(),
|
systemPrompt: editSystemPrompt,
|
||||||
|
prompt: editPrompt,
|
||||||
|
};
|
||||||
|
const result = await apiClient.updateResume(resumeUpdate);
|
||||||
|
console.log('Resume updated:', result);
|
||||||
|
const updatedResume = {
|
||||||
|
...activeResume,
|
||||||
|
...result
|
||||||
};
|
};
|
||||||
setActiveResume(updatedResume);
|
setActiveResume(updatedResume);
|
||||||
setSnack('Resume updated successfully.');
|
setSnack('Resume updated successfully.');
|
||||||
@ -135,6 +145,8 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
|
|
||||||
const handleEditOpen = () => {
|
const handleEditOpen = () => {
|
||||||
setEditContent(activeResume.resume);
|
setEditContent(activeResume.resume);
|
||||||
|
setEditSystemPrompt(activeResume.systemPrompt || '');
|
||||||
|
setEditPrompt(activeResume.prompt || '');
|
||||||
setEditDialogOpen(true);
|
setEditDialogOpen(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -144,13 +156,60 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
|
|
||||||
const formatDate = (date: Date | undefined) => {
|
const formatDate = (date: Date | undefined) => {
|
||||||
if (!date) return 'N/A';
|
if (!date) return 'N/A';
|
||||||
return new Intl.DateTimeFormat('en-US', {
|
try {
|
||||||
month: 'short',
|
return new Intl.DateTimeFormat('en-US', {
|
||||||
day: 'numeric',
|
month: 'short',
|
||||||
year: 'numeric',
|
day: 'numeric',
|
||||||
hour: '2-digit',
|
year: 'numeric',
|
||||||
minute: '2-digit',
|
hour: '2-digit',
|
||||||
}).format(date);
|
minute: '2-digit',
|
||||||
|
}).format(date);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error formatting date:', error);
|
||||||
|
return 'Invalid date';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateResumeHandlers: StreamingOptions<Types.ChatMessageResume> = {
|
||||||
|
onMessage: (message: Types.ChatMessageResume) => {
|
||||||
|
const resume: Resume = message.resume;
|
||||||
|
setEditSystemPrompt(resume.systemPrompt || '');
|
||||||
|
setEditPrompt(resume.prompt || '');
|
||||||
|
setEditContent(resume.resume);
|
||||||
|
setActiveResume({ ...resume });
|
||||||
|
setStatus('');
|
||||||
|
setSnack('Resume generation completed successfully.');
|
||||||
|
},
|
||||||
|
onStreaming: (chunk: Types.ChatMessageStreaming) => {
|
||||||
|
if (status === '') {
|
||||||
|
setStatus('Generating resume...');
|
||||||
|
setStatusType('generating');
|
||||||
|
}
|
||||||
|
setEditContent(chunk.content);
|
||||||
|
},
|
||||||
|
onStatus: (status: Types.ChatMessageStatus) => {
|
||||||
|
console.log('status:', status.content);
|
||||||
|
setStatusType(status.activity);
|
||||||
|
setStatus(status.content);
|
||||||
|
},
|
||||||
|
onError: (error: Types.ChatMessageError) => {
|
||||||
|
console.log('error:', error);
|
||||||
|
setStatusType(null);
|
||||||
|
setStatus(error.content);
|
||||||
|
setError(error);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateResume = async (): Promise<void> => {
|
||||||
|
setStatusType('thinking');
|
||||||
|
setStatus('Starting resume generation...');
|
||||||
|
setActiveResume({ ...activeResume, resume: '' }); // Reset resume content
|
||||||
|
const request = await apiClient.generateResume(
|
||||||
|
activeResume.candidateId || '',
|
||||||
|
activeResume.jobId || '',
|
||||||
|
generateResumeHandlers
|
||||||
|
);
|
||||||
|
await request.promise;
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleTabChange = (event: React.SyntheticEvent, newValue: string) => {
|
const handleTabChange = (event: React.SyntheticEvent, newValue: string) => {
|
||||||
@ -158,6 +217,12 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
reactToPrintFn();
|
reactToPrintFn();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (newValue === 'regenerate') {
|
||||||
|
// Handle resume regeneration logic here
|
||||||
|
setSnack('Regenerating resume...');
|
||||||
|
generateResume();
|
||||||
|
return;
|
||||||
|
}
|
||||||
setTabValue(newValue);
|
setTabValue(newValue);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -281,48 +346,23 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
/>
|
/>
|
||||||
<CardContent sx={{ p: 0 }}>
|
<CardContent sx={{ p: 0 }}>
|
||||||
<Box sx={{ position: 'relative' }}>
|
<Box sx={{ position: 'relative' }}>
|
||||||
<Typography
|
<Scrollable sx={{ maxHeight: '10rem', overflowY: 'auto' }}>
|
||||||
ref={contentRef}
|
<Box
|
||||||
variant="body2"
|
sx={{
|
||||||
component="div"
|
display: 'flex',
|
||||||
sx={{
|
lineHeight: 1.6,
|
||||||
display: '-webkit-box',
|
fontSize: '0.875rem !important',
|
||||||
WebkitLineClamp: isContentExpanded
|
whiteSpace: 'pre-wrap',
|
||||||
? 'unset'
|
fontFamily: 'monospace',
|
||||||
: variant === 'small'
|
backgroundColor: theme.palette.action.hover,
|
||||||
? 5
|
p: 2,
|
||||||
: variant === 'minimal'
|
borderRadius: 1,
|
||||||
? 3
|
border: `1px solid ${theme.palette.divider}`,
|
||||||
: 10,
|
}}
|
||||||
WebkitBoxOrient: 'vertical',
|
>
|
||||||
overflow: 'hidden',
|
{activeResume.resume}
|
||||||
textOverflow: 'ellipsis',
|
|
||||||
lineHeight: 1.6,
|
|
||||||
fontSize: '0.875rem !important',
|
|
||||||
whiteSpace: 'pre-wrap',
|
|
||||||
fontFamily: 'monospace',
|
|
||||||
backgroundColor: theme.palette.action.hover,
|
|
||||||
p: 2,
|
|
||||||
borderRadius: 1,
|
|
||||||
border: `1px solid ${theme.palette.divider}`,
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{activeResume.resume}
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
{shouldShowMoreButton && variant !== 'all' && (
|
|
||||||
<Box sx={{ display: 'flex', justifyContent: 'center', mt: 1 }}>
|
|
||||||
<Button
|
|
||||||
variant="text"
|
|
||||||
size="small"
|
|
||||||
onClick={() => setIsContentExpanded(!isContentExpanded)}
|
|
||||||
startIcon={isContentExpanded ? <VisibilityOffIcon /> : <VisibilityIcon />}
|
|
||||||
sx={{ fontSize: '0.75rem' }}
|
|
||||||
>
|
|
||||||
{isContentExpanded ? 'Show Less' : 'Show More'}
|
|
||||||
</Button>
|
|
||||||
</Box>
|
</Box>
|
||||||
)}
|
</Scrollable>
|
||||||
</Box>
|
</Box>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
@ -451,81 +491,124 @@ const ResumeInfo: React.FC<ResumeInfoProps> = (props: ResumeInfoProps) => {
|
|||||||
height: '100%',
|
height: '100%',
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Tabs value={tabValue} onChange={handleTabChange} centered>
|
<Box sx={{ display: 'flex', flexDirection: 'row', height: '100%', gap: 1, pt: 1, width: '100%', position: 'relative', overflow: 'hidden' }}>
|
||||||
<Tab value="markdown" icon={<EditDocumentIcon />} label="Markdown" />
|
<Paper sx={{ p: 1, flex: 1, display: 'flex', flexDirection: 'column', position: 'relative', maxWidth: "100%", height: '100%', overflow: 'hidden' }}>
|
||||||
<Tab value="preview" icon={<PreviewIcon />} label="Preview" />
|
<Tabs value={tabValue} onChange={handleTabChange} centered>
|
||||||
<Tab value="job" icon={<WorkIcon />} label="Job" />
|
<Tab value="markdown" icon={<EditDocumentIcon />} label="Markdown" />
|
||||||
<Tab value="print" icon={<PrintIcon />} label="Print" />
|
{activeResume.systemPrompt && <Tab value="systemPrompt" icon={<TuneIcon />} label="System Prompt" />}
|
||||||
</Tabs>
|
{activeResume.systemPrompt && <Tab value="prompt" icon={<InputIcon />} label="Prompt" />}
|
||||||
<Box
|
<Tab value="preview" icon={<PreviewIcon />} label="Preview" />
|
||||||
ref={printContentRef}
|
<Tab value="print" icon={<PrintIcon />} label="Print" />
|
||||||
sx={{
|
<Tab value="regenerate" icon={<ModelTraining />} label="Regenerate" />
|
||||||
display: 'flex',
|
</Tabs>
|
||||||
flexDirection: 'column',
|
{status && (
|
||||||
height: '100%' /* Restrict to main-container's height */,
|
<Box sx={{ mt: 0, mb: 1 }}>
|
||||||
width: '100%',
|
<StatusBox>
|
||||||
minHeight: 0 /* Prevent flex overflow */,
|
{statusType && <StatusIcon type={statusType} />}
|
||||||
//maxHeight: "min-content",
|
<Typography variant="body2" sx={{ ml: 1 }}>
|
||||||
'& > *:not(.Scrollable)': {
|
{status || 'Processing...'}
|
||||||
flexShrink: 0 /* Prevent shrinking */,
|
</Typography>
|
||||||
},
|
</StatusBox>
|
||||||
position: 'relative',
|
{status && !error && <LinearProgress sx={{ mt: 1 }} />}
|
||||||
}}
|
</Box>
|
||||||
>
|
)}
|
||||||
{tabValue === 'markdown' && (
|
<Scrollable
|
||||||
<BackstoryTextField
|
sx={{
|
||||||
value={editContent}
|
|
||||||
onChange={value => setEditContent(value)}
|
|
||||||
style={{
|
|
||||||
position: 'relative',
|
|
||||||
// maxHeight: "100%",
|
|
||||||
height: '100%',
|
|
||||||
width: '100%',
|
|
||||||
display: 'flex',
|
display: 'flex',
|
||||||
minHeight: '100%',
|
flexDirection: 'column',
|
||||||
|
height: '100%' /* Restrict to main-container's height */,
|
||||||
flexGrow: 1,
|
width: '100%',
|
||||||
flex: 1 /* Take remaining space in some-container */,
|
minHeight: 0 /* Prevent flex overflow */,
|
||||||
overflowY: 'auto' /* Scroll if content overflows */,
|
//maxHeight: "min-content",
|
||||||
|
'& > *:not(.Scrollable)': {
|
||||||
|
flexShrink: 0 /* Prevent shrinking */,
|
||||||
|
},
|
||||||
|
position: 'relative',
|
||||||
}}
|
}}
|
||||||
placeholder="Enter resume content..."
|
>
|
||||||
/>
|
{tabValue === 'markdown' && (
|
||||||
)}
|
<BackstoryTextField
|
||||||
{tabValue === 'preview' && (
|
value={editContent}
|
||||||
<Box className="document-container">
|
onChange={value => setEditContent(value)}
|
||||||
<Box className="a4-document">
|
style={{
|
||||||
<StyledMarkdown
|
|
||||||
sx={{
|
|
||||||
position: 'relative',
|
position: 'relative',
|
||||||
maxHeight: '100%',
|
maxHeight: "100%",
|
||||||
|
height: '100%',
|
||||||
width: '100%',
|
width: '100%',
|
||||||
display: 'flex',
|
display: 'flex',
|
||||||
|
minHeight: '100%',
|
||||||
|
|
||||||
flexGrow: 1,
|
flexGrow: 1,
|
||||||
flex: 1 /* Take remaining space in some-container */,
|
flex: 1 /* Take remaining space in some-container */,
|
||||||
// overflowY: 'auto' /* Scroll if content overflows */,
|
overflowY: 'auto' /* Scroll if content overflows */,
|
||||||
}}
|
}}
|
||||||
content={editContent}
|
placeholder="Enter resume content..."
|
||||||
/>
|
/>
|
||||||
</Box>
|
)}
|
||||||
<Box sx={{ p: 2 }}> </Box>
|
{tabValue === 'systemPrompt' && (
|
||||||
</Box>
|
<BackstoryTextField
|
||||||
)}
|
value={editSystemPrompt}
|
||||||
{tabValue === 'job' && activeResume.job && (
|
onChange={value => setEditSystemPrompt(value)}
|
||||||
<JobInfo
|
style={{
|
||||||
variant="all"
|
position: 'relative',
|
||||||
job={activeResume.job}
|
maxHeight: "100%",
|
||||||
sx={{
|
// height: '100%',
|
||||||
p: 2,
|
width: '100%',
|
||||||
position: 'relative',
|
display: 'flex',
|
||||||
maxHeight: '100%',
|
minHeight: '100%',
|
||||||
width: '100%',
|
flexGrow: 1,
|
||||||
display: 'flex',
|
flex: 1 /* Take remaining space in some-container */,
|
||||||
flexGrow: 1,
|
overflowY: 'auto' /* Scroll if content overflows */,
|
||||||
flex: 1 /* Take remaining space in some-container */,
|
}}
|
||||||
overflowY: 'auto' /* Scroll if content overflows */,
|
placeholder="Edit system prompt..."
|
||||||
}}
|
/>
|
||||||
/>
|
)}
|
||||||
)}
|
{tabValue === 'prompt' && (
|
||||||
|
<BackstoryTextField
|
||||||
|
value={editPrompt}
|
||||||
|
onChange={value => setEditPrompt(value)}
|
||||||
|
style={{
|
||||||
|
position: 'relative',
|
||||||
|
maxHeight: "100%",
|
||||||
|
height: '100%',
|
||||||
|
width: '100%',
|
||||||
|
display: 'flex',
|
||||||
|
minHeight: '100%',
|
||||||
|
|
||||||
|
flexGrow: 1,
|
||||||
|
flex: 1 /* Take remaining space in some-container */,
|
||||||
|
overflowY: 'auto' /* Scroll if content overflows */,
|
||||||
|
}}
|
||||||
|
placeholder="Edit prompt..."
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{tabValue === 'preview' && (
|
||||||
|
<Box className="document-container" ref={printContentRef}>
|
||||||
|
<Box className="a4-document">
|
||||||
|
<StyledMarkdown
|
||||||
|
sx={{
|
||||||
|
position: 'relative',
|
||||||
|
maxHeight: '100%',
|
||||||
|
display: 'flex',
|
||||||
|
flexGrow: 1,
|
||||||
|
flex: 1 /* Take remaining space in some-container */,
|
||||||
|
// overflowY: 'auto' /* Scroll if content overflows */,
|
||||||
|
}}
|
||||||
|
content={editContent}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
<Box sx={{ p: 2 }}> </Box>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
</Scrollable>
|
||||||
|
</Paper>
|
||||||
|
<Scrollable sx={{ flex: 1, display: 'flex', height: '100%', overflowY: 'auto', position: 'relative' }}>
|
||||||
|
<Paper sx={{ p: 1, flex: 1, display: 'flex', flexDirection: 'column', position: 'relative', backgroundColor: "#f8f0e0" }}>
|
||||||
|
{activeResume.job !== undefined && <JobInfo variant={"all"} job={activeResume.job} sx={{
|
||||||
|
mt: 2, backgroundColor: "#f8f0e0", //theme.palette.background.paper,
|
||||||
|
}} />}
|
||||||
|
</Paper>
|
||||||
|
</Scrollable>
|
||||||
</Box>
|
</Box>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
<DialogActions>
|
<DialogActions>
|
||||||
|
@ -699,18 +699,11 @@ class ApiClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
saveResume(
|
saveResume(
|
||||||
candidate_id: string,
|
resume: Types.Resume,
|
||||||
job_id: string,
|
|
||||||
resume: string,
|
|
||||||
streamingOptions?: StreamingOptions<Types.ResumeMessage>
|
streamingOptions?: StreamingOptions<Types.ResumeMessage>
|
||||||
): StreamingResponse<Types.ResumeMessage> {
|
): StreamingResponse<Types.ResumeMessage> {
|
||||||
const body = JSON.stringify(resume);
|
const body = JSON.stringify(formatApiRequest(resume));
|
||||||
return this.streamify<Types.ResumeMessage>(
|
return this.streamify<Types.ResumeMessage>(`/resumes`, body, streamingOptions, 'Resume');
|
||||||
`/resumes/${candidate_id}/${job_id}`,
|
|
||||||
body,
|
|
||||||
streamingOptions,
|
|
||||||
'Resume'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Additional API methods for Resume management
|
// Additional API methods for Resume management
|
||||||
@ -810,11 +803,12 @@ class ApiClient {
|
|||||||
return handleApiResponse<{ success: boolean; statistics: any }>(response);
|
return handleApiResponse<{ success: boolean; statistics: any }>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
async updateResume(resumeId: string, content: string): Promise<Types.Resume> {
|
async updateResume(resume: Types.Resume): Promise<Types.Resume> {
|
||||||
const response = await fetch(`${this.baseUrl}/resumes/${resumeId}`, {
|
const body = JSON.stringify(formatApiRequest(resume));
|
||||||
method: 'PUT',
|
const response = await fetch(`${this.baseUrl}/resumes`, {
|
||||||
|
method: 'PATCH',
|
||||||
headers: this.defaultHeaders,
|
headers: this.defaultHeaders,
|
||||||
body: JSON.stringify(content),
|
body: body,
|
||||||
});
|
});
|
||||||
|
|
||||||
return this.handleApiResponseWithConversion<Types.Resume>(response, 'Resume');
|
return this.handleApiResponseWithConversion<Types.Resume>(response, 'Resume');
|
||||||
@ -1524,7 +1518,9 @@ class ApiClient {
|
|||||||
|
|
||||||
case 'done':
|
case 'done':
|
||||||
const message = (
|
const message = (
|
||||||
modelType ? convertFromApi<T>(incoming, modelType) : incoming
|
modelType
|
||||||
|
? convertFromApi<T>(parseApiResponse<T>(incoming), modelType)
|
||||||
|
: incoming
|
||||||
) as T;
|
) as T;
|
||||||
finalMessage = message;
|
finalMessage = message;
|
||||||
try {
|
try {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// Generated TypeScript types from Pydantic models
|
// Generated TypeScript types from Pydantic models
|
||||||
// Source: src/backend/models.py
|
// Source: src/backend/models.py
|
||||||
// Generated on: 2025-06-18T22:54:34.823060
|
// Generated on: 2025-06-19T22:17:35.101284
|
||||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||||
|
|
||||||
// ============================
|
// ============================
|
||||||
@ -354,9 +354,7 @@ export interface ChatMessageResume {
|
|||||||
content: string;
|
content: string;
|
||||||
tunables?: Tunables;
|
tunables?: Tunables;
|
||||||
metadata: ChatMessageMetaData;
|
metadata: ChatMessageMetaData;
|
||||||
resume: string;
|
resume: Resume;
|
||||||
systemPrompt?: string;
|
|
||||||
prompt?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatMessageSkillAssessment {
|
export interface ChatMessageSkillAssessment {
|
||||||
@ -976,6 +974,8 @@ export interface Resume {
|
|||||||
jobId: string;
|
jobId: string;
|
||||||
candidateId: string;
|
candidateId: string;
|
||||||
resume: string;
|
resume: string;
|
||||||
|
systemPrompt?: string;
|
||||||
|
prompt?: string;
|
||||||
createdAt?: Date;
|
createdAt?: Date;
|
||||||
updatedAt?: Date;
|
updatedAt?: Date;
|
||||||
job?: Job;
|
job?: Job;
|
||||||
@ -1377,6 +1377,7 @@ export function convertChatMessageRagSearchFromApi(data: any): ChatMessageRagSea
|
|||||||
/**
|
/**
|
||||||
* Convert ChatMessageResume from API response
|
* Convert ChatMessageResume from API response
|
||||||
* Date fields: timestamp
|
* Date fields: timestamp
|
||||||
|
* Nested models: resume (Resume)
|
||||||
*/
|
*/
|
||||||
export function convertChatMessageResumeFromApi(data: any): ChatMessageResume {
|
export function convertChatMessageResumeFromApi(data: any): ChatMessageResume {
|
||||||
if (!data) return data;
|
if (!data) return data;
|
||||||
@ -1385,6 +1386,8 @@ export function convertChatMessageResumeFromApi(data: any): ChatMessageResume {
|
|||||||
...data,
|
...data,
|
||||||
// Convert timestamp from ISO string to Date
|
// Convert timestamp from ISO string to Date
|
||||||
timestamp: data.timestamp ? new Date(data.timestamp) : undefined,
|
timestamp: data.timestamp ? new Date(data.timestamp) : undefined,
|
||||||
|
// Convert nested Resume model
|
||||||
|
resume: convertResumeFromApi(data.resume),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -19,6 +19,7 @@ from models import (
|
|||||||
ChatMessageError,
|
ChatMessageError,
|
||||||
ChatMessageResume,
|
ChatMessageResume,
|
||||||
ChatMessageStatus,
|
ChatMessageStatus,
|
||||||
|
Resume,
|
||||||
SkillAssessment,
|
SkillAssessment,
|
||||||
SkillStrength,
|
SkillStrength,
|
||||||
)
|
)
|
||||||
@ -63,16 +64,13 @@ class GenerateResume(Agent):
|
|||||||
if skill and strength in skills_by_strength:
|
if skill and strength in skills_by_strength:
|
||||||
skills_by_strength[strength].append(skill)
|
skills_by_strength[strength].append(skill)
|
||||||
|
|
||||||
# Collect experience evidence
|
if skill not in experience_evidence:
|
||||||
|
experience_evidence[skill] = []
|
||||||
|
# Collect experience evidence, grouped by skill
|
||||||
for evidence in assessment.evidence_details:
|
for evidence in assessment.evidence_details:
|
||||||
source = evidence.source
|
experience_evidence[skill].append(
|
||||||
if source:
|
{"source": evidence.source, "quote": evidence.quote, "context": evidence.context}
|
||||||
if source not in experience_evidence:
|
)
|
||||||
experience_evidence[source] = []
|
|
||||||
|
|
||||||
experience_evidence[source].append(
|
|
||||||
{"skill": skill, "quote": evidence.quote, "context": evidence.context}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build the system prompt
|
# Build the system prompt
|
||||||
system_prompt = f"""You are a professional resume writer with expertise in highlighting candidate strengths and experiences.
|
system_prompt = f"""You are a professional resume writer with expertise in highlighting candidate strengths and experiences.
|
||||||
@ -91,21 +89,21 @@ Phone: {self.user.phone or 'N/A'}
|
|||||||
system_prompt += f"""\
|
system_prompt += f"""\
|
||||||
|
|
||||||
### Strong Skills (prominent in resume):
|
### Strong Skills (prominent in resume):
|
||||||
{", ".join(skills_by_strength[SkillStrength.STRONG])}
|
* {".\n* ".join(skills_by_strength[SkillStrength.STRONG])}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if len(skills_by_strength[SkillStrength.MODERATE]):
|
if len(skills_by_strength[SkillStrength.MODERATE]):
|
||||||
system_prompt += f"""\
|
system_prompt += f"""\
|
||||||
|
|
||||||
### Moderate Skills (demonstrated in resume):
|
### Moderate Skills (demonstrated in resume):
|
||||||
{", ".join(skills_by_strength[SkillStrength.MODERATE])}
|
* {".\n* ".join(skills_by_strength[SkillStrength.MODERATE])}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if len(skills_by_strength[SkillStrength.WEAK]):
|
if len(skills_by_strength[SkillStrength.WEAK]):
|
||||||
system_prompt += f"""\
|
system_prompt += f"""\
|
||||||
|
|
||||||
### Weaker Skills (mentioned or implied):
|
### Weaker Skills (mentioned or implied):
|
||||||
{", ".join(skills_by_strength[SkillStrength.WEAK])}
|
* {".\n* ".join(skills_by_strength[SkillStrength.WEAK])}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
system_prompt += """\
|
system_prompt += """\
|
||||||
@ -114,10 +112,19 @@ Phone: {self.user.phone or 'N/A'}
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Add experience evidence by source/position
|
# Add experience evidence by source/position
|
||||||
for source, evidences in experience_evidence.items():
|
for skill, evidences in experience_evidence.items():
|
||||||
system_prompt += f"\n### {source}:\n"
|
system_prompt += f"\n### {skill}:\n"
|
||||||
|
last_source = None
|
||||||
|
index = 0
|
||||||
|
sub_index = 1
|
||||||
for evidence in evidences:
|
for evidence in evidences:
|
||||||
system_prompt += f"- {evidence['skill']}: {evidence['context']}\n"
|
if last_source != evidence['source']:
|
||||||
|
index += 1
|
||||||
|
last_source = evidence['source']
|
||||||
|
system_prompt += f"{index}. Source: {last_source}:\n"
|
||||||
|
sub_index = 1
|
||||||
|
system_prompt += f" {index}.{sub_index}. Quote: \"{evidence['quote']}\"\n Evidence: {evidence['context']}\n"
|
||||||
|
sub_index += 1
|
||||||
|
|
||||||
# Add instructions for the resume creation
|
# Add instructions for the resume creation
|
||||||
system_prompt += """\
|
system_prompt += """\
|
||||||
@ -132,8 +139,8 @@ When sections lack data, output "Information not provided" or use placeholder te
|
|||||||
2. Format the resume in a clean, concise, and modern style that will pass ATS systems.
|
2. Format the resume in a clean, concise, and modern style that will pass ATS systems.
|
||||||
3. Include these sections:
|
3. Include these sections:
|
||||||
- Professional Summary (highlight strongest skills and experience level)
|
- Professional Summary (highlight strongest skills and experience level)
|
||||||
- Skills (organized by strength, under a single section). When listing skills, rephrase them so they are not identical to the original assessment.
|
- Skills (organized by strength, under a single section). When listing skills, rephrase them so they are not identical to the original assessment. Do not list the strengths explicitly, but rather integrate them into the skills section.
|
||||||
- Professional Experience (focus on achievements and evidence of the skill)
|
- Professional Experience (focus on achievements and evidence of the skill.) For the skills listed, identify content from the EXPERIENCE EVIDENCE and summarize experiences with specific details achievements where possible, ordering by date and job.
|
||||||
4. Optional sections, to include only if evidence is present:
|
4. Optional sections, to include only if evidence is present:
|
||||||
- Education section
|
- Education section
|
||||||
Certifications section
|
Certifications section
|
||||||
@ -169,6 +176,11 @@ Format it in clean, ATS-friendly markdown. Provide ONLY the resume with no comme
|
|||||||
async def generate_resume(
|
async def generate_resume(
|
||||||
self, llm: Any, model: str, session_id: str, skills: List[SkillAssessment]
|
self, llm: Any, model: str, session_id: str, skills: List[SkillAssessment]
|
||||||
) -> AsyncGenerator[ApiMessage, None]:
|
) -> AsyncGenerator[ApiMessage, None]:
|
||||||
|
if not self.user:
|
||||||
|
error_message = ChatMessageError(session_id=session_id, content="User must be set before generating resume.")
|
||||||
|
logger.error(f"⚠️ {error_message.content}")
|
||||||
|
yield error_message
|
||||||
|
return
|
||||||
# Stage 1A: Analyze job requirements
|
# Stage 1A: Analyze job requirements
|
||||||
status_message = ChatMessageStatus(
|
status_message = ChatMessageStatus(
|
||||||
session_id=session_id, content="Analyzing job requirements", activity=ApiActivityType.THINKING
|
session_id=session_id, content="Analyzing job requirements", activity=ApiActivityType.THINKING
|
||||||
@ -208,9 +220,13 @@ Format it in clean, ATS-friendly markdown. Provide ONLY the resume with no comme
|
|||||||
status=ApiStatusType.DONE,
|
status=ApiStatusType.DONE,
|
||||||
content="Resume generation completed successfully.",
|
content="Resume generation completed successfully.",
|
||||||
metadata=generated_message.metadata,
|
metadata=generated_message.metadata,
|
||||||
resume=generated_message.content,
|
resume=Resume(
|
||||||
prompt=prompt,
|
job_id="N/A",
|
||||||
system_prompt=system_prompt,
|
candidate_id=self.user.id,
|
||||||
|
resume=generated_message.content,
|
||||||
|
prompt=prompt,
|
||||||
|
system_prompt=system_prompt
|
||||||
|
)
|
||||||
)
|
)
|
||||||
yield resume_message
|
yield resume_message
|
||||||
logger.info("✅ Resume generation completed successfully.")
|
logger.info("✅ Resume generation completed successfully.")
|
||||||
|
@ -39,24 +39,25 @@ class JobRequirementsAgent(Agent):
|
|||||||
logger.info(f"{self.agent_type} - {inspect.stack()[0].function}")
|
logger.info(f"{self.agent_type} - {inspect.stack()[0].function}")
|
||||||
system_prompt = """
|
system_prompt = """
|
||||||
You are an objective job requirements analyzer. Your task is to extract and categorize the specific skills,
|
You are an objective job requirements analyzer. Your task is to extract and categorize the specific skills,
|
||||||
experiences, and qualifications required in a job description WITHOUT any reference to any candidate.
|
experiences, and qualifications required in a job description.
|
||||||
|
|
||||||
## INSTRUCTIONS:
|
## INSTRUCTIONS:
|
||||||
|
|
||||||
1. Analyze ONLY the job description provided.
|
1. Analyze ONLY the <|job_description|> provided, and provide only requirements from that description.
|
||||||
2. Extract company information, job title, and all requirements.
|
2. Extract company information, job title, and all requirements.
|
||||||
3. If a requirement is compound (e.g., "5+ years experience with React, Node.js and MongoDB" or "FastAPI/Django/React"), break it down into individual components.
|
3. If a requirement can be broken into multiple requirements, do so.
|
||||||
4. Categorize requirements into:
|
4. Categorize each requirement into one and only one of the following categories:
|
||||||
- Technical skills (required and preferred)
|
- Technical skills (required and preferred)
|
||||||
- Experience requirements (required and preferred)
|
- Experience requirements (required and preferred)
|
||||||
- Education requirements
|
- Soft skills (e.g., "excellent communication skills")
|
||||||
- Soft skills
|
- Experience (e.g., "5+ years in software development")
|
||||||
- Industry knowledge
|
- Eduction
|
||||||
- Responsibilities
|
- Certifications (e.g., "AWS Certified Solutions Architect")
|
||||||
|
- Preferred attributes (e.g., "team player", "self-motivated")
|
||||||
- Company values
|
- Company values
|
||||||
5. Extract and categorize all requirements and preferences.
|
5. Extract and categorize all requirements and preferences.
|
||||||
6. DO NOT consider any candidate information - this is a pure job analysis task.
|
6. Provide the output in a structured JSON format as specified below.
|
||||||
7. Provide the output in a structured JSON format as specified below.
|
7. If there are no requirements in a category, leave it as an empty list.
|
||||||
|
|
||||||
## OUTPUT FORMAT:
|
## OUTPUT FORMAT:
|
||||||
|
|
||||||
@ -85,17 +86,10 @@ experiences, and qualifications required in a job description WITHOUT any refere
|
|||||||
```
|
```
|
||||||
|
|
||||||
Be specific and detailed in your extraction.
|
Be specific and detailed in your extraction.
|
||||||
If a requirement can be broken down into several separate requirements, split them.
|
|
||||||
For example, the technical_skill of "Python/Django/FastAPI" should be separated into different requirements: Python, Django, and FastAPI.
|
|
||||||
|
|
||||||
For example, if the job description mentions: "Python/Django/FastAPI", you should extract it as:
|
|
||||||
|
|
||||||
"technical_skills": { "required": [ "Python", "Django", "FastAPI" ] },
|
|
||||||
|
|
||||||
Avoid vague categorizations and be precise about whether skills are explicitly required or just preferred.
|
Avoid vague categorizations and be precise about whether skills are explicitly required or just preferred.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prompt = f"Job Description:\n{job_description}"
|
prompt = f"<|job_description|>\n{job_description}\n</|job_description|>\n"
|
||||||
return system_prompt, prompt
|
return system_prompt, prompt
|
||||||
|
|
||||||
async def analyze_job_requirements(
|
async def analyze_job_requirements(
|
||||||
|
@ -5,7 +5,7 @@ from redis.asyncio import Redis
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from models import SkillAssessment
|
from models import Resume, SkillAssessment
|
||||||
|
|
||||||
|
|
||||||
class DatabaseProtocol(Protocol):
|
class DatabaseProtocol(Protocol):
|
||||||
@ -172,9 +172,6 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def delete_all_candidate_documents(self, candidate_id: str) -> int:
|
async def delete_all_candidate_documents(self, candidate_id: str) -> int:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def delete_all_resumes_for_user(self, user_id: str) -> int:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def delete_authentication(self, user_id: str) -> bool:
|
async def delete_authentication(self, user_id: str) -> bool:
|
||||||
...
|
...
|
||||||
|
|
||||||
@ -190,8 +187,6 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def delete_job(self, job_id: str):
|
async def delete_job(self, job_id: str):
|
||||||
...
|
...
|
||||||
|
|
||||||
async def delete_resume(self, user_id: str, resume_id: str) -> bool:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def delete_viewer(self, viewer_id: str):
|
async def delete_viewer(self, viewer_id: str):
|
||||||
...
|
...
|
||||||
@ -229,12 +224,6 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def get_all_jobs(self) -> Dict[str, Any]:
|
async def get_all_jobs(self) -> Dict[str, Any]:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def get_all_resumes_for_user(self, user_id: str) -> List[Dict]:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def get_all_resumes(self) -> Dict[str, List[Dict]]:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def get_authentication(self, user_id: str) -> Optional[Dict[str, Any]]:
|
async def get_authentication(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||||
...
|
...
|
||||||
|
|
||||||
@ -304,13 +293,34 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def get_refresh_token(self, token: str) -> Optional[Dict[str, Any]]:
|
async def get_refresh_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Dict]:
|
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Resume]:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Dict]:
|
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Dict]:
|
async def delete_all_resumes_for_user(self, user_id: str) -> int:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_all_resumes_for_user(self, user_id: str) -> List[Dict]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Resume]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def delete_resume(self, user_id: str, resume_id: str) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_all_resumes(self) -> Dict[str, List[Dict]]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Resume]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Resume]:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Resume]:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def get_resume_statistics(self, user_id: str) -> Dict[str, Any]:
|
async def get_resume_statistics(self, user_id: str) -> Dict[str, Any]:
|
||||||
@ -364,9 +374,6 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def search_chat_messages(self, session_id: str, query: str) -> List[Dict]:
|
async def search_chat_messages(self, session_id: str, query: str) -> List[Dict]:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Dict]:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def set_ai_parameters(self, param_id: str, param_data: Dict):
|
async def set_ai_parameters(self, param_id: str, param_data: Dict):
|
||||||
...
|
...
|
||||||
|
|
||||||
@ -388,9 +395,6 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def set_job(self, job_id: str, job_data: Dict):
|
async def set_job(self, job_id: str, job_data: Dict):
|
||||||
...
|
...
|
||||||
|
|
||||||
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def set_viewer(self, viewer_id: str, viewer_data: Dict):
|
async def set_viewer(self, viewer_id: str, viewer_data: Dict):
|
||||||
...
|
...
|
||||||
|
|
||||||
@ -414,5 +418,3 @@ class DatabaseProtocol(Protocol):
|
|||||||
async def update_document(self, document_id: str, updates: Dict) -> Dict[Any, Any] | None:
|
async def update_document(self, document_id: str, updates: Dict) -> Dict[Any, Any] | None:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Dict]:
|
|
||||||
...
|
|
||||||
|
@ -2,6 +2,8 @@ from datetime import UTC, datetime
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from models import Resume
|
||||||
|
|
||||||
from .protocols import DatabaseProtocol
|
from .protocols import DatabaseProtocol
|
||||||
from ..constants import KEY_PREFIXES
|
from ..constants import KEY_PREFIXES
|
||||||
|
|
||||||
@ -14,27 +16,23 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
|
async def set_resume(self, user_id: str, resume_data: Dict) -> bool:
|
||||||
"""Save a resume for a user"""
|
"""Save a resume for a user"""
|
||||||
try:
|
try:
|
||||||
# Generate resume_id if not present
|
resume = Resume.model_validate(resume_data)
|
||||||
if "id" not in resume_data:
|
|
||||||
raise ValueError("Resume data must include an 'id' field")
|
|
||||||
|
|
||||||
resume_id = resume_data["id"]
|
|
||||||
|
|
||||||
# Store the resume data
|
# Store the resume data
|
||||||
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
|
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume.id}"
|
||||||
await self.redis.set(key, self._serialize(resume_data))
|
await self.redis.set(key, self._serialize(resume_data))
|
||||||
|
|
||||||
# Add resume_id to user's resume list
|
# Add resume_id to user's resume list
|
||||||
user_resumes_key = f"{KEY_PREFIXES['user_resumes']}{user_id}"
|
user_resumes_key = f"{KEY_PREFIXES['user_resumes']}{user_id}"
|
||||||
await self.redis.rpush(user_resumes_key, resume_id) # type: ignore
|
await self.redis.rpush(user_resumes_key, resume.id) # type: ignore
|
||||||
|
|
||||||
logger.info(f"📄 Saved resume {resume_id} for user {user_id}")
|
logger.info(f"📄 Saved resume {resume.id} for user {user_id}")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error saving resume for user {user_id}: {e}")
|
logger.error(f"❌ Error saving resume for user {user_id}: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Dict]:
|
async def get_resume(self, user_id: str, resume_id: str) -> Optional[Resume]:
|
||||||
"""Get a specific resume for a user"""
|
"""Get a specific resume for a user"""
|
||||||
try:
|
try:
|
||||||
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
|
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
|
||||||
@ -42,7 +40,7 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
if data:
|
if data:
|
||||||
resume_data = self._deserialize(data)
|
resume_data = self._deserialize(data)
|
||||||
logger.info(f"📄 Retrieved resume {resume_id} for user {user_id}")
|
logger.info(f"📄 Retrieved resume {resume_id} for user {user_id}")
|
||||||
return resume_data
|
return Resume.model_validate(resume_data)
|
||||||
logger.info(f"📄 Resume {resume_id} not found for user {user_id}")
|
logger.info(f"📄 Resume {resume_id} not found for user {user_id}")
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -178,7 +176,7 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
logger.error(f"❌ Error retrieving all resumes: {e}")
|
logger.error(f"❌ Error retrieving all resumes: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Dict]:
|
async def search_resumes_for_user(self, user_id: str, query: str) -> List[Resume]:
|
||||||
"""Search resumes for a user by content, job title, or candidate name"""
|
"""Search resumes for a user by content, job title, or candidate name"""
|
||||||
try:
|
try:
|
||||||
all_resumes = await self.get_all_resumes_for_user(user_id)
|
all_resumes = await self.get_all_resumes_for_user(user_id)
|
||||||
@ -200,16 +198,16 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
matching_resumes.append(resume)
|
matching_resumes.append(resume)
|
||||||
|
|
||||||
logger.info(f"📄 Found {len(matching_resumes)} matching resumes for user {user_id}")
|
logger.info(f"📄 Found {len(matching_resumes)} matching resumes for user {user_id}")
|
||||||
return matching_resumes
|
return [Resume.model_validate(resume) for resume in matching_resumes]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error searching resumes for user {user_id}: {e}")
|
logger.error(f"❌ Error searching resumes for user {user_id}: {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Dict]:
|
async def get_resumes_by_candidate(self, user_id: str, candidate_id: str) -> List[Resume]:
|
||||||
"""Get all resumes for a specific candidate created by a user"""
|
"""Get all resumes for a specific candidate created by a user"""
|
||||||
try:
|
try:
|
||||||
all_resumes = await self.get_all_resumes_for_user(user_id)
|
all_resumes = await self.get_all_resumes_for_user(user_id)
|
||||||
candidate_resumes = [resume for resume in all_resumes if resume.get("candidate_id") == candidate_id]
|
candidate_resumes = [Resume.model_validate(resume) for resume in all_resumes if resume.get("candidate_id") == candidate_id]
|
||||||
|
|
||||||
logger.info(f"📄 Found {len(candidate_resumes)} resumes for candidate {candidate_id} by user {user_id}")
|
logger.info(f"📄 Found {len(candidate_resumes)} resumes for candidate {candidate_id} by user {user_id}")
|
||||||
return candidate_resumes
|
return candidate_resumes
|
||||||
@ -217,11 +215,11 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
logger.error(f"❌ Error retrieving resumes for candidate {candidate_id} by user {user_id}: {e}")
|
logger.error(f"❌ Error retrieving resumes for candidate {candidate_id} by user {user_id}: {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Dict]:
|
async def get_resumes_by_job(self, user_id: str, job_id: str) -> List[Resume]:
|
||||||
"""Get all resumes for a specific job created by a user"""
|
"""Get all resumes for a specific job created by a user"""
|
||||||
try:
|
try:
|
||||||
all_resumes = await self.get_all_resumes_for_user(user_id)
|
all_resumes = await self.get_all_resumes_for_user(user_id)
|
||||||
job_resumes = [resume for resume in all_resumes if resume.get("job_id") == job_id]
|
job_resumes = [Resume.model_validate(resume) for resume in all_resumes if resume.get("job_id") == job_id]
|
||||||
|
|
||||||
logger.info(f"📄 Found {len(job_resumes)} resumes for job {job_id} by user {user_id}")
|
logger.info(f"📄 Found {len(job_resumes)} resumes for job {job_id} by user {user_id}")
|
||||||
return job_resumes
|
return job_resumes
|
||||||
@ -274,19 +272,20 @@ class ResumeMixin(DatabaseProtocol):
|
|||||||
"recent_resumes": [],
|
"recent_resumes": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Dict]:
|
async def update_resume(self, user_id: str, resume_id: str, updates: Dict) -> Optional[Resume]:
|
||||||
"""Update specific fields of a resume"""
|
"""Update specific fields of a resume"""
|
||||||
try:
|
try:
|
||||||
resume_data = await self.get_resume(user_id, resume_id)
|
resume_data = await self.get_resume(user_id, resume_id)
|
||||||
if resume_data:
|
if resume_data:
|
||||||
resume_data.update(updates)
|
resume_dict = resume_data.model_dump()
|
||||||
resume_data["updated_at"] = datetime.now(UTC).isoformat()
|
resume_dict.update(updates)
|
||||||
|
resume_dict["updated_at"] = datetime.now(UTC).isoformat()
|
||||||
|
|
||||||
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
|
key = f"{KEY_PREFIXES['resumes']}{user_id}:{resume_id}"
|
||||||
await self.redis.set(key, self._serialize(resume_data))
|
await self.redis.set(key, self._serialize(resume_dict))
|
||||||
|
|
||||||
logger.info(f"📄 Updated resume {resume_id} for user {user_id}")
|
logger.info(f"📄 Updated resume {resume_id} for user {user_id}")
|
||||||
return resume_data
|
return Resume.model_validate(resume_dict)
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error updating resume {resume_id} for user {user_id}: {e}")
|
logger.error(f"❌ Error updating resume {resume_id} for user {user_id}: {e}")
|
||||||
|
@ -1090,26 +1090,24 @@ class ChatMessageSkillAssessment(ChatMessageUser):
|
|||||||
skill_assessment: SkillAssessment = Field(..., alias=str("skillAssessment"))
|
skill_assessment: SkillAssessment = Field(..., alias=str("skillAssessment"))
|
||||||
|
|
||||||
|
|
||||||
class ChatMessageResume(ChatMessageUser):
|
|
||||||
role: ChatSenderType = ChatSenderType.ASSISTANT
|
|
||||||
metadata: ChatMessageMetaData = Field(default=ChatMessageMetaData())
|
|
||||||
resume: str = Field(..., alias=str("resume"))
|
|
||||||
system_prompt: Optional[str] = Field(default=None, alias=str("systemPrompt"))
|
|
||||||
prompt: Optional[str] = Field(default=None, alias=str("prompt"))
|
|
||||||
model_config = ConfigDict(populate_by_name=True)
|
|
||||||
|
|
||||||
|
|
||||||
class Resume(BaseModel):
|
class Resume(BaseModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
job_id: str = Field(..., alias=str("jobId"))
|
job_id: str = Field(..., alias=str("jobId"))
|
||||||
candidate_id: str = Field(..., alias=str("candidateId"))
|
candidate_id: str = Field(..., alias=str("candidateId"))
|
||||||
resume: str = Field(..., alias=str("resume"))
|
resume: str
|
||||||
|
system_prompt: Optional[str] = Field(default=None)
|
||||||
|
prompt: Optional[str] = Field(default=None)
|
||||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("createdAt"))
|
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("createdAt"))
|
||||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("updatedAt"))
|
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias=str("updatedAt"))
|
||||||
job: Optional[Job] = None
|
job: Optional[Job] = None
|
||||||
candidate: Optional[Candidate] = None
|
candidate: Optional[Candidate] = None
|
||||||
model_config = ConfigDict(populate_by_name=True)
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
|
|
||||||
|
class ChatMessageResume(ChatMessageUser):
|
||||||
|
role: ChatSenderType = ChatSenderType.ASSISTANT
|
||||||
|
metadata: ChatMessageMetaData = Field(default=ChatMessageMetaData())
|
||||||
|
resume: Resume
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
|
|
||||||
class ResumeMessage(ChatMessageUser):
|
class ResumeMessage(ChatMessageUser):
|
||||||
role: ChatSenderType = ChatSenderType.ASSISTANT
|
role: ChatSenderType = ChatSenderType.ASSISTANT
|
||||||
|
@ -1850,6 +1850,7 @@ async def generate_resume(
|
|||||||
return
|
return
|
||||||
|
|
||||||
resume: ChatMessageResume = final_message
|
resume: ChatMessageResume = final_message
|
||||||
|
resume.resume.job_id = job.id
|
||||||
yield resume
|
yield resume
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ from markitdown import MarkItDown, StreamInfo
|
|||||||
import backstory_traceback as backstory_traceback
|
import backstory_traceback as backstory_traceback
|
||||||
import defines
|
import defines
|
||||||
from agents.base import CandidateEntity
|
from agents.base import CandidateEntity
|
||||||
from utils.helpers import create_job_from_content, filter_and_paginate, get_document_type_from_filename
|
from utils.helpers import filter_and_paginate, get_document_type_from_filename
|
||||||
from database.manager import RedisDatabase
|
from database.manager import RedisDatabase
|
||||||
from logger import logger
|
from logger import logger
|
||||||
from models import (
|
from models import (
|
||||||
@ -67,7 +67,8 @@ async def reformat_as_markdown(database: RedisDatabase, candidate_entity: Candid
|
|||||||
prompt=content,
|
prompt=content,
|
||||||
system_prompt="""
|
system_prompt="""
|
||||||
You are a document editor. Take the provided job description and reformat as legible markdown.
|
You are a document editor. Take the provided job description and reformat as legible markdown.
|
||||||
Return only the markdown content, no other text. Make sure all content is included.
|
Return only the markdown content, no other text. Make sure all content is included. If the
|
||||||
|
content is already in markdown format, return it as is.
|
||||||
""",
|
""",
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
Resume Routes
|
Resume Routes
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
from datetime import datetime, UTC
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
import uuid
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query
|
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
@ -18,37 +18,39 @@ from utils.responses import create_success_response
|
|||||||
# Create router for authentication endpoints
|
# Create router for authentication endpoints
|
||||||
router = APIRouter(prefix="/resumes", tags=["resumes"])
|
router = APIRouter(prefix="/resumes", tags=["resumes"])
|
||||||
|
|
||||||
|
@router.post("")
|
||||||
@router.post("/{candidate_id}/{job_id}")
|
|
||||||
async def create_candidate_resume(
|
async def create_candidate_resume(
|
||||||
candidate_id: str = Path(..., description="ID of the candidate"),
|
resume: Resume = Body(...),
|
||||||
job_id: str = Path(..., description="ID of the job"),
|
|
||||||
resume_content: str = Body(...),
|
|
||||||
current_user=Depends(get_current_user),
|
current_user=Depends(get_current_user),
|
||||||
database: RedisDatabase = Depends(get_database),
|
database: RedisDatabase = Depends(get_database),
|
||||||
):
|
):
|
||||||
"""Create a new resume for a candidate/job combination"""
|
"""Create a new resume for a candidate/job combination"""
|
||||||
|
|
||||||
async def message_stream_generator():
|
# Ignore the resume ID if provided, generate a new one
|
||||||
logger.info(f"🔍 Looking up candidate and job details for {candidate_id}/{job_id}")
|
resume.id = str(uuid.uuid4())
|
||||||
|
|
||||||
candidate_data = await database.get_candidate(candidate_id)
|
logger.info(f"📝 Creating resume for candidate {resume.candidate_id} for job {resume.job_id}")
|
||||||
|
|
||||||
|
async def message_stream_generator():
|
||||||
|
logger.info(f"🔍 Looking up candidate and job details for {resume.candidate_id}/{resume.job_id}")
|
||||||
|
|
||||||
|
candidate_data = await database.get_candidate(resume.candidate_id)
|
||||||
if not candidate_data:
|
if not candidate_data:
|
||||||
logger.error(f"❌ Candidate with ID '{candidate_id}' not found")
|
logger.error(f"❌ Candidate with ID '{resume.candidate_id}' not found")
|
||||||
error_message = ChatMessageError(
|
error_message = ChatMessageError(
|
||||||
session_id=MOCK_UUID, # No session ID for document uploads
|
session_id=MOCK_UUID, # No session ID for document uploads
|
||||||
content=f"Candidate with ID '{candidate_id}' not found",
|
content=f"Candidate with ID '{resume.candidate_id}' not found",
|
||||||
)
|
)
|
||||||
yield error_message
|
yield error_message
|
||||||
return
|
return
|
||||||
candidate = Candidate.model_validate(candidate_data)
|
candidate = Candidate.model_validate(candidate_data)
|
||||||
|
|
||||||
job_data = await database.get_job(job_id)
|
job_data = await database.get_job(resume.job_id)
|
||||||
if not job_data:
|
if not job_data:
|
||||||
logger.error(f"❌ Job with ID '{job_id}' not found")
|
logger.error(f"❌ Job with ID '{resume.job_id}' not found")
|
||||||
error_message = ChatMessageError(
|
error_message = ChatMessageError(
|
||||||
session_id=MOCK_UUID, # No session ID for document uploads
|
session_id=MOCK_UUID, # No session ID for document uploads
|
||||||
content=f"Job with ID '{job_id}' not found",
|
content=f"Job with ID '{resume.job_id}' not found",
|
||||||
)
|
)
|
||||||
yield error_message
|
yield error_message
|
||||||
return
|
return
|
||||||
@ -58,12 +60,6 @@ async def create_candidate_resume(
|
|||||||
f"📄 Saving resume for candidate {candidate.first_name} {candidate.last_name} for job '{job.title}'"
|
f"📄 Saving resume for candidate {candidate.first_name} {candidate.last_name} for job '{job.title}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Job and Candidate are valid. Save the resume
|
|
||||||
resume = Resume(
|
|
||||||
job_id=job_id,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
resume=resume_content,
|
|
||||||
)
|
|
||||||
resume_message: ResumeMessage = ResumeMessage(
|
resume_message: ResumeMessage = ResumeMessage(
|
||||||
session_id=MOCK_UUID, # No session ID for document uploads
|
session_id=MOCK_UUID, # No session ID for document uploads
|
||||||
resume=resume,
|
resume=resume,
|
||||||
@ -240,29 +236,27 @@ async def get_resume_statistics(
|
|||||||
logger.error(f"❌ Error retrieving resume statistics for user {current_user.id}: {e}")
|
logger.error(f"❌ Error retrieving resume statistics for user {current_user.id}: {e}")
|
||||||
raise HTTPException(status_code=500, detail="Failed to retrieve resume statistics")
|
raise HTTPException(status_code=500, detail="Failed to retrieve resume statistics")
|
||||||
|
|
||||||
|
@router.patch("")
|
||||||
@router.put("/{resume_id}")
|
|
||||||
async def update_resume(
|
async def update_resume(
|
||||||
resume_id: str = Path(..., description="ID of the resume"),
|
resume: Resume = Body(...),
|
||||||
resume: str = Body(..., description="Updated resume content"),
|
|
||||||
current_user=Depends(get_current_user),
|
current_user=Depends(get_current_user),
|
||||||
database: RedisDatabase = Depends(get_database),
|
database: RedisDatabase = Depends(get_database),
|
||||||
):
|
):
|
||||||
"""Update the content of a specific resume"""
|
"""Update the content of a specific resume"""
|
||||||
try:
|
try:
|
||||||
updates = {"resume": resume, "updated_at": datetime.now(UTC).isoformat()}
|
updates = resume.model_dump()
|
||||||
|
updated_resume_data = await database.update_resume(current_user.id, resume.id, updates)
|
||||||
updated_resume_data = await database.update_resume(current_user.id, resume_id, updates)
|
|
||||||
if not updated_resume_data:
|
if not updated_resume_data:
|
||||||
logger.warning(f"⚠️ Resume {resume_id} not found for user {current_user.id}")
|
logger.warning(f"⚠️ Resume {resume.id} not found for user {current_user.id}")
|
||||||
raise HTTPException(status_code=404, detail="Resume not found")
|
raise HTTPException(status_code=404, detail="Resume not found")
|
||||||
updated_resume = Resume.model_validate(updated_resume_data) if updated_resume_data else None
|
updated_resume = Resume.model_validate(updated_resume_data) if updated_resume_data else None
|
||||||
|
if not updated_resume:
|
||||||
return create_success_response(
|
logger.warning(f"⚠️ Resume {resume.id} could not be updated for user {current_user.id}")
|
||||||
{"success": True, "message": f"Resume {resume_id} updated successfully", "resume": updated_resume}
|
raise HTTPException(status_code=400, detail="Failed to update resume")
|
||||||
)
|
return create_success_response(updated_resume.model_dump(by_alias=True))
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error updating resume {resume_id} for user {current_user.id}: {e}")
|
logger.error(f"❌ Error updating resume {resume.id} for user {current_user.id}: {e}")
|
||||||
raise HTTPException(status_code=500, detail="Failed to update resume")
|
raise HTTPException(status_code=500, detail="Failed to update resume")
|
||||||
|
@ -13,10 +13,8 @@ from fastapi.responses import StreamingResponse
|
|||||||
import defines
|
import defines
|
||||||
from logger import logger
|
from logger import logger
|
||||||
from models import DocumentType
|
from models import DocumentType
|
||||||
from models import Job, ChatMessage, DocumentType, ApiStatusType
|
from models import Job, ChatMessage, ApiStatusType
|
||||||
|
|
||||||
from typing import List, Dict
|
|
||||||
from models import Job
|
|
||||||
import utils.llm_proxy as llm_manager
|
import utils.llm_proxy as llm_manager
|
||||||
|
|
||||||
|
|
||||||
|
@ -230,7 +230,7 @@ class OllamaAdapter(BaseLLMAdapter):
|
|||||||
else:
|
else:
|
||||||
response = await self.client.chat(model=model, messages=ollama_messages, stream=False, **kwargs)
|
response = await self.client.chat(model=model, messages=ollama_messages, stream=False, **kwargs)
|
||||||
|
|
||||||
usage_stats = self._create_usage_stats(response)
|
usage_stats = self._create_usage_stats(response.model_dump())
|
||||||
|
|
||||||
return ChatResponse(
|
return ChatResponse(
|
||||||
content=response["message"]["content"],
|
content=response["message"]["content"],
|
||||||
@ -267,7 +267,7 @@ class OllamaAdapter(BaseLLMAdapter):
|
|||||||
else:
|
else:
|
||||||
response = await self.client.generate(model=model, prompt=prompt, stream=False, **kwargs)
|
response = await self.client.generate(model=model, prompt=prompt, stream=False, **kwargs)
|
||||||
|
|
||||||
usage_stats = self._create_usage_stats(response)
|
usage_stats = self._create_usage_stats(response.model_dump())
|
||||||
|
|
||||||
return ChatResponse(
|
return ChatResponse(
|
||||||
content=response["response"], model=model, finish_reason=response.get("done_reason"), usage=usage_stats
|
content=response["response"], model=model, finish_reason=response.get("done_reason"), usage=usage_stats
|
||||||
@ -312,7 +312,7 @@ class OllamaAdapter(BaseLLMAdapter):
|
|||||||
# Create usage stats if available from the last response
|
# Create usage stats if available from the last response
|
||||||
usage_stats = None
|
usage_stats = None
|
||||||
if final_response and len(results) == 1:
|
if final_response and len(results) == 1:
|
||||||
usage_stats = self._create_usage_stats(final_response)
|
usage_stats = self._create_usage_stats(final_response.model_dump())
|
||||||
|
|
||||||
return EmbeddingResponse(data=results, model=model, usage=usage_stats)
|
return EmbeddingResponse(data=results, model=model, usage=usage_stats)
|
||||||
|
|
||||||
@ -326,7 +326,7 @@ class OpenAIAdapter(BaseLLMAdapter):
|
|||||||
|
|
||||||
def __init__(self, **config):
|
def __init__(self, **config):
|
||||||
super().__init__(**config)
|
super().__init__(**config)
|
||||||
import openai
|
import openai # type: ignore
|
||||||
|
|
||||||
self.client = openai.AsyncOpenAI(api_key=config.get("api_key", os.getenv("OPENAI_API_KEY")))
|
self.client = openai.AsyncOpenAI(api_key=config.get("api_key", os.getenv("OPENAI_API_KEY")))
|
||||||
|
|
||||||
@ -425,7 +425,7 @@ class AnthropicAdapter(BaseLLMAdapter):
|
|||||||
|
|
||||||
def __init__(self, **config):
|
def __init__(self, **config):
|
||||||
super().__init__(**config)
|
super().__init__(**config)
|
||||||
import anthropic
|
import anthropic # type: ignore
|
||||||
|
|
||||||
self.client = anthropic.AsyncAnthropic(api_key=config.get("api_key", os.getenv("ANTHROPIC_API_KEY")))
|
self.client = anthropic.AsyncAnthropic(api_key=config.get("api_key", os.getenv("ANTHROPIC_API_KEY")))
|
||||||
|
|
||||||
@ -524,7 +524,7 @@ class GeminiAdapter(BaseLLMAdapter):
|
|||||||
|
|
||||||
def __init__(self, **config):
|
def __init__(self, **config):
|
||||||
super().__init__(**config)
|
super().__init__(**config)
|
||||||
import google.generativeai as genai
|
import google.generativeai as genai # type: ignore
|
||||||
|
|
||||||
genai.configure(api_key=config.get("api_key", os.getenv("GEMINI_API_KEY")))
|
genai.configure(api_key=config.get("api_key", os.getenv("GEMINI_API_KEY")))
|
||||||
self.genai = genai
|
self.genai = genai
|
||||||
@ -720,7 +720,8 @@ class UnifiedLLMProxy:
|
|||||||
if stream is False:
|
if stream is False:
|
||||||
raise ValueError("stream must be True for chat_stream")
|
raise ValueError("stream must be True for chat_stream")
|
||||||
result = await self.chat(model, messages, provider, stream=True, **kwargs)
|
result = await self.chat(model, messages, provider, stream=True, **kwargs)
|
||||||
# Type checker now knows this is an AsyncGenerator due to stream=True
|
if isinstance(result, ChatResponse):
|
||||||
|
raise RuntimeError("Expected AsyncGenerator, got ChatResponse")
|
||||||
async for chunk in result:
|
async for chunk in result:
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
@ -734,7 +735,8 @@ class UnifiedLLMProxy:
|
|||||||
"""Get single chat response using specified or default provider"""
|
"""Get single chat response using specified or default provider"""
|
||||||
|
|
||||||
result = await self.chat(model, messages, provider, stream=False, **kwargs)
|
result = await self.chat(model, messages, provider, stream=False, **kwargs)
|
||||||
# Type checker now knows this is a ChatResponse due to stream=False
|
if not isinstance(result, ChatResponse):
|
||||||
|
raise RuntimeError("Expected ChatResponse, got AsyncGenerator")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def generate(
|
async def generate(
|
||||||
@ -753,6 +755,8 @@ class UnifiedLLMProxy:
|
|||||||
"""Stream text generation using specified or default provider"""
|
"""Stream text generation using specified or default provider"""
|
||||||
|
|
||||||
result = await self.generate(model, prompt, provider, stream=True, **kwargs)
|
result = await self.generate(model, prompt, provider, stream=True, **kwargs)
|
||||||
|
if isinstance(result, ChatResponse):
|
||||||
|
raise RuntimeError("Expected AsyncGenerator, got ChatResponse")
|
||||||
async for chunk in result:
|
async for chunk in result:
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
@ -762,6 +766,8 @@ class UnifiedLLMProxy:
|
|||||||
"""Get single generation response using specified or default provider"""
|
"""Get single generation response using specified or default provider"""
|
||||||
|
|
||||||
result = await self.generate(model, prompt, provider, stream=False, **kwargs)
|
result = await self.generate(model, prompt, provider, stream=False, **kwargs)
|
||||||
|
if not isinstance(result, ChatResponse):
|
||||||
|
raise RuntimeError("Expected ChatResponse, got AsyncGenerator")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def embeddings(
|
async def embeddings(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user