Restructuring top level UI
This commit is contained in:
parent
4f4187eba4
commit
3a21f2e510
@ -112,15 +112,22 @@ const DocumentManager = (props: BackstoryElementProps) => {
|
||||
|
||||
try {
|
||||
// Upload file (replace with actual API call)
|
||||
const controller = apiClient.uploadCandidateDocument(file, { includeInRAG: true, isJobDocument: false });
|
||||
const controller = apiClient.uploadCandidateDocument(file, { includeInRag: true, isJobDocument: false }, {
|
||||
onError: (error) => {
|
||||
console.error(error);
|
||||
setSnack(error.content, 'error');
|
||||
}
|
||||
});
|
||||
const result = await controller.promise;
|
||||
|
||||
setDocuments(prev => [...prev, result.document]);
|
||||
setSnack(`Document uploaded: ${file.name}`, 'success');
|
||||
if (result && result.document) {
|
||||
setDocuments(prev => [...prev, result.document]);
|
||||
setSnack(`Document uploaded: ${file.name}`, 'success');
|
||||
}
|
||||
|
||||
// Reset file input
|
||||
e.target.value = '';
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
setSnack('Failed to upload document', 'error');
|
||||
}
|
||||
}
|
||||
@ -147,20 +154,20 @@ const DocumentManager = (props: BackstoryElementProps) => {
|
||||
};
|
||||
|
||||
// Handle RAG flag toggle
|
||||
const handleRAGToggle = async (document: Types.Document, includeInRAG: boolean) => {
|
||||
const handleRAGToggle = async (document: Types.Document, includeInRag: boolean) => {
|
||||
try {
|
||||
document.options = { includeInRAG };
|
||||
document.options = { includeInRag };
|
||||
// Call API to update RAG flag
|
||||
await apiClient.updateCandidateDocument(document);
|
||||
|
||||
setDocuments(prev =>
|
||||
prev.map(doc =>
|
||||
doc.id === document.id
|
||||
? { ...doc, includeInRAG }
|
||||
? { ...doc, includeInRag }
|
||||
: doc
|
||||
)
|
||||
);
|
||||
setSnack(`Document ${includeInRAG ? 'included in' : 'excluded from'} RAG`, 'success');
|
||||
setSnack(`Document ${includeInRag ? 'included in' : 'excluded from'} RAG`, 'success');
|
||||
} catch (error) {
|
||||
setSnack('Failed to update RAG setting', 'error');
|
||||
}
|
||||
@ -292,7 +299,7 @@ const DocumentManager = (props: BackstoryElementProps) => {
|
||||
size="small"
|
||||
color={getFileTypeColor(doc.type)}
|
||||
/>
|
||||
{doc.options?.includeInRAG && (
|
||||
{doc.options?.includeInRag && (
|
||||
<Chip
|
||||
label="RAG"
|
||||
size="small"
|
||||
@ -311,7 +318,7 @@ const DocumentManager = (props: BackstoryElementProps) => {
|
||||
<FormControlLabel
|
||||
control={
|
||||
<Switch
|
||||
checked={doc.options?.includeInRAG}
|
||||
checked={doc.options?.includeInRag}
|
||||
onChange={(e) => handleRAGToggle(doc, e.target.checked)}
|
||||
size="small"
|
||||
/>
|
||||
|
@ -44,6 +44,7 @@ import * as Types from 'types/types';
|
||||
import { StyledMarkdown } from './StyledMarkdown';
|
||||
import { JobInfo } from './ui/JobInfo';
|
||||
import { Scrollable } from './Scrollable';
|
||||
import { StatusIcon, StatusBox } from 'components/ui/StatusIcon';
|
||||
|
||||
const VisuallyHiddenInput = styled('input')({
|
||||
clip: 'rect(0 0 0 0)',
|
||||
@ -71,42 +72,6 @@ const UploadBox = styled(Box)(({ theme }) => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const StatusBox = styled(Box)(({ theme }) => ({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: theme.spacing(1),
|
||||
padding: theme.spacing(1, 2),
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
borderRadius: theme.shape.borderRadius,
|
||||
border: `1px solid ${theme.palette.divider}`,
|
||||
minHeight: 48,
|
||||
}));
|
||||
|
||||
const getIcon = (type: Types.ApiActivityType) => {
|
||||
switch (type) {
|
||||
case 'converting':
|
||||
return <SyncAlt color="primary" />;
|
||||
case 'heartbeat':
|
||||
return <Favorite color="error" />;
|
||||
case 'system':
|
||||
return <Settings color="action" />;
|
||||
case 'info':
|
||||
return <Info color="info" />;
|
||||
case 'searching':
|
||||
return <Search color="primary" />;
|
||||
case 'generating':
|
||||
return <AutoFixHigh color="secondary" />;
|
||||
case 'generating_image':
|
||||
return <Image color="primary" />;
|
||||
case 'thinking':
|
||||
return <Psychology color="secondary" />;
|
||||
case 'tooling':
|
||||
return <Build color="action" />;
|
||||
default:
|
||||
return <Info color="action" />;
|
||||
}
|
||||
};
|
||||
|
||||
interface JobCreatorProps extends BackstoryElementProps {
|
||||
onSave?: (job: Types.Job) => void;
|
||||
}
|
||||
@ -125,7 +90,7 @@ const JobCreator = (props: JobCreatorProps) => {
|
||||
const [summary, setSummary] = useState<string>('');
|
||||
const [job, setJob] = useState<Types.Job | null>(null);
|
||||
const [jobStatus, setJobStatus] = useState<string>('');
|
||||
const [jobStatusIcon, setJobStatusIcon] = useState<JSX.Element>(<></>);
|
||||
const [jobStatusType, setJobStatusType] = useState<Types.ApiActivityType | null>(null);
|
||||
const [isProcessing, setIsProcessing] = useState<boolean>(false);
|
||||
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
@ -140,7 +105,7 @@ const JobCreator = (props: JobCreatorProps) => {
|
||||
const jobStatusHandlers = {
|
||||
onStatus: (status: Types.ChatMessageStatus) => {
|
||||
console.log('status:', status.content);
|
||||
setJobStatusIcon(getIcon(status.activity));
|
||||
setJobStatusType(status.activity);
|
||||
setJobStatus(status.content);
|
||||
},
|
||||
onMessage: (jobMessage: Types.JobRequirementsMessage) => {
|
||||
@ -152,7 +117,7 @@ const JobCreator = (props: JobCreatorProps) => {
|
||||
setSummary(job.summary || '');
|
||||
setJobTitle(job.title || '');
|
||||
setJobRequirements(job.requirements || null);
|
||||
setJobStatusIcon(<></>);
|
||||
setJobStatusType(null);
|
||||
setJobStatus('');
|
||||
},
|
||||
onError: (error: Types.ChatMessageError) => {
|
||||
@ -161,7 +126,7 @@ const JobCreator = (props: JobCreatorProps) => {
|
||||
setIsProcessing(false);
|
||||
},
|
||||
onComplete: () => {
|
||||
setJobStatusIcon(<></>);
|
||||
setJobStatusType(null);
|
||||
setJobStatus('');
|
||||
setIsProcessing(false);
|
||||
}
|
||||
@ -421,7 +386,7 @@ const JobCreator = (props: JobCreatorProps) => {
|
||||
{(jobStatus || isProcessing) && (
|
||||
<Box sx={{ mt: 3 }}>
|
||||
<StatusBox>
|
||||
{jobStatusIcon}
|
||||
{jobStatusType && <StatusIcon type={jobStatusType} />}
|
||||
<Typography variant="body2" sx={{ ml: 1 }}>
|
||||
{jobStatus || 'Processing...'}
|
||||
</Typography>
|
||||
|
@ -30,6 +30,8 @@ import { StyledMarkdown } from './StyledMarkdown';
|
||||
import { Scrollable } from './Scrollable';
|
||||
import { useAppState } from 'hooks/GlobalContext';
|
||||
import * as Types from 'types/types';
|
||||
import JsonView from '@uiw/react-json-view';
|
||||
import { VectorVisualizer } from './VectorVisualizer';
|
||||
|
||||
interface JobAnalysisProps extends BackstoryPageProps {
|
||||
job: Job;
|
||||
@ -106,6 +108,7 @@ const JobMatchAnalysis: React.FC<JobAnalysisProps> = (props: JobAnalysisProps) =
|
||||
|
||||
const initialSkillMatches: SkillMatch[] = requirements.map(req => ({
|
||||
skill: req.requirement,
|
||||
skillModified: req.requirement,
|
||||
candidateId: candidate.id || "",
|
||||
domain: req.domain,
|
||||
status: 'waiting' as const,
|
||||
@ -487,6 +490,13 @@ const JobMatchAnalysis: React.FC<JobAnalysisProps> = (props: JobAnalysisProps) =
|
||||
<Typography paragraph>
|
||||
{match.description}
|
||||
</Typography>
|
||||
{/* { match.ragResults && match.ragResults.length !== 0 && <>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
RAG Information
|
||||
</Typography>
|
||||
<VectorVisualizer inline rag={match.ragResults[0]} />
|
||||
</>
|
||||
} */}
|
||||
|
||||
</Box>
|
||||
)}
|
||||
|
@ -27,7 +27,7 @@ import { useNavigate } from 'react-router-dom';
|
||||
|
||||
interface VectorVisualizerProps extends BackstoryPageProps {
|
||||
inline?: boolean;
|
||||
rag?: any;
|
||||
rag?: Types.ChromaDBGetResponse;
|
||||
};
|
||||
|
||||
interface Metadata {
|
||||
@ -37,11 +37,16 @@ interface Metadata {
|
||||
distance?: number;
|
||||
}
|
||||
|
||||
const emptyQuerySet = {
|
||||
const emptyQuerySet: Types.ChromaDBGetResponse = {
|
||||
ids: [],
|
||||
documents: [],
|
||||
metadatas: [],
|
||||
embeddings: [],
|
||||
distances: [],
|
||||
name: "Empty",
|
||||
size: 0,
|
||||
dimensions: 2,
|
||||
query: ""
|
||||
};
|
||||
|
||||
interface PlotData {
|
||||
|
@ -19,9 +19,10 @@ const BackstoryLogo = () => {
|
||||
letterSpacing: '.2rem',
|
||||
color: theme.palette.primary.contrastText,
|
||||
textDecoration: 'none',
|
||||
display: "flex",
|
||||
display: "inline-flex",
|
||||
flexDirection: "row",
|
||||
alignItems: "center",
|
||||
verticalAlign: "center",
|
||||
gap: 1,
|
||||
textTransform: "uppercase",
|
||||
}}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import React, { JSX } from 'react';
|
||||
import { Box, Link, Typography, Avatar, Grid, SxProps, CardActions, Chip, Stack, CardHeader } from '@mui/material';
|
||||
import React, { JSX, useActionState, useState } from 'react';
|
||||
import { Box, Link, Typography, Avatar, Grid, SxProps, CardActions, Chip, Stack, CardHeader, Button, styled, LinearProgress, IconButton, Tooltip } from '@mui/material';
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
@ -8,23 +8,31 @@ import {
|
||||
} from '@mui/material';
|
||||
import DeleteIcon from '@mui/icons-material/Delete';
|
||||
import { useMediaQuery } from '@mui/material';
|
||||
import { Job, JobFull } from 'types/types';
|
||||
import { Job } from 'types/types';
|
||||
import { CopyBubble } from "components/CopyBubble";
|
||||
import { rest } from 'lodash';
|
||||
import { AIBanner } from 'components/ui/AIBanner';
|
||||
import { useAuth } from 'hooks/AuthContext';
|
||||
import { DeleteConfirmation } from '../DeleteConfirmation';
|
||||
import { Build, CheckCircle, Description, Psychology, Star, Work } from '@mui/icons-material';
|
||||
import ModelTrainingIcon from '@mui/icons-material/ModelTraining';
|
||||
import { StatusIcon, StatusBox } from 'components/ui/StatusIcon';
|
||||
import RestoreIcon from '@mui/icons-material/Restore';
|
||||
import SaveIcon from '@mui/icons-material/Save';
|
||||
import * as Types from "types/types";
|
||||
import { useAppState } from 'hooks/GlobalContext';
|
||||
|
||||
interface JobInfoProps {
|
||||
job: Job | JobFull;
|
||||
job: Job;
|
||||
sx?: SxProps;
|
||||
action?: string;
|
||||
elevation?: number;
|
||||
variant?: "small" | "normal" | null
|
||||
};
|
||||
|
||||
|
||||
const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
const { setSnack } = useAppState();
|
||||
const { job } = props;
|
||||
const { user, apiClient } = useAuth();
|
||||
const {
|
||||
@ -36,6 +44,9 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
const theme = useTheme();
|
||||
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
|
||||
const isAdmin = user?.isAdmin;
|
||||
const [adminStatus, setAdminStatus] = useState<string | null>(null);
|
||||
const [adminStatusType, setAdminStatusType] = useState<Types.ApiActivityType | null>(null);
|
||||
const [activeJob, setActiveJob] = useState<Types.Job>({ ...job }); /* Copy of job */
|
||||
|
||||
const deleteJob = async (jobId: string | undefined) => {
|
||||
if (jobId) {
|
||||
@ -43,10 +54,52 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
}
|
||||
}
|
||||
|
||||
const handleReset = async () => {
|
||||
setActiveJob({ ...job });
|
||||
}
|
||||
|
||||
if (!job) {
|
||||
return <Box>No user loaded.</Box>;
|
||||
}
|
||||
|
||||
const handleSave = async () => {
|
||||
const newJob = await apiClient.updateJob(job.id || '', {
|
||||
description: activeJob.description,
|
||||
requirements: activeJob.requirements,
|
||||
});
|
||||
job.updatedAt = newJob.updatedAt;
|
||||
setActiveJob(newJob)
|
||||
setSnack('Job updated.');
|
||||
}
|
||||
|
||||
const handleRefresh = () => {
|
||||
setAdminStatus("Re-extracting Job information...");
|
||||
const jobStatusHandlers = {
|
||||
onStatus: (status: Types.ChatMessageStatus) => {
|
||||
console.log('status:', status.content);
|
||||
setAdminStatusType(status.activity);
|
||||
setAdminStatus(status.content);
|
||||
},
|
||||
onMessage: (jobMessage: Types.JobRequirementsMessage) => {
|
||||
const newJob: Types.Job = jobMessage.job
|
||||
console.log('onMessage - job', newJob);
|
||||
newJob.id = job.id;
|
||||
newJob.createdAt = job.createdAt;
|
||||
setActiveJob(newJob);
|
||||
},
|
||||
onError: (error: Types.ChatMessageError) => {
|
||||
console.log('onError', error);
|
||||
setAdminStatusType(null);
|
||||
setAdminStatus(null);
|
||||
},
|
||||
onComplete: () => {
|
||||
setAdminStatusType(null);
|
||||
setAdminStatus(null);
|
||||
}
|
||||
};
|
||||
apiClient.createJobFromDescription(activeJob.description, jobStatusHandlers);
|
||||
};
|
||||
|
||||
const renderRequirementSection = (title: string, items: string[] | undefined, icon: JSX.Element, required = false) => {
|
||||
if (!items || items.length === 0) return null;
|
||||
|
||||
@ -75,10 +128,10 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
};
|
||||
|
||||
const renderJobRequirements = () => {
|
||||
if (!job.requirements) return null;
|
||||
if (!activeJob.requirements) return null;
|
||||
|
||||
return (
|
||||
<Card elevation={0} sx={{ m: 0, p: 0, mt: 2, }}>
|
||||
<Card elevation={0} sx={{ m: 0, p: 0, mt: 2, background: "transparent !important" }}>
|
||||
<CardHeader
|
||||
title="Job Requirements Analysis"
|
||||
avatar={<CheckCircle color="success" />}
|
||||
@ -87,49 +140,49 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
<CardContent sx={{ p: 0 }}>
|
||||
{renderRequirementSection(
|
||||
"Technical Skills (Required)",
|
||||
job.requirements.technicalSkills.required,
|
||||
activeJob.requirements.technicalSkills.required,
|
||||
<Build color="primary" />,
|
||||
true
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Technical Skills (Preferred)",
|
||||
job.requirements.technicalSkills.preferred,
|
||||
activeJob.requirements.technicalSkills.preferred,
|
||||
<Build color="action" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Experience Requirements (Required)",
|
||||
job.requirements.experienceRequirements.required,
|
||||
activeJob.requirements.experienceRequirements.required,
|
||||
<Work color="primary" />,
|
||||
true
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Experience Requirements (Preferred)",
|
||||
job.requirements.experienceRequirements.preferred,
|
||||
activeJob.requirements.experienceRequirements.preferred,
|
||||
<Work color="action" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Soft Skills",
|
||||
job.requirements.softSkills,
|
||||
activeJob.requirements.softSkills,
|
||||
<Psychology color="secondary" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Experience",
|
||||
job.requirements.experience,
|
||||
activeJob.requirements.experience,
|
||||
<Star color="warning" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Education",
|
||||
job.requirements.education,
|
||||
activeJob.requirements.education,
|
||||
<Description color="info" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Certifications",
|
||||
job.requirements.certifications,
|
||||
activeJob.requirements.certifications,
|
||||
<CheckCircle color="success" />
|
||||
)}
|
||||
{renderRequirementSection(
|
||||
"Preferred Attributes",
|
||||
job.requirements.preferredAttributes,
|
||||
activeJob.requirements.preferredAttributes,
|
||||
<Star color="secondary" />
|
||||
)}
|
||||
</CardContent>
|
||||
@ -153,51 +206,92 @@ const JobInfo: React.FC<JobInfoProps> = (props: JobInfoProps) => {
|
||||
>
|
||||
<CardContent sx={{ display: "flex", flexGrow: 1, p: 3, height: '100%', flexDirection: 'column', alignItems: 'stretch', position: "relative" }}>
|
||||
{variant !== "small" && <>
|
||||
{'location' in job &&
|
||||
{activeJob.details &&
|
||||
<Typography variant="body2" sx={{ mb: 1 }}>
|
||||
<strong>Location:</strong> {job.location.city}, {job.location.state || job.location.country}
|
||||
<strong>Location:</strong> {activeJob.details.location.city}, {activeJob.details.location.state || activeJob.details.location.country}
|
||||
</Typography>
|
||||
}
|
||||
{job.title &&
|
||||
{activeJob.title &&
|
||||
<Typography variant="body2" sx={{ mb: 1 }}>
|
||||
<strong>Title:</strong> {job.title}
|
||||
<strong>Title:</strong> {activeJob.title}
|
||||
</Typography>
|
||||
}
|
||||
{job.company &&
|
||||
{activeJob.company &&
|
||||
<Typography variant="body2" sx={{ mb: 1 }}>
|
||||
<strong>Company:</strong> {job.company}
|
||||
<strong>Company:</strong> {activeJob.company}
|
||||
</Typography>
|
||||
}
|
||||
{job.summary && <Typography variant="body2">
|
||||
<strong>Summary:</strong> {job.summary}
|
||||
{activeJob.summary && <Typography variant="body2">
|
||||
<strong>Summary:</strong> {activeJob.summary}
|
||||
</Typography>
|
||||
}
|
||||
{job.createdAt && <Typography variant="body2">
|
||||
<strong>Created:</strong> {job.createdAt.toISOString()}
|
||||
</Typography>
|
||||
{activeJob.owner && <Typography variant="body2">
|
||||
<strong>Created by:</strong> {activeJob.owner.fullName}
|
||||
</Typography>}
|
||||
{activeJob.createdAt &&
|
||||
<Typography variant="caption">Created: {activeJob.createdAt.toISOString()}</Typography>
|
||||
}
|
||||
{ job.owner && <Typography variant="body2">
|
||||
<strong>Created by:</strong> {job.owner.fullName}
|
||||
</Typography>
|
||||
{activeJob.updatedAt &&
|
||||
<Typography variant="caption">Updated: {activeJob.updatedAt.toISOString()}</Typography>
|
||||
}
|
||||
<Typography variant="caption">Job ID: {job.id}</Typography>
|
||||
</>}
|
||||
|
||||
<Divider/>
|
||||
<Divider />
|
||||
{renderJobRequirements()}
|
||||
|
||||
</CardContent>
|
||||
<CardActions>
|
||||
{isAdmin &&
|
||||
<DeleteConfirmation
|
||||
onDelete={() => { deleteJob(job.id); }}
|
||||
sx={{ minWidth: 'auto', px: 2, maxHeight: "min-content", color: "red" }}
|
||||
action="delete"
|
||||
label="job"
|
||||
title="Delete job"
|
||||
icon=<DeleteIcon />
|
||||
message={`Are you sure you want to delete ${job.id}? This action cannot be undone.`}
|
||||
/>}
|
||||
{isAdmin &&
|
||||
<CardActions sx={{ display: "flex", flexDirection: "column", p: 1 }}>
|
||||
<Box sx={{ display: "flex", flexDirection: "row", pl: 1, pr: 1, gap: 1, alignContent: "center", height: "32px" }}>
|
||||
{(job.updatedAt && job.updatedAt.toISOString()) !== (activeJob.updatedAt && activeJob.updatedAt.toISOString()) &&
|
||||
<Tooltip title="Save Job">
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={(e) => { e.stopPropagation(); handleSave(); }}
|
||||
>
|
||||
<SaveIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
}
|
||||
<Tooltip title="Delete Job">
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={(e) => { e.stopPropagation(); deleteJob(job.id); }}
|
||||
>
|
||||
<DeleteIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
<Tooltip title="Reset Job">
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={(e) => { e.stopPropagation(); handleReset(); }}
|
||||
>
|
||||
<RestoreIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
<Tooltip title="Reprocess Job">
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={(e) => { e.stopPropagation(); handleRefresh(); }}
|
||||
>
|
||||
<ModelTrainingIcon />
|
||||
</IconButton>
|
||||
</Tooltip>
|
||||
</Box>
|
||||
{adminStatus &&
|
||||
<Box sx={{ mt: 3 }}>
|
||||
<StatusBox>
|
||||
{adminStatusType && <StatusIcon type={adminStatusType} />}
|
||||
<Typography variant="body2" sx={{ ml: 1 }}>
|
||||
{adminStatus || 'Processing...'}
|
||||
</Typography>
|
||||
</StatusBox>
|
||||
{adminStatus && <LinearProgress sx={{ mt: 1 }} />}
|
||||
</Box>
|
||||
}
|
||||
</CardActions>
|
||||
}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
@ -5,12 +5,12 @@ import Box from '@mui/material/Box';
|
||||
|
||||
import { BackstoryElementProps } from 'components/BackstoryTab';
|
||||
import { JobInfo } from 'components/ui/JobInfo';
|
||||
import { Job, JobFull } from "types/types";
|
||||
import { Job } from "types/types";
|
||||
import { useAuth } from 'hooks/AuthContext';
|
||||
import { useAppState, useSelectedJob } from 'hooks/GlobalContext';
|
||||
|
||||
interface JobPickerProps extends BackstoryElementProps {
|
||||
onSelect?: (job: JobFull) => void
|
||||
onSelect?: (job: Job) => void
|
||||
};
|
||||
|
||||
const JobPicker = (props: JobPickerProps) => {
|
||||
@ -18,7 +18,7 @@ const JobPicker = (props: JobPickerProps) => {
|
||||
const { apiClient } = useAuth();
|
||||
const { selectedJob, setSelectedJob } = useSelectedJob();
|
||||
const { setSnack } = useAppState();
|
||||
const [jobs, setJobs] = useState<JobFull[] | null>(null);
|
||||
const [jobs, setJobs] = useState<Job[] | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (jobs !== null) {
|
||||
@ -27,7 +27,7 @@ const JobPicker = (props: JobPickerProps) => {
|
||||
const getJobs = async () => {
|
||||
try {
|
||||
const results = await apiClient.getJobs();
|
||||
const jobs: JobFull[] = results.data;
|
||||
const jobs: Job[] = results.data;
|
||||
jobs.sort((a, b) => {
|
||||
let result = a.company?.localeCompare(b.company || '');
|
||||
if (result === 0) {
|
||||
|
59
frontend/src/components/ui/StatusIcon.tsx
Normal file
59
frontend/src/components/ui/StatusIcon.tsx
Normal file
@ -0,0 +1,59 @@
|
||||
import React from 'react';
|
||||
import {
|
||||
SyncAlt,
|
||||
Favorite,
|
||||
Settings,
|
||||
Info,
|
||||
Search,
|
||||
AutoFixHigh,
|
||||
Image,
|
||||
Psychology,
|
||||
Build,
|
||||
} from '@mui/icons-material';
|
||||
import { styled } from '@mui/material/styles';
|
||||
import * as Types from 'types/types';
|
||||
import { Box } from '@mui/material';
|
||||
|
||||
interface StatusIconProps {
|
||||
type: Types.ApiActivityType;
|
||||
}
|
||||
|
||||
const StatusBox = styled(Box)(({ theme }) => ({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: theme.spacing(1),
|
||||
padding: theme.spacing(1, 2),
|
||||
backgroundColor: theme.palette.background.paper,
|
||||
borderRadius: theme.shape.borderRadius,
|
||||
border: `1px solid ${theme.palette.divider}`,
|
||||
minHeight: 48,
|
||||
}));
|
||||
|
||||
const StatusIcon = (props: StatusIconProps) => {
|
||||
const {type} = props;
|
||||
|
||||
switch (type) {
|
||||
case 'converting':
|
||||
return <SyncAlt color="primary" />;
|
||||
case 'heartbeat':
|
||||
return <Favorite color="error" />;
|
||||
case 'system':
|
||||
return <Settings color="action" />;
|
||||
case 'info':
|
||||
return <Info color="info" />;
|
||||
case 'searching':
|
||||
return <Search color="primary" />;
|
||||
case 'generating':
|
||||
return <AutoFixHigh color="secondary" />;
|
||||
case 'generating_image':
|
||||
return <Image color="primary" />;
|
||||
case 'thinking':
|
||||
return <Psychology color="secondary" />;
|
||||
case 'tooling':
|
||||
return <Build color="action" />;
|
||||
default:
|
||||
return <Info color="action" />;
|
||||
}
|
||||
};
|
||||
|
||||
export { StatusIcon, StatusBox };
|
@ -16,6 +16,8 @@ import {
|
||||
Bookmark as BookmarkIcon,
|
||||
BubbleChart,
|
||||
} from '@mui/icons-material';
|
||||
import SchoolIcon from '@mui/icons-material/School';
|
||||
|
||||
import FaceRetouchingNaturalIcon from '@mui/icons-material/FaceRetouchingNatural';
|
||||
import LibraryBooksIcon from '@mui/icons-material/LibraryBooks';
|
||||
import { BackstoryLogo } from 'components/ui/BackstoryLogo';
|
||||
@ -36,6 +38,7 @@ import { NavigationConfig, NavigationItem } from 'types/navigation';
|
||||
import { CandidateProfile } from 'pages/candidate/Profile';
|
||||
import { DocumentManager } from 'components/DocumentManager';
|
||||
import { VectorVisualizer } from 'components/VectorVisualizer';
|
||||
import { HowItWorks } from 'pages/HowItWorks';
|
||||
|
||||
// Beta page components for placeholder routes
|
||||
const SearchPage = () => (<BetaPage><Typography variant="h4">Search</Typography></BetaPage>);
|
||||
@ -49,16 +52,17 @@ const SettingsPage = () => (<BetaPage><Typography variant="h4">Settings</Typogra
|
||||
export const navigationConfig: NavigationConfig = {
|
||||
items: [
|
||||
{ id: 'home', label: <BackstoryLogo />, path: '/', component: <HomePage />, userTypes: ['guest', 'candidate', 'employer'], exact: true, },
|
||||
{ id: 'how-it-works', label: 'How It Works', path: '/how-it-works', icon: <SchoolIcon />, component: <HowItWorks />, userTypes: ['guest', 'candidate', 'employer',], },
|
||||
{ id: 'job-analysis', label: 'Job Analysis', path: '/job-analysis', icon: <WorkIcon />, component: <JobAnalysisPage />, userTypes: ['guest', 'candidate', 'employer',], },
|
||||
{ id: 'chat', label: 'Candidate Chat', path: '/chat', icon: <ChatIcon />, component: <CandidateChatPage />, userTypes: ['guest', 'candidate', 'employer',], }, {
|
||||
id: 'candidate-menu', label: 'Tools', icon: <PersonIcon />, userTypes: ['candidate'], children: [
|
||||
{ id: 'candidate-dashboard', label: 'Dashboard', path: '/candidate/dashboard', icon: <DashboardIcon />, component: <CandidateDashboard />, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-profile', label: 'Profile', icon: <PersonIcon />, path: '/candidate/profile', component: <CandidateProfile />, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-docs', label: 'Documents', icon: <BubbleChart />, path: '/candidate/documents', component: <Box sx={{ display: "flex", width: "100%", flexDirection: "column" }}><VectorVisualizer /><DocumentManager /></Box>, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-qa-setup', label: 'Q&A Setup', icon: <QuizIcon />, path: '/candidate/qa-setup', component: <BetaPage><Box>Candidate q&a setup page</Box></BetaPage>, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-analytics', label: 'Analytics', icon: <AnalyticsIcon />, path: '/candidate/analytics', component: <BetaPage><Box>Candidate analytics page</Box></BetaPage>, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-job-analysis', label: 'Job Analysis', path: '/candidate/job-analysis', icon: <WorkIcon />, component: <JobAnalysisPage />, userTypes: ['candidate'], },
|
||||
{ id: 'candidate-resumes', label: 'Resumes', icon: <DescriptionIcon />, path: '/candidate/resumes', component: <BetaPage><Box>Candidate resumes page</Box></BetaPage>, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-content', label: 'Content', icon: <BubbleChart />, path: '/candidate/content', component: <Box sx={{ display: "flex", width: "100%", flexDirection: "column" }}><VectorVisualizer /><DocumentManager /></Box>, userTypes: ['candidate'] },
|
||||
{ id: 'candidate-settings', label: 'Settings', path: '/candidate/settings', icon: <SettingsIcon />, component: <Settings />, userTypes: ['candidate'], },
|
||||
],
|
||||
},
|
||||
@ -75,7 +79,6 @@ export const navigationConfig: NavigationConfig = {
|
||||
],
|
||||
},
|
||||
// { id: 'find-candidate', label: 'Find a Candidate', path: '/find-a-candidate', icon: <PersonSearchIcon />, component: <CandidateListingPage />, userTypes: ['guest', 'candidate', 'employer'], },
|
||||
{ id: 'docs', label: 'Docs', path: '/docs/*', icon: <LibraryBooksIcon />, component: <DocsPage />, userTypes: ['guest', 'candidate', 'employer'], },
|
||||
{
|
||||
id: 'admin-menu',
|
||||
label: 'Admin',
|
||||
@ -83,6 +86,7 @@ export const navigationConfig: NavigationConfig = {
|
||||
userTypes: ['admin'],
|
||||
children: [
|
||||
{ id: 'generate-candidate', label: 'Generate Candidate', path: '/admin/generate-candidate', icon: <FaceRetouchingNaturalIcon />, component: <GenerateCandidate />, userTypes: ['admin'] },
|
||||
{ id: 'docs', label: 'Docs', path: '/docs/*', icon: <LibraryBooksIcon />, component: <DocsPage />, userTypes: ['admin'], },
|
||||
],
|
||||
},
|
||||
// Auth routes (special handling)
|
||||
|
24
frontend/src/pages/HowItWorks.tsx
Normal file
24
frontend/src/pages/HowItWorks.tsx
Normal file
@ -0,0 +1,24 @@
|
||||
import React from 'react';
|
||||
import { Box, Paper, Typography } from '@mui/material';
|
||||
import { BackstoryLogo } from 'components/ui/BackstoryLogo';
|
||||
|
||||
const HowItWorks = () => {
|
||||
return (<Paper sx={{ m: 1, p: 1 }}>
|
||||
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
||||
<Box sx={{ display: "flex", alignContent: "center", verticalAlign: "center", flexDirection: "row" }}>
|
||||
<Typography>Job Description ⇒</Typography><BackstoryLogo /><Typography>⇒ (Company Info, Job Summary, Job Requirements) ⇒ <strong>Job</strong></Typography>
|
||||
</Box>
|
||||
<Box sx={{ display: "flex", alignContent: "center", verticalAlign: "center", flexDirection: "row" }}>
|
||||
<Typography>User Content ⇒ </Typography><BackstoryLogo /><Typography>⇒ RAG Vector Database ⇒ <strong>Candidate</strong></Typography>
|
||||
</Box>
|
||||
<Box sx={{ display: "flex", alignContent: "center", verticalAlign: "center", flexDirection: "row" }}>
|
||||
<Typography><strong>Job</strong> + <strong>Candidate</strong> ⇒ </Typography><BackstoryLogo /><Typography>⇒ <strong>Skill Match</strong></Typography>
|
||||
</Box>
|
||||
<Box sx={{ display: "flex", alignContent: "center", verticalAlign: "center", flexDirection: "row" }}>
|
||||
<Typography><strong>Skill Match</strong> + <strong>Candidate</strong> ⇒ </Typography><BackstoryLogo /><Typography>⇒ <strong>Resume</strong></Typography>
|
||||
</Box>
|
||||
</Box>
|
||||
</Paper>);
|
||||
}
|
||||
|
||||
export { HowItWorks };
|
@ -644,9 +644,19 @@ class ApiClient {
|
||||
return this.handleApiResponseWithConversion<Types.Employer>(response, 'Employer');
|
||||
}
|
||||
|
||||
|
||||
// ============================
|
||||
// Job Methods with Date Conversion
|
||||
// ============================
|
||||
async updateJob(id: string, updates: Partial<Types.Job>): Promise<Types.Job> {
|
||||
const response = await fetch(`${this.baseUrl}/jobs/${id}`, {
|
||||
method: 'PATCH',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(updates))
|
||||
});
|
||||
|
||||
return this.handleApiResponseWithConversion<Types.Job>(response, 'Job');
|
||||
}
|
||||
|
||||
createJobFromDescription(job_description: string, streamingOptions?: StreamingOptions<Types.JobRequirementsMessage>): StreamingResponse<Types.JobRequirementsMessage> {
|
||||
const body = JSON.stringify(job_description);
|
||||
@ -672,7 +682,7 @@ class ApiClient {
|
||||
return this.handleApiResponseWithConversion<Types.Job>(response, 'Job');
|
||||
}
|
||||
|
||||
async getJobs(request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.JobFull>> {
|
||||
async getJobs(request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.Job>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
@ -680,7 +690,7 @@ class ApiClient {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return this.handlePaginatedApiResponseWithConversion<Types.JobFull>(response, 'JobFull');
|
||||
return this.handlePaginatedApiResponseWithConversion<Types.Job>(response, 'Job');
|
||||
}
|
||||
|
||||
async getJobsByEmployer(employerId: string, request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.Job>> {
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Generated TypeScript types from Pydantic models
|
||||
// Source: src/backend/models.py
|
||||
// Generated on: 2025-06-10T02:48:12.087485
|
||||
// Generated on: 2025-06-10T17:14:56.968033
|
||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||
|
||||
// ============================
|
||||
@ -526,7 +526,7 @@ export interface DocumentMessage {
|
||||
}
|
||||
|
||||
export interface DocumentOptions {
|
||||
includeInRAG: boolean;
|
||||
includeInRag: boolean;
|
||||
isJobDocument?: boolean;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
@ -716,6 +716,7 @@ export interface Job {
|
||||
requirements?: JobRequirements;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
details?: JobDetails;
|
||||
}
|
||||
|
||||
export interface JobApplication {
|
||||
@ -734,25 +735,14 @@ export interface JobApplication {
|
||||
decision?: ApplicationDecision;
|
||||
}
|
||||
|
||||
export interface JobFull {
|
||||
id?: string;
|
||||
ownerId: string;
|
||||
ownerType: "candidate" | "employer" | "guest";
|
||||
owner?: BaseUser;
|
||||
title?: string;
|
||||
summary?: string;
|
||||
company?: string;
|
||||
description: string;
|
||||
requirements?: JobRequirements;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
export interface JobDetails {
|
||||
location: Location;
|
||||
salaryRange?: SalaryRange;
|
||||
employmentType: "full-time" | "part-time" | "contract" | "internship" | "freelance";
|
||||
datePosted?: Date;
|
||||
applicationDeadline?: Date;
|
||||
isActive: boolean;
|
||||
applicants?: Array<JobApplication>;
|
||||
applicants?: Array<any>;
|
||||
department?: string;
|
||||
reportsTo?: string;
|
||||
benefits?: Array<string>;
|
||||
@ -1010,6 +1000,7 @@ export interface Skill {
|
||||
export interface SkillAssessment {
|
||||
candidateId: string;
|
||||
skill: string;
|
||||
skillModified?: string;
|
||||
evidenceFound: boolean;
|
||||
evidenceStrength: "strong" | "moderate" | "weak" | "none";
|
||||
assessment: string;
|
||||
@ -1017,6 +1008,7 @@ export interface SkillAssessment {
|
||||
evidenceDetails?: Array<EvidenceDetail>;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
ragResults?: Array<ChromaDBGetResponse>;
|
||||
}
|
||||
|
||||
export interface SocialLink {
|
||||
@ -1609,7 +1601,7 @@ export function convertInterviewScheduleFromApi(data: any): InterviewSchedule {
|
||||
/**
|
||||
* Convert Job from API response
|
||||
* Date fields: createdAt, updatedAt
|
||||
* Nested models: owner (BaseUser)
|
||||
* Nested models: owner (BaseUser), details (JobDetails)
|
||||
*/
|
||||
export function convertJobFromApi(data: any): Job {
|
||||
if (!data) return data;
|
||||
@ -1622,6 +1614,8 @@ export function convertJobFromApi(data: any): Job {
|
||||
updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined,
|
||||
// Convert nested BaseUser model
|
||||
owner: data.owner ? convertBaseUserFromApi(data.owner) : undefined,
|
||||
// Convert nested JobDetails model
|
||||
details: data.details ? convertJobDetailsFromApi(data.details) : undefined,
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -1645,29 +1639,20 @@ export function convertJobApplicationFromApi(data: any): JobApplication {
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Convert JobFull from API response
|
||||
* Date fields: createdAt, updatedAt, datePosted, applicationDeadline, featuredUntil
|
||||
* Nested models: owner (BaseUser), applicants (JobApplication)
|
||||
* Convert JobDetails from API response
|
||||
* Date fields: datePosted, applicationDeadline, featuredUntil
|
||||
*/
|
||||
export function convertJobFullFromApi(data: any): JobFull {
|
||||
export function convertJobDetailsFromApi(data: any): JobDetails {
|
||||
if (!data) return data;
|
||||
|
||||
return {
|
||||
...data,
|
||||
// Convert createdAt from ISO string to Date
|
||||
createdAt: data.createdAt ? new Date(data.createdAt) : undefined,
|
||||
// Convert updatedAt from ISO string to Date
|
||||
updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined,
|
||||
// Convert datePosted from ISO string to Date
|
||||
datePosted: data.datePosted ? new Date(data.datePosted) : undefined,
|
||||
// Convert applicationDeadline from ISO string to Date
|
||||
applicationDeadline: data.applicationDeadline ? new Date(data.applicationDeadline) : undefined,
|
||||
// Convert featuredUntil from ISO string to Date
|
||||
featuredUntil: data.featuredUntil ? new Date(data.featuredUntil) : undefined,
|
||||
// Convert nested BaseUser model
|
||||
owner: data.owner ? convertBaseUserFromApi(data.owner) : undefined,
|
||||
// Convert nested JobApplication model
|
||||
applicants: data.applicants ? convertJobApplicationFromApi(data.applicants) : undefined,
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -1906,8 +1891,8 @@ export function convertFromApi<T>(data: any, modelType: string): T {
|
||||
return convertJobFromApi(data) as T;
|
||||
case 'JobApplication':
|
||||
return convertJobApplicationFromApi(data) as T;
|
||||
case 'JobFull':
|
||||
return convertJobFullFromApi(data) as T;
|
||||
case 'JobDetails':
|
||||
return convertJobDetailsFromApi(data) as T;
|
||||
case 'JobListResponse':
|
||||
return convertJobListResponseFromApi(data) as T;
|
||||
case 'JobRequirementsMessage':
|
||||
|
@ -22,6 +22,7 @@ import asyncio
|
||||
from datetime import datetime, UTC
|
||||
from prometheus_client import Counter, Summary, CollectorRegistry # type: ignore
|
||||
import numpy as np # type: ignore
|
||||
import json_extractor as json_extractor
|
||||
|
||||
from models import ( ApiActivityType, ChatMessageError, ChatMessageRagSearch, ChatMessageStatus, ChatMessageStreaming, LLMMessage, ChatQuery, ChatMessage, ChatOptions, ChatMessageUser, Tunables, ApiMessageType, ChatSenderType, ApiStatusType, ChatMessageMetaData, Candidate)
|
||||
from logger import logger
|
||||
@ -373,6 +374,7 @@ Content: {content}
|
||||
|
||||
rag_metadata = ChromaDBGetResponse(
|
||||
name=rag.name,
|
||||
query=prompt,
|
||||
query_embedding=query_embedding.tolist(),
|
||||
ids=chroma_results.get("ids", []),
|
||||
embeddings=chroma_results.get("embeddings", []),
|
||||
@ -406,7 +408,7 @@ Content: {content}
|
||||
temperature=0.7) -> AsyncGenerator[ChatMessageStatus | ChatMessageError | ChatMessageStreaming | ChatMessage, None]:
|
||||
|
||||
self.set_optimal_context_size(
|
||||
llm=llm, model=model, prompt=prompt
|
||||
llm=llm, model=model, prompt=prompt+system_prompt
|
||||
)
|
||||
|
||||
options = ChatOptions(
|
||||
@ -808,20 +810,16 @@ Content: {content}
|
||||
|
||||
# return
|
||||
|
||||
def extract_json_blocks(self, text: str, allow_multiple: bool = False) -> List[dict]:
|
||||
"""
|
||||
Extract JSON blocks from text, even if surrounded by markdown or noisy text.
|
||||
If allow_multiple is True, returns all JSON blocks; otherwise, only the first.
|
||||
"""
|
||||
return json_extractor.extract_json_blocks(text, allow_multiple)
|
||||
|
||||
def extract_json_from_text(self, text: str) -> str:
|
||||
"""Extract JSON string from text that may contain other content."""
|
||||
json_pattern = r"```json\s*([\s\S]*?)\s*```"
|
||||
match = re.search(json_pattern, text)
|
||||
if match:
|
||||
return match.group(1).strip()
|
||||
|
||||
# Try to find JSON without the markdown code block
|
||||
json_pattern = r"({[\s\S]*})"
|
||||
match = re.search(json_pattern, text)
|
||||
if match:
|
||||
return match.group(1).strip()
|
||||
|
||||
raise ValueError("No JSON found in the response")
|
||||
return json_extractor.extract_json_from_text(text)
|
||||
|
||||
def extract_markdown_from_text(self, text: str) -> str:
|
||||
"""Extract Markdown string from text that may contain other content."""
|
||||
@ -832,7 +830,5 @@ Content: {content}
|
||||
|
||||
raise ValueError("No Markdown found in the response")
|
||||
|
||||
|
||||
|
||||
# Register the base agent
|
||||
agent_registry.register(Agent._agent_type, Agent)
|
||||
|
@ -85,6 +85,13 @@ class JobRequirementsAgent(Agent):
|
||||
) -> AsyncGenerator[ChatMessage | ChatMessageError, None]:
|
||||
"""Analyze job requirements from job description."""
|
||||
system_prompt, prompt = self.create_job_analysis_prompt(prompt)
|
||||
status_message = ChatMessageStatus(
|
||||
session_id=session_id,
|
||||
content="Analyzing job requirements",
|
||||
activity=ApiActivityType.THINKING
|
||||
)
|
||||
yield status_message
|
||||
logger.info(f"🔍 {status_message.content}")
|
||||
generated_message = None
|
||||
async for generated_message in self.llm_one_shot(llm, model, session_id=session_id, prompt=prompt, system_prompt=system_prompt):
|
||||
if generated_message.status == ApiStatusType.ERROR:
|
||||
|
@ -152,9 +152,19 @@ JSON RESPONSE:"""
|
||||
logger.error(f"⚠️ {error_message.content}")
|
||||
yield error_message
|
||||
return
|
||||
# Stage 1A: Analyze job requirements
|
||||
|
||||
skill = prompt.strip()
|
||||
if not skill:
|
||||
error_message = ChatMessageError(
|
||||
session_id=session_id,
|
||||
content="Skill cannot be empty."
|
||||
)
|
||||
logger.error(f"⚠️ {error_message.content}")
|
||||
yield error_message
|
||||
return
|
||||
|
||||
rag_message = None
|
||||
async for rag_message in self.generate_rag_results(session_id=session_id, prompt=prompt):
|
||||
async for rag_message in self.generate_rag_results(session_id=session_id, prompt=skill):
|
||||
if rag_message.status == ApiStatusType.ERROR:
|
||||
yield rag_message
|
||||
return
|
||||
@ -172,7 +182,7 @@ JSON RESPONSE:"""
|
||||
|
||||
rag_context = self.get_rag_context(rag_message)
|
||||
logger.info(f"🔍 RAG content retrieved {len(rag_context)} bytes of context")
|
||||
system_prompt, prompt = self.generate_skill_assessment_prompt(skill=prompt, rag_context=rag_context)
|
||||
system_prompt, prompt = self.generate_skill_assessment_prompt(skill=skill, rag_context=rag_context)
|
||||
|
||||
skill_message = None
|
||||
async for skill_message in self.llm_one_shot(llm=llm, model=model, session_id=session_id, prompt=prompt, system_prompt=system_prompt, temperature=0.7):
|
||||
@ -199,11 +209,13 @@ JSON RESPONSE:"""
|
||||
skill_assessment_data = json.loads(json_str).get("skill_assessment", {})
|
||||
skill_assessment = SkillAssessment(
|
||||
candidate_id=self.user.id,
|
||||
skill=skill_assessment_data.get("skill", ""),
|
||||
skill=skill,
|
||||
skill_modified=skill_assessment_data.get("skill", ""),
|
||||
evidence_found=skill_assessment_data.get("evidence_found", False),
|
||||
evidence_strength=skill_assessment_data.get("evidence_strength", "NONE").lower(),
|
||||
assessment=skill_assessment_data.get("assessment", ""),
|
||||
description=skill_assessment_data.get("description", ""),
|
||||
rag_results=rag_message.content,
|
||||
evidence_details=[
|
||||
EvidenceDetail(
|
||||
source=evidence.get("source", ""),
|
||||
@ -215,13 +227,25 @@ JSON RESPONSE:"""
|
||||
except Exception as e:
|
||||
error_message = ChatMessageError(
|
||||
session_id=session_id,
|
||||
content=f"Failed to parse Skill assessment JSON: {str(e)}\n\n{skill_message.content}"
|
||||
content=f"Failed to parse Skill assessment JSON: {str(e)}\n\n{skill_message.content}\n\nJSON:\n{json_str}\n\n"
|
||||
)
|
||||
logger.error(traceback.format_exc())
|
||||
logger.error(f"⚠️ {error_message.content}")
|
||||
yield error_message
|
||||
return
|
||||
|
||||
# if skill_assessment.evidence_strength == "none":
|
||||
# logger.info("⚠️ No evidence found for skill assessment, returning NONE.")
|
||||
# with open("src/tmp.txt", "w") as f:
|
||||
# f.write(f"Skill: {skill}\n\n")
|
||||
# f.write(f"System prompt:\n{system_prompt}\n\n")
|
||||
# f.write(f"Prompt:\n{prompt}\n\n")
|
||||
# f.write(f"LLM response:\n{skill_message.content}\n\n")
|
||||
# f.write(f"JSON portion:\n{json_str}\n\n")
|
||||
# f.write(f"JSON parsed:\n{json.dumps(skill_assessment_data, indent=2)}\n\n")
|
||||
# f.write(f"Skill assessment data:\n")
|
||||
# f.write(skill_assessment.model_dump_json(indent=2))
|
||||
|
||||
skill_assessment_message = ChatMessageSkillAssessment(
|
||||
session_id=session_id,
|
||||
status=ApiStatusType.DONE,
|
||||
|
@ -294,6 +294,7 @@ class RedisDatabase:
|
||||
# Convert to UTC if it's in a different timezone
|
||||
dt = dt.astimezone(timezone.utc)
|
||||
return dt
|
||||
logger.warning(f"⚠️ No RAG update time found for user {user_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting user RAG update time: {e}")
|
||||
@ -314,6 +315,7 @@ class RedisDatabase:
|
||||
# Store as ISO format with timezone info
|
||||
timestamp_str = update_time.isoformat() # This includes timezone
|
||||
await self.redis.set(rag_update_key, timestamp_str)
|
||||
logger.info(f"✅ User RAG update time set for user {user_id}: {timestamp_str}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error setting user RAG update time: {e}")
|
||||
@ -412,7 +414,7 @@ class RedisDatabase:
|
||||
async def get_documents_by_rag_status(self, candidate_id: str, include_in_rag: bool = True) -> List[Dict]:
|
||||
"""Get candidate documents filtered by RAG inclusion status"""
|
||||
all_documents = await self.get_candidate_documents(candidate_id)
|
||||
return [doc for doc in all_documents if doc.get("include_in_RAG", False) == include_in_rag]
|
||||
return [doc for doc in all_documents if doc.get("include_in_rag", False) == include_in_rag]
|
||||
|
||||
async def bulk_update_document_rag_status(self, candidate_id: str, document_ids: List[str], include_in_rag: bool):
|
||||
"""Bulk update RAG status for multiple documents"""
|
||||
@ -421,7 +423,7 @@ class RedisDatabase:
|
||||
for doc_id in document_ids:
|
||||
doc_data = await self.get_document(doc_id)
|
||||
if doc_data and doc_data.get("candidate_id") == candidate_id:
|
||||
doc_data["include_in_RAG"] = include_in_rag
|
||||
doc_data["include_in_rag"] = include_in_rag
|
||||
doc_data["updatedAt"] = datetime.now(UTC).isoformat()
|
||||
pipe.set(f"document:{doc_id}", self._serialize(doc_data))
|
||||
|
||||
|
@ -3,12 +3,12 @@ import weakref
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Optional, Any
|
||||
from contextlib import asynccontextmanager
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field # type: ignore
|
||||
|
||||
from models import ( Candidate )
|
||||
from .candidate_entity import CandidateEntity
|
||||
from database import RedisDatabase
|
||||
from prometheus_client import CollectorRegistry
|
||||
from prometheus_client import CollectorRegistry # type: ignore
|
||||
|
||||
class EntityManager:
|
||||
"""Manages lifecycle of CandidateEntity instances"""
|
||||
|
97
src/backend/json_extractor.py
Normal file
97
src/backend/json_extractor.py
Normal file
@ -0,0 +1,97 @@
|
||||
import json
|
||||
import re
|
||||
from typing import List, Union
|
||||
|
||||
def extract_json_blocks(text: str, allow_multiple: bool = False) -> List[dict]:
|
||||
"""
|
||||
Extract JSON blocks from text, even if surrounded by markdown or noisy text.
|
||||
If allow_multiple is True, returns all JSON blocks; otherwise, only the first.
|
||||
"""
|
||||
found = []
|
||||
|
||||
# First try to extract from code blocks (most reliable)
|
||||
code_block_pattern = r"```(?:json)?\s*([\s\S]+?)\s*```"
|
||||
for match in re.finditer(code_block_pattern, text):
|
||||
block = match.group(1).strip()
|
||||
try:
|
||||
parsed = json.loads(block)
|
||||
found.append(parsed)
|
||||
if not allow_multiple:
|
||||
return [parsed]
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
# If no valid code blocks found, look for standalone JSON objects/arrays
|
||||
if not found:
|
||||
standalone_json = _extract_standalone_json(text, allow_multiple)
|
||||
found.extend(standalone_json)
|
||||
|
||||
if not found:
|
||||
raise ValueError("No valid JSON block found in the text")
|
||||
|
||||
return found
|
||||
|
||||
def _extract_standalone_json(text: str, allow_multiple: bool = False) -> List[Union[dict, list]]:
|
||||
"""Extract standalone JSON objects or arrays from text using proper brace counting."""
|
||||
found = []
|
||||
i = 0
|
||||
|
||||
while i < len(text):
|
||||
if text[i] in '{[':
|
||||
# Found potential JSON start
|
||||
json_str = _extract_complete_json_at_position(text, i)
|
||||
if json_str:
|
||||
try:
|
||||
parsed = json.loads(json_str)
|
||||
found.append(parsed)
|
||||
if not allow_multiple:
|
||||
return [parsed]
|
||||
# Move past this JSON block
|
||||
i += len(json_str)
|
||||
continue
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
i += 1
|
||||
|
||||
return found
|
||||
|
||||
def _extract_complete_json_at_position(text: str, start_pos: int) -> str:
|
||||
"""
|
||||
Extract a complete JSON object or array starting at the given position.
|
||||
Uses proper brace/bracket counting and string escape handling.
|
||||
"""
|
||||
if start_pos >= len(text) or text[start_pos] not in '{[':
|
||||
return ""
|
||||
|
||||
start_char = text[start_pos]
|
||||
end_char = '}' if start_char == '{' else ']'
|
||||
|
||||
count = 1
|
||||
i = start_pos + 1
|
||||
in_string = False
|
||||
escape_next = False
|
||||
|
||||
while i < len(text) and count > 0:
|
||||
char = text[i]
|
||||
|
||||
if escape_next:
|
||||
escape_next = False
|
||||
elif char == '\\' and in_string:
|
||||
escape_next = True
|
||||
elif char == '"' and not escape_next:
|
||||
in_string = not in_string
|
||||
elif not in_string:
|
||||
if char == start_char:
|
||||
count += 1
|
||||
elif char == end_char:
|
||||
count -= 1
|
||||
|
||||
i += 1
|
||||
|
||||
if count == 0:
|
||||
return text[start_pos:i]
|
||||
return ""
|
||||
|
||||
def extract_json_from_text(text: str) -> str:
|
||||
"""Extract JSON string from text that may contain other content."""
|
||||
return json.dumps(extract_json_blocks(text, allow_multiple=False)[0])
|
@ -70,6 +70,7 @@ import entities
|
||||
from email_service import VerificationEmailRateLimiter, email_service
|
||||
from device_manager import DeviceManager
|
||||
import agents
|
||||
from entities.candidate_entity import CandidateEntity
|
||||
|
||||
# =============================
|
||||
# Import Pydantic models
|
||||
@ -82,7 +83,7 @@ from models import (
|
||||
Candidate, Employer, BaseUserWithType, BaseUser, Guest, Authentication, AuthResponse, CandidateAI,
|
||||
|
||||
# Job models
|
||||
JobFull, JobApplication, ApplicationStatus,
|
||||
JobApplication, ApplicationStatus,
|
||||
|
||||
# Chat models
|
||||
ChatSession, ChatMessage, ChatContext, ChatQuery, ApiStatusType, ChatSenderType, ApiMessageType, ChatContextType,
|
||||
@ -2121,7 +2122,7 @@ async def upload_candidate_document(
|
||||
try:
|
||||
# Parse the JSON string and create DocumentOptions object
|
||||
options_dict = json.loads(options_data)
|
||||
options : DocumentOptions = DocumentOptions.model_validate(**options_dict)
|
||||
options : DocumentOptions = DocumentOptions.model_validate(options_dict)
|
||||
except (json.JSONDecodeError, ValidationError) as e:
|
||||
return StreamingResponse(
|
||||
iter([json.dumps(ChatMessageError(
|
||||
@ -2178,7 +2179,7 @@ async def upload_candidate_document(
|
||||
|
||||
logger.info(f"📁 Received file upload: filename='{file.filename}', content_type='{file.content_type}', size='{len(file_content)} bytes'")
|
||||
|
||||
directory = "rag-content" if options.include_in_RAG else "files"
|
||||
directory = "rag-content" if options.include_in_rag else "files"
|
||||
directory = "jobs" if options.is_job_document else directory
|
||||
|
||||
# Ensure the file does not already exist either in 'files' or in 'rag-content'
|
||||
@ -2332,6 +2333,52 @@ async def upload_candidate_document(
|
||||
media_type="text/event-stream"
|
||||
)
|
||||
|
||||
async def reformat_as_markdown(database: RedisDatabase, candidate_entity: CandidateEntity, content: str):
|
||||
chat_agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.JOB_REQUIREMENTS)
|
||||
if not chat_agent:
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content="No agent found for job requirements chat type"
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
status_message = ChatMessageStatus(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content=f"Reformatting job description as markdown...",
|
||||
activity=ApiActivityType.CONVERTING
|
||||
)
|
||||
yield status_message
|
||||
|
||||
message = None
|
||||
async for message in chat_agent.llm_one_shot(
|
||||
llm=llm_manager.get_llm(),
|
||||
model=defines.model,
|
||||
session_id=MOCK_UUID,
|
||||
prompt=content,
|
||||
system_prompt="""
|
||||
You are a document editor. Take the provided job description and reformat as legible markdown.
|
||||
Return only the markdown content, no other text. Make sure all content is included.
|
||||
"""
|
||||
):
|
||||
pass
|
||||
|
||||
if not message or not isinstance(message, ChatMessage):
|
||||
logger.error("❌ Failed to reformat job description to markdown")
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content="Failed to reformat job description"
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
chat_message : ChatMessage = message
|
||||
try:
|
||||
chat_message.content = chat_agent.extract_markdown_from_text(chat_message.content)
|
||||
except Exception as e:
|
||||
pass
|
||||
logger.info(f"✅ Successfully converted content to markdown")
|
||||
yield chat_message
|
||||
return
|
||||
|
||||
async def create_job_from_content(database: RedisDatabase, current_user: Candidate, content: str):
|
||||
status_message = ChatMessageStatus(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
@ -2342,6 +2389,20 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida
|
||||
await asyncio.sleep(0) # Let the status message propagate
|
||||
|
||||
async with entities.get_candidate_entity(candidate=current_user) as candidate_entity:
|
||||
message = None
|
||||
async for message in reformat_as_markdown(database, candidate_entity, content):
|
||||
# Only yield one final DONE message
|
||||
if message.status != ApiStatusType.DONE:
|
||||
yield message
|
||||
if not message or not isinstance(message, ChatMessage):
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content="Failed to reformat job description"
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
markdown_message = message
|
||||
|
||||
chat_agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.JOB_REQUIREMENTS)
|
||||
if not chat_agent:
|
||||
error_message = ChatMessageError(
|
||||
@ -2350,7 +2411,6 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
message = None
|
||||
status_message = ChatMessageStatus(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content=f"Analyzing document for company and requirement details...",
|
||||
@ -2358,13 +2418,15 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida
|
||||
)
|
||||
yield status_message
|
||||
|
||||
message = None
|
||||
async for message in chat_agent.generate(
|
||||
llm=llm_manager.get_llm(),
|
||||
model=defines.model,
|
||||
session_id=MOCK_UUID,
|
||||
prompt=content
|
||||
prompt=markdown_message.content
|
||||
):
|
||||
pass
|
||||
if message.status != ApiStatusType.DONE:
|
||||
yield message
|
||||
|
||||
if not message or not isinstance(message, JobRequirementsMessage):
|
||||
error_message = ChatMessageError(
|
||||
@ -2374,42 +2436,8 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida
|
||||
yield error_message
|
||||
return
|
||||
|
||||
status_message = ChatMessageStatus(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content=f"Reformatting job description as markdown...",
|
||||
activity=ApiActivityType.CONVERTING
|
||||
)
|
||||
yield status_message
|
||||
|
||||
job_requirements : JobRequirementsMessage = message
|
||||
async for message in chat_agent.llm_one_shot(
|
||||
llm=llm_manager.get_llm(),
|
||||
model=defines.model,
|
||||
session_id=MOCK_UUID,
|
||||
prompt=content,
|
||||
system_prompt="""
|
||||
You are a document editor. Take the provided job description and reformat as legible markdown.
|
||||
Return only the markdown content, no other text. Make sure all content is included.
|
||||
"""
|
||||
):
|
||||
pass
|
||||
|
||||
if not message or not isinstance(message, ChatMessage):
|
||||
logger.error("❌ Failed to reformat job description to markdown")
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content="Failed to reformat job description"
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
chat_message : ChatMessage = message
|
||||
markdown = chat_message.content
|
||||
try:
|
||||
markdown = chat_agent.extract_markdown_from_text(chat_message.content)
|
||||
except Exception as e:
|
||||
pass
|
||||
job_requirements.job.description = markdown
|
||||
logger.info(f"✅ Successfully saved job requirements job {job_requirements.id}")
|
||||
logger.info(f"✅ Successfully generated job requirements for job {job_requirements.id}")
|
||||
yield job_requirements
|
||||
return
|
||||
|
||||
@ -2619,7 +2647,7 @@ async def get_document_content(
|
||||
content=create_error_response("FORBIDDEN", "Cannot access another candidate's document")
|
||||
)
|
||||
|
||||
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName)
|
||||
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_rag else "files", document.originalName)
|
||||
file_path = pathlib.Path(file_path)
|
||||
if not document.type in [DocumentType.TXT, DocumentType.MARKDOWN]:
|
||||
file_path = file_path.with_suffix('.md')
|
||||
@ -2694,7 +2722,7 @@ async def update_document(
|
||||
content=create_error_response("FORBIDDEN", "Cannot update another candidate's document")
|
||||
)
|
||||
update_options = updates.options if updates.options else DocumentOptions()
|
||||
if document.options.include_in_RAG != update_options.include_in_RAG:
|
||||
if document.options.include_in_rag != update_options.include_in_rag:
|
||||
# If RAG status is changing, we need to handle file movement
|
||||
rag_dir = os.path.join(defines.user_dir, candidate.username, "rag-content")
|
||||
file_dir = os.path.join(defines.user_dir, candidate.username, "files")
|
||||
@ -2703,7 +2731,7 @@ async def update_document(
|
||||
rag_path = os.path.join(rag_dir, document.originalName)
|
||||
file_path = os.path.join(file_dir, document.originalName)
|
||||
|
||||
if update_options.include_in_RAG:
|
||||
if update_options.include_in_rag:
|
||||
src = pathlib.Path(file_path)
|
||||
dst = pathlib.Path(rag_path)
|
||||
# Move to RAG directory
|
||||
@ -2731,8 +2759,8 @@ async def update_document(
|
||||
update_dict = {}
|
||||
if updates.filename is not None:
|
||||
update_dict["filename"] = updates.filename.strip()
|
||||
if update_options.include_in_RAG is not None:
|
||||
update_dict["include_in_RAG"] = update_options.include_in_RAG
|
||||
if update_options.include_in_rag is not None:
|
||||
update_dict["include_in_rag"] = update_options.include_in_rag
|
||||
|
||||
if not update_dict:
|
||||
return JSONResponse(
|
||||
@ -2802,7 +2830,7 @@ async def delete_document(
|
||||
)
|
||||
|
||||
# Delete file from disk
|
||||
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName)
|
||||
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_rag else "files", document.originalName)
|
||||
file_path = pathlib.Path(file_path)
|
||||
|
||||
try:
|
||||
@ -3264,10 +3292,7 @@ async def create_candidate_job(
|
||||
is_employer = isinstance(current_user, Employer)
|
||||
|
||||
try:
|
||||
if is_employer:
|
||||
job = JobFull.model_validate(job_data)
|
||||
else:
|
||||
job = Job.model_validate(job_data)
|
||||
job = Job.model_validate(job_data)
|
||||
|
||||
# Add required fields
|
||||
job.id = str(uuid.uuid4())
|
||||
@ -3286,6 +3311,50 @@ async def create_candidate_job(
|
||||
)
|
||||
|
||||
|
||||
@api_router.patch("/jobs/{job_id}")
|
||||
async def update_job(
|
||||
job_id: str = Path(...),
|
||||
updates: Dict[str, Any] = Body(...),
|
||||
current_user = Depends(get_current_user),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Update a candidate"""
|
||||
try:
|
||||
job_data = await database.get_job(job_id)
|
||||
if not job_data:
|
||||
logger.warning(f"⚠️ Job not found for update: {job_data}")
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("NOT_FOUND", "Job not found")
|
||||
)
|
||||
|
||||
job = Job.model_validate(job_data)
|
||||
|
||||
# Check authorization (user can only update their own profile)
|
||||
if current_user.is_admin is False and job.owner_id != current_user.id:
|
||||
logger.warning(f"⚠️ Unauthorized update attempt by user {current_user.id} on job {job_id}")
|
||||
return JSONResponse(
|
||||
status_code=403,
|
||||
content=create_error_response("FORBIDDEN", "Cannot update another user's job")
|
||||
)
|
||||
|
||||
# Apply updates
|
||||
updates["updatedAt"] = datetime.now(UTC).isoformat()
|
||||
logger.info(f"🔄 Updating job {job_id} with data: {updates}")
|
||||
job_dict = job.model_dump()
|
||||
job_dict.update(updates)
|
||||
updated_job = Job.model_validate(job_dict)
|
||||
await database.set_job(job_id, updated_job.model_dump())
|
||||
|
||||
return create_success_response(updated_job.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Update job error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("UPDATE_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.post("/jobs/from-content")
|
||||
async def create_job_from_description(
|
||||
content: str = Body(...),
|
||||
@ -3306,7 +3375,14 @@ async def create_job_from_description(
|
||||
|
||||
logger.info(f"📁 Received file content: size='{len(content)} bytes'")
|
||||
|
||||
last_yield_was_streaming = False
|
||||
async for message in create_job_from_content(database=database, current_user=current_user, content=content):
|
||||
if message.status != ApiStatusType.STREAMING:
|
||||
last_yield_was_streaming = False
|
||||
else:
|
||||
if last_yield_was_streaming:
|
||||
continue
|
||||
last_yield_was_streaming = True
|
||||
logger.info(f"📄 Yielding job creation message status: {message.status}")
|
||||
yield message
|
||||
return
|
||||
@ -3526,10 +3602,7 @@ async def get_jobs(
|
||||
all_jobs_data = await database.get_all_jobs()
|
||||
jobs_list = []
|
||||
for job in all_jobs_data.values():
|
||||
if job.get("user_type") == "employer":
|
||||
jobs_list.append(JobFull.model_validate(job))
|
||||
else:
|
||||
jobs_list.append(Job.model_validate(job))
|
||||
jobs_list.append(Job.model_validate(job))
|
||||
|
||||
paginated_jobs, total = filter_and_paginate(
|
||||
jobs_list, page, limit, sortBy, sortOrder, filter_dict
|
||||
@ -4537,12 +4610,12 @@ def get_endpoint_rate_limiter(rate_limiter: RateLimiter = Depends(get_rate_limit
|
||||
@api_router.post("/candidates/{candidate_id}/skill-match")
|
||||
async def get_candidate_skill_match(
|
||||
candidate_id: str = Path(...),
|
||||
requirement: str = Body(...),
|
||||
skill: str = Body(...),
|
||||
current_user = Depends(get_current_user_or_guest),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
) -> StreamingResponse:
|
||||
|
||||
"""Get skill match for a candidate against a requirement with caching"""
|
||||
"""Get skill match for a candidate against a skill with caching"""
|
||||
async def message_stream_generator():
|
||||
candidate_data = await database.get_candidate(candidate_id)
|
||||
if not candidate_data:
|
||||
@ -4555,18 +4628,21 @@ async def get_candidate_skill_match(
|
||||
|
||||
candidate = Candidate.model_validate(candidate_data)
|
||||
|
||||
# Create cache key for this specific candidate + requirement combination
|
||||
requirement_hash = hashlib.md5(requirement.encode()).hexdigest()[:8]
|
||||
cache_key = f"skill_match:{candidate.id}:{requirement_hash}"
|
||||
# Create cache key for this specific candidate + skill combination
|
||||
skill_hash = hashlib.md5(skill.lower().encode()).hexdigest()[:8]
|
||||
cache_key = f"skill_match:{candidate.id}:{skill_hash}"
|
||||
|
||||
# Get cached assessment if it exists
|
||||
assessment : SkillAssessment | None = await database.get_cached_skill_match(cache_key)
|
||||
|
||||
if assessment and assessment.skill.lower() != skill.lower():
|
||||
logger.warning(f"❌ Cached skill match for {candidate.username} does not match requested skill: {assessment.skill} != {skill} ({cache_key}). Regenerating...")
|
||||
assessment = None
|
||||
|
||||
# Determine if we need to regenerate the assessment
|
||||
cached_date = None
|
||||
if assessment:
|
||||
# Get the latest RAG data update time for the current user
|
||||
user_rag_update_time = await database.get_user_rag_update_time(current_user.id)
|
||||
user_rag_update_time = await database.get_user_rag_update_time(candidate.id)
|
||||
|
||||
updated = assessment.updated_at if "updated_at" in assessment else assessment.created_at
|
||||
# Check if cached result is still valid
|
||||
@ -4575,14 +4651,11 @@ async def get_candidate_skill_match(
|
||||
logger.info(f"🔄 Out-of-date cached entry for {candidate.username} skill {assessment.skill}")
|
||||
assessment = None
|
||||
else:
|
||||
cached_date = updated
|
||||
logger.info(f"✅ Using cached skill match for {candidate.username} skill {assessment.skill}: {cache_key}")
|
||||
else:
|
||||
logger.info(f"💾 No cached skill match data: {cache_key}, {candidate.id}, {requirement}")
|
||||
logger.info(f"💾 No cached skill match data: {cache_key}, {candidate.id}, {skill}")
|
||||
|
||||
if assessment:
|
||||
logger.info(f"✅ Found cached skill match for candidate {candidate.username} against requirement: {requirement}")
|
||||
logger.info(f"💾 Cached skill match data: {assessment.evidence_strength}")
|
||||
|
||||
# Return cached assessment
|
||||
skill_message = ChatMessageSkillAssessment(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
@ -4592,7 +4665,7 @@ async def get_candidate_skill_match(
|
||||
yield skill_message
|
||||
return
|
||||
|
||||
logger.info(f"🔍 Generating skill match for candidate {candidate.username} against requirement: {requirement}")
|
||||
logger.info(f"🔍 Generating skill match for candidate {candidate.username} for skill: {skill}")
|
||||
|
||||
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
||||
agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.SKILL_MATCH)
|
||||
@ -4610,11 +4683,15 @@ async def get_candidate_skill_match(
|
||||
llm=llm_manager.get_llm(),
|
||||
model=defines.model,
|
||||
session_id=MOCK_UUID,
|
||||
prompt=requirement,
|
||||
prompt=skill,
|
||||
):
|
||||
if generated_message.status == ApiStatusType.ERROR:
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content=f"AI generation error: {generated_message.content}"
|
||||
)
|
||||
logger.error(f"❌ AI generation error: {generated_message.content}")
|
||||
yield f"data: {json.dumps({'status': 'error'})}\n\n"
|
||||
yield error_message
|
||||
return
|
||||
|
||||
# If the message is not done, convert it to a ChatMessageBase to remove
|
||||
@ -4634,7 +4711,7 @@ async def get_candidate_skill_match(
|
||||
if final_message is None:
|
||||
error_message = ChatMessageError(
|
||||
sessionId=MOCK_UUID, # No session ID for document uploads
|
||||
content=f"No skill match found for the given requirement"
|
||||
content=f"No match found for the given skill"
|
||||
)
|
||||
yield error_message
|
||||
return
|
||||
@ -4658,8 +4735,8 @@ async def get_candidate_skill_match(
|
||||
return
|
||||
|
||||
await database.cache_skill_match(cache_key, assessment)
|
||||
logger.info(f"💾 Cached new skill match for candidate {candidate.id}")
|
||||
logger.info(f"✅ Skill match found for candidate {candidate.id}: {assessment.evidence_strength}")
|
||||
logger.info(f"💾 Cached new skill match for candidate {candidate.id} as {cache_key}")
|
||||
logger.info(f"✅ Skill match: {assessment.evidence_strength} {skill}")
|
||||
yield skill_match
|
||||
return
|
||||
|
||||
|
@ -98,9 +98,26 @@ class EvidenceDetail(BaseModel):
|
||||
"populate_by_name": True, # Allow both field names and aliases
|
||||
}
|
||||
|
||||
class ChromaDBGetResponse(BaseModel):
|
||||
# Chroma fields
|
||||
ids: List[str] = []
|
||||
embeddings: List[List[float]] = []
|
||||
documents: List[str] = []
|
||||
metadatas: List[Dict[str, Any]] = []
|
||||
distances: List[float] = []
|
||||
# Additional fields
|
||||
name: str = ""
|
||||
size: int = 0
|
||||
dimensions: int = 2 | 3
|
||||
query: str = ""
|
||||
query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding")
|
||||
umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D")
|
||||
umap_embedding_3d: Optional[List[float]] = Field(default=None, alias="umapEmbedding3D")
|
||||
|
||||
class SkillAssessment(BaseModel):
|
||||
candidate_id: str = Field(..., alias='candidateId')
|
||||
skill: str = Field(..., alias="skill", description="The skill being assessed")
|
||||
skill_modified: Optional[str] = Field(default="", alias="skillModified", description="The skill rephrased by LLM during skill match")
|
||||
evidence_found: bool = Field(..., alias="evidenceFound", description="Whether evidence was found for the skill")
|
||||
evidence_strength: SkillStrength = Field(..., alias="evidenceStrength", description="Strength of evidence found for the skill")
|
||||
assessment: str = Field(..., alias="assessment", description="Short (one to two sentence) assessment of the candidate's proficiency with the skill")
|
||||
@ -108,6 +125,7 @@ class SkillAssessment(BaseModel):
|
||||
evidence_details: List[EvidenceDetail] = Field(default_factory=list, alias="evidenceDetails", description="List of evidence details supporting the skill assessment")
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias='createdAt')
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias='updatedAt')
|
||||
rag_results: List[ChromaDBGetResponse] = Field(default_factory=list, alias="ragResults")
|
||||
model_config = {
|
||||
"populate_by_name": True, # Allow both field names and aliases
|
||||
}
|
||||
@ -523,7 +541,7 @@ class DocumentType(str, Enum):
|
||||
IMAGE = "image"
|
||||
|
||||
class DocumentOptions(BaseModel):
|
||||
include_in_RAG: bool = Field(default=True, alias="includeInRAG")
|
||||
include_in_rag: bool = Field(default=True, alias="includeInRag")
|
||||
is_job_document: Optional[bool] = Field(default=False, alias="isJobDocument")
|
||||
overwrite: Optional[bool] = Field(default=False, alias="overwrite")
|
||||
model_config = {
|
||||
@ -680,23 +698,7 @@ class JobRequirements(BaseModel):
|
||||
"populate_by_name": True # Allow both field names and aliases
|
||||
}
|
||||
|
||||
class Job(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
owner_id: str = Field(..., alias="ownerId")
|
||||
owner_type: UserType = Field(..., alias="ownerType")
|
||||
owner: Optional[BaseUser] = None
|
||||
title: Optional[str]
|
||||
summary: Optional[str]
|
||||
company: Optional[str]
|
||||
description: str
|
||||
requirements: Optional[JobRequirements]
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="createdAt")
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="updatedAt")
|
||||
model_config = {
|
||||
"populate_by_name": True # Allow both field names and aliases
|
||||
}
|
||||
|
||||
class JobFull(Job):
|
||||
class JobDetails(BaseModel):
|
||||
location: Location
|
||||
salary_range: Optional[SalaryRange] = Field(None, alias="salaryRange")
|
||||
employment_type: EmploymentType = Field(..., alias="employmentType")
|
||||
@ -712,6 +714,24 @@ class JobFull(Job):
|
||||
views: int = 0
|
||||
application_count: int = Field(0, alias="applicationCount")
|
||||
|
||||
class Job(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
owner_id: str = Field(..., alias="ownerId")
|
||||
owner_type: UserType = Field(..., alias="ownerType")
|
||||
owner: Optional[BaseUser] = None
|
||||
title: Optional[str]
|
||||
summary: Optional[str]
|
||||
company: Optional[str]
|
||||
description: str
|
||||
requirements: Optional[JobRequirements]
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="createdAt")
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="updatedAt")
|
||||
details: Optional[JobDetails] = Field(None, alias="details")
|
||||
model_config = {
|
||||
"populate_by_name": True # Allow both field names and aliases
|
||||
}
|
||||
|
||||
|
||||
class InterviewFeedback(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
interview_id: str = Field(..., alias="interviewId")
|
||||
@ -765,22 +785,6 @@ class JobApplication(BaseModel):
|
||||
"populate_by_name": True # Allow both field names and aliases
|
||||
}
|
||||
|
||||
class ChromaDBGetResponse(BaseModel):
|
||||
# Chroma fields
|
||||
ids: List[str] = []
|
||||
embeddings: List[List[float]] = []
|
||||
documents: List[str] = []
|
||||
metadatas: List[Dict[str, Any]] = []
|
||||
distances: List[float] = []
|
||||
# Additional fields
|
||||
name: str = ""
|
||||
size: int = 0
|
||||
dimensions: int = 2 | 3
|
||||
query: str = ""
|
||||
query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding")
|
||||
umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D")
|
||||
umap_embedding_3d: Optional[List[float]] = Field(default=None, alias="umapEmbedding3D")
|
||||
|
||||
class GuestSessionResponse(BaseModel):
|
||||
"""Response for guest session creation"""
|
||||
access_token: str = Field(..., alias="accessToken")
|
||||
@ -1270,4 +1274,4 @@ Candidate.update_forward_refs()
|
||||
Employer.update_forward_refs()
|
||||
ChatSession.update_forward_refs()
|
||||
JobApplication.update_forward_refs()
|
||||
JobFull.update_forward_refs()
|
||||
Job.update_forward_refs()
|
@ -309,7 +309,7 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
|
||||
include=["embeddings", "documents", "metadatas"]
|
||||
))
|
||||
if not self._umap_collection or not len(self._umap_collection.embeddings):
|
||||
logging.warning("No embeddings found in the collection.")
|
||||
logging.warning("⚠️ No embeddings found in the collection.")
|
||||
return
|
||||
|
||||
# During initialization
|
||||
@ -455,7 +455,7 @@ class ChromaDBFileWatcher(FileSystemEventHandler):
|
||||
meta["chunk_end"] = end
|
||||
return "".join(lines[start:end])
|
||||
except:
|
||||
logging.warning(f"Unable to open {source_file}")
|
||||
logging.warning(f"⚠️ Unable to open {source_file}")
|
||||
return None
|
||||
|
||||
# Cosine Distance Equivalent Similarity Retrieval Characteristics
|
||||
|
Loading…
x
Reference in New Issue
Block a user