From 3a21f2e51073b3ff5fb68837de6ed8e004510217 Mon Sep 17 00:00:00 2001 From: James Ketrenos Date: Tue, 10 Jun 2025 11:24:00 -0700 Subject: [PATCH] Restructuring top level UI --- frontend/src/components/DocumentManager.tsx | 27 ++- frontend/src/components/JobCreator.tsx | 47 +--- frontend/src/components/JobMatchAnalysis.tsx | 10 + frontend/src/components/VectorVisualizer.tsx | 9 +- frontend/src/components/ui/BackstoryLogo.tsx | 5 +- frontend/src/components/ui/JobInfo.tsx | 178 +++++++++++---- frontend/src/components/ui/JobPicker.tsx | 8 +- frontend/src/components/ui/StatusIcon.tsx | 59 +++++ frontend/src/config/navigationConfig.tsx | 8 +- frontend/src/pages/HowItWorks.tsx | 24 ++ frontend/src/services/api-client.ts | 14 +- frontend/src/types/types.ts | 45 ++-- src/backend/agents/base.py | 26 +-- src/backend/agents/job_requirements.py | 7 + src/backend/agents/skill_match.py | 34 ++- src/backend/database.py | 6 +- src/backend/entities/entity_manager.py | 4 +- src/backend/json_extractor.py | 97 ++++++++ src/backend/main.py | 225 +++++++++++++------ src/backend/models.py | 74 +++--- src/backend/rag/rag.py | 4 +- 21 files changed, 641 insertions(+), 270 deletions(-) create mode 100644 frontend/src/components/ui/StatusIcon.tsx create mode 100644 frontend/src/pages/HowItWorks.tsx create mode 100644 src/backend/json_extractor.py diff --git a/frontend/src/components/DocumentManager.tsx b/frontend/src/components/DocumentManager.tsx index c68889c..dc964ce 100644 --- a/frontend/src/components/DocumentManager.tsx +++ b/frontend/src/components/DocumentManager.tsx @@ -112,15 +112,22 @@ const DocumentManager = (props: BackstoryElementProps) => { try { // Upload file (replace with actual API call) - const controller = apiClient.uploadCandidateDocument(file, { includeInRAG: true, isJobDocument: false }); + const controller = apiClient.uploadCandidateDocument(file, { includeInRag: true, isJobDocument: false }, { + onError: (error) => { + console.error(error); + setSnack(error.content, 'error'); + } + }); const result = await controller.promise; - - setDocuments(prev => [...prev, result.document]); - setSnack(`Document uploaded: ${file.name}`, 'success'); + if (result && result.document) { + setDocuments(prev => [...prev, result.document]); + setSnack(`Document uploaded: ${file.name}`, 'success'); + } // Reset file input e.target.value = ''; } catch (error) { + console.error(error); setSnack('Failed to upload document', 'error'); } } @@ -147,20 +154,20 @@ const DocumentManager = (props: BackstoryElementProps) => { }; // Handle RAG flag toggle - const handleRAGToggle = async (document: Types.Document, includeInRAG: boolean) => { + const handleRAGToggle = async (document: Types.Document, includeInRag: boolean) => { try { - document.options = { includeInRAG }; + document.options = { includeInRag }; // Call API to update RAG flag await apiClient.updateCandidateDocument(document); setDocuments(prev => prev.map(doc => doc.id === document.id - ? { ...doc, includeInRAG } + ? { ...doc, includeInRag } : doc ) ); - setSnack(`Document ${includeInRAG ? 'included in' : 'excluded from'} RAG`, 'success'); + setSnack(`Document ${includeInRag ? 'included in' : 'excluded from'} RAG`, 'success'); } catch (error) { setSnack('Failed to update RAG setting', 'error'); } @@ -292,7 +299,7 @@ const DocumentManager = (props: BackstoryElementProps) => { size="small" color={getFileTypeColor(doc.type)} /> - {doc.options?.includeInRAG && ( + {doc.options?.includeInRag && ( { handleRAGToggle(doc, e.target.checked)} size="small" /> diff --git a/frontend/src/components/JobCreator.tsx b/frontend/src/components/JobCreator.tsx index 39e15a3..62f1e88 100644 --- a/frontend/src/components/JobCreator.tsx +++ b/frontend/src/components/JobCreator.tsx @@ -44,6 +44,7 @@ import * as Types from 'types/types'; import { StyledMarkdown } from './StyledMarkdown'; import { JobInfo } from './ui/JobInfo'; import { Scrollable } from './Scrollable'; +import { StatusIcon, StatusBox } from 'components/ui/StatusIcon'; const VisuallyHiddenInput = styled('input')({ clip: 'rect(0 0 0 0)', @@ -71,42 +72,6 @@ const UploadBox = styled(Box)(({ theme }) => ({ }, })); -const StatusBox = styled(Box)(({ theme }) => ({ - display: 'flex', - alignItems: 'center', - gap: theme.spacing(1), - padding: theme.spacing(1, 2), - backgroundColor: theme.palette.background.paper, - borderRadius: theme.shape.borderRadius, - border: `1px solid ${theme.palette.divider}`, - minHeight: 48, -})); - -const getIcon = (type: Types.ApiActivityType) => { - switch (type) { - case 'converting': - return ; - case 'heartbeat': - return ; - case 'system': - return ; - case 'info': - return ; - case 'searching': - return ; - case 'generating': - return ; - case 'generating_image': - return ; - case 'thinking': - return ; - case 'tooling': - return ; - default: - return ; - } -}; - interface JobCreatorProps extends BackstoryElementProps { onSave?: (job: Types.Job) => void; } @@ -125,7 +90,7 @@ const JobCreator = (props: JobCreatorProps) => { const [summary, setSummary] = useState(''); const [job, setJob] = useState(null); const [jobStatus, setJobStatus] = useState(''); - const [jobStatusIcon, setJobStatusIcon] = useState(<>); + const [jobStatusType, setJobStatusType] = useState(null); const [isProcessing, setIsProcessing] = useState(false); const fileInputRef = useRef(null); @@ -140,7 +105,7 @@ const JobCreator = (props: JobCreatorProps) => { const jobStatusHandlers = { onStatus: (status: Types.ChatMessageStatus) => { console.log('status:', status.content); - setJobStatusIcon(getIcon(status.activity)); + setJobStatusType(status.activity); setJobStatus(status.content); }, onMessage: (jobMessage: Types.JobRequirementsMessage) => { @@ -152,7 +117,7 @@ const JobCreator = (props: JobCreatorProps) => { setSummary(job.summary || ''); setJobTitle(job.title || ''); setJobRequirements(job.requirements || null); - setJobStatusIcon(<>); + setJobStatusType(null); setJobStatus(''); }, onError: (error: Types.ChatMessageError) => { @@ -161,7 +126,7 @@ const JobCreator = (props: JobCreatorProps) => { setIsProcessing(false); }, onComplete: () => { - setJobStatusIcon(<>); + setJobStatusType(null); setJobStatus(''); setIsProcessing(false); } @@ -421,7 +386,7 @@ const JobCreator = (props: JobCreatorProps) => { {(jobStatus || isProcessing) && ( - {jobStatusIcon} + {jobStatusType && } {jobStatus || 'Processing...'} diff --git a/frontend/src/components/JobMatchAnalysis.tsx b/frontend/src/components/JobMatchAnalysis.tsx index 6ed79ce..d9df632 100644 --- a/frontend/src/components/JobMatchAnalysis.tsx +++ b/frontend/src/components/JobMatchAnalysis.tsx @@ -30,6 +30,8 @@ import { StyledMarkdown } from './StyledMarkdown'; import { Scrollable } from './Scrollable'; import { useAppState } from 'hooks/GlobalContext'; import * as Types from 'types/types'; +import JsonView from '@uiw/react-json-view'; +import { VectorVisualizer } from './VectorVisualizer'; interface JobAnalysisProps extends BackstoryPageProps { job: Job; @@ -106,6 +108,7 @@ const JobMatchAnalysis: React.FC = (props: JobAnalysisProps) = const initialSkillMatches: SkillMatch[] = requirements.map(req => ({ skill: req.requirement, + skillModified: req.requirement, candidateId: candidate.id || "", domain: req.domain, status: 'waiting' as const, @@ -487,6 +490,13 @@ const JobMatchAnalysis: React.FC = (props: JobAnalysisProps) = {match.description} + {/* { match.ragResults && match.ragResults.length !== 0 && <> + + RAG Information + + + + } */} )} diff --git a/frontend/src/components/VectorVisualizer.tsx b/frontend/src/components/VectorVisualizer.tsx index a826892..17c3037 100644 --- a/frontend/src/components/VectorVisualizer.tsx +++ b/frontend/src/components/VectorVisualizer.tsx @@ -27,7 +27,7 @@ import { useNavigate } from 'react-router-dom'; interface VectorVisualizerProps extends BackstoryPageProps { inline?: boolean; - rag?: any; + rag?: Types.ChromaDBGetResponse; }; interface Metadata { @@ -37,11 +37,16 @@ interface Metadata { distance?: number; } -const emptyQuerySet = { +const emptyQuerySet: Types.ChromaDBGetResponse = { ids: [], documents: [], metadatas: [], embeddings: [], + distances: [], + name: "Empty", + size: 0, + dimensions: 2, + query: "" }; interface PlotData { diff --git a/frontend/src/components/ui/BackstoryLogo.tsx b/frontend/src/components/ui/BackstoryLogo.tsx index 0889482..d5c881a 100644 --- a/frontend/src/components/ui/BackstoryLogo.tsx +++ b/frontend/src/components/ui/BackstoryLogo.tsx @@ -19,9 +19,10 @@ const BackstoryLogo = () => { letterSpacing: '.2rem', color: theme.palette.primary.contrastText, textDecoration: 'none', - display: "flex", + display: "inline-flex", flexDirection: "row", - alignItems: "center", + alignItems: "center", + verticalAlign: "center", gap: 1, textTransform: "uppercase", }} diff --git a/frontend/src/components/ui/JobInfo.tsx b/frontend/src/components/ui/JobInfo.tsx index b1ea437..f3e66ec 100644 --- a/frontend/src/components/ui/JobInfo.tsx +++ b/frontend/src/components/ui/JobInfo.tsx @@ -1,5 +1,5 @@ -import React, { JSX } from 'react'; -import { Box, Link, Typography, Avatar, Grid, SxProps, CardActions, Chip, Stack, CardHeader } from '@mui/material'; +import React, { JSX, useActionState, useState } from 'react'; +import { Box, Link, Typography, Avatar, Grid, SxProps, CardActions, Chip, Stack, CardHeader, Button, styled, LinearProgress, IconButton, Tooltip } from '@mui/material'; import { Card, CardContent, @@ -8,23 +8,31 @@ import { } from '@mui/material'; import DeleteIcon from '@mui/icons-material/Delete'; import { useMediaQuery } from '@mui/material'; -import { Job, JobFull } from 'types/types'; +import { Job } from 'types/types'; import { CopyBubble } from "components/CopyBubble"; import { rest } from 'lodash'; import { AIBanner } from 'components/ui/AIBanner'; import { useAuth } from 'hooks/AuthContext'; import { DeleteConfirmation } from '../DeleteConfirmation'; import { Build, CheckCircle, Description, Psychology, Star, Work } from '@mui/icons-material'; +import ModelTrainingIcon from '@mui/icons-material/ModelTraining'; +import { StatusIcon, StatusBox } from 'components/ui/StatusIcon'; +import RestoreIcon from '@mui/icons-material/Restore'; +import SaveIcon from '@mui/icons-material/Save'; +import * as Types from "types/types"; +import { useAppState } from 'hooks/GlobalContext'; interface JobInfoProps { - job: Job | JobFull; + job: Job; sx?: SxProps; action?: string; elevation?: number; variant?: "small" | "normal" | null }; + const JobInfo: React.FC = (props: JobInfoProps) => { + const { setSnack } = useAppState(); const { job } = props; const { user, apiClient } = useAuth(); const { @@ -36,6 +44,9 @@ const JobInfo: React.FC = (props: JobInfoProps) => { const theme = useTheme(); const isMobile = useMediaQuery(theme.breakpoints.down('md')); const isAdmin = user?.isAdmin; + const [adminStatus, setAdminStatus] = useState(null); + const [adminStatusType, setAdminStatusType] = useState(null); + const [activeJob, setActiveJob] = useState({ ...job }); /* Copy of job */ const deleteJob = async (jobId: string | undefined) => { if (jobId) { @@ -43,10 +54,52 @@ const JobInfo: React.FC = (props: JobInfoProps) => { } } + const handleReset = async () => { + setActiveJob({ ...job }); + } + if (!job) { return No user loaded.; } + const handleSave = async () => { + const newJob = await apiClient.updateJob(job.id || '', { + description: activeJob.description, + requirements: activeJob.requirements, + }); + job.updatedAt = newJob.updatedAt; + setActiveJob(newJob) + setSnack('Job updated.'); + } + + const handleRefresh = () => { + setAdminStatus("Re-extracting Job information..."); + const jobStatusHandlers = { + onStatus: (status: Types.ChatMessageStatus) => { + console.log('status:', status.content); + setAdminStatusType(status.activity); + setAdminStatus(status.content); + }, + onMessage: (jobMessage: Types.JobRequirementsMessage) => { + const newJob: Types.Job = jobMessage.job + console.log('onMessage - job', newJob); + newJob.id = job.id; + newJob.createdAt = job.createdAt; + setActiveJob(newJob); + }, + onError: (error: Types.ChatMessageError) => { + console.log('onError', error); + setAdminStatusType(null); + setAdminStatus(null); + }, + onComplete: () => { + setAdminStatusType(null); + setAdminStatus(null); + } + }; + apiClient.createJobFromDescription(activeJob.description, jobStatusHandlers); + }; + const renderRequirementSection = (title: string, items: string[] | undefined, icon: JSX.Element, required = false) => { if (!items || items.length === 0) return null; @@ -75,10 +128,10 @@ const JobInfo: React.FC = (props: JobInfoProps) => { }; const renderJobRequirements = () => { - if (!job.requirements) return null; + if (!activeJob.requirements) return null; return ( - + } @@ -87,49 +140,49 @@ const JobInfo: React.FC = (props: JobInfoProps) => { {renderRequirementSection( "Technical Skills (Required)", - job.requirements.technicalSkills.required, + activeJob.requirements.technicalSkills.required, , true )} {renderRequirementSection( "Technical Skills (Preferred)", - job.requirements.technicalSkills.preferred, + activeJob.requirements.technicalSkills.preferred, )} {renderRequirementSection( "Experience Requirements (Required)", - job.requirements.experienceRequirements.required, + activeJob.requirements.experienceRequirements.required, , true )} {renderRequirementSection( "Experience Requirements (Preferred)", - job.requirements.experienceRequirements.preferred, + activeJob.requirements.experienceRequirements.preferred, )} {renderRequirementSection( "Soft Skills", - job.requirements.softSkills, + activeJob.requirements.softSkills, )} {renderRequirementSection( "Experience", - job.requirements.experience, + activeJob.requirements.experience, )} {renderRequirementSection( "Education", - job.requirements.education, + activeJob.requirements.education, )} {renderRequirementSection( "Certifications", - job.requirements.certifications, + activeJob.requirements.certifications, )} {renderRequirementSection( "Preferred Attributes", - job.requirements.preferredAttributes, + activeJob.requirements.preferredAttributes, )} @@ -153,51 +206,92 @@ const JobInfo: React.FC = (props: JobInfoProps) => { > {variant !== "small" && <> - {'location' in job && + {activeJob.details && - Location: {job.location.city}, {job.location.state || job.location.country} + Location: {activeJob.details.location.city}, {activeJob.details.location.state || activeJob.details.location.country} } - {job.title && + {activeJob.title && - Title: {job.title} + Title: {activeJob.title} } - {job.company && + {activeJob.company && - Company: {job.company} + Company: {activeJob.company} } - {job.summary && - Summary: {job.summary} + {activeJob.summary && + Summary: {activeJob.summary} } - {job.createdAt && - Created: {job.createdAt.toISOString()} - + {activeJob.owner && + Created by: {activeJob.owner.fullName} + } + {activeJob.createdAt && + Created: {activeJob.createdAt.toISOString()} } - { job.owner && - Created by: {job.owner.fullName} - - } + {activeJob.updatedAt && + Updated: {activeJob.updatedAt.toISOString()} + } + Job ID: {job.id} } - + {renderJobRequirements()} - - {isAdmin && - { deleteJob(job.id); }} - sx={{ minWidth: 'auto', px: 2, maxHeight: "min-content", color: "red" }} - action="delete" - label="job" - title="Delete job" - icon= - message={`Are you sure you want to delete ${job.id}? This action cannot be undone.`} - />} + {isAdmin && + + + {(job.updatedAt && job.updatedAt.toISOString()) !== (activeJob.updatedAt && activeJob.updatedAt.toISOString()) && + + { e.stopPropagation(); handleSave(); }} + > + + + + } + + { e.stopPropagation(); deleteJob(job.id); }} + > + + + + + { e.stopPropagation(); handleReset(); }} + > + + + + + { e.stopPropagation(); handleRefresh(); }} + > + + + + + {adminStatus && + + + {adminStatusType && } + + {adminStatus || 'Processing...'} + + + {adminStatus && } + + } + } ); }; diff --git a/frontend/src/components/ui/JobPicker.tsx b/frontend/src/components/ui/JobPicker.tsx index 14351f4..160fe8d 100644 --- a/frontend/src/components/ui/JobPicker.tsx +++ b/frontend/src/components/ui/JobPicker.tsx @@ -5,12 +5,12 @@ import Box from '@mui/material/Box'; import { BackstoryElementProps } from 'components/BackstoryTab'; import { JobInfo } from 'components/ui/JobInfo'; -import { Job, JobFull } from "types/types"; +import { Job } from "types/types"; import { useAuth } from 'hooks/AuthContext'; import { useAppState, useSelectedJob } from 'hooks/GlobalContext'; interface JobPickerProps extends BackstoryElementProps { - onSelect?: (job: JobFull) => void + onSelect?: (job: Job) => void }; const JobPicker = (props: JobPickerProps) => { @@ -18,7 +18,7 @@ const JobPicker = (props: JobPickerProps) => { const { apiClient } = useAuth(); const { selectedJob, setSelectedJob } = useSelectedJob(); const { setSnack } = useAppState(); - const [jobs, setJobs] = useState(null); + const [jobs, setJobs] = useState(null); useEffect(() => { if (jobs !== null) { @@ -27,7 +27,7 @@ const JobPicker = (props: JobPickerProps) => { const getJobs = async () => { try { const results = await apiClient.getJobs(); - const jobs: JobFull[] = results.data; + const jobs: Job[] = results.data; jobs.sort((a, b) => { let result = a.company?.localeCompare(b.company || ''); if (result === 0) { diff --git a/frontend/src/components/ui/StatusIcon.tsx b/frontend/src/components/ui/StatusIcon.tsx new file mode 100644 index 0000000..94a8a44 --- /dev/null +++ b/frontend/src/components/ui/StatusIcon.tsx @@ -0,0 +1,59 @@ +import React from 'react'; +import { + SyncAlt, + Favorite, + Settings, + Info, + Search, + AutoFixHigh, + Image, + Psychology, + Build, +} from '@mui/icons-material'; +import { styled } from '@mui/material/styles'; +import * as Types from 'types/types'; +import { Box } from '@mui/material'; + +interface StatusIconProps { + type: Types.ApiActivityType; +} + +const StatusBox = styled(Box)(({ theme }) => ({ + display: 'flex', + alignItems: 'center', + gap: theme.spacing(1), + padding: theme.spacing(1, 2), + backgroundColor: theme.palette.background.paper, + borderRadius: theme.shape.borderRadius, + border: `1px solid ${theme.palette.divider}`, + minHeight: 48, +})); + +const StatusIcon = (props: StatusIconProps) => { + const {type} = props; + + switch (type) { + case 'converting': + return ; + case 'heartbeat': + return ; + case 'system': + return ; + case 'info': + return ; + case 'searching': + return ; + case 'generating': + return ; + case 'generating_image': + return ; + case 'thinking': + return ; + case 'tooling': + return ; + default: + return ; + } +}; + +export { StatusIcon, StatusBox }; \ No newline at end of file diff --git a/frontend/src/config/navigationConfig.tsx b/frontend/src/config/navigationConfig.tsx index e58db2c..d5e1cc3 100644 --- a/frontend/src/config/navigationConfig.tsx +++ b/frontend/src/config/navigationConfig.tsx @@ -16,6 +16,8 @@ import { Bookmark as BookmarkIcon, BubbleChart, } from '@mui/icons-material'; +import SchoolIcon from '@mui/icons-material/School'; + import FaceRetouchingNaturalIcon from '@mui/icons-material/FaceRetouchingNatural'; import LibraryBooksIcon from '@mui/icons-material/LibraryBooks'; import { BackstoryLogo } from 'components/ui/BackstoryLogo'; @@ -36,6 +38,7 @@ import { NavigationConfig, NavigationItem } from 'types/navigation'; import { CandidateProfile } from 'pages/candidate/Profile'; import { DocumentManager } from 'components/DocumentManager'; import { VectorVisualizer } from 'components/VectorVisualizer'; +import { HowItWorks } from 'pages/HowItWorks'; // Beta page components for placeholder routes const SearchPage = () => (Search); @@ -49,16 +52,17 @@ const SettingsPage = () => (Settings, path: '/', component: , userTypes: ['guest', 'candidate', 'employer'], exact: true, }, + { id: 'how-it-works', label: 'How It Works', path: '/how-it-works', icon: , component: , userTypes: ['guest', 'candidate', 'employer',], }, { id: 'job-analysis', label: 'Job Analysis', path: '/job-analysis', icon: , component: , userTypes: ['guest', 'candidate', 'employer',], }, { id: 'chat', label: 'Candidate Chat', path: '/chat', icon: , component: , userTypes: ['guest', 'candidate', 'employer',], }, { id: 'candidate-menu', label: 'Tools', icon: , userTypes: ['candidate'], children: [ { id: 'candidate-dashboard', label: 'Dashboard', path: '/candidate/dashboard', icon: , component: , userTypes: ['candidate'] }, { id: 'candidate-profile', label: 'Profile', icon: , path: '/candidate/profile', component: , userTypes: ['candidate'] }, + { id: 'candidate-docs', label: 'Documents', icon: , path: '/candidate/documents', component: , userTypes: ['candidate'] }, { id: 'candidate-qa-setup', label: 'Q&A Setup', icon: , path: '/candidate/qa-setup', component: Candidate q&a setup page, userTypes: ['candidate'] }, { id: 'candidate-analytics', label: 'Analytics', icon: , path: '/candidate/analytics', component: Candidate analytics page, userTypes: ['candidate'] }, { id: 'candidate-job-analysis', label: 'Job Analysis', path: '/candidate/job-analysis', icon: , component: , userTypes: ['candidate'], }, { id: 'candidate-resumes', label: 'Resumes', icon: , path: '/candidate/resumes', component: Candidate resumes page, userTypes: ['candidate'] }, - { id: 'candidate-content', label: 'Content', icon: , path: '/candidate/content', component: , userTypes: ['candidate'] }, { id: 'candidate-settings', label: 'Settings', path: '/candidate/settings', icon: , component: , userTypes: ['candidate'], }, ], }, @@ -75,7 +79,6 @@ export const navigationConfig: NavigationConfig = { ], }, // { id: 'find-candidate', label: 'Find a Candidate', path: '/find-a-candidate', icon: , component: , userTypes: ['guest', 'candidate', 'employer'], }, - { id: 'docs', label: 'Docs', path: '/docs/*', icon: , component: , userTypes: ['guest', 'candidate', 'employer'], }, { id: 'admin-menu', label: 'Admin', @@ -83,6 +86,7 @@ export const navigationConfig: NavigationConfig = { userTypes: ['admin'], children: [ { id: 'generate-candidate', label: 'Generate Candidate', path: '/admin/generate-candidate', icon: , component: , userTypes: ['admin'] }, + { id: 'docs', label: 'Docs', path: '/docs/*', icon: , component: , userTypes: ['admin'], }, ], }, // Auth routes (special handling) diff --git a/frontend/src/pages/HowItWorks.tsx b/frontend/src/pages/HowItWorks.tsx new file mode 100644 index 0000000..d9cfb96 --- /dev/null +++ b/frontend/src/pages/HowItWorks.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { Box, Paper, Typography } from '@mui/material'; +import { BackstoryLogo } from 'components/ui/BackstoryLogo'; + +const HowItWorks = () => { + return ( + + + Job Description ⇒⇒ (Company Info, Job Summary, Job Requirements) ⇒ Job + + + User Content ⇒ ⇒ RAG Vector Database ⇒ Candidate + + + Job + CandidateSkill Match + + + Skill Match + CandidateResume + + + ); +} + +export { HowItWorks }; \ No newline at end of file diff --git a/frontend/src/services/api-client.ts b/frontend/src/services/api-client.ts index 62a998e..27fd548 100644 --- a/frontend/src/services/api-client.ts +++ b/frontend/src/services/api-client.ts @@ -644,9 +644,19 @@ class ApiClient { return this.handleApiResponseWithConversion(response, 'Employer'); } + // ============================ // Job Methods with Date Conversion // ============================ + async updateJob(id: string, updates: Partial): Promise { + const response = await fetch(`${this.baseUrl}/jobs/${id}`, { + method: 'PATCH', + headers: this.defaultHeaders, + body: JSON.stringify(formatApiRequest(updates)) + }); + + return this.handleApiResponseWithConversion(response, 'Job'); + } createJobFromDescription(job_description: string, streamingOptions?: StreamingOptions): StreamingResponse { const body = JSON.stringify(job_description); @@ -672,7 +682,7 @@ class ApiClient { return this.handleApiResponseWithConversion(response, 'Job'); } - async getJobs(request: Partial = {}): Promise> { + async getJobs(request: Partial = {}): Promise> { const paginatedRequest = createPaginatedRequest(request); const params = toUrlParams(formatApiRequest(paginatedRequest)); @@ -680,7 +690,7 @@ class ApiClient { headers: this.defaultHeaders }); - return this.handlePaginatedApiResponseWithConversion(response, 'JobFull'); + return this.handlePaginatedApiResponseWithConversion(response, 'Job'); } async getJobsByEmployer(employerId: string, request: Partial = {}): Promise> { diff --git a/frontend/src/types/types.ts b/frontend/src/types/types.ts index c012bdf..4bf3c93 100644 --- a/frontend/src/types/types.ts +++ b/frontend/src/types/types.ts @@ -1,6 +1,6 @@ // Generated TypeScript types from Pydantic models // Source: src/backend/models.py -// Generated on: 2025-06-10T02:48:12.087485 +// Generated on: 2025-06-10T17:14:56.968033 // DO NOT EDIT MANUALLY - This file is auto-generated // ============================ @@ -526,7 +526,7 @@ export interface DocumentMessage { } export interface DocumentOptions { - includeInRAG: boolean; + includeInRag: boolean; isJobDocument?: boolean; overwrite?: boolean; } @@ -716,6 +716,7 @@ export interface Job { requirements?: JobRequirements; createdAt?: Date; updatedAt?: Date; + details?: JobDetails; } export interface JobApplication { @@ -734,25 +735,14 @@ export interface JobApplication { decision?: ApplicationDecision; } -export interface JobFull { - id?: string; - ownerId: string; - ownerType: "candidate" | "employer" | "guest"; - owner?: BaseUser; - title?: string; - summary?: string; - company?: string; - description: string; - requirements?: JobRequirements; - createdAt?: Date; - updatedAt?: Date; +export interface JobDetails { location: Location; salaryRange?: SalaryRange; employmentType: "full-time" | "part-time" | "contract" | "internship" | "freelance"; datePosted?: Date; applicationDeadline?: Date; isActive: boolean; - applicants?: Array; + applicants?: Array; department?: string; reportsTo?: string; benefits?: Array; @@ -1010,6 +1000,7 @@ export interface Skill { export interface SkillAssessment { candidateId: string; skill: string; + skillModified?: string; evidenceFound: boolean; evidenceStrength: "strong" | "moderate" | "weak" | "none"; assessment: string; @@ -1017,6 +1008,7 @@ export interface SkillAssessment { evidenceDetails?: Array; createdAt?: Date; updatedAt?: Date; + ragResults?: Array; } export interface SocialLink { @@ -1609,7 +1601,7 @@ export function convertInterviewScheduleFromApi(data: any): InterviewSchedule { /** * Convert Job from API response * Date fields: createdAt, updatedAt - * Nested models: owner (BaseUser) + * Nested models: owner (BaseUser), details (JobDetails) */ export function convertJobFromApi(data: any): Job { if (!data) return data; @@ -1622,6 +1614,8 @@ export function convertJobFromApi(data: any): Job { updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined, // Convert nested BaseUser model owner: data.owner ? convertBaseUserFromApi(data.owner) : undefined, + // Convert nested JobDetails model + details: data.details ? convertJobDetailsFromApi(data.details) : undefined, }; } /** @@ -1645,29 +1639,20 @@ export function convertJobApplicationFromApi(data: any): JobApplication { }; } /** - * Convert JobFull from API response - * Date fields: createdAt, updatedAt, datePosted, applicationDeadline, featuredUntil - * Nested models: owner (BaseUser), applicants (JobApplication) + * Convert JobDetails from API response + * Date fields: datePosted, applicationDeadline, featuredUntil */ -export function convertJobFullFromApi(data: any): JobFull { +export function convertJobDetailsFromApi(data: any): JobDetails { if (!data) return data; return { ...data, - // Convert createdAt from ISO string to Date - createdAt: data.createdAt ? new Date(data.createdAt) : undefined, - // Convert updatedAt from ISO string to Date - updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined, // Convert datePosted from ISO string to Date datePosted: data.datePosted ? new Date(data.datePosted) : undefined, // Convert applicationDeadline from ISO string to Date applicationDeadline: data.applicationDeadline ? new Date(data.applicationDeadline) : undefined, // Convert featuredUntil from ISO string to Date featuredUntil: data.featuredUntil ? new Date(data.featuredUntil) : undefined, - // Convert nested BaseUser model - owner: data.owner ? convertBaseUserFromApi(data.owner) : undefined, - // Convert nested JobApplication model - applicants: data.applicants ? convertJobApplicationFromApi(data.applicants) : undefined, }; } /** @@ -1906,8 +1891,8 @@ export function convertFromApi(data: any, modelType: string): T { return convertJobFromApi(data) as T; case 'JobApplication': return convertJobApplicationFromApi(data) as T; - case 'JobFull': - return convertJobFullFromApi(data) as T; + case 'JobDetails': + return convertJobDetailsFromApi(data) as T; case 'JobListResponse': return convertJobListResponseFromApi(data) as T; case 'JobRequirementsMessage': diff --git a/src/backend/agents/base.py b/src/backend/agents/base.py index 11ca727..000eca4 100644 --- a/src/backend/agents/base.py +++ b/src/backend/agents/base.py @@ -22,6 +22,7 @@ import asyncio from datetime import datetime, UTC from prometheus_client import Counter, Summary, CollectorRegistry # type: ignore import numpy as np # type: ignore +import json_extractor as json_extractor from models import ( ApiActivityType, ChatMessageError, ChatMessageRagSearch, ChatMessageStatus, ChatMessageStreaming, LLMMessage, ChatQuery, ChatMessage, ChatOptions, ChatMessageUser, Tunables, ApiMessageType, ChatSenderType, ApiStatusType, ChatMessageMetaData, Candidate) from logger import logger @@ -373,6 +374,7 @@ Content: {content} rag_metadata = ChromaDBGetResponse( name=rag.name, + query=prompt, query_embedding=query_embedding.tolist(), ids=chroma_results.get("ids", []), embeddings=chroma_results.get("embeddings", []), @@ -406,7 +408,7 @@ Content: {content} temperature=0.7) -> AsyncGenerator[ChatMessageStatus | ChatMessageError | ChatMessageStreaming | ChatMessage, None]: self.set_optimal_context_size( - llm=llm, model=model, prompt=prompt + llm=llm, model=model, prompt=prompt+system_prompt ) options = ChatOptions( @@ -808,20 +810,16 @@ Content: {content} # return + def extract_json_blocks(self, text: str, allow_multiple: bool = False) -> List[dict]: + """ + Extract JSON blocks from text, even if surrounded by markdown or noisy text. + If allow_multiple is True, returns all JSON blocks; otherwise, only the first. + """ + return json_extractor.extract_json_blocks(text, allow_multiple) + def extract_json_from_text(self, text: str) -> str: """Extract JSON string from text that may contain other content.""" - json_pattern = r"```json\s*([\s\S]*?)\s*```" - match = re.search(json_pattern, text) - if match: - return match.group(1).strip() - - # Try to find JSON without the markdown code block - json_pattern = r"({[\s\S]*})" - match = re.search(json_pattern, text) - if match: - return match.group(1).strip() - - raise ValueError("No JSON found in the response") + return json_extractor.extract_json_from_text(text) def extract_markdown_from_text(self, text: str) -> str: """Extract Markdown string from text that may contain other content.""" @@ -832,7 +830,5 @@ Content: {content} raise ValueError("No Markdown found in the response") - - # Register the base agent agent_registry.register(Agent._agent_type, Agent) diff --git a/src/backend/agents/job_requirements.py b/src/backend/agents/job_requirements.py index b59aa6b..55d5f28 100644 --- a/src/backend/agents/job_requirements.py +++ b/src/backend/agents/job_requirements.py @@ -85,6 +85,13 @@ class JobRequirementsAgent(Agent): ) -> AsyncGenerator[ChatMessage | ChatMessageError, None]: """Analyze job requirements from job description.""" system_prompt, prompt = self.create_job_analysis_prompt(prompt) + status_message = ChatMessageStatus( + session_id=session_id, + content="Analyzing job requirements", + activity=ApiActivityType.THINKING + ) + yield status_message + logger.info(f"🔍 {status_message.content}") generated_message = None async for generated_message in self.llm_one_shot(llm, model, session_id=session_id, prompt=prompt, system_prompt=system_prompt): if generated_message.status == ApiStatusType.ERROR: diff --git a/src/backend/agents/skill_match.py b/src/backend/agents/skill_match.py index 2cc7f96..60f7ede 100644 --- a/src/backend/agents/skill_match.py +++ b/src/backend/agents/skill_match.py @@ -152,9 +152,19 @@ JSON RESPONSE:""" logger.error(f"⚠️ {error_message.content}") yield error_message return - # Stage 1A: Analyze job requirements + + skill = prompt.strip() + if not skill: + error_message = ChatMessageError( + session_id=session_id, + content="Skill cannot be empty." + ) + logger.error(f"⚠️ {error_message.content}") + yield error_message + return + rag_message = None - async for rag_message in self.generate_rag_results(session_id=session_id, prompt=prompt): + async for rag_message in self.generate_rag_results(session_id=session_id, prompt=skill): if rag_message.status == ApiStatusType.ERROR: yield rag_message return @@ -172,7 +182,7 @@ JSON RESPONSE:""" rag_context = self.get_rag_context(rag_message) logger.info(f"🔍 RAG content retrieved {len(rag_context)} bytes of context") - system_prompt, prompt = self.generate_skill_assessment_prompt(skill=prompt, rag_context=rag_context) + system_prompt, prompt = self.generate_skill_assessment_prompt(skill=skill, rag_context=rag_context) skill_message = None async for skill_message in self.llm_one_shot(llm=llm, model=model, session_id=session_id, prompt=prompt, system_prompt=system_prompt, temperature=0.7): @@ -199,11 +209,13 @@ JSON RESPONSE:""" skill_assessment_data = json.loads(json_str).get("skill_assessment", {}) skill_assessment = SkillAssessment( candidate_id=self.user.id, - skill=skill_assessment_data.get("skill", ""), + skill=skill, + skill_modified=skill_assessment_data.get("skill", ""), evidence_found=skill_assessment_data.get("evidence_found", False), evidence_strength=skill_assessment_data.get("evidence_strength", "NONE").lower(), assessment=skill_assessment_data.get("assessment", ""), description=skill_assessment_data.get("description", ""), + rag_results=rag_message.content, evidence_details=[ EvidenceDetail( source=evidence.get("source", ""), @@ -215,13 +227,25 @@ JSON RESPONSE:""" except Exception as e: error_message = ChatMessageError( session_id=session_id, - content=f"Failed to parse Skill assessment JSON: {str(e)}\n\n{skill_message.content}" + content=f"Failed to parse Skill assessment JSON: {str(e)}\n\n{skill_message.content}\n\nJSON:\n{json_str}\n\n" ) logger.error(traceback.format_exc()) logger.error(f"⚠️ {error_message.content}") yield error_message return + # if skill_assessment.evidence_strength == "none": + # logger.info("⚠️ No evidence found for skill assessment, returning NONE.") + # with open("src/tmp.txt", "w") as f: + # f.write(f"Skill: {skill}\n\n") + # f.write(f"System prompt:\n{system_prompt}\n\n") + # f.write(f"Prompt:\n{prompt}\n\n") + # f.write(f"LLM response:\n{skill_message.content}\n\n") + # f.write(f"JSON portion:\n{json_str}\n\n") + # f.write(f"JSON parsed:\n{json.dumps(skill_assessment_data, indent=2)}\n\n") + # f.write(f"Skill assessment data:\n") + # f.write(skill_assessment.model_dump_json(indent=2)) + skill_assessment_message = ChatMessageSkillAssessment( session_id=session_id, status=ApiStatusType.DONE, diff --git a/src/backend/database.py b/src/backend/database.py index f631ba5..9cac753 100644 --- a/src/backend/database.py +++ b/src/backend/database.py @@ -294,6 +294,7 @@ class RedisDatabase: # Convert to UTC if it's in a different timezone dt = dt.astimezone(timezone.utc) return dt + logger.warning(f"⚠️ No RAG update time found for user {user_id}") return None except Exception as e: logger.error(f"❌ Error getting user RAG update time: {e}") @@ -314,6 +315,7 @@ class RedisDatabase: # Store as ISO format with timezone info timestamp_str = update_time.isoformat() # This includes timezone await self.redis.set(rag_update_key, timestamp_str) + logger.info(f"✅ User RAG update time set for user {user_id}: {timestamp_str}") return True except Exception as e: logger.error(f"❌ Error setting user RAG update time: {e}") @@ -412,7 +414,7 @@ class RedisDatabase: async def get_documents_by_rag_status(self, candidate_id: str, include_in_rag: bool = True) -> List[Dict]: """Get candidate documents filtered by RAG inclusion status""" all_documents = await self.get_candidate_documents(candidate_id) - return [doc for doc in all_documents if doc.get("include_in_RAG", False) == include_in_rag] + return [doc for doc in all_documents if doc.get("include_in_rag", False) == include_in_rag] async def bulk_update_document_rag_status(self, candidate_id: str, document_ids: List[str], include_in_rag: bool): """Bulk update RAG status for multiple documents""" @@ -421,7 +423,7 @@ class RedisDatabase: for doc_id in document_ids: doc_data = await self.get_document(doc_id) if doc_data and doc_data.get("candidate_id") == candidate_id: - doc_data["include_in_RAG"] = include_in_rag + doc_data["include_in_rag"] = include_in_rag doc_data["updatedAt"] = datetime.now(UTC).isoformat() pipe.set(f"document:{doc_id}", self._serialize(doc_data)) diff --git a/src/backend/entities/entity_manager.py b/src/backend/entities/entity_manager.py index ba64517..4b535b4 100644 --- a/src/backend/entities/entity_manager.py +++ b/src/backend/entities/entity_manager.py @@ -3,12 +3,12 @@ import weakref from datetime import datetime, timedelta from typing import Dict, Optional, Any from contextlib import asynccontextmanager -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field # type: ignore from models import ( Candidate ) from .candidate_entity import CandidateEntity from database import RedisDatabase -from prometheus_client import CollectorRegistry +from prometheus_client import CollectorRegistry # type: ignore class EntityManager: """Manages lifecycle of CandidateEntity instances""" diff --git a/src/backend/json_extractor.py b/src/backend/json_extractor.py new file mode 100644 index 0000000..7694559 --- /dev/null +++ b/src/backend/json_extractor.py @@ -0,0 +1,97 @@ +import json +import re +from typing import List, Union + +def extract_json_blocks(text: str, allow_multiple: bool = False) -> List[dict]: + """ + Extract JSON blocks from text, even if surrounded by markdown or noisy text. + If allow_multiple is True, returns all JSON blocks; otherwise, only the first. + """ + found = [] + + # First try to extract from code blocks (most reliable) + code_block_pattern = r"```(?:json)?\s*([\s\S]+?)\s*```" + for match in re.finditer(code_block_pattern, text): + block = match.group(1).strip() + try: + parsed = json.loads(block) + found.append(parsed) + if not allow_multiple: + return [parsed] + except json.JSONDecodeError: + continue + + # If no valid code blocks found, look for standalone JSON objects/arrays + if not found: + standalone_json = _extract_standalone_json(text, allow_multiple) + found.extend(standalone_json) + + if not found: + raise ValueError("No valid JSON block found in the text") + + return found + +def _extract_standalone_json(text: str, allow_multiple: bool = False) -> List[Union[dict, list]]: + """Extract standalone JSON objects or arrays from text using proper brace counting.""" + found = [] + i = 0 + + while i < len(text): + if text[i] in '{[': + # Found potential JSON start + json_str = _extract_complete_json_at_position(text, i) + if json_str: + try: + parsed = json.loads(json_str) + found.append(parsed) + if not allow_multiple: + return [parsed] + # Move past this JSON block + i += len(json_str) + continue + except json.JSONDecodeError: + pass + i += 1 + + return found + +def _extract_complete_json_at_position(text: str, start_pos: int) -> str: + """ + Extract a complete JSON object or array starting at the given position. + Uses proper brace/bracket counting and string escape handling. + """ + if start_pos >= len(text) or text[start_pos] not in '{[': + return "" + + start_char = text[start_pos] + end_char = '}' if start_char == '{' else ']' + + count = 1 + i = start_pos + 1 + in_string = False + escape_next = False + + while i < len(text) and count > 0: + char = text[i] + + if escape_next: + escape_next = False + elif char == '\\' and in_string: + escape_next = True + elif char == '"' and not escape_next: + in_string = not in_string + elif not in_string: + if char == start_char: + count += 1 + elif char == end_char: + count -= 1 + + i += 1 + + if count == 0: + return text[start_pos:i] + return "" + +def extract_json_from_text(text: str) -> str: + """Extract JSON string from text that may contain other content.""" + return json.dumps(extract_json_blocks(text, allow_multiple=False)[0]) diff --git a/src/backend/main.py b/src/backend/main.py index 0cb0408..303c096 100644 --- a/src/backend/main.py +++ b/src/backend/main.py @@ -70,6 +70,7 @@ import entities from email_service import VerificationEmailRateLimiter, email_service from device_manager import DeviceManager import agents +from entities.candidate_entity import CandidateEntity # ============================= # Import Pydantic models @@ -82,7 +83,7 @@ from models import ( Candidate, Employer, BaseUserWithType, BaseUser, Guest, Authentication, AuthResponse, CandidateAI, # Job models - JobFull, JobApplication, ApplicationStatus, + JobApplication, ApplicationStatus, # Chat models ChatSession, ChatMessage, ChatContext, ChatQuery, ApiStatusType, ChatSenderType, ApiMessageType, ChatContextType, @@ -2121,7 +2122,7 @@ async def upload_candidate_document( try: # Parse the JSON string and create DocumentOptions object options_dict = json.loads(options_data) - options : DocumentOptions = DocumentOptions.model_validate(**options_dict) + options : DocumentOptions = DocumentOptions.model_validate(options_dict) except (json.JSONDecodeError, ValidationError) as e: return StreamingResponse( iter([json.dumps(ChatMessageError( @@ -2178,7 +2179,7 @@ async def upload_candidate_document( logger.info(f"📁 Received file upload: filename='{file.filename}', content_type='{file.content_type}', size='{len(file_content)} bytes'") - directory = "rag-content" if options.include_in_RAG else "files" + directory = "rag-content" if options.include_in_rag else "files" directory = "jobs" if options.is_job_document else directory # Ensure the file does not already exist either in 'files' or in 'rag-content' @@ -2332,6 +2333,52 @@ async def upload_candidate_document( media_type="text/event-stream" ) +async def reformat_as_markdown(database: RedisDatabase, candidate_entity: CandidateEntity, content: str): + chat_agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.JOB_REQUIREMENTS) + if not chat_agent: + error_message = ChatMessageError( + sessionId=MOCK_UUID, # No session ID for document uploads + content="No agent found for job requirements chat type" + ) + yield error_message + return + status_message = ChatMessageStatus( + sessionId=MOCK_UUID, # No session ID for document uploads + content=f"Reformatting job description as markdown...", + activity=ApiActivityType.CONVERTING + ) + yield status_message + + message = None + async for message in chat_agent.llm_one_shot( + llm=llm_manager.get_llm(), + model=defines.model, + session_id=MOCK_UUID, + prompt=content, + system_prompt=""" +You are a document editor. Take the provided job description and reformat as legible markdown. +Return only the markdown content, no other text. Make sure all content is included. +""" + ): + pass + + if not message or not isinstance(message, ChatMessage): + logger.error("❌ Failed to reformat job description to markdown") + error_message = ChatMessageError( + sessionId=MOCK_UUID, # No session ID for document uploads + content="Failed to reformat job description" + ) + yield error_message + return + chat_message : ChatMessage = message + try: + chat_message.content = chat_agent.extract_markdown_from_text(chat_message.content) + except Exception as e: + pass + logger.info(f"✅ Successfully converted content to markdown") + yield chat_message + return + async def create_job_from_content(database: RedisDatabase, current_user: Candidate, content: str): status_message = ChatMessageStatus( sessionId=MOCK_UUID, # No session ID for document uploads @@ -2342,6 +2389,20 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida await asyncio.sleep(0) # Let the status message propagate async with entities.get_candidate_entity(candidate=current_user) as candidate_entity: + message = None + async for message in reformat_as_markdown(database, candidate_entity, content): + # Only yield one final DONE message + if message.status != ApiStatusType.DONE: + yield message + if not message or not isinstance(message, ChatMessage): + error_message = ChatMessageError( + sessionId=MOCK_UUID, # No session ID for document uploads + content="Failed to reformat job description" + ) + yield error_message + return + markdown_message = message + chat_agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.JOB_REQUIREMENTS) if not chat_agent: error_message = ChatMessageError( @@ -2350,7 +2411,6 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida ) yield error_message return - message = None status_message = ChatMessageStatus( sessionId=MOCK_UUID, # No session ID for document uploads content=f"Analyzing document for company and requirement details...", @@ -2358,13 +2418,15 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida ) yield status_message + message = None async for message in chat_agent.generate( llm=llm_manager.get_llm(), model=defines.model, session_id=MOCK_UUID, - prompt=content + prompt=markdown_message.content ): - pass + if message.status != ApiStatusType.DONE: + yield message if not message or not isinstance(message, JobRequirementsMessage): error_message = ChatMessageError( @@ -2374,42 +2436,8 @@ async def create_job_from_content(database: RedisDatabase, current_user: Candida yield error_message return - status_message = ChatMessageStatus( - sessionId=MOCK_UUID, # No session ID for document uploads - content=f"Reformatting job description as markdown...", - activity=ApiActivityType.CONVERTING - ) - yield status_message - job_requirements : JobRequirementsMessage = message - async for message in chat_agent.llm_one_shot( - llm=llm_manager.get_llm(), - model=defines.model, - session_id=MOCK_UUID, - prompt=content, - system_prompt=""" -You are a document editor. Take the provided job description and reformat as legible markdown. -Return only the markdown content, no other text. Make sure all content is included. -""" - ): - pass - - if not message or not isinstance(message, ChatMessage): - logger.error("❌ Failed to reformat job description to markdown") - error_message = ChatMessageError( - sessionId=MOCK_UUID, # No session ID for document uploads - content="Failed to reformat job description" - ) - yield error_message - return - chat_message : ChatMessage = message - markdown = chat_message.content - try: - markdown = chat_agent.extract_markdown_from_text(chat_message.content) - except Exception as e: - pass - job_requirements.job.description = markdown - logger.info(f"✅ Successfully saved job requirements job {job_requirements.id}") + logger.info(f"✅ Successfully generated job requirements for job {job_requirements.id}") yield job_requirements return @@ -2619,7 +2647,7 @@ async def get_document_content( content=create_error_response("FORBIDDEN", "Cannot access another candidate's document") ) - file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName) + file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_rag else "files", document.originalName) file_path = pathlib.Path(file_path) if not document.type in [DocumentType.TXT, DocumentType.MARKDOWN]: file_path = file_path.with_suffix('.md') @@ -2694,7 +2722,7 @@ async def update_document( content=create_error_response("FORBIDDEN", "Cannot update another candidate's document") ) update_options = updates.options if updates.options else DocumentOptions() - if document.options.include_in_RAG != update_options.include_in_RAG: + if document.options.include_in_rag != update_options.include_in_rag: # If RAG status is changing, we need to handle file movement rag_dir = os.path.join(defines.user_dir, candidate.username, "rag-content") file_dir = os.path.join(defines.user_dir, candidate.username, "files") @@ -2703,7 +2731,7 @@ async def update_document( rag_path = os.path.join(rag_dir, document.originalName) file_path = os.path.join(file_dir, document.originalName) - if update_options.include_in_RAG: + if update_options.include_in_rag: src = pathlib.Path(file_path) dst = pathlib.Path(rag_path) # Move to RAG directory @@ -2731,8 +2759,8 @@ async def update_document( update_dict = {} if updates.filename is not None: update_dict["filename"] = updates.filename.strip() - if update_options.include_in_RAG is not None: - update_dict["include_in_RAG"] = update_options.include_in_RAG + if update_options.include_in_rag is not None: + update_dict["include_in_rag"] = update_options.include_in_rag if not update_dict: return JSONResponse( @@ -2802,7 +2830,7 @@ async def delete_document( ) # Delete file from disk - file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName) + file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_rag else "files", document.originalName) file_path = pathlib.Path(file_path) try: @@ -3264,10 +3292,7 @@ async def create_candidate_job( is_employer = isinstance(current_user, Employer) try: - if is_employer: - job = JobFull.model_validate(job_data) - else: - job = Job.model_validate(job_data) + job = Job.model_validate(job_data) # Add required fields job.id = str(uuid.uuid4()) @@ -3286,6 +3311,50 @@ async def create_candidate_job( ) +@api_router.patch("/jobs/{job_id}") +async def update_job( + job_id: str = Path(...), + updates: Dict[str, Any] = Body(...), + current_user = Depends(get_current_user), + database: RedisDatabase = Depends(get_database) +): + """Update a candidate""" + try: + job_data = await database.get_job(job_id) + if not job_data: + logger.warning(f"⚠️ Job not found for update: {job_data}") + return JSONResponse( + status_code=404, + content=create_error_response("NOT_FOUND", "Job not found") + ) + + job = Job.model_validate(job_data) + + # Check authorization (user can only update their own profile) + if current_user.is_admin is False and job.owner_id != current_user.id: + logger.warning(f"⚠️ Unauthorized update attempt by user {current_user.id} on job {job_id}") + return JSONResponse( + status_code=403, + content=create_error_response("FORBIDDEN", "Cannot update another user's job") + ) + + # Apply updates + updates["updatedAt"] = datetime.now(UTC).isoformat() + logger.info(f"🔄 Updating job {job_id} with data: {updates}") + job_dict = job.model_dump() + job_dict.update(updates) + updated_job = Job.model_validate(job_dict) + await database.set_job(job_id, updated_job.model_dump()) + + return create_success_response(updated_job.model_dump(by_alias=True)) + + except Exception as e: + logger.error(f"❌ Update job error: {e}") + return JSONResponse( + status_code=400, + content=create_error_response("UPDATE_FAILED", str(e)) + ) + @api_router.post("/jobs/from-content") async def create_job_from_description( content: str = Body(...), @@ -3306,7 +3375,14 @@ async def create_job_from_description( logger.info(f"📁 Received file content: size='{len(content)} bytes'") + last_yield_was_streaming = False async for message in create_job_from_content(database=database, current_user=current_user, content=content): + if message.status != ApiStatusType.STREAMING: + last_yield_was_streaming = False + else: + if last_yield_was_streaming: + continue + last_yield_was_streaming = True logger.info(f"📄 Yielding job creation message status: {message.status}") yield message return @@ -3526,10 +3602,7 @@ async def get_jobs( all_jobs_data = await database.get_all_jobs() jobs_list = [] for job in all_jobs_data.values(): - if job.get("user_type") == "employer": - jobs_list.append(JobFull.model_validate(job)) - else: - jobs_list.append(Job.model_validate(job)) + jobs_list.append(Job.model_validate(job)) paginated_jobs, total = filter_and_paginate( jobs_list, page, limit, sortBy, sortOrder, filter_dict @@ -4537,12 +4610,12 @@ def get_endpoint_rate_limiter(rate_limiter: RateLimiter = Depends(get_rate_limit @api_router.post("/candidates/{candidate_id}/skill-match") async def get_candidate_skill_match( candidate_id: str = Path(...), - requirement: str = Body(...), + skill: str = Body(...), current_user = Depends(get_current_user_or_guest), database: RedisDatabase = Depends(get_database) ) -> StreamingResponse: - """Get skill match for a candidate against a requirement with caching""" + """Get skill match for a candidate against a skill with caching""" async def message_stream_generator(): candidate_data = await database.get_candidate(candidate_id) if not candidate_data: @@ -4555,18 +4628,21 @@ async def get_candidate_skill_match( candidate = Candidate.model_validate(candidate_data) - # Create cache key for this specific candidate + requirement combination - requirement_hash = hashlib.md5(requirement.encode()).hexdigest()[:8] - cache_key = f"skill_match:{candidate.id}:{requirement_hash}" + # Create cache key for this specific candidate + skill combination + skill_hash = hashlib.md5(skill.lower().encode()).hexdigest()[:8] + cache_key = f"skill_match:{candidate.id}:{skill_hash}" # Get cached assessment if it exists assessment : SkillAssessment | None = await database.get_cached_skill_match(cache_key) - + + if assessment and assessment.skill.lower() != skill.lower(): + logger.warning(f"❌ Cached skill match for {candidate.username} does not match requested skill: {assessment.skill} != {skill} ({cache_key}). Regenerating...") + assessment = None + # Determine if we need to regenerate the assessment - cached_date = None if assessment: # Get the latest RAG data update time for the current user - user_rag_update_time = await database.get_user_rag_update_time(current_user.id) + user_rag_update_time = await database.get_user_rag_update_time(candidate.id) updated = assessment.updated_at if "updated_at" in assessment else assessment.created_at # Check if cached result is still valid @@ -4575,14 +4651,11 @@ async def get_candidate_skill_match( logger.info(f"🔄 Out-of-date cached entry for {candidate.username} skill {assessment.skill}") assessment = None else: - cached_date = updated + logger.info(f"✅ Using cached skill match for {candidate.username} skill {assessment.skill}: {cache_key}") else: - logger.info(f"💾 No cached skill match data: {cache_key}, {candidate.id}, {requirement}") + logger.info(f"💾 No cached skill match data: {cache_key}, {candidate.id}, {skill}") if assessment: - logger.info(f"✅ Found cached skill match for candidate {candidate.username} against requirement: {requirement}") - logger.info(f"💾 Cached skill match data: {assessment.evidence_strength}") - # Return cached assessment skill_message = ChatMessageSkillAssessment( sessionId=MOCK_UUID, # No session ID for document uploads @@ -4592,7 +4665,7 @@ async def get_candidate_skill_match( yield skill_message return - logger.info(f"🔍 Generating skill match for candidate {candidate.username} against requirement: {requirement}") + logger.info(f"🔍 Generating skill match for candidate {candidate.username} for skill: {skill}") async with entities.get_candidate_entity(candidate=candidate) as candidate_entity: agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.SKILL_MATCH) @@ -4610,11 +4683,15 @@ async def get_candidate_skill_match( llm=llm_manager.get_llm(), model=defines.model, session_id=MOCK_UUID, - prompt=requirement, + prompt=skill, ): if generated_message.status == ApiStatusType.ERROR: + error_message = ChatMessageError( + sessionId=MOCK_UUID, # No session ID for document uploads + content=f"AI generation error: {generated_message.content}" + ) logger.error(f"❌ AI generation error: {generated_message.content}") - yield f"data: {json.dumps({'status': 'error'})}\n\n" + yield error_message return # If the message is not done, convert it to a ChatMessageBase to remove @@ -4634,7 +4711,7 @@ async def get_candidate_skill_match( if final_message is None: error_message = ChatMessageError( sessionId=MOCK_UUID, # No session ID for document uploads - content=f"No skill match found for the given requirement" + content=f"No match found for the given skill" ) yield error_message return @@ -4658,8 +4735,8 @@ async def get_candidate_skill_match( return await database.cache_skill_match(cache_key, assessment) - logger.info(f"💾 Cached new skill match for candidate {candidate.id}") - logger.info(f"✅ Skill match found for candidate {candidate.id}: {assessment.evidence_strength}") + logger.info(f"💾 Cached new skill match for candidate {candidate.id} as {cache_key}") + logger.info(f"✅ Skill match: {assessment.evidence_strength} {skill}") yield skill_match return diff --git a/src/backend/models.py b/src/backend/models.py index 9030ac0..484e6a0 100644 --- a/src/backend/models.py +++ b/src/backend/models.py @@ -98,9 +98,26 @@ class EvidenceDetail(BaseModel): "populate_by_name": True, # Allow both field names and aliases } +class ChromaDBGetResponse(BaseModel): + # Chroma fields + ids: List[str] = [] + embeddings: List[List[float]] = [] + documents: List[str] = [] + metadatas: List[Dict[str, Any]] = [] + distances: List[float] = [] + # Additional fields + name: str = "" + size: int = 0 + dimensions: int = 2 | 3 + query: str = "" + query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding") + umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D") + umap_embedding_3d: Optional[List[float]] = Field(default=None, alias="umapEmbedding3D") + class SkillAssessment(BaseModel): candidate_id: str = Field(..., alias='candidateId') skill: str = Field(..., alias="skill", description="The skill being assessed") + skill_modified: Optional[str] = Field(default="", alias="skillModified", description="The skill rephrased by LLM during skill match") evidence_found: bool = Field(..., alias="evidenceFound", description="Whether evidence was found for the skill") evidence_strength: SkillStrength = Field(..., alias="evidenceStrength", description="Strength of evidence found for the skill") assessment: str = Field(..., alias="assessment", description="Short (one to two sentence) assessment of the candidate's proficiency with the skill") @@ -108,6 +125,7 @@ class SkillAssessment(BaseModel): evidence_details: List[EvidenceDetail] = Field(default_factory=list, alias="evidenceDetails", description="List of evidence details supporting the skill assessment") created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias='createdAt') updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias='updatedAt') + rag_results: List[ChromaDBGetResponse] = Field(default_factory=list, alias="ragResults") model_config = { "populate_by_name": True, # Allow both field names and aliases } @@ -523,7 +541,7 @@ class DocumentType(str, Enum): IMAGE = "image" class DocumentOptions(BaseModel): - include_in_RAG: bool = Field(default=True, alias="includeInRAG") + include_in_rag: bool = Field(default=True, alias="includeInRag") is_job_document: Optional[bool] = Field(default=False, alias="isJobDocument") overwrite: Optional[bool] = Field(default=False, alias="overwrite") model_config = { @@ -680,23 +698,7 @@ class JobRequirements(BaseModel): "populate_by_name": True # Allow both field names and aliases } -class Job(BaseModel): - id: str = Field(default_factory=lambda: str(uuid.uuid4())) - owner_id: str = Field(..., alias="ownerId") - owner_type: UserType = Field(..., alias="ownerType") - owner: Optional[BaseUser] = None - title: Optional[str] - summary: Optional[str] - company: Optional[str] - description: str - requirements: Optional[JobRequirements] - created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="createdAt") - updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="updatedAt") - model_config = { - "populate_by_name": True # Allow both field names and aliases - } - -class JobFull(Job): +class JobDetails(BaseModel): location: Location salary_range: Optional[SalaryRange] = Field(None, alias="salaryRange") employment_type: EmploymentType = Field(..., alias="employmentType") @@ -712,6 +714,24 @@ class JobFull(Job): views: int = 0 application_count: int = Field(0, alias="applicationCount") +class Job(BaseModel): + id: str = Field(default_factory=lambda: str(uuid.uuid4())) + owner_id: str = Field(..., alias="ownerId") + owner_type: UserType = Field(..., alias="ownerType") + owner: Optional[BaseUser] = None + title: Optional[str] + summary: Optional[str] + company: Optional[str] + description: str + requirements: Optional[JobRequirements] + created_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="createdAt") + updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC), alias="updatedAt") + details: Optional[JobDetails] = Field(None, alias="details") + model_config = { + "populate_by_name": True # Allow both field names and aliases + } + + class InterviewFeedback(BaseModel): id: str = Field(default_factory=lambda: str(uuid.uuid4())) interview_id: str = Field(..., alias="interviewId") @@ -765,22 +785,6 @@ class JobApplication(BaseModel): "populate_by_name": True # Allow both field names and aliases } -class ChromaDBGetResponse(BaseModel): - # Chroma fields - ids: List[str] = [] - embeddings: List[List[float]] = [] - documents: List[str] = [] - metadatas: List[Dict[str, Any]] = [] - distances: List[float] = [] - # Additional fields - name: str = "" - size: int = 0 - dimensions: int = 2 | 3 - query: str = "" - query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding") - umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D") - umap_embedding_3d: Optional[List[float]] = Field(default=None, alias="umapEmbedding3D") - class GuestSessionResponse(BaseModel): """Response for guest session creation""" access_token: str = Field(..., alias="accessToken") @@ -1270,4 +1274,4 @@ Candidate.update_forward_refs() Employer.update_forward_refs() ChatSession.update_forward_refs() JobApplication.update_forward_refs() -JobFull.update_forward_refs() \ No newline at end of file +Job.update_forward_refs() \ No newline at end of file diff --git a/src/backend/rag/rag.py b/src/backend/rag/rag.py index 0a6d3c1..3220d76 100644 --- a/src/backend/rag/rag.py +++ b/src/backend/rag/rag.py @@ -309,7 +309,7 @@ class ChromaDBFileWatcher(FileSystemEventHandler): include=["embeddings", "documents", "metadatas"] )) if not self._umap_collection or not len(self._umap_collection.embeddings): - logging.warning("No embeddings found in the collection.") + logging.warning("⚠️ No embeddings found in the collection.") return # During initialization @@ -455,7 +455,7 @@ class ChromaDBFileWatcher(FileSystemEventHandler): meta["chunk_end"] = end return "".join(lines[start:end]) except: - logging.warning(f"Unable to open {source_file}") + logging.warning(f"⚠️ Unable to open {source_file}") return None # Cosine Distance Equivalent Similarity Retrieval Characteristics