diff --git a/frontend/src/components/VectorVisualizer.tsx b/frontend/src/components/VectorVisualizer.tsx index e10021c..4585cb2 100644 --- a/frontend/src/components/VectorVisualizer.tsx +++ b/frontend/src/components/VectorVisualizer.tsx @@ -21,6 +21,15 @@ import { connectionBase } from '../utils/Global'; import './VectorVisualizer.css'; import { BackstoryPageProps } from './BackstoryTab'; +import { useAuth } from 'hooks/AuthContext'; +import * as Types from 'types/types'; +import { useSelectedCandidate } from 'hooks/GlobalContext'; +import { useNavigate } from 'react-router-dom'; +import { Message } from './Message'; +const defaultMessage: Types.ChatMessageBase = { + type: "preparing", status: "done", sender: "system", sessionId: "", timestamp: new Date(), content: "" +}; + interface VectorVisualizerProps extends BackstoryPageProps { inline?: boolean; @@ -29,23 +38,11 @@ interface VectorVisualizerProps extends BackstoryPageProps { interface Metadata { id: string; - doc_type: string; + docType: string; content: string; distance?: number; } -type QuerySet = { - ids: string[], - documents: string[], - metadatas: Metadata[], - embeddings: (number[])[], - distances?: (number | undefined)[], - dimensions?: number; - query?: string; - umap_embedding_2d?: number[]; - umap_embedding_3d?: number[]; -}; - const emptyQuerySet = { ids: [], documents: [], @@ -173,25 +170,27 @@ const DEFAULT_UNFOCUS_SIZE = 2.; type Node = { id: string, content: string, // Portion of content that was used for embedding - full_content: string | undefined, // Portion of content plus/minus buffer + fullContent: string | undefined, // Portion of content plus/minus buffer emoji: string, - doc_type: string, + docType: string, source_file: string, distance: number | undefined, path: string, - chunk_begin: number, - line_begin: number, - chunk_end: number, - line_end: number, + chunkBegin: number, + lineBegin: number, + chunkEnd: number, + lineEnd: number, sx: SxProps, }; const VectorVisualizer: React.FC = (props: VectorVisualizerProps) => { - const { setSnack, rag, inline, sx } = props; + const { user, apiClient } = useAuth(); + const { setSnack, submitQuery, rag, inline, sx } = props; + const backstoryProps = { setSnack, submitQuery }; const [plotData, setPlotData] = useState(null); const [newQuery, setNewQuery] = useState(''); - const [querySet, setQuerySet] = useState(rag || emptyQuerySet); - const [result, setResult] = useState(undefined); + const [querySet, setQuerySet] = useState(rag || emptyQuerySet); + const [result, setResult] = useState(null); const [view2D, setView2D] = useState(true); const plotlyRef = useRef(null); const boxRef = useRef(null); @@ -199,6 +198,9 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz const theme = useTheme(); const isMobile = useMediaQuery(theme.breakpoints.down('md')); const [plotDimensions, setPlotDimensions] = useState({ width: 0, height: 0 }); + const navigate = useNavigate(); + + const candidate: Types.Candidate | null = user?.userType === 'candidate' ? user : null; /* Force resize of Plotly as it tends to not be the correct size if it is initially rendered * off screen (eg., the VectorVisualizer is not on the tab the app loads to) */ @@ -225,21 +227,16 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz // Get the collection to visualize useEffect(() => { - if ((result !== undefined && result.dimensions !== (view2D ? 3 : 2))) { + if (result) { return; } const fetchCollection = async () => { + if (!candidate) { + return; + } try { - const response = await fetch(connectionBase + `/api/umap/`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ dimensions: view2D ? 2 : 3 }), - }); - const data: QuerySet = await response.json(); - data.dimensions = view2D ? 2 : 3; - setResult(data); + const result = await apiClient.getCandidateVectors(view2D ? 2 : 3); + setResult(result); } catch (error) { console.error('Error obtaining collection information:', error); setSnack("Unable to obtain collection information.", "error"); @@ -253,7 +250,8 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz if (!result || !result.embeddings) return; if (result.embeddings.length === 0) return; - const full: QuerySet = { + const full: Types.ChromaDBGetResponse = { + ...result, ids: [...result.ids || []], documents: [...result.documents || []], embeddings: [...result.embeddings], @@ -270,18 +268,27 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz return; } - let query: QuerySet = { + let query: Types.ChromaDBGetResponse = { ids: [], documents: [], embeddings: [], metadatas: [], distances: [], + query: '', + size: 0, + dimensions: 2, + name: '' }; - let filtered: QuerySet = { + let filtered: Types.ChromaDBGetResponse = { ids: [], documents: [], embeddings: [], metadatas: [], + distances: [], + query: '', + size: 0, + dimensions: 2, + name: '' }; /* Loop through all items and divide into two groups: @@ -310,30 +317,30 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz } }); - if (view2D && querySet.umap_embedding_2d && querySet.umap_embedding_2d.length) { + if (view2D && querySet.umapEmbedding2D && querySet.umapEmbedding2D.length) { query.ids.unshift('query'); - query.metadatas.unshift({ id: 'query', doc_type: 'query', content: querySet.query || '', distance: 0 }); - query.embeddings.unshift(querySet.umap_embedding_2d); + query.metadatas.unshift({ id: 'query', docType: 'query', content: querySet.query || '', distance: 0 }); + query.embeddings.unshift(querySet.umapEmbedding2D); } - if (!view2D && querySet.umap_embedding_3d && querySet.umap_embedding_3d.length) { + if (!view2D && querySet.umapEmbedding3D && querySet.umapEmbedding3D.length) { query.ids.unshift('query'); - query.metadatas.unshift({ id: 'query', doc_type: 'query', content: querySet.query || '', distance: 0 }); - query.embeddings.unshift(querySet.umap_embedding_3d); + query.metadatas.unshift({ id: 'query', docType: 'query', content: querySet.query || '', distance: 0 }); + query.embeddings.unshift(querySet.umapEmbedding3D); } - const filtered_doc_types = filtered.metadatas.map(m => m.doc_type || 'unknown') - const query_doc_types = query.metadatas.map(m => m.doc_type || 'unknown') + const filtered_docTypes = filtered.metadatas.map(m => m.docType || 'unknown') + const query_docTypes = query.metadatas.map(m => m.docType || 'unknown') const has_query = query.metadatas.length > 0; const filtered_sizes = filtered.metadatas.map(m => has_query ? DEFAULT_UNFOCUS_SIZE : DEFAULT_SIZE); - const filtered_colors = filtered_doc_types.map(type => colorMap[type] || '#ff8080'); + const filtered_colors = filtered_docTypes.map(type => colorMap[type] || '#4d4d4d'); const filtered_x = normalizeDimension(filtered.embeddings.map((v: number[]) => v[0])); const filtered_y = normalizeDimension(filtered.embeddings.map((v: number[]) => v[1])); const filtered_z = is3D ? normalizeDimension(filtered.embeddings.map((v: number[]) => v[2])) : undefined; const query_sizes = query.metadatas.map(m => DEFAULT_SIZE + 2. * DEFAULT_SIZE * Math.pow((1. - (m.distance || 1.)), 3)); - const query_colors = query_doc_types.map(type => colorMap[type] || '#ff8080'); + const query_colors = query_docTypes.map(type => colorMap[type] || '#4d4d4d'); const query_x = normalizeDimension(query.embeddings.map((v: number[]) => v[0])); const query_y = normalizeDimension(query.embeddings.map((v: number[]) => v[1])); const query_z = is3D ? normalizeDimension(query.embeddings.map((v: number[]) => v[2])) : undefined; @@ -388,22 +395,14 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz const sendQuery = async (query: string) => { if (!query.trim()) return; setNewQuery(''); + try { - const response = await fetch(`${connectionBase}/api/similarity/`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - query: query, - dimensions: view2D ? 2 : 3, - }) - }); - const data = await response.json(); - setQuerySet(data); + const result = await apiClient.getCandidateSimilarContent(query); + console.log(result); + setQuerySet(result); } catch (error) { - console.error('Error obtaining query similarity information:', error); - setSnack("Unable to obtain query similarity information.", "error"); + const msg = `Error obtaining similar content to ${query}.` + setSnack(msg, "error"); }; }; @@ -413,18 +412,18 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz ); + if (!candidate) return ( + +
No candidate selected. Please first.
+
+ ); + const fetchRAGMeta = async (node: Node) => { try { - const response = await fetch(connectionBase + `/api/umap/entry/${node.id}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }); - + const result = await apiClient.getCandidateContent(node.id); const update: Node = { ...node, - full_content: await response.json() + fullContent: result.content } setNode(update); } catch (error) { @@ -436,14 +435,15 @@ const VectorVisualizer: React.FC = (props: VectorVisualiz const onNodeSelected = (metadata: any) => { let node: Node; - if (metadata.doc_type === 'query') { + console.log(metadata); + if (metadata.docType === 'query') { node = { ...metadata, content: `Similarity results for the query **${querySet.query || ''}** The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '2' : '3'}-dimensional space. Larger dots represent relative similarity in N-dimensional space. `, - emoji: emojiMap[metadata.doc_type], + emoji: emojiMap[metadata.docType], sx: { m: 0.5, p: 2, @@ -453,7 +453,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? ' justifyContent: "center", flexGrow: 0, flexWrap: "wrap", - backgroundColor: colorMap[metadata.doc_type] || '#ff8080', + backgroundColor: colorMap[metadata.docType] || '#ff8080', } } setNode(node); @@ -463,7 +463,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? ' node = { content: `Loading...`, ...metadata, - emoji: emojiMap[metadata.doc_type] || '❓', + emoji: emojiMap[metadata.docType] || '❓', } setNode(node); @@ -499,7 +499,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? ' flexBasis: 0, flexGrow: 0 }} - control={} onChange={() => setView2D(!view2D)} label="3D" /> + control={} onChange={() => { setView2D(!view2D); setResult(null); }} label="3D" /> { onNodeSelected(event.points[0].customdata); }} @@ -528,7 +528,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? ' Type - {node.emoji} {node.doc_type} + {node.emoji} {node.docType} {node.source_file !== undefined && File @@ -560,7 +560,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? ' Click a point in the scatter-graph to see information about that node. } - {node !== null && node.full_content && + {node !== null && node.fullContent && { - node.full_content.split('\n').map((line, index) => { - index += 1 + node.chunk_begin; - const bgColor = (index > node.line_begin && index <= node.line_end) ? '#f0f0f0' : 'auto'; + node.fullContent.split('\n').map((line, index) => { + index += 1 + node.chunkBegin; + const bgColor = (index > node.lineBegin && index <= node.lineEnd) ? '#f0f0f0' : 'auto'; return {index}
{line || " "}
; }) } - {!node.line_begin &&
{node.content}
} + {!node.lineBegin &&
{node.content}
}
} diff --git a/frontend/src/components/layout/BackstoryRoutes.tsx b/frontend/src/components/layout/BackstoryRoutes.tsx index c7eb709..d2941da 100644 --- a/frontend/src/components/layout/BackstoryRoutes.tsx +++ b/frontend/src/components/layout/BackstoryRoutes.tsx @@ -20,8 +20,8 @@ import { ControlsPage } from 'pages/ControlsPage'; import { LoginPage } from "pages/LoginPage"; import { CandidateDashboardPage } from "pages/CandidateDashboardPage" import { EmailVerificationPage } from "components/EmailVerificationComponents"; +import { CandidateProfilePage } from "pages/candidate/Profile"; -const ProfilePage = () => (Profile); const BackstoryPage = () => (Backstory); const ResumesPage = () => (Resumes); const QASetupPage = () => (Q&A Setup); @@ -69,7 +69,7 @@ const getBackstoryDynamicRoutes = (props: BackstoryDynamicRoutesProps): ReactNod if (user.userType === 'candidate') { routes.splice(-1, 0, ...[ } />, - } />, + } />, } />, } />, } />, diff --git a/frontend/src/hooks/AuthContext.tsx b/frontend/src/hooks/AuthContext.tsx index bd69566..ef8b509 100644 --- a/frontend/src/hooks/AuthContext.tsx +++ b/frontend/src/hooks/AuthContext.tsx @@ -23,13 +23,6 @@ interface LoginRequest { password: string; } -interface MFAVerificationRequest { - email: string; - code: string; - deviceId: string; - rememberDevice?: boolean; -} - interface EmailVerificationRequest { token: string; } @@ -418,7 +411,7 @@ function useAuthenticationLogic() { }, [apiClient]); // MFA verification - const verifyMFA = useCallback(async (mfaData: MFAVerificationRequest): Promise => { + const verifyMFA = useCallback(async (mfaData: Types.MFAVerifyRequest): Promise => { setAuthState(prev => ({ ...prev, isLoading: true, error: null })); try { @@ -742,7 +735,7 @@ function ProtectedRoute({ } export type { - AuthState, LoginRequest, MFAVerificationRequest, EmailVerificationRequest, ResendVerificationRequest, PasswordResetRequest + AuthState, LoginRequest, EmailVerificationRequest, ResendVerificationRequest, PasswordResetRequest } export type { CreateCandidateRequest, CreateEmployerRequest } from '../services/api-client'; diff --git a/frontend/src/pages/GenerateCandidate.tsx b/frontend/src/pages/GenerateCandidate.tsx index 0965a81..de0dd8b 100644 --- a/frontend/src/pages/GenerateCandidate.tsx +++ b/frontend/src/pages/GenerateCandidate.tsx @@ -43,7 +43,10 @@ const emptyUser: Candidate = { education: [], preferredJobTypes: [], languages: [], - certifications: [] + certifications: [], + isAdmin: false, + hasProfile: false, + ragContentSize: 0 }; const GenerateCandidate = (props: BackstoryElementProps) => { diff --git a/frontend/src/pages/LoginPage.tsx b/frontend/src/pages/LoginPage.tsx index 735425e..12d8247 100644 --- a/frontend/src/pages/LoginPage.tsx +++ b/frontend/src/pages/LoginPage.tsx @@ -28,8 +28,10 @@ import { BackstoryPageProps } from 'components/BackstoryTab'; import { LoginForm } from "components/EmailVerificationComponents"; import { CandidateRegistrationForm } from "components/RegistrationForms"; +import { useNavigate } from 'react-router-dom'; const LoginPage: React.FC = (props: BackstoryPageProps) => { + const navigate = useNavigate(); const { setSnack } = props; const [tabValue, setTabValue] = useState(0); const [loading, setLoading] = useState(false); @@ -62,68 +64,10 @@ const LoginPage: React.FC = (props: BackstoryPageProps) => { setSuccess(null); }; - // If user is logged in, show their profile + // If user is logged in, navigate to the profile page if (user) { - return ( - - - - - - - - - User Profile - - - - - - - - - Username: {name} - - - - - Email: {user.email} - - - - - {/* Status: {user.status} */} - - - - - Phone: {user.phone || 'Not provided'} - - - - - Account type: {user.userType} - - - - - Last Login: { - user.lastLogin - ? user.lastLogin.toLocaleString() - : 'N/A' - } - - - - - Member Since: {user.createdAt.toLocaleDateString()} - - - - - - - ); + navigate('/candidate/profile'); + return (<>); } return ( diff --git a/frontend/src/pages/candidate/Profile.tsx b/frontend/src/pages/candidate/Profile.tsx new file mode 100644 index 0000000..98c6ce1 --- /dev/null +++ b/frontend/src/pages/candidate/Profile.tsx @@ -0,0 +1,1056 @@ +import React, { useState, useEffect } from 'react'; +import { + Box, + Button, + Container, + Grid, + Paper, + TextField, + Typography, + Avatar, + IconButton, + Tabs, + Tab, + useMediaQuery, + CircularProgress, + Snackbar, + Alert, + Card, + CardContent, + CardActions, + Chip, + Divider, + List, + ListItem, + ListItemText, + ListItemSecondaryAction, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + MenuItem, + Select, + FormControl, + InputLabel, + Switch, + FormControlLabel +} from '@mui/material'; +import { styled } from '@mui/material/styles'; +import { + CloudUpload, + PhotoCamera, + Edit, + Save, + Cancel, + Add, + Delete, + Work, + School, + Language, + EmojiEvents, + LocationOn, + Phone, + Email, + AccountCircle, + BubbleChart +} from '@mui/icons-material'; +import { useTheme } from '@mui/material/styles'; +import { useAuth } from "hooks/AuthContext"; +import * as Types from 'types/types'; +import { ComingSoon } from 'components/ui/ComingSoon'; +import { VectorVisualizer } from 'components/VectorVisualizer'; +import { BackstoryPageProps } from 'components/BackstoryTab'; + +// Styled components +const VisuallyHiddenInput = styled('input')({ + clip: 'rect(0 0 0 0)', + clipPath: 'inset(50%)', + height: 1, + overflow: 'hidden', + position: 'absolute', + bottom: 0, + left: 0, + whiteSpace: 'nowrap', + width: 1, +}); + +interface TabPanelProps { + children?: React.ReactNode; + index: number; + value: number; +} + +function TabPanel(props: TabPanelProps) { + const { children, value, index, ...other } = props; + + return ( + + ); +} + +const CandidateProfilePage: React.FC = (props: BackstoryPageProps) => { + const { setSnack, submitQuery } = props; + const backstoryProps = { setSnack, submitQuery }; + const theme = useTheme(); + const isMobile = useMediaQuery(theme.breakpoints.down('sm')); + const { user, /*updateUser,*/ apiClient } = useAuth(); + + // Check if user is a candidate + const candidate = user?.userType === 'candidate' ? user as Types.Candidate : null; + + // State management + const [tabValue, setTabValue] = useState(0); + const [editMode, setEditMode] = useState<{ [key: string]: boolean }>({}); + const [loading, setLoading] = useState(false); + const [snackbar, setSnackbar] = useState<{ + open: boolean; + message: string; + severity: "success" | "error" | "info" | "warning"; + }>({ + open: false, + message: '', + severity: 'success' + }); + + // Form data state + const [formData, setFormData] = useState>({}); + const [profileImage, setProfileImage] = useState(null); + const [resumeFile, setResumeFile] = useState(null); + + // Dialog states + const [skillDialog, setSkillDialog] = useState(false); + const [experienceDialog, setExperienceDialog] = useState(false); + const [educationDialog, setEducationDialog] = useState(false); + const [languageDialog, setLanguageDialog] = useState(false); + const [certificationDialog, setCertificationDialog] = useState(false); + + // New item states + const [newSkill, setNewSkill] = useState>({ + name: '', + category: '', + level: 'beginner', + yearsOfExperience: 0 + }); + const [newExperience, setNewExperience] = useState>({ + companyName: '', + position: '', + startDate: new Date(), + isCurrent: false, + description: '', + skills: [], + location: { city: '', country: '' } + }); + const [newEducation, setNewEducation] = useState>({ + institution: '', + degree: '', + fieldOfStudy: '', + startDate: new Date(), + isCurrent: false + }); + const [newLanguage, setNewLanguage] = useState>({ + language: '', + proficiency: 'basic' + }); + const [newCertification, setNewCertification] = useState>({ + name: '', + issuingOrganization: '', + issueDate: new Date() + }); + + useEffect(() => { + if (candidate) { + setFormData(candidate); + setProfileImage(candidate.profileImage || null); + } + }, [candidate]); + + if (!candidate) { + return ( + + + Access denied. This page is only available for candidates. + + + ); + } + + // Handle tab change + const handleTabChange = (event: React.SyntheticEvent, newValue: number) => { + setTabValue(newValue); + }; + + // Handle form input changes + const handleInputChange = (field: string, value: any) => { + setFormData({ + ...formData, + [field]: value, + }); + }; + + // Handle profile image upload + const handleImageUpload = (e: React.ChangeEvent) => { + if (e.target.files && e.target.files[0]) { + const reader = new FileReader(); + reader.onload = (event) => { + if (event.target?.result) { + setProfileImage(event.target.result.toString()); + } + }; + reader.readAsDataURL(e.target.files[0]); + } + }; + + // Handle resume upload + const handleResumeUpload = (e: React.ChangeEvent) => { + if (e.target.files && e.target.files[0]) { + setResumeFile(e.target.files[0]); + setSnackbar({ + open: true, + message: `Resume uploaded: ${e.target.files[0].name}`, + severity: 'success' + }); + } + }; + + // Toggle edit mode for a section + const toggleEditMode = (section: string) => { + setEditMode({ + ...editMode, + [section]: !editMode[section] + }); + }; + + // Save changes + const handleSave = async (section: string) => { + setLoading(true); + try { + if (candidate.id) { + const updatedCandidate = await apiClient.updateCandidate(candidate.id, formData); +// updateUser(updatedCandidate); + setSnackbar({ + open: true, + message: 'Profile updated successfully!', + severity: 'success' + }); + toggleEditMode(section); + } + } catch (error) { + setSnackbar({ + open: true, + message: 'Failed to update profile. Please try again.', + severity: 'error' + }); + } finally { + setLoading(false); + } + }; + + // Cancel edit + const handleCancel = (section: string) => { + setFormData(candidate); + toggleEditMode(section); + }; + + // Add new skill + const handleAddSkill = () => { + if (newSkill.name && newSkill.category) { + const updatedSkills = [...(formData.skills || []), newSkill as Types.Skill]; + setFormData({ ...formData, skills: updatedSkills }); + setNewSkill({ name: '', category: '', level: 'beginner', yearsOfExperience: 0 }); + setSkillDialog(false); + } + }; + + // Remove skill + const handleRemoveSkill = (index: number) => { + const updatedSkills = (formData.skills || []).filter((_, i) => i !== index); + setFormData({ ...formData, skills: updatedSkills }); + }; + + // Add new work experience + const handleAddExperience = () => { + if (newExperience.companyName && newExperience.position) { + const updatedExperience = [...(formData.experience || []), newExperience as Types.WorkExperience]; + setFormData({ ...formData, experience: updatedExperience }); + setNewExperience({ + companyName: '', + position: '', + startDate: new Date(), + isCurrent: false, + description: '', + skills: [], + location: { city: '', country: '' } + }); + setExperienceDialog(false); + } + }; + + // Remove work experience + const handleRemoveExperience = (index: number) => { + const updatedExperience = (formData.experience || []).filter((_, i) => i !== index); + setFormData({ ...formData, experience: updatedExperience }); + }; + + // Basic Information Tab + const renderBasicInfo = () => ( + + + + + {!profileImage && !candidate.profileImage && } + + {editMode.basic && ( + <> + + + + + + Update profile photo + + + )} + + + + + {editMode.basic ? ( + handleInputChange('firstName', e.target.value)} + variant="outlined" + /> + ) : ( + + First Name: {candidate.firstName} + + )} + + + + {editMode.basic ? ( + handleInputChange('lastName', e.target.value)} + variant="outlined" + /> + ) : ( + + Last Name: {candidate.lastName} + + )} + + + + {editMode.basic ? ( + handleInputChange('email', e.target.value)} + variant="outlined" + /> + ) : ( + + + Email: {candidate.email} + + )} + + + + {editMode.basic ? ( + handleInputChange('phone', e.target.value)} + variant="outlined" + /> + ) : ( + + + Phone: {candidate.phone || 'Not provided'} + + )} + + + + {editMode.basic ? ( + handleInputChange('summary', e.target.value)} + variant="outlined" + /> + ) : ( + + Professional Summary:
+ {candidate.summary || 'No summary provided'} +
+ )} +
+ + {/* + {editMode.basic ? ( + handleInputChange('location', { + ...formData.location, + city: e.target.value + })} + variant="outlined" + placeholder="City, State, Country" + /> + ) : ( + + + Location: {candidate.location?.city || 'Not specified'}, {candidate.location?.country || ''} + + )} + */} + + + + {editMode.basic ? ( + <> + + + + ) : ( + + )} + + +
+ ); + + // Skills Tab + const renderSkills = () => ( + + + Skills & Expertise + + + + + {(formData.skills || []).map((skill, index) => ( + + + + + + + {skill.name} + + + {skill.category} + + + {skill.yearsOfExperience && ( + + {skill.yearsOfExperience} years experience + + )} + + handleRemoveSkill(index)} + color="error" + sx={{ ml: 1 }} + > + + + + + + + ))} + + + {(!formData.skills || formData.skills.length === 0) && ( + + No skills added yet. Click "Add Skill" to get started. + + )} + + ); + + // Experience Tab + const renderExperience = () => ( + + + Work Experience + + + + {(formData.experience || []).map((exp, index) => ( + + + + + + {exp.position} + + + {exp.companyName} + + + {exp.startDate?.toLocaleDateString()} - {exp.isCurrent ? 'Present' : exp.endDate?.toLocaleDateString()} + + + {exp.description} + + {exp.skills && exp.skills.length > 0 && ( + + {exp.skills.map((skill, skillIndex) => ( + + ))} + + )} + + handleRemoveExperience(index)} + color="error" + size="small" + sx={{ + alignSelf: { xs: 'flex-end', sm: 'flex-start' }, + ml: { sm: 1 } + }} + > + + + + + + ))} + + {(!formData.experience || formData.experience.length === 0) && ( + + No work experience added yet. Click "Add Experience" to get started. + + )} + + ); + + // Resume Tab + const renderResume = () => ( + + Resume & Documents + + + + + + + Current Resume + + {candidate.resume ? ( + + Resume on file: {candidate.resume} + + ) : ( + + No resume uploaded + + )} + + + + + {resumeFile && ( + + New file selected: {resumeFile.name} + + )} + + + + + + + ); + + return ( + + + + + } + iconPosition={isMobile ? "top" : "start"} + /> + } + iconPosition={isMobile ? "top" : "start"} + /> + } + iconPosition={isMobile ? "top" : "start"} + /> + } + iconPosition={isMobile ? "top" : "start"} + /> + } + iconPosition={isMobile ? "top" : "start"} + /> + } + iconPosition={isMobile ? "top" : "start"} + /> + + + + + {renderBasicInfo()} + + + + {renderSkills()} + + + + {renderExperience()} + + + + + Education (Coming Soon) + + Education management will be available in a future update. + + + + + + {renderResume()} + + + + + + + + {/* Add Skill Dialog */} + setSkillDialog(false)} + maxWidth="sm" + fullWidth + fullScreen={isMobile} + PaperProps={{ + sx: { + ...(isMobile && { + margin: 0, + width: '100%', + height: '100%', + maxHeight: '100%' + }) + } + }} + > + Add New Skill + + + + setNewSkill({ ...newSkill, name: e.target.value })} + size={isMobile ? "small" : "medium"} + /> + + + setNewSkill({ ...newSkill, category: e.target.value })} + placeholder="e.g., Programming, Design, Marketing" + size={isMobile ? "small" : "medium"} + /> + + + + Proficiency Level + + + + + setNewSkill({ ...newSkill, yearsOfExperience: parseInt(e.target.value) || 0 })} + size={isMobile ? "small" : "medium"} + /> + + + + + + + + + + {/* Add Experience Dialog */} + setExperienceDialog(false)} + maxWidth="md" + fullWidth + fullScreen={isMobile} + PaperProps={{ + sx: { + ...(isMobile && { + margin: 0, + width: '100%', + height: '100%', + maxHeight: '100%' + }) + } + }} + > + Add Work Experience + + + + setNewExperience({ ...newExperience, companyName: e.target.value })} + size={isMobile ? "small" : "medium"} + /> + + + setNewExperience({ ...newExperience, position: e.target.value })} + size={isMobile ? "small" : "medium"} + /> + + + setNewExperience({ ...newExperience, startDate: new Date(e.target.value) })} + InputLabelProps={{ shrink: true }} + size={isMobile ? "small" : "medium"} + /> + + + setNewExperience({ ...newExperience, isCurrent: e.target.checked })} + size={isMobile ? "small" : "medium"} + /> + } + label="Currently working here" + sx={{ + '& .MuiFormControlLabel-label': { + fontSize: { xs: '0.875rem', sm: '1rem' } + } + }} + /> + + + setNewExperience({ ...newExperience, description: e.target.value })} + placeholder="Describe your responsibilities and achievements..." + size={isMobile ? "small" : "medium"} + /> + + + + + + + + + + {/* Snackbar for notifications */} + setSnackbar({ ...snackbar, open: false })} + > + setSnackbar({ ...snackbar, open: false })} + severity={snackbar.severity} + sx={{ width: '100%' }} + > + {snackbar.message} + + + + ); +}; + +export { CandidateProfilePage }; \ No newline at end of file diff --git a/frontend/src/services/api-client.ts b/frontend/src/services/api-client.ts index 1c24f99..a49bc00 100644 --- a/frontend/src/services/api-client.ts +++ b/frontend/src/services/api-client.ts @@ -33,6 +33,7 @@ import { convertFromApi, convertArrayFromApi } from 'types/types'; +import internal from 'stream'; // ============================ // Streaming Types and Interfaces @@ -290,14 +291,7 @@ class ApiClient { body: JSON.stringify(formatApiRequest(auth)) }); - // This could return either a full auth response or MFA request - const data = await response.json(); - - if (!response.ok) { - throw new Error(data.error?.message || 'Login failed'); - } - - return data.data; + return handleApiResponse(response); } /** @@ -524,10 +518,11 @@ class ApiClient { } async updateCandidate(id: string, updates: Partial): Promise { + const request = formatApiRequest(updates); const response = await fetch(`${this.baseUrl}/candidates/${id}`, { method: 'PATCH', headers: this.defaultHeaders, - body: JSON.stringify(formatApiRequest(updates)) + body: JSON.stringify(request) }); return this.handleApiResponseWithConversion(response, 'Candidate'); @@ -739,6 +734,47 @@ class ApiClient { return result; } + async getCandidateSimilarContent(query: string + ): Promise { + const response = await fetch(`${this.baseUrl}/candidates/rag-search`, { + method: 'POST', + headers: this.defaultHeaders, + body: JSON.stringify(query) + }); + + const result = await handleApiResponse(response); + + return result; + } + + async getCandidateVectors( + dimensions: number, + ): Promise { + const response = await fetch(`${this.baseUrl}/candidates/rag-vectors`, { + method: 'POST', + headers: this.defaultHeaders, + body: JSON.stringify(dimensions) + }); + + const result = await handleApiResponse(response); + + return result; + } + + async getCandidateContent( + doc_id: string, + ): Promise { + const response = await fetch(`${this.baseUrl}/candidates/rag-content`, { + method: 'POST', + headers: this.defaultHeaders, + body: JSON.stringify(doc_id) + }); + + const result = await handleApiResponse(response); + + return result; + } + /** * Create a chat session about a specific candidate */ @@ -809,7 +845,7 @@ class ApiClient { * Send message with streaming response support and date conversion */ sendMessageStream( - chatMessage: Types.ChatMessageUser, + chatMessage: Types.ChatMessageBase, options: StreamingOptions = {} ): StreamingResponse { const abortController = new AbortController(); diff --git a/frontend/src/types/types.ts b/frontend/src/types/types.ts index 37898e4..1078078 100644 --- a/frontend/src/types/types.ts +++ b/frontend/src/types/types.ts @@ -1,6 +1,6 @@ // Generated TypeScript types from Pydantic models // Source: src/backend/models.py -// Generated on: 2025-06-01T20:40:46.797024 +// Generated on: 2025-06-02T18:30:16.709256 // DO NOT EDIT MANUALLY - This file is auto-generated // ============================ @@ -13,9 +13,9 @@ export type ActivityType = "login" | "search" | "view_job" | "apply_job" | "mess export type ApplicationStatus = "applied" | "reviewing" | "interview" | "offer" | "rejected" | "accepted" | "withdrawn"; -export type ChatContextType = "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile"; +export type ChatContextType = "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile" | "rag_search"; -export type ChatMessageType = "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user"; +export type ChatMessageType = "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user"; export type ChatSenderType = "user" | "assistant" | "system"; @@ -145,7 +145,7 @@ export interface BaseUser { lastLogin?: Date; profileImage?: string; status: "active" | "inactive" | "pending" | "banned"; - isAdmin?: boolean; + isAdmin: boolean; } export interface BaseUserWithType { @@ -161,7 +161,7 @@ export interface BaseUserWithType { lastLogin?: Date; profileImage?: string; status: "active" | "inactive" | "pending" | "banned"; - isAdmin?: boolean; + isAdmin: boolean; userType: "candidate" | "employer" | "guest"; } @@ -178,7 +178,7 @@ export interface Candidate { lastLogin?: Date; profileImage?: string; status: "active" | "inactive" | "pending" | "banned"; - isAdmin?: boolean; + isAdmin: boolean; userType: "candidate"; username: string; description?: string; @@ -194,9 +194,9 @@ export interface Candidate { languages?: Array; certifications?: Array; jobApplications?: Array; - hasProfile?: boolean; + hasProfile: boolean; rags?: Array; - ragContentSize?: number; + ragContentSize: number; age?: number; gender?: "female" | "male"; ethnicity?: string; @@ -237,7 +237,7 @@ export interface Certification { } export interface ChatContext { - type: "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile"; + type: "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile" | "rag_search"; relatedEntityId?: string; relatedEntityType?: "job" | "candidate" | "employer"; additionalContext?: Record; @@ -248,11 +248,11 @@ export interface ChatMessage { sessionId: string; senderId?: string; status: "initializing" | "streaming" | "done" | "error"; - type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user"; + type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user"; sender: "user" | "assistant" | "system"; timestamp: Date; tunables?: Tunables; - content?: string; + content: string; metadata?: ChatMessageMetaData; } @@ -261,32 +261,45 @@ export interface ChatMessageBase { sessionId: string; senderId?: string; status: "initializing" | "streaming" | "done" | "error"; - type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user"; + type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user"; sender: "user" | "assistant" | "system"; timestamp: Date; tunables?: Tunables; - content?: string; + content: string; } export interface ChatMessageMetaData { model: "qwen2.5"; - temperature?: number; - maxTokens?: number; - topP?: number; + temperature: number; + maxTokens: number; + topP: number; frequencyPenalty?: number; presencePenalty?: number; stopSequences?: Array; ragResults?: Array; llmHistory?: Array; - evalCount?: number; - evalDuration?: number; - promptEvalCount?: number; - promptEvalDuration?: number; + evalCount: number; + evalDuration: number; + promptEvalCount: number; + promptEvalDuration: number; options?: ChatOptions; tools?: Record; timers?: Record; } +export interface ChatMessageRagSearch { + id?: string; + sessionId: string; + senderId?: string; + status: "done"; + type: "rag_result"; + sender: "user"; + timestamp: Date; + tunables?: Tunables; + content: string; + dimensions: number; +} + export interface ChatMessageUser { id?: string; sessionId: string; @@ -296,7 +309,7 @@ export interface ChatMessageUser { sender: "user"; timestamp: Date; tunables?: Tunables; - content?: string; + content: string; } export interface ChatOptions { @@ -320,23 +333,46 @@ export interface ChatSession { title?: string; context: ChatContext; messages?: Array; - isArchived?: boolean; + isArchived: boolean; systemPrompt?: string; } export interface ChromaDBGetResponse { - ids?: Array; - embeddings?: Array>; - documents?: Array; - metadatas?: Array>; - name?: string; - size?: number; - query?: string; + ids: Array; + embeddings: Array>; + documents: Array; + metadatas: Array>; + distances: Array; + name: string; + size: number; + dimensions: number; + query: string; queryEmbedding?: Array; umapEmbedding2D?: Array; umapEmbedding3D?: Array; } +export interface CreateCandidateRequest { + email: string; + username: string; + password: string; + firstName: string; + lastName: string; + phone?: string; +} + +export interface CreateEmployerRequest { + email: string; + username: string; + password: string; + companyName: string; + industry: string; + companySize: string; + companyDescription: string; + websiteUrl?: string; + phone?: string; +} + export interface CustomQuestion { question: string; answer: string; @@ -398,7 +434,7 @@ export interface Employer { lastLogin?: Date; profileImage?: string; status: "active" | "inactive" | "pending" | "banned"; - isAdmin?: boolean; + isAdmin: boolean; userType: "employer"; companyName: string; industry: string; @@ -486,8 +522,8 @@ export interface Job { benefits?: Array; visaSponsorship?: boolean; featuredUntil?: Date; - views?: number; - applicationCount?: number; + views: number; + applicationCount: number; } export interface JobApplication { @@ -521,8 +557,8 @@ export interface JobResponse { } export interface LLMMessage { - role?: string; - content?: string; + role: string; + content: string; toolCalls?: Array>; } @@ -572,7 +608,7 @@ export interface MFAVerifyRequest { email: string; code: string; deviceId: string; - rememberDevice?: boolean; + rememberDevice: boolean; } export interface MessageReaction { @@ -588,8 +624,8 @@ export interface NotificationPreference { } export interface PaginatedRequest { - page?: number; - limit?: number; + page: number; + limit: number; sortBy?: string; sortOrder?: "asc" | "desc"; filters?: Record; @@ -634,10 +670,26 @@ export interface RAGConfiguration { isActive: boolean; } +export interface RagContentMetadata { + sourceFile: string; + lineBegin: number; + lineEnd: number; + lines: number; + chunkBegin?: number; + chunkEnd?: number; + metadata?: Record; +} + +export interface RagContentResponse { + id: string; + content: string; + metadata: RagContentMetadata; +} + export interface RagEntry { name: string; - description?: string; - enabled?: boolean; + description: string; + enabled: boolean; } export interface RefreshToken { @@ -674,8 +726,8 @@ export interface SalaryRange { export interface SearchQuery { query: string; filters?: Record; - page?: number; - limit?: number; + page: number; + limit: number; sortBy?: string; sortOrder?: "asc" | "desc"; } @@ -700,9 +752,9 @@ export interface SocialLink { } export interface Tunables { - enableRAG?: boolean; - enableTools?: boolean; - enableContext?: boolean; + enableRAG: boolean; + enableTools: boolean; + enableContext: boolean; } export interface UserActivity { @@ -898,6 +950,19 @@ export function convertChatMessageBaseFromApi(data: any): ChatMessageBase { timestamp: new Date(data.timestamp), }; } +/** + * Convert ChatMessageRagSearch from API response, parsing date fields + * Date fields: timestamp + */ +export function convertChatMessageRagSearchFromApi(data: any): ChatMessageRagSearch { + if (!data) return data; + + return { + ...data, + // Convert timestamp from ISO string to Date + timestamp: new Date(data.timestamp), + }; +} /** * Convert ChatMessageUser from API response, parsing date fields * Date fields: timestamp @@ -1159,6 +1224,8 @@ export function convertFromApi(data: any, modelType: string): T { return convertChatMessageFromApi(data) as T; case 'ChatMessageBase': return convertChatMessageBaseFromApi(data) as T; + case 'ChatMessageRagSearch': + return convertChatMessageRagSearchFromApi(data) as T; case 'ChatMessageUser': return convertChatMessageUserFromApi(data) as T; case 'ChatSession': diff --git a/src/backend/agents/base.py b/src/backend/agents/base.py index 472ae6d..3ed7fab 100644 --- a/src/backend/agents/base.py +++ b/src/backend/agents/base.py @@ -60,7 +60,7 @@ class Agent(BaseModel, ABC): return self # Agent properties - system_prompt: str # Mandatory + system_prompt: str = "" context_tokens: int = 0 # context_size is shared across all subclasses diff --git a/src/backend/agents/rag_search.py b/src/backend/agents/rag_search.py new file mode 100644 index 0000000..663ed72 --- /dev/null +++ b/src/backend/agents/rag_search.py @@ -0,0 +1,98 @@ +from __future__ import annotations +from typing import Literal, AsyncGenerator, ClassVar, Optional, Any, List +from datetime import datetime, UTC +import inspect + +from .base import Agent, agent_registry +from logger import logger + +from .registry import agent_registry +from models import ( ChatMessage, ChatStatusType, ChatMessage, ChatOptions, ChatMessageType, ChatSenderType, ChatStatusType, ChatMessageMetaData, Candidate ) +from rag import ( ChromaDBGetResponse ) + +class Chat(Agent): + """ + Chat Agent + """ + + agent_type: Literal["rag_search"] = "rag_search" # type: ignore + _agent_type: ClassVar[str] = agent_type # Add this for registration + + async def generate( + self, llm: Any, model: str, user_message: ChatMessage, user: Candidate, temperature=0.7 + ) -> AsyncGenerator[ChatMessage, None]: + """ + Generate a response based on the user message and the provided LLM. + + Args: + llm: The language model to use for generation. + model: The specific model to use. + user_message: The message from the user. + user: Optional user information. + temperature: The temperature setting for generation. + + Yields: + ChatMessage: The generated response. + """ + logger.info(f"{self.agent_type} - {inspect.stack()[0].function}") + + if user.id != user_message.sender_id: + logger.error(f"User {user.username} id does not match message {user_message.sender_id}") + raise ValueError("User does not match message sender") + + chat_message = ChatMessage( + session_id=user_message.session_id, + tunables=user_message.tunables, + status=ChatStatusType.INITIALIZING, + type=ChatMessageType.PREPARING, + sender=ChatSenderType.ASSISTANT, + content="", + timestamp=datetime.now(UTC) + ) + + chat_message.metadata = ChatMessageMetaData() + chat_message.metadata.options = ChatOptions( + seed=8911, + num_ctx=self.context_size, + temperature=temperature, # Higher temperature to encourage tool usage + ) + + # Create a dict for storing various timing stats + chat_message.metadata.timers = {} + + self.metrics.generate_count.labels(agent=self.agent_type).inc() + with self.metrics.generate_duration.labels(agent=self.agent_type).time(): + + rag_message : Optional[ChatMessage] = None + async for rag_message in self.generate_rag_results(chat_message=user_message): + if rag_message.status == ChatStatusType.ERROR: + chat_message.status = rag_message.status + chat_message.content = rag_message.content + yield chat_message + return + yield rag_message + + if rag_message: + chat_message.content = "" + rag_results: List[ChromaDBGetResponse] = rag_message.metadata.rag_results + chat_message.metadata.rag_results = rag_results + for chroma_results in rag_results: + for index, metadata in enumerate(chroma_results.metadatas): + content = "\n".join([ + line.strip() + for line in chroma_results.documents[index].split("\n") + if line + ]).strip() + chat_message.content += f""" +Source: {metadata.get("doc_type", "unknown")}: {metadata.get("path", "")} +Document reference: {chroma_results.ids[index]} +Content: { content } + +""" + + chat_message.status = ChatStatusType.DONE + chat_message.type = ChatMessageType.RAG_RESULT + yield chat_message + +# Register the base agent +agent_registry.register(Chat._agent_type, Chat) diff --git a/src/backend/entities/candidate_entity.py b/src/backend/entities/candidate_entity.py index 20b6381..260ed63 100644 --- a/src/backend/entities/candidate_entity.py +++ b/src/backend/entities/candidate_entity.py @@ -63,7 +63,7 @@ class CandidateEntity(Candidate): # Check if file exists return user_info_path.is_file() - def get_or_create_agent(self, agent_type: ChatContextType, **kwargs) -> agents.Agent: + def get_or_create_agent(self, agent_type: ChatContextType, **kwargs) -> agents.Agent: """ Get or create an agent of the specified type for this candidate. diff --git a/src/backend/generate_types.py b/src/backend/generate_types.py index a355e30..904005a 100644 --- a/src/backend/generate_types.py +++ b/src/backend/generate_types.py @@ -382,10 +382,11 @@ def is_field_optional(field_info: Any, field_type: Any, debug: bool = False) -> print(f" └─ RESULT: Required (has specific enum default: {default_val.value})") return False - # Any other actual default value makes it optional + # FIXED: Fields with actual default values (like [], "", 0) should be REQUIRED + # because they will always have a value (either provided or the default) if debug: - print(f" └─ RESULT: Optional (has actual default value)") - return True + print(f" └─ RESULT: Required (has actual default value - field will always have a value)") + return False # Changed from True to False else: if debug: print(f" └─ No default attribute found") diff --git a/src/backend/main.py b/src/backend/main.py index 30c7d3a..9416e29 100644 --- a/src/backend/main.py +++ b/src/backend/main.py @@ -50,6 +50,7 @@ from llm_manager import llm_manager import entities from email_service import VerificationEmailRateLimiter, email_service from device_manager import DeviceManager +import agents # ============================= # Import Pydantic models @@ -65,10 +66,11 @@ from models import ( Job, JobApplication, ApplicationStatus, # Chat models - ChatSession, ChatMessage, ChatContext, ChatQuery, ChatStatusType, ChatMessageBase, ChatMessageUser, ChatSenderType, ChatMessageType, + ChatSession, ChatMessage, ChatContext, ChatQuery, ChatStatusType, ChatMessageBase, ChatMessageUser, ChatSenderType, ChatMessageType, ChatContextType, + ChatMessageRagSearch, # Supporting models - Location, MFARequest, MFAData, MFARequestResponse, MFAVerifyRequest, ResendVerificationRequest, Skill, WorkExperience, Education, + Location, MFARequest, MFAData, MFARequestResponse, MFAVerifyRequest, RagContentResponse, ResendVerificationRequest, Skill, WorkExperience, Education, # Email EmailVerificationRequest @@ -161,10 +163,10 @@ ALGORITHM = "HS256" @app.exception_handler(RequestValidationError) async def validation_exception_handler(request: Request, exc: RequestValidationError): logger.error(traceback.format_exc()) - logger.error("❌ Validation error:", exc.errors()) + logger.error(f"❌ Validation error {request.method} {request.url.path}: {str(exc)}") return JSONResponse( status_code=HTTP_422_UNPROCESSABLE_ENTITY, - content=json.dumps({"detail": exc.errors()}), + content=json.dumps({"detail": str(exc)}), ) # ============================ @@ -228,13 +230,16 @@ async def get_current_user( # Check candidates candidate = await database.get_candidate(user_id) if candidate: + # logger.info(f"🔑 Current user is candidate: {candidate['id']}") return Candidate.model_validate(candidate) # Check employers employer = await database.get_employer(user_id) if employer: + # logger.info(f"🔑 Current user is employer: {employer['id']}") return Employer.model_validate(employer) + logger.warning(f"⚠️ User {user_id} not found in database") raise HTTPException(status_code=404, detail="User not found") except Exception as e: @@ -324,6 +329,65 @@ def filter_and_paginate( return paginated_items, total +async def stream_agent_response(chat_agent: agents.Agent, + user_message: ChatMessageUser, + candidate: Candidate, + chat_session_data: Dict[str, Any] | None = None, + database: RedisDatabase | None = None) -> StreamingResponse: + async def message_stream_generator(): + """Generator to stream messages with persistence""" + last_log = None + final_message = None + + async for generated_message in chat_agent.generate( + llm=llm_manager.get_llm(), + model=defines.model, + user_message=user_message, + user=candidate, + ): + # Store reference to the complete AI message + if generated_message.status == ChatStatusType.DONE: + final_message = generated_message + + # If the message is not done, convert it to a ChatMessageBase to remove + # metadata and other unnecessary fields for streaming + if generated_message.status != ChatStatusType.DONE: + generated_message = model_cast.cast_to_model(ChatMessageBase, generated_message) + + json_data = generated_message.model_dump(mode='json', by_alias=True, exclude_unset=True) + json_str = json.dumps(json_data) + + log = f"🔗 Message status={generated_message.status}, sender={getattr(generated_message, 'sender', 'unknown')}" + if last_log != log: + last_log = log + logger.info(log) + + yield f"data: {json_str}\n\n" + + # After streaming is complete, persist the final AI message to database + if final_message and final_message.status == ChatStatusType.DONE: + try: + if database and chat_session_data: + await database.add_chat_message(final_message.session_id, final_message.model_dump()) + logger.info(f"🤖 Message saved to database for session {final_message.session_id}") + + # Update session last activity again + chat_session_data["lastActivity"] = datetime.now(UTC).isoformat() + await database.set_chat_session(final_message.session_id, chat_session_data) + + except Exception as e: + logger.error(f"❌ Failed to save message to database: {e}") + + return StreamingResponse( + message_stream_generator(), + media_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + }, + ) + # ============================ # API Router Setup # ============================ @@ -709,12 +773,14 @@ async def verify_email( verification_data = await database.get_email_verification_token(request.token) if not verification_data: + logger.warning(f"⚠️ Invalid verification token: {request.token}") return JSONResponse( status_code=400, content=create_error_response("INVALID_TOKEN", "Invalid or expired verification token") ) if verification_data.get("verified"): + logger.warning(f"⚠️ Attempt to verify already verified email: {verification_data['email']}") return JSONResponse( status_code=400, content=create_error_response("ALREADY_VERIFIED", "Email already verified") @@ -723,6 +789,7 @@ async def verify_email( # Check expiration expires_at = datetime.fromisoformat(verification_data["expires_at"]) if datetime.now(timezone.utc) > expires_at: + logger.warning(f"⚠️ Verification token expired for: {verification_data['email']}") return JSONResponse( status_code=400, content=create_error_response("TOKEN_EXPIRED", "Verification token has expired") @@ -1398,6 +1465,93 @@ async def get_candidate( content=create_error_response("FETCH_ERROR", str(e)) ) +@api_router.post("/candidates/rag-content") +async def post_candidate_vector_content( + doc_id: str = Body(...), + current_user = Depends(get_current_user) +): + try: + if current_user.user_type != "candidate": + return JSONResponse( + status_code=403, + content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint") + ) + candidate : Candidate = current_user + + async with entities.get_candidate_entity(candidate=candidate) as candidate_entity: + collection = candidate_entity.umap_collection + if not collection: + return JSONResponse( + {"error": "No UMAP collection found"}, status_code=404 + ) + + if not collection.get("metadatas", None): + return JSONResponse(f"Document id {doc_id} not found.", 404) + + for index, id in enumerate(collection.get("ids", [])): + if id == doc_id: + metadata = collection.get("metadatas", [])[index].copy() + content = candidate_entity.file_watcher.prepare_metadata(metadata) + rag_response = RagContentResponse(id=id, content=content, metadata=metadata) + logger.info(f"✅ Fetched RAG content for document id {id} for candidate {candidate.username}") + return create_success_response(rag_response.model_dump(by_alias=True, exclude_unset=True)) + + return JSONResponse(f"Document id {doc_id} not found.", 404) + except Exception as e: + logger.error(f"❌ Post candidate content error: {e}") + return JSONResponse( + status_code=500, + content=create_error_response("FETCH_ERROR", str(e)) + ) + +@api_router.post("/candidates/rag-vectors") +async def post_candidate_vectors( + dimensions: int = Body(...), + current_user = Depends(get_current_user) +): + try: + if current_user.user_type != "candidate": + return JSONResponse( + status_code=403, + content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint") + ) + candidate : Candidate = current_user + + async with entities.get_candidate_entity(candidate=candidate) as candidate_entity: + collection = candidate_entity.umap_collection + if not collection: + logger.error(f"❌ Candidate collection not found") + return JSONResponse( + status_code=404, + content=create_error_response("NOT_FOUND", "Candidate collection not found") + ) + if dimensions == 2: + umap_embedding = candidate_entity.file_watcher.umap_embedding_2d + else: + umap_embedding = candidate_entity.file_watcher.umap_embedding_3d + + if len(umap_embedding) == 0: + return JSONResponse( + status_code=404, + content=create_error_response("NOT_FOUND", "Candidate collection embedding not found") + ) + result = { + "ids": collection.get("ids", []), + "metadatas": collection.get("metadatas", []), + "documents": collection.get("documents", []), + "embeddings": umap_embedding.tolist(), + "size": candidate_entity.file_watcher.collection.count() + } + + return create_success_response(result) + + except Exception as e: + logger.error(f"❌ Post candidate vectors error: {e}") + return JSONResponse( + status_code=500, + content=create_error_response("FETCH_ERROR", str(e)) + ) + @api_router.patch("/candidates/{candidate_id}") async def update_candidate( candidate_id: str = Path(...), @@ -1418,6 +1572,7 @@ async def update_candidate( # Check authorization (user can only update their own profile) if candidate.id != current_user.id: + logger.warning(f"⚠️ Unauthorized update attempt by user {current_user.id} on candidate {candidate_id}") return JSONResponse( status_code=403, content=create_error_response("FORBIDDEN", "Cannot update another user's profile") @@ -1772,6 +1927,56 @@ async def get_chat_statistics( content=create_error_response("STATS_ERROR", str(e)) ) +@api_router.post("/candidates/rag-search") +async def post_candidate_rag_search( + query: str = Body(...), + current_user = Depends(get_current_user) +): + """Get chat activity summary for a candidate""" + try: + if current_user.user_type != "candidate": + logger.warning(f"⚠️ Unauthorized RAG search attempt by user {current_user.id}") + return JSONResponse( + status_code=403, + content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint") + ) + + candidate : Candidate = current_user + chat_type = ChatContextType.RAG_SEARCH + # Get RAG search data + async with entities.get_candidate_entity(candidate=candidate) as candidate_entity: + # Entity automatically released when done + chat_agent = candidate_entity.get_or_create_agent(agent_type=chat_type) + if not chat_agent: + return JSONResponse( + status_code=400, + content=create_error_response("AGENT_NOT_FOUND", "No agent found for this chat type") + ) + + user_message = ChatMessageUser(sender_id=candidate.id, session_id="", content=query, timestamp=datetime.now(UTC)) + rag_message = None + async for generated_message in chat_agent.generate( + llm=llm_manager.get_llm(), + model=defines.model, + user_message=user_message, + user=candidate, + ): + rag_message = generated_message + + if not rag_message: + return JSONResponse( + status_code=500, + content=create_error_response("NO_RESPONSE", "No response generated for the RAG search") + ) + return create_success_response(rag_message.metadata.rag_results[0].model_dump(by_alias=True, exclude_unset=True)) + + except Exception as e: + logger.error(f"❌ Get candidate chat summary error: {e}") + return JSONResponse( + status_code=500, + content=create_error_response("SUMMARY_ERROR", str(e)) + ) + @api_router.get("/candidates/{username}/chat-summary") async def get_candidate_chat_summary( username: str = Path(...), @@ -1985,58 +2190,13 @@ async def post_chat_session_message_stream( chat_session_data["lastActivity"] = datetime.now(UTC).isoformat() await database.set_chat_session(user_message.session_id, chat_session_data) - async def message_stream_generator(): - """Generator to stream messages with persistence""" - last_log = None - final_message = None - - async for generated_message in chat_agent.generate( - llm=llm_manager.get_llm(), - model=defines.model, - user_message=user_message, - user=current_user, - ): - # Store reference to the complete AI message - if generated_message.status == ChatStatusType.DONE: - final_message = generated_message - - # If the message is not done, convert it to a ChatMessageBase to remove - # metadata and other unnecessary fields for streaming - if generated_message.status != ChatStatusType.DONE: - generated_message = model_cast.cast_to_model(ChatMessageBase, generated_message) - - json_data = generated_message.model_dump(mode='json', by_alias=True, exclude_unset=True) - json_str = json.dumps(json_data) - - log = f"🔗 Message status={generated_message.status}, sender={getattr(generated_message, 'sender', 'unknown')}" - if last_log != log: - last_log = log - logger.info(log) - - yield f"data: {json_str}\n\n" - - # After streaming is complete, persist the final AI message to database - if final_message and final_message.status == ChatStatusType.DONE: - try: - await database.add_chat_message(final_message.session_id, final_message.model_dump()) - logger.info(f"🤖 AI message saved to database for session {final_message.session_id}") - - # Update session last activity again - chat_session_data["lastActivity"] = datetime.now(UTC).isoformat() - await database.set_chat_session(final_message.session_id, chat_session_data) - - except Exception as e: - logger.error(f"❌ Failed to save AI message to database: {e}") - - return StreamingResponse( - message_stream_generator(), - media_type="text/event-stream", - headers={ - "Cache-Control": "no-cache", - "Connection": "keep-alive", - "X-Accel-Buffering": "no", - }, - ) + return stream_agent_response( + chat_agent=chat_agent, + user_message=user_message, + candidate=candidate, + database=database, + chat_session_data=chat_session_data, + ) except Exception as e: logger.error(traceback.format_exc()) @@ -2544,10 +2704,11 @@ logger.info(f"Debug mode is {'enabled' if defines.debug else 'disabled'}") async def log_requests(request: Request, call_next): try: if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path): - logger.info(f"Request path: {request.url.path}, Method: {request.method}, Remote: {request.client.host}") + logger.info(f"📝 Request {request.method}: {request.url.path}, Remote: {request.client.host}") response = await call_next(request) if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path): - logger.info(f"Response status: {response.status_code}, Path: {request.url.path}, Method: {request.method}") + if response.status_code < 200 or response.status_code >= 300: + logger.warning(f"⚠️ Response {request.method} {response.status_code}: Path: {request.url.path}") return response except Exception as e: logger.error(f"❌ Error processing request: {str(e)}, Path: {request.url.path}, Method: {request.method}") diff --git a/src/backend/models.py b/src/backend/models.py index d9ea954..64fd8ef 100644 --- a/src/backend/models.py +++ b/src/backend/models.py @@ -81,6 +81,7 @@ class ChatMessageType(str, Enum): PROCESSING = "processing" RESPONSE = "response" SEARCHING = "searching" + RAG_RESULT = "rag_result" SYSTEM = "system" THINKING = "thinking" TOOLING = "tooling" @@ -100,6 +101,7 @@ class ChatContextType(str, Enum): GENERAL = "general" GENERATE_PERSONA = "generate_persona" GENERATE_PROFILE = "generate_profile" + RAG_SEARCH = "rag_search" class AIModelType(str, Enum): QWEN2_5 = "qwen2.5" @@ -461,6 +463,23 @@ class RagEntry(BaseModel): description: str = "" enabled: bool = True +class RagContentMetadata(BaseModel): + source_file: str = Field(..., alias="sourceFile") + line_begin: int = Field(..., alias="lineBegin") + line_end: int = Field(..., alias="lineEnd") + lines: int + chunk_begin: Optional[int] = Field(None, alias="chunkBegin") + chunk_end: Optional[int] = Field(None, alias="chunkEnd") + metadata: Dict[str, Any] = Field(default_factory=dict) + model_config = { + "populate_by_name": True, # Allow both field names and aliases + } + +class RagContentResponse(BaseModel): + id: str + content: str + metadata: RagContentMetadata + class Candidate(BaseUser): user_type: Literal[UserType.CANDIDATE] = Field(UserType.CANDIDATE, alias="userType") username: str @@ -618,12 +637,14 @@ class JobApplication(BaseModel): class ChromaDBGetResponse(BaseModel): # Chroma fields ids: List[str] = [] - embeddings: List[List[float]] = Field(default=[]) + embeddings: List[List[float]] = [] documents: List[str] = [] metadatas: List[Dict[str, Any]] = [] + distances: List[float] = [] # Additional fields name: str = "" size: int = 0 + dimensions: int = 2 | 3 query: str = "" query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding") umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D") @@ -663,6 +684,12 @@ class ChatMessageBase(BaseModel): "populate_by_name": True # Allow both field names and aliases } +class ChatMessageRagSearch(ChatMessageBase): + status: ChatStatusType = ChatStatusType.DONE + type: ChatMessageType = ChatMessageType.RAG_RESULT + sender: ChatSenderType = ChatSenderType.USER + dimensions: int = 2 | 3 + class ChatMessageMetaData(BaseModel): model: AIModelType = AIModelType.QWEN2_5 temperature: float = 0.7