RAG working in candidate page
This commit is contained in:
parent
bb84709f44
commit
149bbdf73b
@ -21,6 +21,15 @@ import { connectionBase } from '../utils/Global';
|
|||||||
|
|
||||||
import './VectorVisualizer.css';
|
import './VectorVisualizer.css';
|
||||||
import { BackstoryPageProps } from './BackstoryTab';
|
import { BackstoryPageProps } from './BackstoryTab';
|
||||||
|
import { useAuth } from 'hooks/AuthContext';
|
||||||
|
import * as Types from 'types/types';
|
||||||
|
import { useSelectedCandidate } from 'hooks/GlobalContext';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import { Message } from './Message';
|
||||||
|
const defaultMessage: Types.ChatMessageBase = {
|
||||||
|
type: "preparing", status: "done", sender: "system", sessionId: "", timestamp: new Date(), content: ""
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
interface VectorVisualizerProps extends BackstoryPageProps {
|
interface VectorVisualizerProps extends BackstoryPageProps {
|
||||||
inline?: boolean;
|
inline?: boolean;
|
||||||
@ -29,23 +38,11 @@ interface VectorVisualizerProps extends BackstoryPageProps {
|
|||||||
|
|
||||||
interface Metadata {
|
interface Metadata {
|
||||||
id: string;
|
id: string;
|
||||||
doc_type: string;
|
docType: string;
|
||||||
content: string;
|
content: string;
|
||||||
distance?: number;
|
distance?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
type QuerySet = {
|
|
||||||
ids: string[],
|
|
||||||
documents: string[],
|
|
||||||
metadatas: Metadata[],
|
|
||||||
embeddings: (number[])[],
|
|
||||||
distances?: (number | undefined)[],
|
|
||||||
dimensions?: number;
|
|
||||||
query?: string;
|
|
||||||
umap_embedding_2d?: number[];
|
|
||||||
umap_embedding_3d?: number[];
|
|
||||||
};
|
|
||||||
|
|
||||||
const emptyQuerySet = {
|
const emptyQuerySet = {
|
||||||
ids: [],
|
ids: [],
|
||||||
documents: [],
|
documents: [],
|
||||||
@ -173,25 +170,27 @@ const DEFAULT_UNFOCUS_SIZE = 2.;
|
|||||||
type Node = {
|
type Node = {
|
||||||
id: string,
|
id: string,
|
||||||
content: string, // Portion of content that was used for embedding
|
content: string, // Portion of content that was used for embedding
|
||||||
full_content: string | undefined, // Portion of content plus/minus buffer
|
fullContent: string | undefined, // Portion of content plus/minus buffer
|
||||||
emoji: string,
|
emoji: string,
|
||||||
doc_type: string,
|
docType: string,
|
||||||
source_file: string,
|
source_file: string,
|
||||||
distance: number | undefined,
|
distance: number | undefined,
|
||||||
path: string,
|
path: string,
|
||||||
chunk_begin: number,
|
chunkBegin: number,
|
||||||
line_begin: number,
|
lineBegin: number,
|
||||||
chunk_end: number,
|
chunkEnd: number,
|
||||||
line_end: number,
|
lineEnd: number,
|
||||||
sx: SxProps,
|
sx: SxProps,
|
||||||
};
|
};
|
||||||
|
|
||||||
const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualizerProps) => {
|
const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualizerProps) => {
|
||||||
const { setSnack, rag, inline, sx } = props;
|
const { user, apiClient } = useAuth();
|
||||||
|
const { setSnack, submitQuery, rag, inline, sx } = props;
|
||||||
|
const backstoryProps = { setSnack, submitQuery };
|
||||||
const [plotData, setPlotData] = useState<PlotData | null>(null);
|
const [plotData, setPlotData] = useState<PlotData | null>(null);
|
||||||
const [newQuery, setNewQuery] = useState<string>('');
|
const [newQuery, setNewQuery] = useState<string>('');
|
||||||
const [querySet, setQuerySet] = useState<QuerySet>(rag || emptyQuerySet);
|
const [querySet, setQuerySet] = useState<Types.ChromaDBGetResponse>(rag || emptyQuerySet);
|
||||||
const [result, setResult] = useState<QuerySet | undefined>(undefined);
|
const [result, setResult] = useState<Types.ChromaDBGetResponse | null>(null);
|
||||||
const [view2D, setView2D] = useState<boolean>(true);
|
const [view2D, setView2D] = useState<boolean>(true);
|
||||||
const plotlyRef = useRef(null);
|
const plotlyRef = useRef(null);
|
||||||
const boxRef = useRef<HTMLElement>(null);
|
const boxRef = useRef<HTMLElement>(null);
|
||||||
@ -199,6 +198,9 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
|
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
|
||||||
const [plotDimensions, setPlotDimensions] = useState({ width: 0, height: 0 });
|
const [plotDimensions, setPlotDimensions] = useState({ width: 0, height: 0 });
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const candidate: Types.Candidate | null = user?.userType === 'candidate' ? user : null;
|
||||||
|
|
||||||
/* Force resize of Plotly as it tends to not be the correct size if it is initially rendered
|
/* Force resize of Plotly as it tends to not be the correct size if it is initially rendered
|
||||||
* off screen (eg., the VectorVisualizer is not on the tab the app loads to) */
|
* off screen (eg., the VectorVisualizer is not on the tab the app loads to) */
|
||||||
@ -225,21 +227,16 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
|
|
||||||
// Get the collection to visualize
|
// Get the collection to visualize
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if ((result !== undefined && result.dimensions !== (view2D ? 3 : 2))) {
|
if (result) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const fetchCollection = async () => {
|
const fetchCollection = async () => {
|
||||||
|
if (!candidate) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
const response = await fetch(connectionBase + `/api/umap/`, {
|
const result = await apiClient.getCandidateVectors(view2D ? 2 : 3);
|
||||||
method: 'PUT',
|
setResult(result);
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({ dimensions: view2D ? 2 : 3 }),
|
|
||||||
});
|
|
||||||
const data: QuerySet = await response.json();
|
|
||||||
data.dimensions = view2D ? 2 : 3;
|
|
||||||
setResult(data);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error obtaining collection information:', error);
|
console.error('Error obtaining collection information:', error);
|
||||||
setSnack("Unable to obtain collection information.", "error");
|
setSnack("Unable to obtain collection information.", "error");
|
||||||
@ -253,7 +250,8 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
if (!result || !result.embeddings) return;
|
if (!result || !result.embeddings) return;
|
||||||
if (result.embeddings.length === 0) return;
|
if (result.embeddings.length === 0) return;
|
||||||
|
|
||||||
const full: QuerySet = {
|
const full: Types.ChromaDBGetResponse = {
|
||||||
|
...result,
|
||||||
ids: [...result.ids || []],
|
ids: [...result.ids || []],
|
||||||
documents: [...result.documents || []],
|
documents: [...result.documents || []],
|
||||||
embeddings: [...result.embeddings],
|
embeddings: [...result.embeddings],
|
||||||
@ -270,18 +268,27 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let query: QuerySet = {
|
let query: Types.ChromaDBGetResponse = {
|
||||||
ids: [],
|
ids: [],
|
||||||
documents: [],
|
documents: [],
|
||||||
embeddings: [],
|
embeddings: [],
|
||||||
metadatas: [],
|
metadatas: [],
|
||||||
distances: [],
|
distances: [],
|
||||||
|
query: '',
|
||||||
|
size: 0,
|
||||||
|
dimensions: 2,
|
||||||
|
name: ''
|
||||||
};
|
};
|
||||||
let filtered: QuerySet = {
|
let filtered: Types.ChromaDBGetResponse = {
|
||||||
ids: [],
|
ids: [],
|
||||||
documents: [],
|
documents: [],
|
||||||
embeddings: [],
|
embeddings: [],
|
||||||
metadatas: [],
|
metadatas: [],
|
||||||
|
distances: [],
|
||||||
|
query: '',
|
||||||
|
size: 0,
|
||||||
|
dimensions: 2,
|
||||||
|
name: ''
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Loop through all items and divide into two groups:
|
/* Loop through all items and divide into two groups:
|
||||||
@ -310,30 +317,30 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (view2D && querySet.umap_embedding_2d && querySet.umap_embedding_2d.length) {
|
if (view2D && querySet.umapEmbedding2D && querySet.umapEmbedding2D.length) {
|
||||||
query.ids.unshift('query');
|
query.ids.unshift('query');
|
||||||
query.metadatas.unshift({ id: 'query', doc_type: 'query', content: querySet.query || '', distance: 0 });
|
query.metadatas.unshift({ id: 'query', docType: 'query', content: querySet.query || '', distance: 0 });
|
||||||
query.embeddings.unshift(querySet.umap_embedding_2d);
|
query.embeddings.unshift(querySet.umapEmbedding2D);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!view2D && querySet.umap_embedding_3d && querySet.umap_embedding_3d.length) {
|
if (!view2D && querySet.umapEmbedding3D && querySet.umapEmbedding3D.length) {
|
||||||
query.ids.unshift('query');
|
query.ids.unshift('query');
|
||||||
query.metadatas.unshift({ id: 'query', doc_type: 'query', content: querySet.query || '', distance: 0 });
|
query.metadatas.unshift({ id: 'query', docType: 'query', content: querySet.query || '', distance: 0 });
|
||||||
query.embeddings.unshift(querySet.umap_embedding_3d);
|
query.embeddings.unshift(querySet.umapEmbedding3D);
|
||||||
}
|
}
|
||||||
|
|
||||||
const filtered_doc_types = filtered.metadatas.map(m => m.doc_type || 'unknown')
|
const filtered_docTypes = filtered.metadatas.map(m => m.docType || 'unknown')
|
||||||
const query_doc_types = query.metadatas.map(m => m.doc_type || 'unknown')
|
const query_docTypes = query.metadatas.map(m => m.docType || 'unknown')
|
||||||
|
|
||||||
const has_query = query.metadatas.length > 0;
|
const has_query = query.metadatas.length > 0;
|
||||||
const filtered_sizes = filtered.metadatas.map(m => has_query ? DEFAULT_UNFOCUS_SIZE : DEFAULT_SIZE);
|
const filtered_sizes = filtered.metadatas.map(m => has_query ? DEFAULT_UNFOCUS_SIZE : DEFAULT_SIZE);
|
||||||
const filtered_colors = filtered_doc_types.map(type => colorMap[type] || '#ff8080');
|
const filtered_colors = filtered_docTypes.map(type => colorMap[type] || '#4d4d4d');
|
||||||
const filtered_x = normalizeDimension(filtered.embeddings.map((v: number[]) => v[0]));
|
const filtered_x = normalizeDimension(filtered.embeddings.map((v: number[]) => v[0]));
|
||||||
const filtered_y = normalizeDimension(filtered.embeddings.map((v: number[]) => v[1]));
|
const filtered_y = normalizeDimension(filtered.embeddings.map((v: number[]) => v[1]));
|
||||||
const filtered_z = is3D ? normalizeDimension(filtered.embeddings.map((v: number[]) => v[2])) : undefined;
|
const filtered_z = is3D ? normalizeDimension(filtered.embeddings.map((v: number[]) => v[2])) : undefined;
|
||||||
|
|
||||||
const query_sizes = query.metadatas.map(m => DEFAULT_SIZE + 2. * DEFAULT_SIZE * Math.pow((1. - (m.distance || 1.)), 3));
|
const query_sizes = query.metadatas.map(m => DEFAULT_SIZE + 2. * DEFAULT_SIZE * Math.pow((1. - (m.distance || 1.)), 3));
|
||||||
const query_colors = query_doc_types.map(type => colorMap[type] || '#ff8080');
|
const query_colors = query_docTypes.map(type => colorMap[type] || '#4d4d4d');
|
||||||
const query_x = normalizeDimension(query.embeddings.map((v: number[]) => v[0]));
|
const query_x = normalizeDimension(query.embeddings.map((v: number[]) => v[0]));
|
||||||
const query_y = normalizeDimension(query.embeddings.map((v: number[]) => v[1]));
|
const query_y = normalizeDimension(query.embeddings.map((v: number[]) => v[1]));
|
||||||
const query_z = is3D ? normalizeDimension(query.embeddings.map((v: number[]) => v[2])) : undefined;
|
const query_z = is3D ? normalizeDimension(query.embeddings.map((v: number[]) => v[2])) : undefined;
|
||||||
@ -388,22 +395,14 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
const sendQuery = async (query: string) => {
|
const sendQuery = async (query: string) => {
|
||||||
if (!query.trim()) return;
|
if (!query.trim()) return;
|
||||||
setNewQuery('');
|
setNewQuery('');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${connectionBase}/api/similarity/`, {
|
const result = await apiClient.getCandidateSimilarContent(query);
|
||||||
method: 'PUT',
|
console.log(result);
|
||||||
headers: {
|
setQuerySet(result);
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
query: query,
|
|
||||||
dimensions: view2D ? 2 : 3,
|
|
||||||
})
|
|
||||||
});
|
|
||||||
const data = await response.json();
|
|
||||||
setQuerySet(data);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error obtaining query similarity information:', error);
|
const msg = `Error obtaining similar content to ${query}.`
|
||||||
setSnack("Unable to obtain query similarity information.", "error");
|
setSnack(msg, "error");
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -413,18 +412,18 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
</Box>
|
</Box>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!candidate) return (
|
||||||
|
<Box sx={{ display: 'flex', flexGrow: 1, justifyContent: 'center', alignItems: 'center' }}>
|
||||||
|
<div>No candidate selected. Please <Button onClick={() => navigate('/find-a-candidate')}>select a candidate</Button> first.</div>
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
|
||||||
const fetchRAGMeta = async (node: Node) => {
|
const fetchRAGMeta = async (node: Node) => {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(connectionBase + `/api/umap/entry/${node.id}`, {
|
const result = await apiClient.getCandidateContent(node.id);
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const update: Node = {
|
const update: Node = {
|
||||||
...node,
|
...node,
|
||||||
full_content: await response.json()
|
fullContent: result.content
|
||||||
}
|
}
|
||||||
setNode(update);
|
setNode(update);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -436,14 +435,15 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = (props: VectorVisualiz
|
|||||||
|
|
||||||
const onNodeSelected = (metadata: any) => {
|
const onNodeSelected = (metadata: any) => {
|
||||||
let node: Node;
|
let node: Node;
|
||||||
if (metadata.doc_type === 'query') {
|
console.log(metadata);
|
||||||
|
if (metadata.docType === 'query') {
|
||||||
node = {
|
node = {
|
||||||
...metadata,
|
...metadata,
|
||||||
content: `Similarity results for the query **${querySet.query || ''}**
|
content: `Similarity results for the query **${querySet.query || ''}**
|
||||||
|
|
||||||
The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '2' : '3'}-dimensional space. Larger dots represent relative similarity in N-dimensional space.
|
The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '2' : '3'}-dimensional space. Larger dots represent relative similarity in N-dimensional space.
|
||||||
`,
|
`,
|
||||||
emoji: emojiMap[metadata.doc_type],
|
emoji: emojiMap[metadata.docType],
|
||||||
sx: {
|
sx: {
|
||||||
m: 0.5,
|
m: 0.5,
|
||||||
p: 2,
|
p: 2,
|
||||||
@ -453,7 +453,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
justifyContent: "center",
|
justifyContent: "center",
|
||||||
flexGrow: 0,
|
flexGrow: 0,
|
||||||
flexWrap: "wrap",
|
flexWrap: "wrap",
|
||||||
backgroundColor: colorMap[metadata.doc_type] || '#ff8080',
|
backgroundColor: colorMap[metadata.docType] || '#ff8080',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
setNode(node);
|
setNode(node);
|
||||||
@ -463,7 +463,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
node = {
|
node = {
|
||||||
content: `Loading...`,
|
content: `Loading...`,
|
||||||
...metadata,
|
...metadata,
|
||||||
emoji: emojiMap[metadata.doc_type] || '❓',
|
emoji: emojiMap[metadata.docType] || '❓',
|
||||||
}
|
}
|
||||||
|
|
||||||
setNode(node);
|
setNode(node);
|
||||||
@ -499,7 +499,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
flexBasis: 0,
|
flexBasis: 0,
|
||||||
flexGrow: 0
|
flexGrow: 0
|
||||||
}}
|
}}
|
||||||
control={<Switch checked={!view2D} />} onChange={() => setView2D(!view2D)} label="3D" />
|
control={<Switch checked={!view2D} />} onChange={() => { setView2D(!view2D); setResult(null); }} label="3D" />
|
||||||
<Plot
|
<Plot
|
||||||
ref={plotlyRef}
|
ref={plotlyRef}
|
||||||
onClick={(event: any) => { onNodeSelected(event.points[0].customdata); }}
|
onClick={(event: any) => { onNodeSelected(event.points[0].customdata); }}
|
||||||
@ -528,7 +528,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
<TableBody sx={{ '& td': { verticalAlign: "top", fontSize: "0.75rem", }, '& td:first-of-type': { whiteSpace: "nowrap", width: "1rem" } }}>
|
<TableBody sx={{ '& td': { verticalAlign: "top", fontSize: "0.75rem", }, '& td:first-of-type': { whiteSpace: "nowrap", width: "1rem" } }}>
|
||||||
<TableRow>
|
<TableRow>
|
||||||
<TableCell>Type</TableCell>
|
<TableCell>Type</TableCell>
|
||||||
<TableCell>{node.emoji} {node.doc_type}</TableCell>
|
<TableCell>{node.emoji} {node.docType}</TableCell>
|
||||||
</TableRow>
|
</TableRow>
|
||||||
{node.source_file !== undefined && <TableRow>
|
{node.source_file !== undefined && <TableRow>
|
||||||
<TableCell>File</TableCell>
|
<TableCell>File</TableCell>
|
||||||
@ -560,7 +560,7 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
Click a point in the scatter-graph to see information about that node.
|
Click a point in the scatter-graph to see information about that node.
|
||||||
</Paper>
|
</Paper>
|
||||||
}
|
}
|
||||||
{node !== null && node.full_content &&
|
{node !== null && node.fullContent &&
|
||||||
<Scrollable
|
<Scrollable
|
||||||
autoscroll={false}
|
autoscroll={false}
|
||||||
sx={{
|
sx={{
|
||||||
@ -575,16 +575,16 @@ The scatter graph shows the query in N-dimensional space, mapped to ${view2D ? '
|
|||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{
|
{
|
||||||
node.full_content.split('\n').map((line, index) => {
|
node.fullContent.split('\n').map((line, index) => {
|
||||||
index += 1 + node.chunk_begin;
|
index += 1 + node.chunkBegin;
|
||||||
const bgColor = (index > node.line_begin && index <= node.line_end) ? '#f0f0f0' : 'auto';
|
const bgColor = (index > node.lineBegin && index <= node.lineEnd) ? '#f0f0f0' : 'auto';
|
||||||
return <Box key={index} sx={{ display: "flex", flexDirection: "row", borderBottom: '1px solid #d0d0d0', ':first-of-type': { borderTop: '1px solid #d0d0d0' }, backgroundColor: bgColor }}>
|
return <Box key={index} sx={{ display: "flex", flexDirection: "row", borderBottom: '1px solid #d0d0d0', ':first-of-type': { borderTop: '1px solid #d0d0d0' }, backgroundColor: bgColor }}>
|
||||||
<Box sx={{ fontFamily: 'courier', fontSize: "0.8rem", minWidth: "2rem", pt: "0.1rem", align: "left", verticalAlign: "top" }}>{index}</Box>
|
<Box sx={{ fontFamily: 'courier', fontSize: "0.8rem", minWidth: "2rem", pt: "0.1rem", align: "left", verticalAlign: "top" }}>{index}</Box>
|
||||||
<pre style={{ margin: 0, padding: 0, border: "none", minHeight: "1rem", overflow: "hidden" }} >{line || " "}</pre>
|
<pre style={{ margin: 0, padding: 0, border: "none", minHeight: "1rem", overflow: "hidden" }} >{line || " "}</pre>
|
||||||
</Box>;
|
</Box>;
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
{!node.line_begin && <pre style={{ margin: 0, padding: 0, border: "none" }}>{node.content}</pre>}
|
{!node.lineBegin && <pre style={{ margin: 0, padding: 0, border: "none" }}>{node.content}</pre>}
|
||||||
</Scrollable>
|
</Scrollable>
|
||||||
}
|
}
|
||||||
</Box>
|
</Box>
|
||||||
|
@ -20,8 +20,8 @@ import { ControlsPage } from 'pages/ControlsPage';
|
|||||||
import { LoginPage } from "pages/LoginPage";
|
import { LoginPage } from "pages/LoginPage";
|
||||||
import { CandidateDashboardPage } from "pages/CandidateDashboardPage"
|
import { CandidateDashboardPage } from "pages/CandidateDashboardPage"
|
||||||
import { EmailVerificationPage } from "components/EmailVerificationComponents";
|
import { EmailVerificationPage } from "components/EmailVerificationComponents";
|
||||||
|
import { CandidateProfilePage } from "pages/candidate/Profile";
|
||||||
|
|
||||||
const ProfilePage = () => (<BetaPage><Typography variant="h4">Profile</Typography></BetaPage>);
|
|
||||||
const BackstoryPage = () => (<BetaPage><Typography variant="h4">Backstory</Typography></BetaPage>);
|
const BackstoryPage = () => (<BetaPage><Typography variant="h4">Backstory</Typography></BetaPage>);
|
||||||
const ResumesPage = () => (<BetaPage><Typography variant="h4">Resumes</Typography></BetaPage>);
|
const ResumesPage = () => (<BetaPage><Typography variant="h4">Resumes</Typography></BetaPage>);
|
||||||
const QASetupPage = () => (<BetaPage><Typography variant="h4">Q&A Setup</Typography></BetaPage>);
|
const QASetupPage = () => (<BetaPage><Typography variant="h4">Q&A Setup</Typography></BetaPage>);
|
||||||
@ -69,7 +69,7 @@ const getBackstoryDynamicRoutes = (props: BackstoryDynamicRoutesProps): ReactNod
|
|||||||
if (user.userType === 'candidate') {
|
if (user.userType === 'candidate') {
|
||||||
routes.splice(-1, 0, ...[
|
routes.splice(-1, 0, ...[
|
||||||
<Route key={`${index++}`} path="/candidate/dashboard" element={<BetaPage><CandidateDashboardPage {...backstoryProps} /></BetaPage>} />,
|
<Route key={`${index++}`} path="/candidate/dashboard" element={<BetaPage><CandidateDashboardPage {...backstoryProps} /></BetaPage>} />,
|
||||||
<Route key={`${index++}`} path="/candidate/profile" element={<ProfilePage />} />,
|
<Route key={`${index++}`} path="/candidate/profile" element={<CandidateProfilePage {...backstoryProps} />} />,
|
||||||
<Route key={`${index++}`} path="/candidate/backstory" element={<BackstoryPage />} />,
|
<Route key={`${index++}`} path="/candidate/backstory" element={<BackstoryPage />} />,
|
||||||
<Route key={`${index++}`} path="/candidate/resumes" element={<ResumesPage />} />,
|
<Route key={`${index++}`} path="/candidate/resumes" element={<ResumesPage />} />,
|
||||||
<Route key={`${index++}`} path="/candidate/qa-setup" element={<QASetupPage />} />,
|
<Route key={`${index++}`} path="/candidate/qa-setup" element={<QASetupPage />} />,
|
||||||
|
@ -23,13 +23,6 @@ interface LoginRequest {
|
|||||||
password: string;
|
password: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface MFAVerificationRequest {
|
|
||||||
email: string;
|
|
||||||
code: string;
|
|
||||||
deviceId: string;
|
|
||||||
rememberDevice?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface EmailVerificationRequest {
|
interface EmailVerificationRequest {
|
||||||
token: string;
|
token: string;
|
||||||
}
|
}
|
||||||
@ -418,7 +411,7 @@ function useAuthenticationLogic() {
|
|||||||
}, [apiClient]);
|
}, [apiClient]);
|
||||||
|
|
||||||
// MFA verification
|
// MFA verification
|
||||||
const verifyMFA = useCallback(async (mfaData: MFAVerificationRequest): Promise<boolean> => {
|
const verifyMFA = useCallback(async (mfaData: Types.MFAVerifyRequest): Promise<boolean> => {
|
||||||
setAuthState(prev => ({ ...prev, isLoading: true, error: null }));
|
setAuthState(prev => ({ ...prev, isLoading: true, error: null }));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -742,7 +735,7 @@ function ProtectedRoute({
|
|||||||
}
|
}
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
AuthState, LoginRequest, MFAVerificationRequest, EmailVerificationRequest, ResendVerificationRequest, PasswordResetRequest
|
AuthState, LoginRequest, EmailVerificationRequest, ResendVerificationRequest, PasswordResetRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
export type { CreateCandidateRequest, CreateEmployerRequest } from '../services/api-client';
|
export type { CreateCandidateRequest, CreateEmployerRequest } from '../services/api-client';
|
||||||
|
@ -43,7 +43,10 @@ const emptyUser: Candidate = {
|
|||||||
education: [],
|
education: [],
|
||||||
preferredJobTypes: [],
|
preferredJobTypes: [],
|
||||||
languages: [],
|
languages: [],
|
||||||
certifications: []
|
certifications: [],
|
||||||
|
isAdmin: false,
|
||||||
|
hasProfile: false,
|
||||||
|
ragContentSize: 0
|
||||||
};
|
};
|
||||||
|
|
||||||
const GenerateCandidate = (props: BackstoryElementProps) => {
|
const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||||
|
@ -28,8 +28,10 @@ import { BackstoryPageProps } from 'components/BackstoryTab';
|
|||||||
|
|
||||||
import { LoginForm } from "components/EmailVerificationComponents";
|
import { LoginForm } from "components/EmailVerificationComponents";
|
||||||
import { CandidateRegistrationForm } from "components/RegistrationForms";
|
import { CandidateRegistrationForm } from "components/RegistrationForms";
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
|
||||||
const LoginPage: React.FC<BackstoryPageProps> = (props: BackstoryPageProps) => {
|
const LoginPage: React.FC<BackstoryPageProps> = (props: BackstoryPageProps) => {
|
||||||
|
const navigate = useNavigate();
|
||||||
const { setSnack } = props;
|
const { setSnack } = props;
|
||||||
const [tabValue, setTabValue] = useState(0);
|
const [tabValue, setTabValue] = useState(0);
|
||||||
const [loading, setLoading] = useState(false);
|
const [loading, setLoading] = useState(false);
|
||||||
@ -62,68 +64,10 @@ const LoginPage: React.FC<BackstoryPageProps> = (props: BackstoryPageProps) => {
|
|||||||
setSuccess(null);
|
setSuccess(null);
|
||||||
};
|
};
|
||||||
|
|
||||||
// If user is logged in, show their profile
|
// If user is logged in, navigate to the profile page
|
||||||
if (user) {
|
if (user) {
|
||||||
return (
|
navigate('/candidate/profile');
|
||||||
<Container maxWidth="md" sx={{ mt: 4 }}>
|
return (<></>);
|
||||||
<Card elevation={3}>
|
|
||||||
<CardContent>
|
|
||||||
<Box sx={{ display: 'flex', alignItems: 'center', mb: 3 }}>
|
|
||||||
<Avatar sx={{ mr: 2, bgcolor: 'primary.main' }}>
|
|
||||||
<AccountCircle />
|
|
||||||
</Avatar>
|
|
||||||
<Typography variant="h4" component="h1">
|
|
||||||
User Profile
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
<Divider sx={{ mb: 3 }} />
|
|
||||||
|
|
||||||
<Grid container spacing={3}>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Username:</strong> {name}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Email:</strong> {user.email}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
{/* <strong>Status:</strong> {user.status} */}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Phone:</strong> {user.phone || 'Not provided'}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Account type:</strong> {user.userType}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Last Login:</strong> {
|
|
||||||
user.lastLogin
|
|
||||||
? user.lastLogin.toLocaleString()
|
|
||||||
: 'N/A'
|
|
||||||
}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
<Grid size={{ xs: 12, md: 6 }}>
|
|
||||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
|
||||||
<strong>Member Since:</strong> {user.createdAt.toLocaleDateString()}
|
|
||||||
</Typography>
|
|
||||||
</Grid>
|
|
||||||
</Grid>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</Container>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
1056
frontend/src/pages/candidate/Profile.tsx
Normal file
1056
frontend/src/pages/candidate/Profile.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -33,6 +33,7 @@ import {
|
|||||||
convertFromApi,
|
convertFromApi,
|
||||||
convertArrayFromApi
|
convertArrayFromApi
|
||||||
} from 'types/types';
|
} from 'types/types';
|
||||||
|
import internal from 'stream';
|
||||||
|
|
||||||
// ============================
|
// ============================
|
||||||
// Streaming Types and Interfaces
|
// Streaming Types and Interfaces
|
||||||
@ -290,14 +291,7 @@ class ApiClient {
|
|||||||
body: JSON.stringify(formatApiRequest(auth))
|
body: JSON.stringify(formatApiRequest(auth))
|
||||||
});
|
});
|
||||||
|
|
||||||
// This could return either a full auth response or MFA request
|
return handleApiResponse<Types.AuthResponse | Types.MFARequestResponse>(response);
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error?.message || 'Login failed');
|
|
||||||
}
|
|
||||||
|
|
||||||
return data.data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -524,10 +518,11 @@ class ApiClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async updateCandidate(id: string, updates: Partial<Types.Candidate>): Promise<Types.Candidate> {
|
async updateCandidate(id: string, updates: Partial<Types.Candidate>): Promise<Types.Candidate> {
|
||||||
|
const request = formatApiRequest(updates);
|
||||||
const response = await fetch(`${this.baseUrl}/candidates/${id}`, {
|
const response = await fetch(`${this.baseUrl}/candidates/${id}`, {
|
||||||
method: 'PATCH',
|
method: 'PATCH',
|
||||||
headers: this.defaultHeaders,
|
headers: this.defaultHeaders,
|
||||||
body: JSON.stringify(formatApiRequest(updates))
|
body: JSON.stringify(request)
|
||||||
});
|
});
|
||||||
|
|
||||||
return this.handleApiResponseWithConversion<Types.Candidate>(response, 'Candidate');
|
return this.handleApiResponseWithConversion<Types.Candidate>(response, 'Candidate');
|
||||||
@ -739,6 +734,47 @@ class ApiClient {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getCandidateSimilarContent(query: string
|
||||||
|
): Promise<Types.ChromaDBGetResponse> {
|
||||||
|
const response = await fetch(`${this.baseUrl}/candidates/rag-search`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.defaultHeaders,
|
||||||
|
body: JSON.stringify(query)
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await handleApiResponse<Types.ChromaDBGetResponse>(response);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCandidateVectors(
|
||||||
|
dimensions: number,
|
||||||
|
): Promise<Types.ChromaDBGetResponse> {
|
||||||
|
const response = await fetch(`${this.baseUrl}/candidates/rag-vectors`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.defaultHeaders,
|
||||||
|
body: JSON.stringify(dimensions)
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await handleApiResponse<Types.ChromaDBGetResponse>(response);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCandidateContent(
|
||||||
|
doc_id: string,
|
||||||
|
): Promise<Types.RagContentResponse> {
|
||||||
|
const response = await fetch(`${this.baseUrl}/candidates/rag-content`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.defaultHeaders,
|
||||||
|
body: JSON.stringify(doc_id)
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await handleApiResponse<Types.RagContentResponse>(response);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a chat session about a specific candidate
|
* Create a chat session about a specific candidate
|
||||||
*/
|
*/
|
||||||
@ -809,7 +845,7 @@ class ApiClient {
|
|||||||
* Send message with streaming response support and date conversion
|
* Send message with streaming response support and date conversion
|
||||||
*/
|
*/
|
||||||
sendMessageStream(
|
sendMessageStream(
|
||||||
chatMessage: Types.ChatMessageUser,
|
chatMessage: Types.ChatMessageBase,
|
||||||
options: StreamingOptions = {}
|
options: StreamingOptions = {}
|
||||||
): StreamingResponse {
|
): StreamingResponse {
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// Generated TypeScript types from Pydantic models
|
// Generated TypeScript types from Pydantic models
|
||||||
// Source: src/backend/models.py
|
// Source: src/backend/models.py
|
||||||
// Generated on: 2025-06-01T20:40:46.797024
|
// Generated on: 2025-06-02T18:30:16.709256
|
||||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||||
|
|
||||||
// ============================
|
// ============================
|
||||||
@ -13,9 +13,9 @@ export type ActivityType = "login" | "search" | "view_job" | "apply_job" | "mess
|
|||||||
|
|
||||||
export type ApplicationStatus = "applied" | "reviewing" | "interview" | "offer" | "rejected" | "accepted" | "withdrawn";
|
export type ApplicationStatus = "applied" | "reviewing" | "interview" | "offer" | "rejected" | "accepted" | "withdrawn";
|
||||||
|
|
||||||
export type ChatContextType = "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile";
|
export type ChatContextType = "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile" | "rag_search";
|
||||||
|
|
||||||
export type ChatMessageType = "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user";
|
export type ChatMessageType = "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user";
|
||||||
|
|
||||||
export type ChatSenderType = "user" | "assistant" | "system";
|
export type ChatSenderType = "user" | "assistant" | "system";
|
||||||
|
|
||||||
@ -145,7 +145,7 @@ export interface BaseUser {
|
|||||||
lastLogin?: Date;
|
lastLogin?: Date;
|
||||||
profileImage?: string;
|
profileImage?: string;
|
||||||
status: "active" | "inactive" | "pending" | "banned";
|
status: "active" | "inactive" | "pending" | "banned";
|
||||||
isAdmin?: boolean;
|
isAdmin: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BaseUserWithType {
|
export interface BaseUserWithType {
|
||||||
@ -161,7 +161,7 @@ export interface BaseUserWithType {
|
|||||||
lastLogin?: Date;
|
lastLogin?: Date;
|
||||||
profileImage?: string;
|
profileImage?: string;
|
||||||
status: "active" | "inactive" | "pending" | "banned";
|
status: "active" | "inactive" | "pending" | "banned";
|
||||||
isAdmin?: boolean;
|
isAdmin: boolean;
|
||||||
userType: "candidate" | "employer" | "guest";
|
userType: "candidate" | "employer" | "guest";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +178,7 @@ export interface Candidate {
|
|||||||
lastLogin?: Date;
|
lastLogin?: Date;
|
||||||
profileImage?: string;
|
profileImage?: string;
|
||||||
status: "active" | "inactive" | "pending" | "banned";
|
status: "active" | "inactive" | "pending" | "banned";
|
||||||
isAdmin?: boolean;
|
isAdmin: boolean;
|
||||||
userType: "candidate";
|
userType: "candidate";
|
||||||
username: string;
|
username: string;
|
||||||
description?: string;
|
description?: string;
|
||||||
@ -194,9 +194,9 @@ export interface Candidate {
|
|||||||
languages?: Array<Language>;
|
languages?: Array<Language>;
|
||||||
certifications?: Array<Certification>;
|
certifications?: Array<Certification>;
|
||||||
jobApplications?: Array<JobApplication>;
|
jobApplications?: Array<JobApplication>;
|
||||||
hasProfile?: boolean;
|
hasProfile: boolean;
|
||||||
rags?: Array<RagEntry>;
|
rags?: Array<RagEntry>;
|
||||||
ragContentSize?: number;
|
ragContentSize: number;
|
||||||
age?: number;
|
age?: number;
|
||||||
gender?: "female" | "male";
|
gender?: "female" | "male";
|
||||||
ethnicity?: string;
|
ethnicity?: string;
|
||||||
@ -237,7 +237,7 @@ export interface Certification {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatContext {
|
export interface ChatContext {
|
||||||
type: "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile";
|
type: "job_search" | "candidate_chat" | "interview_prep" | "resume_review" | "general" | "generate_persona" | "generate_profile" | "rag_search";
|
||||||
relatedEntityId?: string;
|
relatedEntityId?: string;
|
||||||
relatedEntityType?: "job" | "candidate" | "employer";
|
relatedEntityType?: "job" | "candidate" | "employer";
|
||||||
additionalContext?: Record<string, any>;
|
additionalContext?: Record<string, any>;
|
||||||
@ -248,11 +248,11 @@ export interface ChatMessage {
|
|||||||
sessionId: string;
|
sessionId: string;
|
||||||
senderId?: string;
|
senderId?: string;
|
||||||
status: "initializing" | "streaming" | "done" | "error";
|
status: "initializing" | "streaming" | "done" | "error";
|
||||||
type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user";
|
type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user";
|
||||||
sender: "user" | "assistant" | "system";
|
sender: "user" | "assistant" | "system";
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
tunables?: Tunables;
|
tunables?: Tunables;
|
||||||
content?: string;
|
content: string;
|
||||||
metadata?: ChatMessageMetaData;
|
metadata?: ChatMessageMetaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,32 +261,45 @@ export interface ChatMessageBase {
|
|||||||
sessionId: string;
|
sessionId: string;
|
||||||
senderId?: string;
|
senderId?: string;
|
||||||
status: "initializing" | "streaming" | "done" | "error";
|
status: "initializing" | "streaming" | "done" | "error";
|
||||||
type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "system" | "thinking" | "tooling" | "user";
|
type: "error" | "generating" | "info" | "preparing" | "processing" | "response" | "searching" | "rag_result" | "system" | "thinking" | "tooling" | "user";
|
||||||
sender: "user" | "assistant" | "system";
|
sender: "user" | "assistant" | "system";
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
tunables?: Tunables;
|
tunables?: Tunables;
|
||||||
content?: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatMessageMetaData {
|
export interface ChatMessageMetaData {
|
||||||
model: "qwen2.5";
|
model: "qwen2.5";
|
||||||
temperature?: number;
|
temperature: number;
|
||||||
maxTokens?: number;
|
maxTokens: number;
|
||||||
topP?: number;
|
topP: number;
|
||||||
frequencyPenalty?: number;
|
frequencyPenalty?: number;
|
||||||
presencePenalty?: number;
|
presencePenalty?: number;
|
||||||
stopSequences?: Array<string>;
|
stopSequences?: Array<string>;
|
||||||
ragResults?: Array<ChromaDBGetResponse>;
|
ragResults?: Array<ChromaDBGetResponse>;
|
||||||
llmHistory?: Array<LLMMessage>;
|
llmHistory?: Array<LLMMessage>;
|
||||||
evalCount?: number;
|
evalCount: number;
|
||||||
evalDuration?: number;
|
evalDuration: number;
|
||||||
promptEvalCount?: number;
|
promptEvalCount: number;
|
||||||
promptEvalDuration?: number;
|
promptEvalDuration: number;
|
||||||
options?: ChatOptions;
|
options?: ChatOptions;
|
||||||
tools?: Record<string, any>;
|
tools?: Record<string, any>;
|
||||||
timers?: Record<string, number>;
|
timers?: Record<string, number>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ChatMessageRagSearch {
|
||||||
|
id?: string;
|
||||||
|
sessionId: string;
|
||||||
|
senderId?: string;
|
||||||
|
status: "done";
|
||||||
|
type: "rag_result";
|
||||||
|
sender: "user";
|
||||||
|
timestamp: Date;
|
||||||
|
tunables?: Tunables;
|
||||||
|
content: string;
|
||||||
|
dimensions: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface ChatMessageUser {
|
export interface ChatMessageUser {
|
||||||
id?: string;
|
id?: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
@ -296,7 +309,7 @@ export interface ChatMessageUser {
|
|||||||
sender: "user";
|
sender: "user";
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
tunables?: Tunables;
|
tunables?: Tunables;
|
||||||
content?: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatOptions {
|
export interface ChatOptions {
|
||||||
@ -320,23 +333,46 @@ export interface ChatSession {
|
|||||||
title?: string;
|
title?: string;
|
||||||
context: ChatContext;
|
context: ChatContext;
|
||||||
messages?: Array<ChatMessage>;
|
messages?: Array<ChatMessage>;
|
||||||
isArchived?: boolean;
|
isArchived: boolean;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChromaDBGetResponse {
|
export interface ChromaDBGetResponse {
|
||||||
ids?: Array<string>;
|
ids: Array<string>;
|
||||||
embeddings?: Array<Array<number>>;
|
embeddings: Array<Array<number>>;
|
||||||
documents?: Array<string>;
|
documents: Array<string>;
|
||||||
metadatas?: Array<Record<string, any>>;
|
metadatas: Array<Record<string, any>>;
|
||||||
name?: string;
|
distances: Array<number>;
|
||||||
size?: number;
|
name: string;
|
||||||
query?: string;
|
size: number;
|
||||||
|
dimensions: number;
|
||||||
|
query: string;
|
||||||
queryEmbedding?: Array<number>;
|
queryEmbedding?: Array<number>;
|
||||||
umapEmbedding2D?: Array<number>;
|
umapEmbedding2D?: Array<number>;
|
||||||
umapEmbedding3D?: Array<number>;
|
umapEmbedding3D?: Array<number>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CreateCandidateRequest {
|
||||||
|
email: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
firstName: string;
|
||||||
|
lastName: string;
|
||||||
|
phone?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateEmployerRequest {
|
||||||
|
email: string;
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
companyName: string;
|
||||||
|
industry: string;
|
||||||
|
companySize: string;
|
||||||
|
companyDescription: string;
|
||||||
|
websiteUrl?: string;
|
||||||
|
phone?: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface CustomQuestion {
|
export interface CustomQuestion {
|
||||||
question: string;
|
question: string;
|
||||||
answer: string;
|
answer: string;
|
||||||
@ -398,7 +434,7 @@ export interface Employer {
|
|||||||
lastLogin?: Date;
|
lastLogin?: Date;
|
||||||
profileImage?: string;
|
profileImage?: string;
|
||||||
status: "active" | "inactive" | "pending" | "banned";
|
status: "active" | "inactive" | "pending" | "banned";
|
||||||
isAdmin?: boolean;
|
isAdmin: boolean;
|
||||||
userType: "employer";
|
userType: "employer";
|
||||||
companyName: string;
|
companyName: string;
|
||||||
industry: string;
|
industry: string;
|
||||||
@ -486,8 +522,8 @@ export interface Job {
|
|||||||
benefits?: Array<string>;
|
benefits?: Array<string>;
|
||||||
visaSponsorship?: boolean;
|
visaSponsorship?: boolean;
|
||||||
featuredUntil?: Date;
|
featuredUntil?: Date;
|
||||||
views?: number;
|
views: number;
|
||||||
applicationCount?: number;
|
applicationCount: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface JobApplication {
|
export interface JobApplication {
|
||||||
@ -521,8 +557,8 @@ export interface JobResponse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface LLMMessage {
|
export interface LLMMessage {
|
||||||
role?: string;
|
role: string;
|
||||||
content?: string;
|
content: string;
|
||||||
toolCalls?: Array<Record<string, any>>;
|
toolCalls?: Array<Record<string, any>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -572,7 +608,7 @@ export interface MFAVerifyRequest {
|
|||||||
email: string;
|
email: string;
|
||||||
code: string;
|
code: string;
|
||||||
deviceId: string;
|
deviceId: string;
|
||||||
rememberDevice?: boolean;
|
rememberDevice: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface MessageReaction {
|
export interface MessageReaction {
|
||||||
@ -588,8 +624,8 @@ export interface NotificationPreference {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface PaginatedRequest {
|
export interface PaginatedRequest {
|
||||||
page?: number;
|
page: number;
|
||||||
limit?: number;
|
limit: number;
|
||||||
sortBy?: string;
|
sortBy?: string;
|
||||||
sortOrder?: "asc" | "desc";
|
sortOrder?: "asc" | "desc";
|
||||||
filters?: Record<string, any>;
|
filters?: Record<string, any>;
|
||||||
@ -634,10 +670,26 @@ export interface RAGConfiguration {
|
|||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface RagContentMetadata {
|
||||||
|
sourceFile: string;
|
||||||
|
lineBegin: number;
|
||||||
|
lineEnd: number;
|
||||||
|
lines: number;
|
||||||
|
chunkBegin?: number;
|
||||||
|
chunkEnd?: number;
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RagContentResponse {
|
||||||
|
id: string;
|
||||||
|
content: string;
|
||||||
|
metadata: RagContentMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
export interface RagEntry {
|
export interface RagEntry {
|
||||||
name: string;
|
name: string;
|
||||||
description?: string;
|
description: string;
|
||||||
enabled?: boolean;
|
enabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RefreshToken {
|
export interface RefreshToken {
|
||||||
@ -674,8 +726,8 @@ export interface SalaryRange {
|
|||||||
export interface SearchQuery {
|
export interface SearchQuery {
|
||||||
query: string;
|
query: string;
|
||||||
filters?: Record<string, any>;
|
filters?: Record<string, any>;
|
||||||
page?: number;
|
page: number;
|
||||||
limit?: number;
|
limit: number;
|
||||||
sortBy?: string;
|
sortBy?: string;
|
||||||
sortOrder?: "asc" | "desc";
|
sortOrder?: "asc" | "desc";
|
||||||
}
|
}
|
||||||
@ -700,9 +752,9 @@ export interface SocialLink {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface Tunables {
|
export interface Tunables {
|
||||||
enableRAG?: boolean;
|
enableRAG: boolean;
|
||||||
enableTools?: boolean;
|
enableTools: boolean;
|
||||||
enableContext?: boolean;
|
enableContext: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UserActivity {
|
export interface UserActivity {
|
||||||
@ -898,6 +950,19 @@ export function convertChatMessageBaseFromApi(data: any): ChatMessageBase {
|
|||||||
timestamp: new Date(data.timestamp),
|
timestamp: new Date(data.timestamp),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Convert ChatMessageRagSearch from API response, parsing date fields
|
||||||
|
* Date fields: timestamp
|
||||||
|
*/
|
||||||
|
export function convertChatMessageRagSearchFromApi(data: any): ChatMessageRagSearch {
|
||||||
|
if (!data) return data;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...data,
|
||||||
|
// Convert timestamp from ISO string to Date
|
||||||
|
timestamp: new Date(data.timestamp),
|
||||||
|
};
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Convert ChatMessageUser from API response, parsing date fields
|
* Convert ChatMessageUser from API response, parsing date fields
|
||||||
* Date fields: timestamp
|
* Date fields: timestamp
|
||||||
@ -1159,6 +1224,8 @@ export function convertFromApi<T>(data: any, modelType: string): T {
|
|||||||
return convertChatMessageFromApi(data) as T;
|
return convertChatMessageFromApi(data) as T;
|
||||||
case 'ChatMessageBase':
|
case 'ChatMessageBase':
|
||||||
return convertChatMessageBaseFromApi(data) as T;
|
return convertChatMessageBaseFromApi(data) as T;
|
||||||
|
case 'ChatMessageRagSearch':
|
||||||
|
return convertChatMessageRagSearchFromApi(data) as T;
|
||||||
case 'ChatMessageUser':
|
case 'ChatMessageUser':
|
||||||
return convertChatMessageUserFromApi(data) as T;
|
return convertChatMessageUserFromApi(data) as T;
|
||||||
case 'ChatSession':
|
case 'ChatSession':
|
||||||
|
@ -60,7 +60,7 @@ class Agent(BaseModel, ABC):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
# Agent properties
|
# Agent properties
|
||||||
system_prompt: str # Mandatory
|
system_prompt: str = ""
|
||||||
context_tokens: int = 0
|
context_tokens: int = 0
|
||||||
|
|
||||||
# context_size is shared across all subclasses
|
# context_size is shared across all subclasses
|
||||||
|
98
src/backend/agents/rag_search.py
Normal file
98
src/backend/agents/rag_search.py
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
from typing import Literal, AsyncGenerator, ClassVar, Optional, Any, List
|
||||||
|
from datetime import datetime, UTC
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from .base import Agent, agent_registry
|
||||||
|
from logger import logger
|
||||||
|
|
||||||
|
from .registry import agent_registry
|
||||||
|
from models import ( ChatMessage, ChatStatusType, ChatMessage, ChatOptions, ChatMessageType, ChatSenderType, ChatStatusType, ChatMessageMetaData, Candidate )
|
||||||
|
from rag import ( ChromaDBGetResponse )
|
||||||
|
|
||||||
|
class Chat(Agent):
|
||||||
|
"""
|
||||||
|
Chat Agent
|
||||||
|
"""
|
||||||
|
|
||||||
|
agent_type: Literal["rag_search"] = "rag_search" # type: ignore
|
||||||
|
_agent_type: ClassVar[str] = agent_type # Add this for registration
|
||||||
|
|
||||||
|
async def generate(
|
||||||
|
self, llm: Any, model: str, user_message: ChatMessage, user: Candidate, temperature=0.7
|
||||||
|
) -> AsyncGenerator[ChatMessage, None]:
|
||||||
|
"""
|
||||||
|
Generate a response based on the user message and the provided LLM.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
llm: The language model to use for generation.
|
||||||
|
model: The specific model to use.
|
||||||
|
user_message: The message from the user.
|
||||||
|
user: Optional user information.
|
||||||
|
temperature: The temperature setting for generation.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
ChatMessage: The generated response.
|
||||||
|
"""
|
||||||
|
logger.info(f"{self.agent_type} - {inspect.stack()[0].function}")
|
||||||
|
|
||||||
|
if user.id != user_message.sender_id:
|
||||||
|
logger.error(f"User {user.username} id does not match message {user_message.sender_id}")
|
||||||
|
raise ValueError("User does not match message sender")
|
||||||
|
|
||||||
|
chat_message = ChatMessage(
|
||||||
|
session_id=user_message.session_id,
|
||||||
|
tunables=user_message.tunables,
|
||||||
|
status=ChatStatusType.INITIALIZING,
|
||||||
|
type=ChatMessageType.PREPARING,
|
||||||
|
sender=ChatSenderType.ASSISTANT,
|
||||||
|
content="",
|
||||||
|
timestamp=datetime.now(UTC)
|
||||||
|
)
|
||||||
|
|
||||||
|
chat_message.metadata = ChatMessageMetaData()
|
||||||
|
chat_message.metadata.options = ChatOptions(
|
||||||
|
seed=8911,
|
||||||
|
num_ctx=self.context_size,
|
||||||
|
temperature=temperature, # Higher temperature to encourage tool usage
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a dict for storing various timing stats
|
||||||
|
chat_message.metadata.timers = {}
|
||||||
|
|
||||||
|
self.metrics.generate_count.labels(agent=self.agent_type).inc()
|
||||||
|
with self.metrics.generate_duration.labels(agent=self.agent_type).time():
|
||||||
|
|
||||||
|
rag_message : Optional[ChatMessage] = None
|
||||||
|
async for rag_message in self.generate_rag_results(chat_message=user_message):
|
||||||
|
if rag_message.status == ChatStatusType.ERROR:
|
||||||
|
chat_message.status = rag_message.status
|
||||||
|
chat_message.content = rag_message.content
|
||||||
|
yield chat_message
|
||||||
|
return
|
||||||
|
yield rag_message
|
||||||
|
|
||||||
|
if rag_message:
|
||||||
|
chat_message.content = ""
|
||||||
|
rag_results: List[ChromaDBGetResponse] = rag_message.metadata.rag_results
|
||||||
|
chat_message.metadata.rag_results = rag_results
|
||||||
|
for chroma_results in rag_results:
|
||||||
|
for index, metadata in enumerate(chroma_results.metadatas):
|
||||||
|
content = "\n".join([
|
||||||
|
line.strip()
|
||||||
|
for line in chroma_results.documents[index].split("\n")
|
||||||
|
if line
|
||||||
|
]).strip()
|
||||||
|
chat_message.content += f"""
|
||||||
|
Source: {metadata.get("doc_type", "unknown")}: {metadata.get("path", "")}
|
||||||
|
Document reference: {chroma_results.ids[index]}
|
||||||
|
Content: { content }
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
chat_message.status = ChatStatusType.DONE
|
||||||
|
chat_message.type = ChatMessageType.RAG_RESULT
|
||||||
|
yield chat_message
|
||||||
|
|
||||||
|
# Register the base agent
|
||||||
|
agent_registry.register(Chat._agent_type, Chat)
|
@ -382,10 +382,11 @@ def is_field_optional(field_info: Any, field_type: Any, debug: bool = False) ->
|
|||||||
print(f" └─ RESULT: Required (has specific enum default: {default_val.value})")
|
print(f" └─ RESULT: Required (has specific enum default: {default_val.value})")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Any other actual default value makes it optional
|
# FIXED: Fields with actual default values (like [], "", 0) should be REQUIRED
|
||||||
|
# because they will always have a value (either provided or the default)
|
||||||
if debug:
|
if debug:
|
||||||
print(f" └─ RESULT: Optional (has actual default value)")
|
print(f" └─ RESULT: Required (has actual default value - field will always have a value)")
|
||||||
return True
|
return False # Changed from True to False
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
print(f" └─ No default attribute found")
|
print(f" └─ No default attribute found")
|
||||||
|
@ -50,6 +50,7 @@ from llm_manager import llm_manager
|
|||||||
import entities
|
import entities
|
||||||
from email_service import VerificationEmailRateLimiter, email_service
|
from email_service import VerificationEmailRateLimiter, email_service
|
||||||
from device_manager import DeviceManager
|
from device_manager import DeviceManager
|
||||||
|
import agents
|
||||||
|
|
||||||
# =============================
|
# =============================
|
||||||
# Import Pydantic models
|
# Import Pydantic models
|
||||||
@ -65,10 +66,11 @@ from models import (
|
|||||||
Job, JobApplication, ApplicationStatus,
|
Job, JobApplication, ApplicationStatus,
|
||||||
|
|
||||||
# Chat models
|
# Chat models
|
||||||
ChatSession, ChatMessage, ChatContext, ChatQuery, ChatStatusType, ChatMessageBase, ChatMessageUser, ChatSenderType, ChatMessageType,
|
ChatSession, ChatMessage, ChatContext, ChatQuery, ChatStatusType, ChatMessageBase, ChatMessageUser, ChatSenderType, ChatMessageType, ChatContextType,
|
||||||
|
ChatMessageRagSearch,
|
||||||
|
|
||||||
# Supporting models
|
# Supporting models
|
||||||
Location, MFARequest, MFAData, MFARequestResponse, MFAVerifyRequest, ResendVerificationRequest, Skill, WorkExperience, Education,
|
Location, MFARequest, MFAData, MFARequestResponse, MFAVerifyRequest, RagContentResponse, ResendVerificationRequest, Skill, WorkExperience, Education,
|
||||||
|
|
||||||
# Email
|
# Email
|
||||||
EmailVerificationRequest
|
EmailVerificationRequest
|
||||||
@ -161,10 +163,10 @@ ALGORITHM = "HS256"
|
|||||||
@app.exception_handler(RequestValidationError)
|
@app.exception_handler(RequestValidationError)
|
||||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
logger.error("❌ Validation error:", exc.errors())
|
logger.error(f"❌ Validation error {request.method} {request.url.path}: {str(exc)}")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
content=json.dumps({"detail": exc.errors()}),
|
content=json.dumps({"detail": str(exc)}),
|
||||||
)
|
)
|
||||||
|
|
||||||
# ============================
|
# ============================
|
||||||
@ -228,13 +230,16 @@ async def get_current_user(
|
|||||||
# Check candidates
|
# Check candidates
|
||||||
candidate = await database.get_candidate(user_id)
|
candidate = await database.get_candidate(user_id)
|
||||||
if candidate:
|
if candidate:
|
||||||
|
# logger.info(f"🔑 Current user is candidate: {candidate['id']}")
|
||||||
return Candidate.model_validate(candidate)
|
return Candidate.model_validate(candidate)
|
||||||
|
|
||||||
# Check employers
|
# Check employers
|
||||||
employer = await database.get_employer(user_id)
|
employer = await database.get_employer(user_id)
|
||||||
if employer:
|
if employer:
|
||||||
|
# logger.info(f"🔑 Current user is employer: {employer['id']}")
|
||||||
return Employer.model_validate(employer)
|
return Employer.model_validate(employer)
|
||||||
|
|
||||||
|
logger.warning(f"⚠️ User {user_id} not found in database")
|
||||||
raise HTTPException(status_code=404, detail="User not found")
|
raise HTTPException(status_code=404, detail="User not found")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -324,6 +329,65 @@ def filter_and_paginate(
|
|||||||
|
|
||||||
return paginated_items, total
|
return paginated_items, total
|
||||||
|
|
||||||
|
async def stream_agent_response(chat_agent: agents.Agent,
|
||||||
|
user_message: ChatMessageUser,
|
||||||
|
candidate: Candidate,
|
||||||
|
chat_session_data: Dict[str, Any] | None = None,
|
||||||
|
database: RedisDatabase | None = None) -> StreamingResponse:
|
||||||
|
async def message_stream_generator():
|
||||||
|
"""Generator to stream messages with persistence"""
|
||||||
|
last_log = None
|
||||||
|
final_message = None
|
||||||
|
|
||||||
|
async for generated_message in chat_agent.generate(
|
||||||
|
llm=llm_manager.get_llm(),
|
||||||
|
model=defines.model,
|
||||||
|
user_message=user_message,
|
||||||
|
user=candidate,
|
||||||
|
):
|
||||||
|
# Store reference to the complete AI message
|
||||||
|
if generated_message.status == ChatStatusType.DONE:
|
||||||
|
final_message = generated_message
|
||||||
|
|
||||||
|
# If the message is not done, convert it to a ChatMessageBase to remove
|
||||||
|
# metadata and other unnecessary fields for streaming
|
||||||
|
if generated_message.status != ChatStatusType.DONE:
|
||||||
|
generated_message = model_cast.cast_to_model(ChatMessageBase, generated_message)
|
||||||
|
|
||||||
|
json_data = generated_message.model_dump(mode='json', by_alias=True, exclude_unset=True)
|
||||||
|
json_str = json.dumps(json_data)
|
||||||
|
|
||||||
|
log = f"🔗 Message status={generated_message.status}, sender={getattr(generated_message, 'sender', 'unknown')}"
|
||||||
|
if last_log != log:
|
||||||
|
last_log = log
|
||||||
|
logger.info(log)
|
||||||
|
|
||||||
|
yield f"data: {json_str}\n\n"
|
||||||
|
|
||||||
|
# After streaming is complete, persist the final AI message to database
|
||||||
|
if final_message and final_message.status == ChatStatusType.DONE:
|
||||||
|
try:
|
||||||
|
if database and chat_session_data:
|
||||||
|
await database.add_chat_message(final_message.session_id, final_message.model_dump())
|
||||||
|
logger.info(f"🤖 Message saved to database for session {final_message.session_id}")
|
||||||
|
|
||||||
|
# Update session last activity again
|
||||||
|
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
||||||
|
await database.set_chat_session(final_message.session_id, chat_session_data)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to save message to database: {e}")
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
message_stream_generator(),
|
||||||
|
media_type="text/event-stream",
|
||||||
|
headers={
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"X-Accel-Buffering": "no",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# ============================
|
# ============================
|
||||||
# API Router Setup
|
# API Router Setup
|
||||||
# ============================
|
# ============================
|
||||||
@ -709,12 +773,14 @@ async def verify_email(
|
|||||||
verification_data = await database.get_email_verification_token(request.token)
|
verification_data = await database.get_email_verification_token(request.token)
|
||||||
|
|
||||||
if not verification_data:
|
if not verification_data:
|
||||||
|
logger.warning(f"⚠️ Invalid verification token: {request.token}")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
content=create_error_response("INVALID_TOKEN", "Invalid or expired verification token")
|
content=create_error_response("INVALID_TOKEN", "Invalid or expired verification token")
|
||||||
)
|
)
|
||||||
|
|
||||||
if verification_data.get("verified"):
|
if verification_data.get("verified"):
|
||||||
|
logger.warning(f"⚠️ Attempt to verify already verified email: {verification_data['email']}")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
content=create_error_response("ALREADY_VERIFIED", "Email already verified")
|
content=create_error_response("ALREADY_VERIFIED", "Email already verified")
|
||||||
@ -723,6 +789,7 @@ async def verify_email(
|
|||||||
# Check expiration
|
# Check expiration
|
||||||
expires_at = datetime.fromisoformat(verification_data["expires_at"])
|
expires_at = datetime.fromisoformat(verification_data["expires_at"])
|
||||||
if datetime.now(timezone.utc) > expires_at:
|
if datetime.now(timezone.utc) > expires_at:
|
||||||
|
logger.warning(f"⚠️ Verification token expired for: {verification_data['email']}")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
content=create_error_response("TOKEN_EXPIRED", "Verification token has expired")
|
content=create_error_response("TOKEN_EXPIRED", "Verification token has expired")
|
||||||
@ -1398,6 +1465,93 @@ async def get_candidate(
|
|||||||
content=create_error_response("FETCH_ERROR", str(e))
|
content=create_error_response("FETCH_ERROR", str(e))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@api_router.post("/candidates/rag-content")
|
||||||
|
async def post_candidate_vector_content(
|
||||||
|
doc_id: str = Body(...),
|
||||||
|
current_user = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
if current_user.user_type != "candidate":
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=403,
|
||||||
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
||||||
|
)
|
||||||
|
candidate : Candidate = current_user
|
||||||
|
|
||||||
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
||||||
|
collection = candidate_entity.umap_collection
|
||||||
|
if not collection:
|
||||||
|
return JSONResponse(
|
||||||
|
{"error": "No UMAP collection found"}, status_code=404
|
||||||
|
)
|
||||||
|
|
||||||
|
if not collection.get("metadatas", None):
|
||||||
|
return JSONResponse(f"Document id {doc_id} not found.", 404)
|
||||||
|
|
||||||
|
for index, id in enumerate(collection.get("ids", [])):
|
||||||
|
if id == doc_id:
|
||||||
|
metadata = collection.get("metadatas", [])[index].copy()
|
||||||
|
content = candidate_entity.file_watcher.prepare_metadata(metadata)
|
||||||
|
rag_response = RagContentResponse(id=id, content=content, metadata=metadata)
|
||||||
|
logger.info(f"✅ Fetched RAG content for document id {id} for candidate {candidate.username}")
|
||||||
|
return create_success_response(rag_response.model_dump(by_alias=True, exclude_unset=True))
|
||||||
|
|
||||||
|
return JSONResponse(f"Document id {doc_id} not found.", 404)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Post candidate content error: {e}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content=create_error_response("FETCH_ERROR", str(e))
|
||||||
|
)
|
||||||
|
|
||||||
|
@api_router.post("/candidates/rag-vectors")
|
||||||
|
async def post_candidate_vectors(
|
||||||
|
dimensions: int = Body(...),
|
||||||
|
current_user = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
if current_user.user_type != "candidate":
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=403,
|
||||||
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
||||||
|
)
|
||||||
|
candidate : Candidate = current_user
|
||||||
|
|
||||||
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
||||||
|
collection = candidate_entity.umap_collection
|
||||||
|
if not collection:
|
||||||
|
logger.error(f"❌ Candidate collection not found")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=404,
|
||||||
|
content=create_error_response("NOT_FOUND", "Candidate collection not found")
|
||||||
|
)
|
||||||
|
if dimensions == 2:
|
||||||
|
umap_embedding = candidate_entity.file_watcher.umap_embedding_2d
|
||||||
|
else:
|
||||||
|
umap_embedding = candidate_entity.file_watcher.umap_embedding_3d
|
||||||
|
|
||||||
|
if len(umap_embedding) == 0:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=404,
|
||||||
|
content=create_error_response("NOT_FOUND", "Candidate collection embedding not found")
|
||||||
|
)
|
||||||
|
result = {
|
||||||
|
"ids": collection.get("ids", []),
|
||||||
|
"metadatas": collection.get("metadatas", []),
|
||||||
|
"documents": collection.get("documents", []),
|
||||||
|
"embeddings": umap_embedding.tolist(),
|
||||||
|
"size": candidate_entity.file_watcher.collection.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
return create_success_response(result)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Post candidate vectors error: {e}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content=create_error_response("FETCH_ERROR", str(e))
|
||||||
|
)
|
||||||
|
|
||||||
@api_router.patch("/candidates/{candidate_id}")
|
@api_router.patch("/candidates/{candidate_id}")
|
||||||
async def update_candidate(
|
async def update_candidate(
|
||||||
candidate_id: str = Path(...),
|
candidate_id: str = Path(...),
|
||||||
@ -1418,6 +1572,7 @@ async def update_candidate(
|
|||||||
|
|
||||||
# Check authorization (user can only update their own profile)
|
# Check authorization (user can only update their own profile)
|
||||||
if candidate.id != current_user.id:
|
if candidate.id != current_user.id:
|
||||||
|
logger.warning(f"⚠️ Unauthorized update attempt by user {current_user.id} on candidate {candidate_id}")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=403,
|
status_code=403,
|
||||||
content=create_error_response("FORBIDDEN", "Cannot update another user's profile")
|
content=create_error_response("FORBIDDEN", "Cannot update another user's profile")
|
||||||
@ -1772,6 +1927,56 @@ async def get_chat_statistics(
|
|||||||
content=create_error_response("STATS_ERROR", str(e))
|
content=create_error_response("STATS_ERROR", str(e))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@api_router.post("/candidates/rag-search")
|
||||||
|
async def post_candidate_rag_search(
|
||||||
|
query: str = Body(...),
|
||||||
|
current_user = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Get chat activity summary for a candidate"""
|
||||||
|
try:
|
||||||
|
if current_user.user_type != "candidate":
|
||||||
|
logger.warning(f"⚠️ Unauthorized RAG search attempt by user {current_user.id}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=403,
|
||||||
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
||||||
|
)
|
||||||
|
|
||||||
|
candidate : Candidate = current_user
|
||||||
|
chat_type = ChatContextType.RAG_SEARCH
|
||||||
|
# Get RAG search data
|
||||||
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
||||||
|
# Entity automatically released when done
|
||||||
|
chat_agent = candidate_entity.get_or_create_agent(agent_type=chat_type)
|
||||||
|
if not chat_agent:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=400,
|
||||||
|
content=create_error_response("AGENT_NOT_FOUND", "No agent found for this chat type")
|
||||||
|
)
|
||||||
|
|
||||||
|
user_message = ChatMessageUser(sender_id=candidate.id, session_id="", content=query, timestamp=datetime.now(UTC))
|
||||||
|
rag_message = None
|
||||||
|
async for generated_message in chat_agent.generate(
|
||||||
|
llm=llm_manager.get_llm(),
|
||||||
|
model=defines.model,
|
||||||
|
user_message=user_message,
|
||||||
|
user=candidate,
|
||||||
|
):
|
||||||
|
rag_message = generated_message
|
||||||
|
|
||||||
|
if not rag_message:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content=create_error_response("NO_RESPONSE", "No response generated for the RAG search")
|
||||||
|
)
|
||||||
|
return create_success_response(rag_message.metadata.rag_results[0].model_dump(by_alias=True, exclude_unset=True))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Get candidate chat summary error: {e}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content=create_error_response("SUMMARY_ERROR", str(e))
|
||||||
|
)
|
||||||
|
|
||||||
@api_router.get("/candidates/{username}/chat-summary")
|
@api_router.get("/candidates/{username}/chat-summary")
|
||||||
async def get_candidate_chat_summary(
|
async def get_candidate_chat_summary(
|
||||||
username: str = Path(...),
|
username: str = Path(...),
|
||||||
@ -1985,57 +2190,12 @@ async def post_chat_session_message_stream(
|
|||||||
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
||||||
await database.set_chat_session(user_message.session_id, chat_session_data)
|
await database.set_chat_session(user_message.session_id, chat_session_data)
|
||||||
|
|
||||||
async def message_stream_generator():
|
return stream_agent_response(
|
||||||
"""Generator to stream messages with persistence"""
|
chat_agent=chat_agent,
|
||||||
last_log = None
|
user_message=user_message,
|
||||||
final_message = None
|
candidate=candidate,
|
||||||
|
database=database,
|
||||||
async for generated_message in chat_agent.generate(
|
chat_session_data=chat_session_data,
|
||||||
llm=llm_manager.get_llm(),
|
|
||||||
model=defines.model,
|
|
||||||
user_message=user_message,
|
|
||||||
user=current_user,
|
|
||||||
):
|
|
||||||
# Store reference to the complete AI message
|
|
||||||
if generated_message.status == ChatStatusType.DONE:
|
|
||||||
final_message = generated_message
|
|
||||||
|
|
||||||
# If the message is not done, convert it to a ChatMessageBase to remove
|
|
||||||
# metadata and other unnecessary fields for streaming
|
|
||||||
if generated_message.status != ChatStatusType.DONE:
|
|
||||||
generated_message = model_cast.cast_to_model(ChatMessageBase, generated_message)
|
|
||||||
|
|
||||||
json_data = generated_message.model_dump(mode='json', by_alias=True, exclude_unset=True)
|
|
||||||
json_str = json.dumps(json_data)
|
|
||||||
|
|
||||||
log = f"🔗 Message status={generated_message.status}, sender={getattr(generated_message, 'sender', 'unknown')}"
|
|
||||||
if last_log != log:
|
|
||||||
last_log = log
|
|
||||||
logger.info(log)
|
|
||||||
|
|
||||||
yield f"data: {json_str}\n\n"
|
|
||||||
|
|
||||||
# After streaming is complete, persist the final AI message to database
|
|
||||||
if final_message and final_message.status == ChatStatusType.DONE:
|
|
||||||
try:
|
|
||||||
await database.add_chat_message(final_message.session_id, final_message.model_dump())
|
|
||||||
logger.info(f"🤖 AI message saved to database for session {final_message.session_id}")
|
|
||||||
|
|
||||||
# Update session last activity again
|
|
||||||
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
|
||||||
await database.set_chat_session(final_message.session_id, chat_session_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"❌ Failed to save AI message to database: {e}")
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
message_stream_generator(),
|
|
||||||
media_type="text/event-stream",
|
|
||||||
headers={
|
|
||||||
"Cache-Control": "no-cache",
|
|
||||||
"Connection": "keep-alive",
|
|
||||||
"X-Accel-Buffering": "no",
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -2544,10 +2704,11 @@ logger.info(f"Debug mode is {'enabled' if defines.debug else 'disabled'}")
|
|||||||
async def log_requests(request: Request, call_next):
|
async def log_requests(request: Request, call_next):
|
||||||
try:
|
try:
|
||||||
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
||||||
logger.info(f"Request path: {request.url.path}, Method: {request.method}, Remote: {request.client.host}")
|
logger.info(f"📝 Request {request.method}: {request.url.path}, Remote: {request.client.host}")
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
||||||
logger.info(f"Response status: {response.status_code}, Path: {request.url.path}, Method: {request.method}")
|
if response.status_code < 200 or response.status_code >= 300:
|
||||||
|
logger.warning(f"⚠️ Response {request.method} {response.status_code}: Path: {request.url.path}")
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Error processing request: {str(e)}, Path: {request.url.path}, Method: {request.method}")
|
logger.error(f"❌ Error processing request: {str(e)}, Path: {request.url.path}, Method: {request.method}")
|
||||||
|
@ -81,6 +81,7 @@ class ChatMessageType(str, Enum):
|
|||||||
PROCESSING = "processing"
|
PROCESSING = "processing"
|
||||||
RESPONSE = "response"
|
RESPONSE = "response"
|
||||||
SEARCHING = "searching"
|
SEARCHING = "searching"
|
||||||
|
RAG_RESULT = "rag_result"
|
||||||
SYSTEM = "system"
|
SYSTEM = "system"
|
||||||
THINKING = "thinking"
|
THINKING = "thinking"
|
||||||
TOOLING = "tooling"
|
TOOLING = "tooling"
|
||||||
@ -100,6 +101,7 @@ class ChatContextType(str, Enum):
|
|||||||
GENERAL = "general"
|
GENERAL = "general"
|
||||||
GENERATE_PERSONA = "generate_persona"
|
GENERATE_PERSONA = "generate_persona"
|
||||||
GENERATE_PROFILE = "generate_profile"
|
GENERATE_PROFILE = "generate_profile"
|
||||||
|
RAG_SEARCH = "rag_search"
|
||||||
|
|
||||||
class AIModelType(str, Enum):
|
class AIModelType(str, Enum):
|
||||||
QWEN2_5 = "qwen2.5"
|
QWEN2_5 = "qwen2.5"
|
||||||
@ -461,6 +463,23 @@ class RagEntry(BaseModel):
|
|||||||
description: str = ""
|
description: str = ""
|
||||||
enabled: bool = True
|
enabled: bool = True
|
||||||
|
|
||||||
|
class RagContentMetadata(BaseModel):
|
||||||
|
source_file: str = Field(..., alias="sourceFile")
|
||||||
|
line_begin: int = Field(..., alias="lineBegin")
|
||||||
|
line_end: int = Field(..., alias="lineEnd")
|
||||||
|
lines: int
|
||||||
|
chunk_begin: Optional[int] = Field(None, alias="chunkBegin")
|
||||||
|
chunk_end: Optional[int] = Field(None, alias="chunkEnd")
|
||||||
|
metadata: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
model_config = {
|
||||||
|
"populate_by_name": True, # Allow both field names and aliases
|
||||||
|
}
|
||||||
|
|
||||||
|
class RagContentResponse(BaseModel):
|
||||||
|
id: str
|
||||||
|
content: str
|
||||||
|
metadata: RagContentMetadata
|
||||||
|
|
||||||
class Candidate(BaseUser):
|
class Candidate(BaseUser):
|
||||||
user_type: Literal[UserType.CANDIDATE] = Field(UserType.CANDIDATE, alias="userType")
|
user_type: Literal[UserType.CANDIDATE] = Field(UserType.CANDIDATE, alias="userType")
|
||||||
username: str
|
username: str
|
||||||
@ -618,12 +637,14 @@ class JobApplication(BaseModel):
|
|||||||
class ChromaDBGetResponse(BaseModel):
|
class ChromaDBGetResponse(BaseModel):
|
||||||
# Chroma fields
|
# Chroma fields
|
||||||
ids: List[str] = []
|
ids: List[str] = []
|
||||||
embeddings: List[List[float]] = Field(default=[])
|
embeddings: List[List[float]] = []
|
||||||
documents: List[str] = []
|
documents: List[str] = []
|
||||||
metadatas: List[Dict[str, Any]] = []
|
metadatas: List[Dict[str, Any]] = []
|
||||||
|
distances: List[float] = []
|
||||||
# Additional fields
|
# Additional fields
|
||||||
name: str = ""
|
name: str = ""
|
||||||
size: int = 0
|
size: int = 0
|
||||||
|
dimensions: int = 2 | 3
|
||||||
query: str = ""
|
query: str = ""
|
||||||
query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding")
|
query_embedding: Optional[List[float]] = Field(default=None, alias="queryEmbedding")
|
||||||
umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D")
|
umap_embedding_2d: Optional[List[float]] = Field(default=None, alias="umapEmbedding2D")
|
||||||
@ -663,6 +684,12 @@ class ChatMessageBase(BaseModel):
|
|||||||
"populate_by_name": True # Allow both field names and aliases
|
"populate_by_name": True # Allow both field names and aliases
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class ChatMessageRagSearch(ChatMessageBase):
|
||||||
|
status: ChatStatusType = ChatStatusType.DONE
|
||||||
|
type: ChatMessageType = ChatMessageType.RAG_RESULT
|
||||||
|
sender: ChatSenderType = ChatSenderType.USER
|
||||||
|
dimensions: int = 2 | 3
|
||||||
|
|
||||||
class ChatMessageMetaData(BaseModel):
|
class ChatMessageMetaData(BaseModel):
|
||||||
model: AIModelType = AIModelType.QWEN2_5
|
model: AIModelType = AIModelType.QWEN2_5
|
||||||
temperature: float = 0.7
|
temperature: float = 0.7
|
||||||
|
Loading…
x
Reference in New Issue
Block a user