AI generation and deletion working
This commit is contained in:
parent
357b42ea7c
commit
05c53653ed
@ -84,6 +84,16 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
}}
|
||||
/>
|
||||
</Grid>
|
||||
{isAdmin && ai &&
|
||||
<DeleteConfirmation
|
||||
onDelete={() => { deleteCandidate(candidate.id); }}
|
||||
sx={{ minWidth: 'auto', px: 2, maxHeight: "min-content", color: "red" }}
|
||||
action="delete"
|
||||
label="user"
|
||||
title="Delete AI user"
|
||||
icon=<DeleteIcon />
|
||||
message={`Are you sure you want to delete ${candidate.username}? This action cannot be undone.`}
|
||||
/>}
|
||||
|
||||
<Grid size={{ xs: 12, sm: 10 }}>
|
||||
<Box
|
||||
@ -143,17 +153,6 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
}
|
||||
</>}
|
||||
</Grid>
|
||||
{isAdmin && ai &&
|
||||
<DeleteConfirmation
|
||||
onDelete={() => { deleteCandidate(candidate.id); }}
|
||||
sx={{ minWidth: 'auto', px: 2, maxHeight: "min-content", color: "red" }}
|
||||
action="delete"
|
||||
label="user"
|
||||
title="Delete AI user"
|
||||
icon=<DeleteIcon />
|
||||
message={`Are you sure you want to delete ${candidate.username}? This action cannot be undone.`}
|
||||
/>}
|
||||
|
||||
</Grid>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
@ -1,6 +1,5 @@
|
||||
import React, { createContext, useContext, useState, useCallback, useEffect } from 'react';
|
||||
import * as Types from 'types/types';
|
||||
import { formatApiRequest, toCamelCase } from 'types/conversion';
|
||||
|
||||
// ============================
|
||||
// App State Interface
|
||||
@ -26,107 +25,6 @@ export interface AppStateActions {
|
||||
|
||||
export type AppStateContextType = AppState & AppStateActions;
|
||||
|
||||
// ============================
|
||||
// Storage Constants
|
||||
// ============================
|
||||
|
||||
const APP_STORAGE = {
|
||||
SELECTED_CANDIDATE: 'selectedCandidate',
|
||||
SELECTED_JOB: 'selectedJob',
|
||||
SELECTED_EMPLOYER: 'selectedEmployer'
|
||||
} as const;
|
||||
|
||||
// ============================
|
||||
// Storage Utilities with Date Conversion
|
||||
// ============================
|
||||
|
||||
function storeCandidate(candidate: Types.Candidate | null): void {
|
||||
try {
|
||||
if (candidate) {
|
||||
const candidateForStorage = formatApiRequest(candidate);
|
||||
localStorage.setItem(APP_STORAGE.SELECTED_CANDIDATE, JSON.stringify(candidateForStorage));
|
||||
} else {
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_CANDIDATE);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to store selected candidate:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function getStoredCandidate(): Types.Candidate | null {
|
||||
try {
|
||||
const candidateStr = localStorage.getItem(APP_STORAGE.SELECTED_CANDIDATE);
|
||||
if (candidateStr) {
|
||||
const rawData = JSON.parse(candidateStr);
|
||||
return toCamelCase<Types.Candidate>(rawData);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse stored candidate:', error);
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_CANDIDATE);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function storeJob(job: Types.Job | null): void {
|
||||
try {
|
||||
if (job) {
|
||||
const jobForStorage = formatApiRequest(job);
|
||||
localStorage.setItem(APP_STORAGE.SELECTED_JOB, JSON.stringify(jobForStorage));
|
||||
} else {
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_JOB);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to store selected job:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function getStoredJob(): Types.Job | null {
|
||||
try {
|
||||
const jobStr = localStorage.getItem(APP_STORAGE.SELECTED_JOB);
|
||||
if (jobStr) {
|
||||
const rawData = JSON.parse(jobStr);
|
||||
return toCamelCase<Types.Job>(rawData);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse stored job:', error);
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_JOB);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function storeEmployer(employer: Types.Employer | null): void {
|
||||
try {
|
||||
if (employer) {
|
||||
const employerForStorage = formatApiRequest(employer);
|
||||
localStorage.setItem(APP_STORAGE.SELECTED_EMPLOYER, JSON.stringify(employerForStorage));
|
||||
} else {
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_EMPLOYER);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to store selected employer:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function getStoredEmployer(): Types.Employer | null {
|
||||
try {
|
||||
const employerStr = localStorage.getItem(APP_STORAGE.SELECTED_EMPLOYER);
|
||||
if (employerStr) {
|
||||
const rawData = JSON.parse(employerStr);
|
||||
return toCamelCase<Types.Employer>(rawData);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse stored employer:', error);
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_EMPLOYER);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function clearAllStoredSelections(): void {
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_CANDIDATE);
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_JOB);
|
||||
localStorage.removeItem(APP_STORAGE.SELECTED_EMPLOYER);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// App State Hook
|
||||
// ============================
|
||||
@ -136,31 +34,8 @@ export function useAppStateLogic(): AppStateContextType {
|
||||
const [selectedJob, setSelectedJobState] = useState<Types.Job | null>(null);
|
||||
const [selectedEmployer, setSelectedEmployerState] = useState<Types.Employer | null>(null);
|
||||
|
||||
// Initialize state from localStorage on mount
|
||||
useEffect(() => {
|
||||
const storedCandidate = getStoredCandidate();
|
||||
const storedJob = getStoredJob();
|
||||
const storedEmployer = getStoredEmployer();
|
||||
|
||||
if (storedCandidate) {
|
||||
setSelectedCandidateState(storedCandidate);
|
||||
console.log('Restored selected candidate from storage:', storedCandidate);
|
||||
}
|
||||
|
||||
if (storedJob) {
|
||||
setSelectedJobState(storedJob);
|
||||
console.log('Restored selected job from storage:', storedJob);
|
||||
}
|
||||
|
||||
if (storedEmployer) {
|
||||
setSelectedEmployerState(storedEmployer);
|
||||
console.log('Restored selected employer from storage:', storedEmployer);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const setSelectedCandidate = useCallback((candidate: Types.Candidate | null) => {
|
||||
setSelectedCandidateState(candidate);
|
||||
storeCandidate(candidate);
|
||||
|
||||
if (candidate) {
|
||||
console.log('Selected candidate:', candidate);
|
||||
@ -171,7 +46,6 @@ export function useAppStateLogic(): AppStateContextType {
|
||||
|
||||
const setSelectedJob = useCallback((job: Types.Job | null) => {
|
||||
setSelectedJobState(job);
|
||||
storeJob(job);
|
||||
|
||||
if (job) {
|
||||
console.log('Selected job:', job);
|
||||
@ -182,7 +56,6 @@ export function useAppStateLogic(): AppStateContextType {
|
||||
|
||||
const setSelectedEmployer = useCallback((employer: Types.Employer | null) => {
|
||||
setSelectedEmployerState(employer);
|
||||
storeEmployer(employer);
|
||||
|
||||
if (employer) {
|
||||
console.log('Selected employer:', employer);
|
||||
@ -195,7 +68,6 @@ export function useAppStateLogic(): AppStateContextType {
|
||||
setSelectedCandidateState(null);
|
||||
setSelectedJobState(null);
|
||||
setSelectedEmployerState(null);
|
||||
clearAllStoredSelections();
|
||||
console.log('Cleared all selections');
|
||||
}, []);
|
||||
|
||||
|
@ -527,7 +527,7 @@ class ApiClient {
|
||||
|
||||
async deleteCandidate(id: string): Promise<DeleteCandidateResponse> {
|
||||
const response = await fetch(`${this.baseUrl}/candidates/${id}`, {
|
||||
method: 'PATCH',
|
||||
method: 'DELETE',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify({ id })
|
||||
});
|
||||
|
@ -217,6 +217,40 @@ class RedisDatabase:
|
||||
key = f"document:{document_id}"
|
||||
await self.redis.delete(key)
|
||||
|
||||
async def delete_all_candidate_documents(self, candidate_id: str) -> int:
|
||||
"""Delete all documents for a specific candidate and return count of deleted documents"""
|
||||
try:
|
||||
# Get all document IDs for this candidate
|
||||
key = f"{self.KEY_PREFIXES['candidate_documents']}{candidate_id}"
|
||||
document_ids = await self.redis.lrange(key, 0, -1)
|
||||
|
||||
if not document_ids:
|
||||
logger.info(f"No documents found for candidate {candidate_id}")
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
|
||||
# Use pipeline for efficient batch operations
|
||||
pipe = self.redis.pipeline()
|
||||
|
||||
# Delete each document's metadata
|
||||
for doc_id in document_ids:
|
||||
pipe.delete(f"document:{doc_id}")
|
||||
deleted_count += 1
|
||||
|
||||
# Delete the candidate's document list
|
||||
pipe.delete(key)
|
||||
|
||||
# Execute all operations
|
||||
await pipe.execute()
|
||||
|
||||
logger.info(f"Successfully deleted {deleted_count} documents for candidate {candidate_id}")
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting all documents for candidate {candidate_id}: {e}")
|
||||
raise
|
||||
|
||||
async def get_candidate_documents(self, candidate_id: str) -> List[Dict]:
|
||||
"""Get all documents for a specific candidate"""
|
||||
key = f"{self.KEY_PREFIXES['candidate_documents']}{candidate_id}"
|
||||
@ -368,11 +402,322 @@ class RedisDatabase:
|
||||
|
||||
return result
|
||||
|
||||
async def delete_candidate(self, candidate_id: str):
|
||||
"""Delete candidate"""
|
||||
key = f"{self.KEY_PREFIXES['candidates']}{candidate_id}"
|
||||
await self.redis.delete(key)
|
||||
|
||||
async def delete_candidate(self, candidate_id: str) -> Dict[str, int]:
|
||||
"""
|
||||
Delete candidate and all related records in a cascading manner
|
||||
Returns a dictionary with counts of deleted items for each category
|
||||
"""
|
||||
try:
|
||||
deletion_stats = {
|
||||
"documents": 0,
|
||||
"chat_sessions": 0,
|
||||
"chat_messages": 0,
|
||||
"job_applications": 0,
|
||||
"user_records": 0,
|
||||
"auth_records": 0,
|
||||
"security_logs": 0,
|
||||
"ai_parameters": 0,
|
||||
"candidate_record": 0
|
||||
}
|
||||
|
||||
logger.info(f"🗑️ Starting cascading delete for candidate {candidate_id}")
|
||||
|
||||
# 1. Get candidate data first to retrieve associated information
|
||||
candidate_data = await self.get_candidate(candidate_id)
|
||||
if not candidate_data:
|
||||
logger.warning(f"⚠️ Candidate {candidate_id} not found")
|
||||
return deletion_stats
|
||||
|
||||
candidate_email = candidate_data.get("email", "").lower()
|
||||
candidate_username = candidate_data.get("username", "").lower()
|
||||
|
||||
# 2. Delete all candidate documents and their metadata
|
||||
try:
|
||||
documents_deleted = await self.delete_all_candidate_documents(candidate_id)
|
||||
deletion_stats["documents"] = documents_deleted
|
||||
logger.info(f"🗑️ Deleted {documents_deleted} documents for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting candidate documents: {e}")
|
||||
|
||||
# 3. Delete all chat sessions related to this candidate
|
||||
try:
|
||||
candidate_sessions = await self.get_chat_sessions_by_candidate(candidate_id)
|
||||
messages_deleted = 0
|
||||
|
||||
for session in candidate_sessions:
|
||||
session_id = session.get("id")
|
||||
if session_id:
|
||||
# Count messages before deletion
|
||||
message_count = await self.get_chat_message_count(session_id)
|
||||
messages_deleted += message_count
|
||||
|
||||
# Delete chat session and its messages
|
||||
await self.delete_chat_session_completely(session_id)
|
||||
|
||||
deletion_stats["chat_sessions"] = len(candidate_sessions)
|
||||
deletion_stats["chat_messages"] = messages_deleted
|
||||
logger.info(f"🗑️ Deleted {len(candidate_sessions)} chat sessions and {messages_deleted} messages for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting chat sessions: {e}")
|
||||
|
||||
# 4. Delete job applications from this candidate
|
||||
try:
|
||||
all_applications = await self.get_all_job_applications()
|
||||
candidate_applications = []
|
||||
|
||||
for app_id, app_data in all_applications.items():
|
||||
if app_data.get("candidateId") == candidate_id:
|
||||
candidate_applications.append(app_id)
|
||||
|
||||
# Delete each application
|
||||
for app_id in candidate_applications:
|
||||
await self.delete_job_application(app_id)
|
||||
|
||||
deletion_stats["job_applications"] = len(candidate_applications)
|
||||
logger.info(f"🗑️ Deleted {len(candidate_applications)} job applications for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting job applications: {e}")
|
||||
|
||||
# 5. Delete user records (by email and username if they exist)
|
||||
try:
|
||||
user_records_deleted = 0
|
||||
|
||||
# Delete by email
|
||||
if candidate_email and await self.user_exists_by_email(candidate_email):
|
||||
await self.delete_user(candidate_email)
|
||||
user_records_deleted += 1
|
||||
logger.debug(f"🗑️ Deleted user record by email: {candidate_email}")
|
||||
|
||||
# Delete by username (if different from email)
|
||||
if (candidate_username and
|
||||
candidate_username != candidate_email and
|
||||
await self.user_exists_by_username(candidate_username)):
|
||||
await self.delete_user(candidate_username)
|
||||
user_records_deleted += 1
|
||||
logger.debug(f"🗑️ Deleted user record by username: {candidate_username}")
|
||||
|
||||
# Delete user by ID if exists
|
||||
user_by_id = await self.get_user_by_id(candidate_id)
|
||||
if user_by_id:
|
||||
key = f"user_by_id:{candidate_id}"
|
||||
await self.redis.delete(key)
|
||||
user_records_deleted += 1
|
||||
logger.debug(f"🗑️ Deleted user record by ID: {candidate_id}")
|
||||
|
||||
deletion_stats["user_records"] = user_records_deleted
|
||||
logger.info(f"🗑️ Deleted {user_records_deleted} user records for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting user records: {e}")
|
||||
|
||||
# 6. Delete authentication records
|
||||
try:
|
||||
auth_deleted = await self.delete_authentication(candidate_id)
|
||||
if auth_deleted:
|
||||
deletion_stats["auth_records"] = 1
|
||||
logger.debug(f"🗑️ Deleted authentication record for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting authentication records: {e}")
|
||||
|
||||
# 7. Revoke all refresh tokens for this user
|
||||
try:
|
||||
await self.revoke_all_user_tokens(candidate_id)
|
||||
logger.debug(f"🗑️ Revoked all refresh tokens for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error revoking refresh tokens: {e}")
|
||||
|
||||
# 8. Delete security logs for this user
|
||||
try:
|
||||
security_logs_deleted = 0
|
||||
# Security logs are stored by date, so we need to scan for them
|
||||
pattern = f"security_log:{candidate_id}:*"
|
||||
cursor = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = await self.redis.scan(cursor, match=pattern, count=100)
|
||||
|
||||
if keys:
|
||||
await self.redis.delete(*keys)
|
||||
security_logs_deleted += len(keys)
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
deletion_stats["security_logs"] = security_logs_deleted
|
||||
if security_logs_deleted > 0:
|
||||
logger.debug(f"🗑️ Deleted {security_logs_deleted} security log entries for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting security logs: {e}")
|
||||
|
||||
# 9. Delete AI parameters that might be specific to this candidate
|
||||
try:
|
||||
all_ai_params = await self.get_all_ai_parameters()
|
||||
candidate_ai_params = []
|
||||
|
||||
for param_id, param_data in all_ai_params.items():
|
||||
if (param_data.get("candidateId") == candidate_id or
|
||||
param_data.get("userId") == candidate_id):
|
||||
candidate_ai_params.append(param_id)
|
||||
|
||||
# Delete each AI parameter set
|
||||
for param_id in candidate_ai_params:
|
||||
await self.delete_ai_parameters(param_id)
|
||||
|
||||
deletion_stats["ai_parameters"] = len(candidate_ai_params)
|
||||
if len(candidate_ai_params) > 0:
|
||||
logger.info(f"🗑️ Deleted {len(candidate_ai_params)} AI parameter sets for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting AI parameters: {e}")
|
||||
|
||||
# 10. Delete email verification tokens if any exist
|
||||
try:
|
||||
if candidate_email:
|
||||
# Clean up any pending verification tokens
|
||||
pattern = "email_verification:*"
|
||||
cursor = 0
|
||||
tokens_deleted = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = await self.redis.scan(cursor, match=pattern, count=100)
|
||||
|
||||
for key in keys:
|
||||
token_data = await self.redis.get(key)
|
||||
if token_data:
|
||||
verification_info = json.loads(token_data)
|
||||
if verification_info.get("email", "").lower() == candidate_email:
|
||||
await self.redis.delete(key)
|
||||
tokens_deleted += 1
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
if tokens_deleted > 0:
|
||||
logger.debug(f"🗑️ Deleted {tokens_deleted} email verification tokens for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting email verification tokens: {e}")
|
||||
|
||||
# 11. Delete password reset tokens if any exist
|
||||
try:
|
||||
if candidate_email:
|
||||
pattern = "password_reset:*"
|
||||
cursor = 0
|
||||
tokens_deleted = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = await self.redis.scan(cursor, match=pattern, count=100)
|
||||
|
||||
for key in keys:
|
||||
token_data = await self.redis.get(key)
|
||||
if token_data:
|
||||
reset_info = json.loads(token_data)
|
||||
if reset_info.get("email", "").lower() == candidate_email:
|
||||
await self.redis.delete(key)
|
||||
tokens_deleted += 1
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
if tokens_deleted > 0:
|
||||
logger.debug(f"🗑️ Deleted {tokens_deleted} password reset tokens for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting password reset tokens: {e}")
|
||||
|
||||
# 12. Delete MFA codes if any exist
|
||||
try:
|
||||
if candidate_email:
|
||||
pattern = f"mfa_code:{candidate_email}:*"
|
||||
cursor = 0
|
||||
mfa_codes_deleted = 0
|
||||
|
||||
while True:
|
||||
cursor, keys = await self.redis.scan(cursor, match=pattern, count=100)
|
||||
|
||||
if keys:
|
||||
await self.redis.delete(*keys)
|
||||
mfa_codes_deleted += len(keys)
|
||||
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
if mfa_codes_deleted > 0:
|
||||
logger.debug(f"🗑️ Deleted {mfa_codes_deleted} MFA codes for candidate {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting MFA codes: {e}")
|
||||
|
||||
# 13. Finally, delete the candidate record itself
|
||||
try:
|
||||
key = f"{self.KEY_PREFIXES['candidates']}{candidate_id}"
|
||||
result = await self.redis.delete(key)
|
||||
deletion_stats["candidate_record"] = result
|
||||
logger.info(f"🗑️ Deleted candidate record for {candidate_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error deleting candidate record: {e}")
|
||||
|
||||
# 14. Log the deletion as a security event (if we have admin/system user context)
|
||||
try:
|
||||
total_items_deleted = sum(deletion_stats.values())
|
||||
logger.info(f"✅ Completed cascading delete for candidate {candidate_id}. "
|
||||
f"Total items deleted: {total_items_deleted}")
|
||||
logger.info(f"📊 Deletion breakdown: {deletion_stats}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error logging deletion summary: {e}")
|
||||
|
||||
return deletion_stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Critical error during candidate deletion {candidate_id}: {e}")
|
||||
raise
|
||||
|
||||
async def delete_candidate_batch(self, candidate_ids: List[str]) -> Dict[str, Dict[str, int]]:
|
||||
"""
|
||||
Delete multiple candidates in batch with detailed reporting
|
||||
Returns deletion stats for each candidate
|
||||
"""
|
||||
try:
|
||||
batch_results = {}
|
||||
total_stats = {
|
||||
"documents": 0,
|
||||
"chat_sessions": 0,
|
||||
"chat_messages": 0,
|
||||
"job_applications": 0,
|
||||
"user_records": 0,
|
||||
"auth_records": 0,
|
||||
"security_logs": 0,
|
||||
"ai_parameters": 0,
|
||||
"candidate_record": 0
|
||||
}
|
||||
|
||||
logger.info(f"🗑️ Starting batch deletion for {len(candidate_ids)} candidates")
|
||||
|
||||
for candidate_id in candidate_ids:
|
||||
try:
|
||||
deletion_stats = await self.delete_candidate(candidate_id)
|
||||
batch_results[candidate_id] = deletion_stats
|
||||
|
||||
# Add to totals
|
||||
for key, value in deletion_stats.items():
|
||||
total_stats[key] += value
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to delete candidate {candidate_id}: {e}")
|
||||
batch_results[candidate_id] = {"error": str(e)}
|
||||
|
||||
logger.info(f"✅ Completed batch deletion. Total items deleted: {sum(total_stats.values())}")
|
||||
logger.info(f"📊 Batch totals: {total_stats}")
|
||||
|
||||
return {
|
||||
"individual_results": batch_results,
|
||||
"totals": total_stats,
|
||||
"summary": {
|
||||
"total_candidates_processed": len(candidate_ids),
|
||||
"successful_deletions": len([r for r in batch_results.values() if "error" not in r]),
|
||||
"failed_deletions": len([r for r in batch_results.values() if "error" in r]),
|
||||
"total_items_deleted": sum(total_stats.values())
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Critical error during batch candidate deletion: {e}")
|
||||
raise
|
||||
# Employers operations
|
||||
async def get_employer(self, employer_id: str) -> Optional[Dict]:
|
||||
"""Get employer by ID"""
|
||||
|
@ -1853,7 +1853,6 @@ async def get_candidate_profile_image(
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Get profile image of a candidate by username"""
|
||||
logger.info(f"🔍 Fetching profile image for candidate: {username}")
|
||||
try:
|
||||
all_candidates_data = await database.get_all_candidates()
|
||||
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
||||
@ -2406,7 +2405,9 @@ async def update_candidate(
|
||||
content=create_error_response("NOT_FOUND", "Candidate not found")
|
||||
)
|
||||
|
||||
candidate = Candidate.model_validate(candidate_data) if not candidate_data.get("is_AI") else CandidateAI.model_validate(candidate_data)
|
||||
is_AI = candidate_data.get("is_AI", False)
|
||||
logger.info(json.dumps(candidate_data, indent=2))
|
||||
candidate = CandidateAI.model_validate(candidate_data) if is_AI else Candidate.model_validate(candidate_data)
|
||||
|
||||
# Check authorization (user can only update their own profile)
|
||||
if current_user.is_admin is False and candidate.id != current_user.id:
|
||||
@ -2421,7 +2422,7 @@ async def update_candidate(
|
||||
logger.info(f"🔄 Updating candidate {candidate_id} with data: {updates}")
|
||||
candidate_dict = candidate.model_dump()
|
||||
candidate_dict.update(updates)
|
||||
updated_candidate = Candidate.model_validate(candidate_dict)
|
||||
updated_candidate = CandidateAI.model_validate(candidate_dict) if is_AI else Candidate.model_validate(candidate_dict)
|
||||
await database.set_candidate(candidate_id, updated_candidate.model_dump())
|
||||
|
||||
return create_success_response(updated_candidate.model_dump(by_alias=True, exclude_unset=True))
|
||||
|
Loading…
x
Reference in New Issue
Block a user