5732 lines
226 KiB
Python
5732 lines
226 KiB
Python
import hashlib
|
|
import time
|
|
from fastapi import FastAPI, HTTPException, Depends, Query, Path, Body, status, APIRouter, Request, BackgroundTasks, File, UploadFile, Form# type: ignore
|
|
from fastapi.middleware.cors import CORSMiddleware # type: ignore
|
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials# type: ignore
|
|
from fastapi.exceptions import RequestValidationError # type: ignore
|
|
from fastapi.responses import JSONResponse, StreamingResponse, FileResponse # type: ignore
|
|
from fastapi.staticfiles import StaticFiles # type: ignore
|
|
from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY # type: ignore
|
|
from functools import wraps
|
|
from typing import Callable, Any, Optional
|
|
from rate_limiter import RateLimiter, RateLimitResult
|
|
|
|
import schedule # type: ignore
|
|
|
|
import os
|
|
import shutil
|
|
from enum import Enum
|
|
import uuid
|
|
import defines
|
|
import pathlib
|
|
|
|
from markitdown import MarkItDown, StreamInfo # type: ignore
|
|
import io
|
|
|
|
import uvicorn # type: ignore
|
|
from typing import List, Optional, Dict, Any
|
|
from datetime import datetime, timedelta, UTC
|
|
import uuid
|
|
import jwt
|
|
import os
|
|
from contextlib import asynccontextmanager
|
|
import redis.asyncio as redis # type: ignore
|
|
import re
|
|
import asyncio
|
|
import signal
|
|
import json
|
|
|
|
import uuid
|
|
import logging
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import Dict, Any, Optional
|
|
from pydantic import BaseModel, EmailStr, field_validator, ValidationError # type: ignore
|
|
# Prometheus
|
|
from prometheus_client import Summary # type: ignore
|
|
from prometheus_fastapi_instrumentator import Instrumentator # type: ignore
|
|
from prometheus_client import CollectorRegistry, Counter # type: ignore
|
|
import secrets
|
|
import os
|
|
import backstory_traceback
|
|
from rate_limiter import RateLimiter, RateLimitResult, RateLimitConfig
|
|
from background_tasks import BackgroundTaskManager
|
|
|
|
# =============================
|
|
# Import custom modules
|
|
# =============================
|
|
from auth_utils import (
|
|
AuthenticationManager,
|
|
validate_password_strength,
|
|
sanitize_login_input,
|
|
SecurityConfig
|
|
)
|
|
import model_cast
|
|
import defines
|
|
from logger import logger
|
|
from database import RedisDatabase, redis_manager, DatabaseManager
|
|
from metrics import Metrics
|
|
import llm_proxy as llm_manager
|
|
import entities
|
|
from email_service import VerificationEmailRateLimiter, email_service
|
|
from device_manager import DeviceManager
|
|
import agents
|
|
|
|
# =============================
|
|
# Import Pydantic models
|
|
# =============================
|
|
from models import (
|
|
# API
|
|
MOCK_UUID, ApiActivityType, ChatMessageError, ChatMessageResume, ChatMessageSkillAssessment, ChatMessageStatus, ChatMessageStreaming, ChatMessageUser, DocumentMessage, DocumentOptions, Job, JobRequirements, JobRequirementsMessage, LoginRequest, CreateCandidateRequest, CreateEmployerRequest,
|
|
|
|
# User models
|
|
Candidate, Employer, BaseUserWithType, BaseUser, Guest, Authentication, AuthResponse, CandidateAI,
|
|
|
|
# Job models
|
|
JobFull, JobApplication, ApplicationStatus,
|
|
|
|
# Chat models
|
|
ChatSession, ChatMessage, ChatContext, ChatQuery, ApiStatusType, ChatSenderType, ApiMessageType, ChatContextType,
|
|
ChatMessageRagSearch,
|
|
|
|
# Document models
|
|
Document, DocumentType, DocumentListResponse, DocumentUpdateRequest, DocumentContentResponse,
|
|
|
|
# Supporting models
|
|
Location, MFARequest, MFAData, MFARequestResponse, MFAVerifyRequest, RagContentMetadata, RagContentResponse, ResendVerificationRequest, Skill, SkillAssessment, SystemInfo, WorkExperience, Education,
|
|
|
|
# Email
|
|
EmailVerificationRequest
|
|
)
|
|
|
|
|
|
# Initialize FastAPI app
|
|
# ============================
|
|
# Startup Event
|
|
# ============================
|
|
db_manager = DatabaseManager()
|
|
|
|
prev_int = signal.getsignal(signal.SIGINT)
|
|
prev_term = signal.getsignal(signal.SIGTERM)
|
|
|
|
def signal_handler(signum, frame):
|
|
logger.info(f"⚠️ Received signal {signum!r}, shutting down…")
|
|
# now call the old handler (it might raise KeyboardInterrupt or exit)
|
|
if signum == signal.SIGINT and callable(prev_int):
|
|
prev_int(signum, frame)
|
|
elif signum == signal.SIGTERM and callable(prev_term):
|
|
prev_term(signum, frame)
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
# Startup
|
|
logger.info("🚀 Starting Backstory API")
|
|
logger.info(f"📝 API Documentation available at: http://{defines.host}:{defines.port}{defines.api_prefix}/docs")
|
|
logger.info("🔗 API endpoints prefixed with: /api/1.0")
|
|
if os.path.exists(defines.static_content):
|
|
logger.info(f"📁 Serving static files from: {defines.static_content}")
|
|
|
|
try:
|
|
# Initialize database
|
|
await db_manager.initialize()
|
|
entities.entity_manager.initialize(prometheus_collector, database=db_manager.get_database())
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
|
|
logger.info("🚀 Application startup completed")
|
|
|
|
yield # Application is running
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to start application: {e}")
|
|
raise
|
|
|
|
finally:
|
|
# Shutdown
|
|
logger.info("Application shutdown requested")
|
|
await db_manager.graceful_shutdown()
|
|
|
|
# Global background task manager
|
|
background_task_manager: Optional[BackgroundTaskManager] = None
|
|
|
|
app = FastAPI(
|
|
lifespan=lifespan,
|
|
title="Backstory API",
|
|
description="FastAPI backend for Backstory platform with TypeScript frontend",
|
|
version="1.0.0",
|
|
docs_url=f"{defines.api_prefix}/docs",
|
|
redoc_url=f"{defines.api_prefix}/redoc",
|
|
openapi_url=f"{defines.api_prefix}/openapi.json",
|
|
)
|
|
|
|
ssl_enabled = os.getenv("SSL_ENABLED", "true").lower() == "true"
|
|
if ssl_enabled:
|
|
allow_origins = ["https://battle-linux.ketrenos.com:3000",
|
|
"https://backstory-beta.ketrenos.com"]
|
|
else:
|
|
allow_origins = ["http://battle-linux.ketrenos.com:3000",
|
|
"http://backstory-beta.ketrenos.com"]
|
|
|
|
# Add CORS middleware
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=allow_origins,
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
# Security
|
|
security = HTTPBearer()
|
|
JWT_SECRET_KEY = os.getenv("JWT_SECRET_KEY", "")
|
|
if JWT_SECRET_KEY == "":
|
|
raise ValueError("JWT_SECRET_KEY environment variable is not set")
|
|
ALGORITHM = "HS256"
|
|
|
|
# ============================
|
|
# Debug data type failures
|
|
# ============================
|
|
@app.exception_handler(RequestValidationError)
|
|
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
|
import traceback
|
|
logger.error(traceback.format_exc())
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Validation error {request.method} {request.url.path}: {str(exc)}")
|
|
return JSONResponse(
|
|
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
|
content=json.dumps({"detail": str(exc)}),
|
|
)
|
|
|
|
|
|
# ============================
|
|
# Authentication Utilities
|
|
# ============================
|
|
|
|
# Request/Response Models
|
|
|
|
|
|
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
|
to_encode = data.copy()
|
|
if expires_delta:
|
|
expire = datetime.now(UTC) + expires_delta
|
|
else:
|
|
expire = datetime.now(UTC) + timedelta(hours=24)
|
|
to_encode.update({"exp": expire})
|
|
encoded_jwt = jwt.encode(to_encode, JWT_SECRET_KEY, algorithm=ALGORITHM)
|
|
return encoded_jwt
|
|
|
|
async def verify_token_with_blacklist(credentials: HTTPAuthorizationCredentials = Depends(security)):
|
|
"""Enhanced token verification with guest session recovery"""
|
|
try:
|
|
# First decode the token
|
|
payload = jwt.decode(credentials.credentials, JWT_SECRET_KEY, algorithms=[ALGORITHM])
|
|
user_id: str = payload.get("sub")
|
|
token_type: str = payload.get("type", "access")
|
|
|
|
if user_id is None:
|
|
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
|
|
|
# Check if token is blacklisted
|
|
redis = redis_manager.get_client()
|
|
blacklist_key = f"blacklisted_token:{credentials.credentials}"
|
|
|
|
is_blacklisted = await redis.exists(blacklist_key)
|
|
if is_blacklisted:
|
|
logger.warning(f"🚫 Attempt to use blacklisted token for user {user_id}")
|
|
raise HTTPException(status_code=401, detail="Token has been revoked")
|
|
|
|
# For guest tokens, verify guest still exists and update activity
|
|
if token_type == "guest" or payload.get("type") == "guest":
|
|
database = db_manager.get_database()
|
|
guest_data = await database.get_guest(user_id)
|
|
|
|
if not guest_data:
|
|
logger.warning(f"🚫 Guest session not found for token: {user_id}")
|
|
raise HTTPException(status_code=401, detail="Guest session expired")
|
|
|
|
# Update guest activity
|
|
guest_data["last_activity"] = datetime.now(UTC).isoformat()
|
|
await database.set_guest(user_id, guest_data)
|
|
logger.debug(f"🔄 Guest activity updated: {user_id}")
|
|
|
|
return user_id
|
|
|
|
except jwt.PyJWTError as e:
|
|
logger.warning(f"⚠️ JWT decode error: {e}")
|
|
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"❌ Token verification error: {e}")
|
|
raise HTTPException(status_code=401, detail="Token verification failed")
|
|
|
|
async def get_current_user(
|
|
user_id: str = Depends(verify_token_with_blacklist),
|
|
database: RedisDatabase = Depends(lambda: db_manager.get_database())
|
|
) -> BaseUserWithType:
|
|
"""Get current user from database"""
|
|
try:
|
|
# Check candidates
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
# logger.info(f"🔑 Current user is candidate: {candidate['id']}")
|
|
return Candidate.model_validate(candidate_data) if not candidate_data.get("is_AI") else CandidateAI.model_validate(candidate_data) # type: ignore[return-value]
|
|
# Check candidates
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
# logger.info(f"🔑 Current user is candidate: {candidate['id']}")
|
|
if candidate_data.get("is_AI"):
|
|
return model_cast.cast_to_base_user_with_type(CandidateAI.model_validate(candidate_data))
|
|
else:
|
|
return model_cast.cast_to_base_user_with_type(Candidate.model_validate(candidate_data))
|
|
# Check employers
|
|
employer = await database.get_employer(user_id)
|
|
if employer:
|
|
# logger.info(f"🔑 Current user is employer: {employer['id']}")
|
|
return Employer.model_validate(employer)
|
|
|
|
logger.warning(f"⚠️ User {user_id} not found in database")
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error getting current user: {e}")
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
async def get_current_user_or_guest(
|
|
user_id: str = Depends(verify_token_with_blacklist),
|
|
database: RedisDatabase = Depends(lambda: db_manager.get_database())
|
|
) -> BaseUserWithType:
|
|
"""Get current user (including guests) from database"""
|
|
try:
|
|
# Check candidates first
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
return Candidate.model_validate(candidate_data) if not candidate_data.get("is_AI") else CandidateAI.model_validate(candidate_data)
|
|
|
|
# Check employers
|
|
employer_data = await database.get_employer(user_id)
|
|
if employer_data:
|
|
return Employer.model_validate(employer_data)
|
|
|
|
# Check guests
|
|
guest_data = await database.get_guest(user_id)
|
|
if guest_data:
|
|
return Guest.model_validate(guest_data)
|
|
|
|
logger.warning(f"⚠️ User {user_id} not found in database")
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error getting current user: {e}")
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
async def get_current_admin(
|
|
user_id: str = Depends(verify_token_with_blacklist),
|
|
database: RedisDatabase = Depends(lambda: db_manager.get_database())
|
|
) -> BaseUserWithType:
|
|
user = await get_current_user(user_id=user_id, database=database)
|
|
if isinstance(user, Candidate) and user.is_admin:
|
|
return user
|
|
elif isinstance(user, Employer) and user.is_admin:
|
|
return user
|
|
else:
|
|
logger.warning(f"⚠️ User {user_id} is not an admin")
|
|
raise HTTPException(status_code=403, detail="Admin access required")
|
|
|
|
|
|
# ============================
|
|
# Helper Functions
|
|
# ============================
|
|
async def get_database() -> RedisDatabase:
|
|
"""
|
|
FastAPI dependency to get database instance with shutdown protection
|
|
"""
|
|
return db_manager.get_database()
|
|
|
|
async def get_last_item(generator):
|
|
last_item = None
|
|
async for item in generator:
|
|
last_item = item
|
|
return last_item
|
|
|
|
def create_success_response(data: Any, meta: Optional[Dict] = None) -> Dict:
|
|
return {
|
|
"success": True,
|
|
"data": data,
|
|
"meta": meta
|
|
}
|
|
|
|
def create_error_response(code: str, message: str, details: Any = None) -> Dict:
|
|
return {
|
|
"success": False,
|
|
"error": {
|
|
"code": code,
|
|
"message": message,
|
|
"details": details
|
|
}
|
|
}
|
|
|
|
def create_paginated_response(
|
|
data: List[Any],
|
|
page: int,
|
|
limit: int,
|
|
total: int
|
|
) -> Dict:
|
|
total_pages = (total + limit - 1) // limit
|
|
has_more = page < total_pages
|
|
|
|
return {
|
|
"data": data,
|
|
"total": total,
|
|
"page": page,
|
|
"limit": limit,
|
|
"totalPages": total_pages,
|
|
"hasMore": has_more
|
|
}
|
|
|
|
def filter_and_paginate(
|
|
items: List[Any],
|
|
page: int = 1,
|
|
limit: int = 20,
|
|
sort_by: Optional[str] = None,
|
|
sort_order: str = "desc",
|
|
filters: Optional[Dict] = None
|
|
) -> tuple:
|
|
"""Filter, sort, and paginate items"""
|
|
filtered_items = items.copy()
|
|
|
|
# Apply filters (simplified filtering logic)
|
|
if filters:
|
|
for key, value in filters.items():
|
|
if isinstance(filtered_items[0], dict) and key in filtered_items[0]:
|
|
filtered_items = [item for item in filtered_items if item.get(key) == value]
|
|
elif hasattr(filtered_items[0], key) if filtered_items else False:
|
|
filtered_items = [item for item in filtered_items
|
|
if getattr(item, key, None) == value]
|
|
|
|
# Sort items
|
|
if sort_by and filtered_items:
|
|
reverse = sort_order.lower() == "desc"
|
|
try:
|
|
if isinstance(filtered_items[0], dict):
|
|
filtered_items.sort(key=lambda x: x.get(sort_by, ""), reverse=reverse)
|
|
else:
|
|
filtered_items.sort(key=lambda x: getattr(x, sort_by, ""), reverse=reverse)
|
|
except (AttributeError, TypeError):
|
|
pass # Skip sorting if attribute doesn't exist or isn't comparable
|
|
|
|
# Paginate
|
|
total = len(filtered_items)
|
|
start = (page - 1) * limit
|
|
end = start + limit
|
|
paginated_items = filtered_items[start:end]
|
|
|
|
return paginated_items, total
|
|
|
|
async def stream_agent_response(chat_agent: agents.Agent,
|
|
user_message: ChatMessageUser,
|
|
chat_session_data: Dict[str, Any] | None = None,
|
|
database: RedisDatabase | None = None) -> StreamingResponse:
|
|
async def message_stream_generator():
|
|
"""Generator to stream messages with persistence"""
|
|
last_log = None
|
|
final_message = None
|
|
|
|
async for generated_message in chat_agent.generate(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=user_message.session_id,
|
|
prompt=user_message.content,
|
|
):
|
|
if generated_message.status == ApiStatusType.ERROR:
|
|
logger.error(f"❌ AI generation error: {generated_message.content}")
|
|
yield f"data: {json.dumps({'status': 'error'})}\n\n"
|
|
return
|
|
|
|
# Store reference to the complete AI message
|
|
if generated_message.status == ApiStatusType.DONE:
|
|
final_message = generated_message
|
|
|
|
# If the message is not done, convert it to a ChatMessageBase to remove
|
|
# metadata and other unnecessary fields for streaming
|
|
if generated_message.status != ApiStatusType.DONE:
|
|
if not isinstance(generated_message, ChatMessageStreaming) and not isinstance(generated_message, ChatMessageStatus):
|
|
raise TypeError(
|
|
f"Expected ChatMessageStreaming or ChatMessageStatus, got {type(generated_message)}"
|
|
)
|
|
|
|
json_data = generated_message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
|
|
yield f"data: {json_str}\n\n"
|
|
|
|
# After streaming is complete, persist the final AI message to database
|
|
if final_message and final_message.status == ApiStatusType.DONE:
|
|
try:
|
|
if database and chat_session_data:
|
|
await database.add_chat_message(final_message.session_id, final_message.model_dump())
|
|
logger.info(f"🤖 Message saved to database for session {final_message.session_id}")
|
|
|
|
# Update session last activity again
|
|
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
|
await database.set_chat_session(final_message.session_id, chat_session_data)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to save message to database: {e}")
|
|
|
|
return StreamingResponse(
|
|
message_stream_generator(),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
|
|
|
|
# Helper functions
|
|
def get_candidate_files_dir(username: str) -> pathlib.Path:
|
|
"""Get the files directory for a candidate"""
|
|
files_dir = pathlib.Path(defines.user_dir) / username / "files"
|
|
files_dir.mkdir(parents=True, exist_ok=True)
|
|
return files_dir
|
|
|
|
def get_document_type_from_filename(filename: str) -> DocumentType:
|
|
"""Determine document type from filename extension"""
|
|
extension = pathlib.Path(filename).suffix.lower()
|
|
|
|
type_mapping = {
|
|
'.pdf': DocumentType.PDF,
|
|
'.docx': DocumentType.DOCX,
|
|
'.doc': DocumentType.DOCX,
|
|
'.txt': DocumentType.TXT,
|
|
'.md': DocumentType.MARKDOWN,
|
|
'.markdown': DocumentType.MARKDOWN,
|
|
'.png': DocumentType.IMAGE,
|
|
'.jpg': DocumentType.IMAGE,
|
|
'.jpeg': DocumentType.IMAGE,
|
|
'.gif': DocumentType.IMAGE,
|
|
}
|
|
|
|
return type_mapping.get(extension, DocumentType.TXT)
|
|
|
|
# ============================
|
|
# Rate Limiting Dependencies
|
|
# ============================
|
|
|
|
async def get_rate_limiter(database: RedisDatabase = Depends(get_database)) -> RateLimiter:
|
|
"""Dependency to get rate limiter instance"""
|
|
return RateLimiter(database)
|
|
|
|
async def apply_rate_limiting(
|
|
request: Request,
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter),
|
|
current_user: Optional[BaseUserWithType] = None
|
|
) -> RateLimitResult:
|
|
"""
|
|
Apply rate limiting based on user type
|
|
Can be used as a dependency in endpoints
|
|
"""
|
|
try:
|
|
# Determine user info for rate limiting
|
|
if current_user:
|
|
user_id = current_user.id
|
|
user_type = current_user.user_type
|
|
is_admin = getattr(current_user, 'is_admin', False)
|
|
else:
|
|
# For unauthenticated requests, use IP address as identifier
|
|
user_id = request.client.host if request.client else "unknown"
|
|
user_type = "anonymous"
|
|
is_admin = False
|
|
|
|
# Extract endpoint for specific rate limiting if needed
|
|
endpoint = request.url.path
|
|
|
|
# Check rate limits
|
|
result = await rate_limiter.check_rate_limit(
|
|
user_id=user_id,
|
|
user_type=user_type,
|
|
is_admin=is_admin,
|
|
endpoint=endpoint
|
|
)
|
|
|
|
if not result.allowed:
|
|
logger.warning(f"🚫 Rate limit exceeded for {user_type} {user_id}: {result.reason}")
|
|
raise HTTPException(
|
|
status_code=429,
|
|
detail={
|
|
"error": "Rate limit exceeded",
|
|
"message": result.reason,
|
|
"retryAfter": result.retry_after_seconds,
|
|
"remaining": result.remaining_requests
|
|
},
|
|
headers={"Retry-After": str(result.retry_after_seconds or 60)}
|
|
)
|
|
|
|
return result
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"❌ Rate limiting error: {e}")
|
|
# Fail open - allow request if rate limiting fails
|
|
return RateLimitResult(allowed=True, reason="Rate limiting system error")
|
|
|
|
async def rate_limit_dependency(
|
|
request: Request,
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter)
|
|
):
|
|
"""
|
|
Rate limiting dependency that can be applied to any endpoint
|
|
Usage: dependencies=[Depends(rate_limit_dependency)]
|
|
"""
|
|
try:
|
|
# Try to get current user from token if present
|
|
current_user = None
|
|
if "authorization" in request.headers:
|
|
try:
|
|
auth_header = request.headers["authorization"]
|
|
if auth_header.startswith("Bearer "):
|
|
token = auth_header[7:]
|
|
payload = jwt.decode(token, JWT_SECRET_KEY, algorithms=[ALGORITHM])
|
|
user_id = payload.get("sub")
|
|
if user_id:
|
|
database = db_manager.get_database()
|
|
# Quick user lookup for rate limiting
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
current_user = Candidate.model_validate(candidate_data)
|
|
else:
|
|
employer_data = await database.get_employer(user_id)
|
|
if employer_data:
|
|
current_user = Employer.model_validate(employer_data)
|
|
except:
|
|
# Ignore auth errors for rate limiting - treat as anonymous
|
|
pass
|
|
|
|
await apply_rate_limiting(request, rate_limiter, current_user)
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"❌ Rate limit dependency error: {e}")
|
|
# Fail open
|
|
|
|
# ============================
|
|
# API Router Setup
|
|
# ============================
|
|
|
|
# Create API router with prefix
|
|
api_router = APIRouter(prefix="/api/1.0")
|
|
|
|
# ============================
|
|
# Authentication Endpoints
|
|
# ============================
|
|
@api_router.post("/auth/guest")
|
|
async def create_guest_session_enhanced(
|
|
request: Request,
|
|
database: RedisDatabase = Depends(get_database),
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter)
|
|
):
|
|
"""Create a guest session with enhanced validation and persistence"""
|
|
try:
|
|
# Apply rate limiting for guest creation
|
|
ip_address = request.client.host if request.client else "unknown"
|
|
|
|
# Check rate limits for guest session creation
|
|
rate_result = await rate_limiter.check_rate_limit(
|
|
user_id=ip_address,
|
|
user_type="guest_creation",
|
|
is_admin=False,
|
|
endpoint="/auth/guest"
|
|
)
|
|
|
|
if not rate_result.allowed:
|
|
logger.warning(f"🚫 Guest creation rate limit exceeded for IP {ip_address}")
|
|
return JSONResponse(
|
|
status_code=429,
|
|
content=create_error_response(
|
|
"RATE_LIMITED",
|
|
rate_result.reason or "Too many guest sessions created"
|
|
),
|
|
headers={"Retry-After": str(rate_result.retry_after_seconds or 300)}
|
|
)
|
|
|
|
# Generate unique guest identifier with timestamp for uniqueness
|
|
current_time = datetime.now(UTC)
|
|
guest_id = str(uuid.uuid4())
|
|
session_id = f"guest_{int(current_time.timestamp())}_{secrets.token_hex(8)}"
|
|
guest_username = f"guest-{session_id[-12:]}"
|
|
|
|
# Verify username is unique (unlikely but possible collision)
|
|
while True:
|
|
existing_user = await database.get_user(guest_username)
|
|
if existing_user:
|
|
# Regenerate if collision
|
|
session_id = f"guest_{int(current_time.timestamp())}_{secrets.token_hex(12)}"
|
|
guest_username = f"guest-{session_id[-16:]}"
|
|
else:
|
|
break
|
|
|
|
# Create guest user data with comprehensive info
|
|
guest_data = {
|
|
"id": guest_id,
|
|
"session_id": session_id,
|
|
"username": guest_username,
|
|
"email": f"{guest_username}@guest.backstory.ketrenos.com",
|
|
"first_name": "Guest",
|
|
"last_name": "User",
|
|
"full_name": "Guest User",
|
|
"user_type": "guest",
|
|
"created_at": current_time.isoformat(),
|
|
"updated_at": current_time.isoformat(),
|
|
"last_activity": current_time.isoformat(),
|
|
"last_login": current_time.isoformat(),
|
|
"status": "active",
|
|
"is_admin": False,
|
|
"ip_address": ip_address,
|
|
"user_agent": request.headers.get("user-agent", "Unknown"),
|
|
"converted_to_user_id": None,
|
|
"browser_session": True, # Mark as browser session
|
|
"persistent": True, # Mark as persistent
|
|
}
|
|
|
|
# Store guest with enhanced persistence
|
|
await database.set_guest(guest_id, guest_data)
|
|
|
|
# Create user lookup records
|
|
user_auth_data = {
|
|
"id": guest_id,
|
|
"type": "guest",
|
|
"email": guest_data["email"],
|
|
"username": guest_username,
|
|
"session_id": session_id,
|
|
"created_at": current_time.isoformat()
|
|
}
|
|
|
|
await database.set_user(guest_data["email"], user_auth_data)
|
|
await database.set_user(guest_username, user_auth_data)
|
|
await database.set_user_by_id(guest_id, user_auth_data)
|
|
|
|
# Create authentication tokens with longer expiry for guests
|
|
access_token = create_access_token(
|
|
data={"sub": guest_id, "type": "guest"},
|
|
expires_delta=timedelta(hours=48) # Longer expiry for guests
|
|
)
|
|
refresh_token = create_access_token(
|
|
data={"sub": guest_id, "type": "refresh_guest"},
|
|
expires_delta=timedelta(days=14) # 2 weeks refresh for guests
|
|
)
|
|
|
|
# Verify guest was stored correctly
|
|
verification = await database.get_guest(guest_id)
|
|
if not verification:
|
|
logger.error(f"❌ Failed to verify guest storage: {guest_id}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STORAGE_ERROR", "Failed to create guest session")
|
|
)
|
|
|
|
# Create guest object for response
|
|
guest = Guest.model_validate(guest_data)
|
|
|
|
# Log successful creation
|
|
logger.info(f"👤 Guest session created and verified: {guest_username} (ID: {guest_id}) from IP: {ip_address}")
|
|
|
|
# Create auth response
|
|
auth_response = {
|
|
"accessToken": access_token,
|
|
"refreshToken": refresh_token,
|
|
"user": guest.model_dump(by_alias=True),
|
|
"expiresAt": int((current_time + timedelta(hours=48)).timestamp()),
|
|
"userType": "guest",
|
|
"isGuest": True
|
|
}
|
|
|
|
return create_success_response(auth_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Guest session creation error: {e}")
|
|
import traceback
|
|
logger.error(traceback.format_exc())
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("GUEST_CREATION_FAILED", "Failed to create guest session")
|
|
)
|
|
|
|
@api_router.post("/auth/guest/convert")
|
|
async def convert_guest_to_user(
|
|
registration_data: Dict[str, Any] = Body(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Convert a guest session to a permanent user account"""
|
|
try:
|
|
# Verify current user is a guest
|
|
if current_user.user_type != "guest":
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("NOT_GUEST", "Only guest users can be converted")
|
|
)
|
|
|
|
guest: Guest = current_user
|
|
account_type = registration_data.get("accountType", "candidate")
|
|
|
|
if account_type == "candidate":
|
|
# Validate candidate registration data
|
|
try:
|
|
candidate_request = CreateCandidateRequest.model_validate(registration_data)
|
|
except ValidationError as e:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("VALIDATION_ERROR", str(e))
|
|
)
|
|
|
|
# Check if email/username already exists
|
|
auth_manager = AuthenticationManager(database)
|
|
user_exists, conflict_field = await auth_manager.check_user_exists(
|
|
candidate_request.email,
|
|
candidate_request.username
|
|
)
|
|
|
|
if user_exists:
|
|
return JSONResponse(
|
|
status_code=409,
|
|
content=create_error_response(
|
|
"USER_EXISTS",
|
|
f"A user with this {conflict_field} already exists"
|
|
)
|
|
)
|
|
|
|
# Create candidate
|
|
candidate_id = str(uuid.uuid4())
|
|
current_time = datetime.now(timezone.utc)
|
|
|
|
candidate_data = {
|
|
"id": candidate_id,
|
|
"user_type": "candidate",
|
|
"email": candidate_request.email,
|
|
"username": candidate_request.username,
|
|
"first_name": candidate_request.first_name,
|
|
"last_name": candidate_request.last_name,
|
|
"full_name": f"{candidate_request.first_name} {candidate_request.last_name}",
|
|
"phone": candidate_request.phone,
|
|
"created_at": current_time.isoformat(),
|
|
"updated_at": current_time.isoformat(),
|
|
"status": "active",
|
|
"is_admin": False,
|
|
"converted_from_guest": guest.id
|
|
}
|
|
|
|
candidate = Candidate.model_validate(candidate_data)
|
|
|
|
# Create authentication
|
|
await auth_manager.create_user_authentication(candidate_id, candidate_request.password)
|
|
|
|
# Store candidate
|
|
await database.set_candidate(candidate_id, candidate.model_dump())
|
|
|
|
# Update user lookup records
|
|
user_auth_data = {
|
|
"id": candidate_id,
|
|
"type": "candidate",
|
|
"email": candidate.email,
|
|
"username": candidate.username
|
|
}
|
|
|
|
await database.set_user(candidate.email, user_auth_data)
|
|
await database.set_user(candidate.username, user_auth_data)
|
|
await database.set_user_by_id(candidate_id, user_auth_data)
|
|
|
|
# Mark guest as converted
|
|
guest_data = guest.model_dump()
|
|
guest_data["converted_to_user_id"] = candidate_id
|
|
guest_data["updated_at"] = current_time.isoformat()
|
|
await database.set_guest(guest.id, guest_data)
|
|
|
|
# Create new tokens for the candidate
|
|
access_token = create_access_token(data={"sub": candidate_id})
|
|
refresh_token = create_access_token(
|
|
data={"sub": candidate_id, "type": "refresh"},
|
|
expires_delta=timedelta(days=SecurityConfig.REFRESH_TOKEN_EXPIRY_DAYS)
|
|
)
|
|
|
|
auth_response = AuthResponse(
|
|
accessToken=access_token,
|
|
refreshToken=refresh_token,
|
|
user=candidate,
|
|
expiresAt=int((current_time + timedelta(hours=SecurityConfig.TOKEN_EXPIRY_HOURS)).timestamp())
|
|
)
|
|
|
|
logger.info(f"✅ Guest {guest.session_id} converted to candidate {candidate.username}")
|
|
|
|
return create_success_response({
|
|
"message": "Guest account successfully converted to candidate",
|
|
"auth": auth_response.model_dump(by_alias=True),
|
|
"conversionType": "candidate"
|
|
})
|
|
|
|
else:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("INVALID_TYPE", "Only candidate conversion is currently supported")
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Guest conversion error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CONVERSION_FAILED", "Failed to convert guest account")
|
|
)
|
|
|
|
@api_router.post("/auth/logout")
|
|
async def logout(
|
|
access_token: str = Body(..., alias="accessToken"),
|
|
refresh_token: str = Body(..., alias="refreshToken"),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Logout endpoint - revokes both access and refresh tokens"""
|
|
logger.info(f"🔑 User {current_user.id} is logging out")
|
|
try:
|
|
# Verify refresh token
|
|
try:
|
|
refresh_payload = jwt.decode(refresh_token, JWT_SECRET_KEY, algorithms=[ALGORITHM])
|
|
user_id = refresh_payload.get("sub")
|
|
token_type = refresh_payload.get("type")
|
|
refresh_exp = refresh_payload.get("exp")
|
|
|
|
if not user_id or token_type != "refresh":
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
|
)
|
|
except jwt.PyJWTError as e:
|
|
logger.warning(f"⚠️ Invalid refresh token during logout: {e}")
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
|
)
|
|
|
|
# Verify that the refresh token belongs to the current user
|
|
if user_id != current_user.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Token does not belong to current user")
|
|
)
|
|
|
|
# Get Redis client
|
|
redis = redis_manager.get_client()
|
|
|
|
# Revoke refresh token (blacklist it until its natural expiration)
|
|
refresh_ttl = max(0, refresh_exp - int(datetime.now(UTC).timestamp()))
|
|
if refresh_ttl > 0:
|
|
await redis.setex(
|
|
f"blacklisted_token:{refresh_token}",
|
|
refresh_ttl,
|
|
json.dumps({
|
|
"user_id": user_id,
|
|
"token_type": "refresh",
|
|
"revoked_at": datetime.now(UTC).isoformat(),
|
|
"reason": "user_logout"
|
|
})
|
|
)
|
|
logger.info(f"🔒 Blacklisted refresh token for user {user_id}")
|
|
|
|
# If access token is provided, revoke it too
|
|
if access_token:
|
|
try:
|
|
access_payload = jwt.decode(access_token, JWT_SECRET_KEY, algorithms=[ALGORITHM])
|
|
access_user_id = access_payload.get("sub")
|
|
access_exp = access_payload.get("exp")
|
|
|
|
# Verify access token belongs to same user
|
|
if access_user_id == user_id:
|
|
access_ttl = max(0, access_exp - int(datetime.now(UTC).timestamp()))
|
|
if access_ttl > 0:
|
|
await redis.setex(
|
|
f"blacklisted_token:{access_token}",
|
|
access_ttl,
|
|
json.dumps({
|
|
"user_id": user_id,
|
|
"token_type": "access",
|
|
"revoked_at": datetime.now(UTC).isoformat(),
|
|
"reason": "user_logout"
|
|
})
|
|
)
|
|
logger.info(f"🔒 Blacklisted access token for user {user_id}")
|
|
else:
|
|
logger.warning(f"⚠️ Access token user mismatch during logout: {access_user_id} != {user_id}")
|
|
except jwt.PyJWTError as e:
|
|
logger.warning(f"⚠️ Invalid access token during logout (non-critical): {e}")
|
|
# Don't fail logout if access token is invalid
|
|
|
|
# Optional: Revoke all tokens for this user (for "logout from all devices")
|
|
# Uncomment the following lines if you want to implement this feature:
|
|
#
|
|
# await redis.setex(
|
|
# f"user_tokens_revoked:{user_id}",
|
|
# timedelta(days=30).total_seconds(), # Max refresh token lifetime
|
|
# datetime.now(UTC).isoformat()
|
|
# )
|
|
|
|
logger.info(f"🔑 User {user_id} logged out successfully")
|
|
return create_success_response({
|
|
"message": "Logged out successfully",
|
|
"tokensRevoked": {
|
|
"refreshToken": True,
|
|
"accessToken": bool(access_token)
|
|
}
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Logout error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("LOGOUT_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/auth/logout-all")
|
|
async def logout_all_devices(
|
|
current_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Logout from all devices by revoking all tokens for the user"""
|
|
try:
|
|
redis = redis_manager.get_client()
|
|
|
|
# Set a timestamp that invalidates all tokens issued before this moment
|
|
await redis.setex(
|
|
f"user_tokens_revoked:{current_user.id}",
|
|
int(timedelta(days=30).total_seconds()), # Max refresh token lifetime
|
|
datetime.now(UTC).isoformat()
|
|
)
|
|
|
|
logger.info(f"🔒 All tokens revoked for user {current_user.id}")
|
|
return create_success_response({
|
|
"message": "Logged out from all devices successfully"
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Logout all devices error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("LOGOUT_ALL_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/auth/refresh")
|
|
async def refresh_token_endpoint(
|
|
refreshToken: str = Body(..., alias="refreshToken"),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Refresh token endpoint"""
|
|
try:
|
|
# Verify refresh token
|
|
payload = jwt.decode(refreshToken, JWT_SECRET_KEY, algorithms=[ALGORITHM])
|
|
user_id = payload.get("sub")
|
|
token_type = payload.get("type")
|
|
|
|
if not user_id or token_type != "refresh":
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
|
)
|
|
|
|
# Create new access token
|
|
access_token = create_access_token(data={"sub": user_id})
|
|
|
|
# Get user
|
|
user = None
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
user = Candidate.model_validate(candidate_data)
|
|
else:
|
|
employer_data = await database.get_employer(user_id)
|
|
if employer_data:
|
|
user = Employer.model_validate(employer_data)
|
|
|
|
if not user:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User not found")
|
|
)
|
|
|
|
auth_response = AuthResponse(
|
|
accessToken=access_token,
|
|
refreshToken=refreshToken, # Keep same refresh token
|
|
user=user,
|
|
expiresAt=int((datetime.now(UTC) + timedelta(hours=24)).timestamp())
|
|
)
|
|
|
|
return create_success_response(auth_response.model_dump(by_alias=True))
|
|
|
|
except jwt.PyJWTError:
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"❌ Token refresh error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("REFRESH_ERROR", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Candidate Endpoints
|
|
# ============================
|
|
@api_router.post("/candidates/ai")
|
|
async def create_candidate_ai(
|
|
background_tasks: BackgroundTasks,
|
|
user_message: ChatMessageUser = Body(...),
|
|
admin: Candidate = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Create a new candidate using AI-generated data"""
|
|
try:
|
|
generate_agent = agents.get_or_create_agent(
|
|
agent_type=ChatContextType.GENERATE_PERSONA,
|
|
prometheus_collector=prometheus_collector)
|
|
|
|
if not generate_agent:
|
|
logger.warning(f"⚠️ Unable to create AI generation agent.")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("AGENT_NOT_FOUND", "Unable to create AI generation agent")
|
|
)
|
|
|
|
persona_message = None
|
|
resume_message = None
|
|
state = 0 # 0 -- create persona, 1 -- create resume
|
|
async for generated_message in generate_agent.generate(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=user_message.session_id,
|
|
prompt=user_message.content,
|
|
):
|
|
if isinstance(generated_message, ChatMessageError):
|
|
error_message : ChatMessageError = generated_message
|
|
logger.error(f"❌ AI generation error: {error_message.content}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("AI_GENERATION_ERROR", error_message.content)
|
|
)
|
|
if isinstance(generated_message, ChatMessageRagSearch):
|
|
raise ValueError("AI generation returned a RAG search message instead of a persona")
|
|
|
|
if generated_message.status == ApiStatusType.DONE and state == 0:
|
|
persona_message = generated_message
|
|
state = 1 # Switch to resume generation
|
|
elif generated_message.status == ApiStatusType.DONE and state == 1:
|
|
resume_message = generated_message
|
|
|
|
|
|
if not persona_message:
|
|
logger.error(f"❌ AI generation failed: {persona_message.content if persona_message else 'No message generated'}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("AI_GENERATION_FAILED", "Failed to generate AI candidate data")
|
|
)
|
|
|
|
try:
|
|
current_time = datetime.now(timezone.utc)
|
|
candidate_data = json.loads(persona_message.content)
|
|
candidate_data.update({
|
|
"user_type": "candidate",
|
|
"created_at": current_time.isoformat(),
|
|
"updated_at": current_time.isoformat(),
|
|
"status": "active", # Directly active for AI-generated candidates
|
|
"is_admin": False, # Default to non-admin
|
|
"is_AI": True, # Mark as AI-generated
|
|
})
|
|
candidate = CandidateAI.model_validate(candidate_data)
|
|
except ValidationError as e:
|
|
logger.error(f"❌ AI candidate data validation failed")
|
|
for lines in backstory_traceback.format_exc().splitlines():
|
|
logger.error(lines)
|
|
logger.error(json.dumps(persona_message.content, indent=2))
|
|
for error in e.errors():
|
|
print(f"Field: {error['loc'][0]}, Error: {error['msg']}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("AI_VALIDATION_FAILED", "AI-generated data validation failed")
|
|
)
|
|
except Exception as e:
|
|
# Log the error and return a validation error response
|
|
for lines in backstory_traceback.format_exc().splitlines():
|
|
logger.error(lines)
|
|
logger.error(json.dumps(persona_message.content, indent=2))
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("AI_VALIDATION_FAILED", "AI-generated data validation failed")
|
|
)
|
|
|
|
logger.info(f"🤖 AI-generated candidate {candidate.username} created with email {candidate.email}")
|
|
candidate_data = candidate.model_dump(by_alias=False, exclude_unset=False)
|
|
# Store in database
|
|
await database.set_candidate(candidate.id, candidate_data)
|
|
|
|
user_auth_data = {
|
|
"id": candidate.id,
|
|
"type": "candidate",
|
|
"email": candidate.email,
|
|
"username": candidate.username
|
|
}
|
|
|
|
await database.set_user(candidate.email, user_auth_data)
|
|
await database.set_user(candidate.username, user_auth_data)
|
|
await database.set_user_by_id(candidate.id, user_auth_data)
|
|
|
|
document_content = None
|
|
if resume_message:
|
|
document_id = str(uuid.uuid4())
|
|
document_type = DocumentType.MARKDOWN
|
|
document_content = resume_message.content.encode('utf-8')
|
|
document_filename = f"resume.md"
|
|
|
|
document_data = Document(
|
|
id=document_id,
|
|
filename=document_filename,
|
|
originalName=document_filename,
|
|
type=document_type,
|
|
size=len(document_content),
|
|
uploadDate=datetime.now(UTC),
|
|
ownerId=candidate.id
|
|
)
|
|
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content", document_filename)
|
|
# Ensure the directory exists
|
|
rag_content_dir = pathlib.Path(defines.user_dir) / candidate.username / "rag-content"
|
|
rag_content_dir.mkdir(parents=True, exist_ok=True)
|
|
try:
|
|
with open(file_path, "wb") as f:
|
|
f.write(document_content)
|
|
|
|
logger.info(f"📁 File saved to disk: {file_path}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to save file to disk: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FILE_SAVE_ERROR", "Failed to resume file to disk")
|
|
)
|
|
|
|
# Store document metadata in database
|
|
await database.set_document(document_id, document_data.model_dump())
|
|
await database.add_document_to_candidate(candidate.id, document_id)
|
|
logger.info(f"📄 Document metadata saved for candidate {candidate.id}: {document_id}")
|
|
|
|
logger.info(f"✅ AI-generated candidate created: {candidate_data['email']}, resume is {len(document_content) if document_content else 0} bytes")
|
|
|
|
return create_success_response({
|
|
"message": "AI-generated candidate created successfully",
|
|
"candidate": candidate_data,
|
|
"resume": document_content,
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ AI Candidate creation error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("AI_CREATION_FAILED", "Failed to create AI-generated candidate")
|
|
)
|
|
|
|
@api_router.post("/candidates")
|
|
async def create_candidate_with_verification(
|
|
request: CreateCandidateRequest,
|
|
background_tasks: BackgroundTasks,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Create a new candidate with email verification"""
|
|
try:
|
|
# Initialize authentication manager
|
|
auth_manager = AuthenticationManager(database)
|
|
|
|
# Check if user already exists
|
|
user_exists, conflict_field = await auth_manager.check_user_exists(
|
|
request.email,
|
|
request.username
|
|
)
|
|
|
|
if user_exists and conflict_field:
|
|
logger.warning(f"⚠️ Attempted to create user with existing {conflict_field}: {getattr(request, conflict_field)}")
|
|
return JSONResponse(
|
|
status_code=409,
|
|
content=create_error_response(
|
|
"USER_EXISTS",
|
|
f"A user with this {conflict_field} already exists"
|
|
)
|
|
)
|
|
|
|
# Generate candidate data (but don't activate yet)
|
|
candidate_id = str(uuid.uuid4())
|
|
current_time = datetime.now(timezone.utc)
|
|
all_candidates = await database.get_all_candidates()
|
|
is_admin = False
|
|
if len(all_candidates) == 0:
|
|
is_admin = True
|
|
|
|
candidate_data = {
|
|
"id": candidate_id,
|
|
"userType": "candidate",
|
|
"email": request.email,
|
|
"username": request.username,
|
|
"firstName": request.first_name,
|
|
"lastName": request.last_name,
|
|
"fullName": f"{request.first_name} {request.last_name}",
|
|
"phone": request.phone,
|
|
"createdAt": current_time.isoformat(),
|
|
"updatedAt": current_time.isoformat(),
|
|
"status": "pending", # Not active until email verified
|
|
"isAdmin": is_admin,
|
|
}
|
|
|
|
# Generate verification token
|
|
verification_token = secrets.token_urlsafe(32)
|
|
|
|
# Store verification token with user data
|
|
await database.store_email_verification_token(
|
|
request.email,
|
|
verification_token,
|
|
"candidate",
|
|
{
|
|
"candidate_data": candidate_data,
|
|
"password": request.password, # Store temporarily for verification
|
|
"username": request.username
|
|
}
|
|
)
|
|
|
|
# Send verification email in background
|
|
background_tasks.add_task(
|
|
email_service.send_verification_email,
|
|
request.email,
|
|
verification_token,
|
|
f"{request.first_name} {request.last_name}"
|
|
)
|
|
|
|
logger.info(f"✅ Candidate registration initiated for: {request.email}")
|
|
|
|
return create_success_response({
|
|
"message": f"Registration successful! Please check your email to verify your account. {'As the first user on this sytem, you have admin priveledges.' if is_admin else ''}",
|
|
"email": request.email,
|
|
"verificationRequired": True
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Candidate creation error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CREATION_FAILED", "Failed to create candidate account")
|
|
)
|
|
|
|
@api_router.post("/employers")
|
|
async def create_employer_with_verification(
|
|
request: CreateEmployerRequest,
|
|
background_tasks: BackgroundTasks,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Create a new employer with email verification"""
|
|
try:
|
|
# Similar to candidate creation but for employer
|
|
auth_manager = AuthenticationManager(database)
|
|
|
|
user_exists, conflict_field = await auth_manager.check_user_exists(
|
|
request.email,
|
|
request.username
|
|
)
|
|
|
|
if user_exists and conflict_field:
|
|
return JSONResponse(
|
|
status_code=409,
|
|
content=create_error_response(
|
|
"USER_EXISTS",
|
|
f"A user with this {conflict_field} already exists"
|
|
)
|
|
)
|
|
|
|
employer_id = str(uuid.uuid4())
|
|
current_time = datetime.now(timezone.utc)
|
|
|
|
employer_data = {
|
|
"id": employer_id,
|
|
"email": request.email,
|
|
"companyName": request.company_name,
|
|
"industry": request.industry,
|
|
"companySize": request.company_size,
|
|
"companyDescription": request.company_description,
|
|
"websiteUrl": request.website_url,
|
|
"phone": request.phone,
|
|
"createdAt": current_time.isoformat(),
|
|
"updatedAt": current_time.isoformat(),
|
|
"status": "pending", # Not active until verified
|
|
"userType": "employer",
|
|
"location": {
|
|
"city": "",
|
|
"country": "",
|
|
"remote": False
|
|
},
|
|
"socialLinks": []
|
|
}
|
|
|
|
verification_token = secrets.token_urlsafe(32)
|
|
|
|
await database.store_email_verification_token(
|
|
request.email,
|
|
verification_token,
|
|
"employer",
|
|
{
|
|
"employer_data": employer_data,
|
|
"password": request.password,
|
|
"username": request.username
|
|
}
|
|
)
|
|
|
|
background_tasks.add_task(
|
|
email_service.send_verification_email,
|
|
request.email,
|
|
verification_token,
|
|
request.company_name
|
|
)
|
|
|
|
logger.info(f"✅ Employer registration initiated for: {request.email}")
|
|
|
|
return create_success_response({
|
|
"message": "Registration successful! Please check your email to verify your account.",
|
|
"email": request.email,
|
|
"verificationRequired": True
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Employer creation error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CREATION_FAILED", "Failed to create employer account")
|
|
)
|
|
|
|
@api_router.post("/auth/verify-email")
|
|
async def verify_email(
|
|
request: EmailVerificationRequest,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Verify email address and activate account"""
|
|
try:
|
|
# Get verification data
|
|
verification_data = await database.get_email_verification_token(request.token)
|
|
|
|
if not verification_data:
|
|
logger.warning(f"⚠️ Invalid verification token: {request.token}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("INVALID_TOKEN", "Invalid or expired verification token")
|
|
)
|
|
|
|
if verification_data.get("verified"):
|
|
logger.warning(f"⚠️ Attempt to verify already verified email: {verification_data['email']}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("ALREADY_VERIFIED", "Email already verified")
|
|
)
|
|
|
|
# Check expiration
|
|
expires_at = datetime.fromisoformat(verification_data["expires_at"])
|
|
if datetime.now(timezone.utc) > expires_at:
|
|
logger.warning(f"⚠️ Verification token expired for: {verification_data['email']}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("TOKEN_EXPIRED", "Verification token has expired")
|
|
)
|
|
|
|
# Extract user data
|
|
user_type = verification_data["user_type"]
|
|
user_data_container = verification_data["user_data"]
|
|
|
|
if user_type == "candidate":
|
|
candidate_data = user_data_container["candidate_data"]
|
|
password = user_data_container["password"]
|
|
username = user_data_container["username"]
|
|
|
|
# Activate candidate
|
|
candidate_data["status"] = "active"
|
|
candidate = Candidate.model_validate(candidate_data)
|
|
|
|
# Create authentication record
|
|
auth_manager = AuthenticationManager(database)
|
|
await auth_manager.create_user_authentication(candidate.id, password)
|
|
|
|
# Store in database
|
|
await database.set_candidate(candidate.id, candidate.model_dump())
|
|
|
|
# Add user lookup records
|
|
user_auth_data = {
|
|
"id": candidate.id,
|
|
"type": "candidate",
|
|
"email": candidate.email,
|
|
"username": username
|
|
}
|
|
|
|
await database.set_user(candidate.email, user_auth_data)
|
|
await database.set_user(username, user_auth_data)
|
|
await database.set_user_by_id(candidate.id, user_auth_data)
|
|
|
|
elif user_type == "employer":
|
|
employer_data = user_data_container["employer_data"]
|
|
password = user_data_container["password"]
|
|
username = user_data_container["username"]
|
|
|
|
# Activate employer
|
|
employer_data["status"] = "active"
|
|
employer = Employer.model_validate(employer_data)
|
|
|
|
# Create authentication record
|
|
auth_manager = AuthenticationManager(database)
|
|
await auth_manager.create_user_authentication(employer.id, password)
|
|
|
|
# Store in database
|
|
await database.set_employer(employer.id, employer.model_dump())
|
|
|
|
# Add user lookup records
|
|
user_auth_data = {
|
|
"id": employer.id,
|
|
"type": "employer",
|
|
"email": employer.email,
|
|
"username": username
|
|
}
|
|
|
|
await database.set_user(employer.email, user_auth_data)
|
|
await database.set_user(username, user_auth_data)
|
|
await database.set_user_by_id(employer.id, user_auth_data)
|
|
|
|
# Mark as verified
|
|
await database.mark_email_verified(request.token)
|
|
|
|
logger.info(f"✅ Email verified and account activated for: {verification_data['email']}")
|
|
|
|
return create_success_response({
|
|
"message": "Email verified successfully! Your account is now active.",
|
|
"accountActivated": True,
|
|
"userType": user_type
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Email verification error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("VERIFICATION_FAILED", "Failed to verify email")
|
|
)
|
|
|
|
@api_router.post("/auth/resend-verification")
|
|
async def resend_verification_email(
|
|
request: ResendVerificationRequest,
|
|
background_tasks: BackgroundTasks,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Resend verification email with comprehensive rate limiting and validation"""
|
|
try:
|
|
email_lower = request.email.lower().strip()
|
|
|
|
# Initialize rate limiter
|
|
rate_limiter = VerificationEmailRateLimiter(database)
|
|
|
|
# Check rate limiting
|
|
can_send, reason = await rate_limiter.can_send_verification_email(email_lower)
|
|
if not can_send:
|
|
logger.warning(f"⚠️ Verification email rate limit exceeded for {email_lower}: {reason}")
|
|
return JSONResponse(
|
|
status_code=429,
|
|
content=create_error_response("RATE_LIMITED", reason)
|
|
)
|
|
|
|
# Clean up expired tokens first
|
|
await database.cleanup_expired_verification_tokens()
|
|
|
|
# Check if user already exists and is verified
|
|
user_data = await database.get_user(email_lower)
|
|
if user_data:
|
|
# User exists and is verified - don't reveal this for security
|
|
logger.info(f"🔍 Resend verification requested for already verified user: {email_lower}")
|
|
await rate_limiter.record_email_sent(email_lower) # Record attempt to prevent abuse
|
|
return create_success_response({
|
|
"message": "If your email is in our system and pending verification, a new verification email has been sent."
|
|
})
|
|
|
|
# Look for pending verification token
|
|
verification_data = await database.find_verification_token_by_email(email_lower)
|
|
|
|
if not verification_data:
|
|
# No pending verification found - don't reveal this for security
|
|
logger.info(f"🔍 Resend verification requested for non-existent pending verification: {email_lower}")
|
|
await rate_limiter.record_email_sent(email_lower) # Record attempt to prevent abuse
|
|
return create_success_response({
|
|
"message": "If your email is in our system and pending verification, a new verification email has been sent."
|
|
})
|
|
|
|
# Check if verification token has expired
|
|
expires_at = datetime.fromisoformat(verification_data["expires_at"])
|
|
current_time = datetime.now(timezone.utc)
|
|
|
|
if current_time > expires_at:
|
|
# Token expired - clean it up and inform user
|
|
await database.redis.delete(f"email_verification:{verification_data['token']}")
|
|
logger.info(f"🧹 Cleaned up expired verification token for {email_lower}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response(
|
|
"TOKEN_EXPIRED",
|
|
"Your verification link has expired. Please register again to create a new account."
|
|
)
|
|
)
|
|
|
|
# Generate new verification token (invalidate old one)
|
|
old_token = verification_data["token"]
|
|
new_token = secrets.token_urlsafe(32)
|
|
|
|
# Update verification data with new token and reset attempts
|
|
verification_data.update({
|
|
"token": new_token,
|
|
"expires_at": (current_time + timedelta(hours=24)).isoformat(),
|
|
"resent_at": current_time.isoformat(),
|
|
"resend_count": verification_data.get("resend_count", 0) + 1
|
|
})
|
|
|
|
# Store new token and remove old one
|
|
await database.redis.delete(f"email_verification:{old_token}")
|
|
await database.store_email_verification_token(
|
|
email_lower,
|
|
new_token,
|
|
verification_data["user_type"],
|
|
verification_data["user_data"]
|
|
)
|
|
|
|
# Get user name for email
|
|
user_data_container = verification_data["user_data"]
|
|
user_type = verification_data["user_type"]
|
|
|
|
if user_type == "candidate":
|
|
candidate_data = user_data_container["candidate_data"]
|
|
user_name = candidate_data.get("fullName", "User")
|
|
elif user_type == "employer":
|
|
employer_data = user_data_container["employer_data"]
|
|
user_name = employer_data.get("companyName", "User")
|
|
else:
|
|
user_name = "User"
|
|
|
|
# Record email attempt
|
|
await rate_limiter.record_email_sent(email_lower)
|
|
|
|
# Send new verification email in background
|
|
background_tasks.add_task(
|
|
email_service.send_verification_email,
|
|
email_lower,
|
|
new_token,
|
|
user_name,
|
|
user_type
|
|
)
|
|
|
|
# Log security event
|
|
await database.log_security_event(
|
|
verification_data["user_data"].get("candidate_data", {}).get("id") or
|
|
verification_data["user_data"].get("employer_data", {}).get("id") or "unknown",
|
|
"verification_resend",
|
|
{
|
|
"email": email_lower,
|
|
"user_type": user_type,
|
|
"resend_count": verification_data.get("resend_count", 1),
|
|
"old_token_invalidated": old_token[:8] + "...", # Log partial token for debugging
|
|
"ip_address": "unknown" # You can extract this from request if needed
|
|
}
|
|
)
|
|
|
|
logger.info(f"✅ Verification email resent to {email_lower} (attempt #{verification_data.get('resend_count', 1)})")
|
|
|
|
return create_success_response({
|
|
"message": "A new verification email has been sent to your email address. Please check your inbox and spam folder.",
|
|
"resendCount": verification_data.get("resend_count", 1)
|
|
})
|
|
|
|
except ValueError as ve:
|
|
logger.warning(f"⚠️ Invalid resend verification request: {ve}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("VALIDATION_ERROR", str(ve))
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"❌ Resend verification email error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESEND_FAILED", "An error occurred while processing your request. Please try again later.")
|
|
)
|
|
|
|
@api_router.post("/auth/mfa/request")
|
|
async def request_mfa(
|
|
request: MFARequest,
|
|
background_tasks: BackgroundTasks,
|
|
http_request: Request,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Request MFA for login from new device"""
|
|
try:
|
|
# Verify credentials first
|
|
auth_manager = AuthenticationManager(database)
|
|
is_valid, user_data, error_message = await auth_manager.verify_user_credentials(
|
|
request.email,
|
|
request.password
|
|
)
|
|
|
|
if not is_valid or not user_data:
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("AUTH_FAILED", "Invalid credentials")
|
|
)
|
|
|
|
# Check if device is trusted
|
|
device_manager = DeviceManager(database)
|
|
device_info = device_manager.parse_device_info(http_request)
|
|
|
|
is_trusted = await device_manager.is_trusted_device(user_data["id"], request.device_id)
|
|
|
|
if is_trusted:
|
|
# Device is trusted, proceed with normal login
|
|
await device_manager.update_device_last_used(user_data["id"], request.device_id)
|
|
|
|
return create_success_response({
|
|
"mfaRequired": False,
|
|
"message": "Device is trusted, proceed with login"
|
|
})
|
|
|
|
# Generate MFA code
|
|
mfa_code = f"{secrets.randbelow(1000000):06d}" # 6-digit code
|
|
|
|
# Store MFA code
|
|
# Get user name for email
|
|
user_name = "User"
|
|
email = None
|
|
if user_data["type"] == "candidate":
|
|
candidate_data = await database.get_candidate(user_data["id"])
|
|
if candidate_data:
|
|
user_name = candidate_data.get("fullName", "User")
|
|
email = candidate_data.get("email", None)
|
|
elif user_data["type"] == "employer":
|
|
employer_data = await database.get_employer(user_data["id"])
|
|
if employer_data:
|
|
user_name = employer_data.get("companyName", "User")
|
|
email = employer_data.get("email", None)
|
|
|
|
if not email:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("EMAIL_NOT_FOUND", "User email not found for MFA")
|
|
)
|
|
|
|
# Store MFA code
|
|
await database.store_mfa_code(email, mfa_code, request.device_id)
|
|
logger.info(f"🔐 MFA code generated for {email} on device {request.device_id}")
|
|
|
|
# Send MFA code via email
|
|
background_tasks.add_task(
|
|
email_service.send_mfa_email,
|
|
email,
|
|
mfa_code,
|
|
request.device_name,
|
|
user_name
|
|
)
|
|
|
|
logger.info(f"🔐 MFA requested for {request.email} from new device {request.device_name}")
|
|
|
|
mfa_data = MFAData(
|
|
message="New device detected. We've sent a security code to your email address.",
|
|
codeSent=mfa_code,
|
|
email=request.email,
|
|
deviceId=request.device_id,
|
|
deviceName=request.device_name,
|
|
)
|
|
mfa_response = MFARequestResponse(
|
|
mfaRequired=True,
|
|
mfaData=mfa_data
|
|
)
|
|
return create_success_response(mfa_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ MFA request error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("MFA_REQUEST_FAILED", "Failed to process MFA request")
|
|
)
|
|
|
|
@api_router.post("/auth/login")
|
|
async def login(
|
|
request: LoginRequest,
|
|
http_request: Request,
|
|
background_tasks: BackgroundTasks,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""login with automatic MFA email sending for new devices"""
|
|
try:
|
|
# Initialize managers
|
|
auth_manager = AuthenticationManager(database)
|
|
device_manager = DeviceManager(database)
|
|
|
|
# Parse device information
|
|
device_info = device_manager.parse_device_info(http_request)
|
|
device_id = device_info["device_id"]
|
|
|
|
# Verify credentials first
|
|
is_valid, user_data, error_message = await auth_manager.verify_user_credentials(
|
|
request.login,
|
|
request.password
|
|
)
|
|
|
|
if not is_valid or not user_data:
|
|
logger.warning(f"⚠️ Failed login attempt for: {request.login}")
|
|
return JSONResponse(
|
|
status_code=401,
|
|
content=create_error_response("AUTH_FAILED", error_message or "Invalid credentials")
|
|
)
|
|
|
|
# Check if device is trusted
|
|
is_trusted = await device_manager.is_trusted_device(user_data["id"], device_id)
|
|
|
|
if not is_trusted:
|
|
# New device detected - automatically send MFA email
|
|
logger.info(f"🔐 New device detected for {request.login}, sending MFA email")
|
|
|
|
# Generate MFA code
|
|
mfa_code = f"{secrets.randbelow(1000000):06d}" # 6-digit code
|
|
|
|
# Get user name and details for email
|
|
user_name = "User"
|
|
email = None
|
|
if user_data["type"] == "candidate":
|
|
candidate_data = await database.get_candidate(user_data["id"])
|
|
if candidate_data:
|
|
user_name = candidate_data.get("full_name", "User")
|
|
email = candidate_data.get("email", None)
|
|
elif user_data["type"] == "employer":
|
|
employer_data = await database.get_employer(user_data["id"])
|
|
if employer_data:
|
|
user_name = employer_data.get("company_name", "User")
|
|
email = employer_data.get("email", None)
|
|
|
|
if not email:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("EMAIL_NOT_FOUND", "User email not found for MFA")
|
|
)
|
|
|
|
# Store MFA code
|
|
await database.store_mfa_code(email, mfa_code, device_id)
|
|
|
|
# Ensure email is lowercase
|
|
# Get IP address for security info
|
|
ip_address = http_request.client.host if http_request.client else "Unknown"
|
|
|
|
# Send MFA code via email in background
|
|
background_tasks.add_task(
|
|
email_service.send_mfa_email,
|
|
email,
|
|
mfa_code,
|
|
device_info["device_name"],
|
|
user_name,
|
|
ip_address
|
|
)
|
|
|
|
# Log security event
|
|
await database.log_security_event(
|
|
user_data["id"],
|
|
"mfa_request",
|
|
{
|
|
"device_id": device_id,
|
|
"device_name": device_info["device_name"],
|
|
"ip_address": ip_address,
|
|
"user_agent": device_info.get("user_agent", ""),
|
|
"auto_sent": True
|
|
}
|
|
)
|
|
|
|
logger.info(f"🔐 MFA code automatically sent to {request.login} for device {device_info['device_name']}")
|
|
|
|
mfa_response = MFARequestResponse(
|
|
mfaRequired=True,
|
|
mfaData=MFAData(
|
|
message="New device detected. We've sent a security code to your email address.",
|
|
email=email,
|
|
deviceId=device_id,
|
|
deviceName=device_info["device_name"],
|
|
codeSent=mfa_code
|
|
)
|
|
)
|
|
return create_success_response(mfa_response.model_dump(by_alias=True))
|
|
|
|
# Trusted device - proceed with normal login
|
|
await device_manager.update_device_last_used(user_data["id"], device_id)
|
|
await auth_manager.update_last_login(user_data["id"])
|
|
|
|
# Create tokens
|
|
access_token = create_access_token(data={"sub": user_data["id"]})
|
|
refresh_token = create_access_token(
|
|
data={"sub": user_data["id"], "type": "refresh"},
|
|
expires_delta=timedelta(days=SecurityConfig.REFRESH_TOKEN_EXPIRY_DAYS)
|
|
)
|
|
|
|
# Get user object
|
|
user = None
|
|
if user_data["type"] == "candidate":
|
|
candidate_data = await database.get_candidate(user_data["id"])
|
|
if candidate_data:
|
|
user = Candidate.model_validate(candidate_data)
|
|
elif user_data["type"] == "employer":
|
|
employer_data = await database.get_employer(user_data["id"])
|
|
if employer_data:
|
|
user = Employer.model_validate(employer_data)
|
|
|
|
if not user:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User profile not found")
|
|
)
|
|
|
|
# Log successful login from trusted device
|
|
await database.log_security_event(
|
|
user_data["id"],
|
|
"login",
|
|
{
|
|
"device_id": device_id,
|
|
"device_name": device_info["device_name"],
|
|
"ip_address": http_request.client.host if http_request.client else "Unknown",
|
|
"trusted_device": True
|
|
}
|
|
)
|
|
|
|
# Create response
|
|
auth_response = AuthResponse(
|
|
accessToken=access_token,
|
|
refreshToken=refresh_token,
|
|
user=user,
|
|
expiresAt=int((datetime.now(timezone.utc) + timedelta(hours=SecurityConfig.TOKEN_EXPIRY_HOURS)).timestamp())
|
|
)
|
|
|
|
logger.info(f"🔑 User {request.login} logged in successfully from trusted device")
|
|
|
|
return create_success_response(auth_response.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Login error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("LOGIN_ERROR", "An error occurred during login")
|
|
)
|
|
|
|
|
|
@api_router.post("/auth/mfa/verify")
|
|
async def verify_mfa(
|
|
request: MFAVerifyRequest,
|
|
http_request: Request,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Verify MFA code and complete login with error handling"""
|
|
try:
|
|
# Get MFA data
|
|
mfa_data = await database.get_mfa_code(request.email, request.device_id)
|
|
|
|
if not mfa_data:
|
|
logger.warning(f"⚠️ No MFA session found for {request.email} on device {request.device_id}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NO_MFA_SESSION", "No active MFA session found. Please try logging in again.")
|
|
)
|
|
|
|
if mfa_data.get("verified"):
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("ALREADY_VERIFIED", "This MFA code has already been used. Please login again.")
|
|
)
|
|
|
|
# Check expiration
|
|
expires_at = datetime.fromisoformat(mfa_data["expires_at"])
|
|
if datetime.now(timezone.utc) > expires_at:
|
|
# Clean up expired MFA session
|
|
await database.redis.delete(f"mfa_code:{request.email.lower()}:{request.device_id}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("MFA_EXPIRED", "MFA code has expired. Please try logging in again.")
|
|
)
|
|
|
|
# Check attempts
|
|
current_attempts = mfa_data.get("attempts", 0)
|
|
if current_attempts >= 5:
|
|
# Clean up after too many attempts
|
|
await database.redis.delete(f"mfa_code:{request.email.lower()}:{request.device_id}")
|
|
return JSONResponse(
|
|
status_code=429,
|
|
content=create_error_response("TOO_MANY_ATTEMPTS", "Too many incorrect attempts. Please try logging in again.")
|
|
)
|
|
|
|
# Verify code
|
|
if mfa_data["code"] != request.code:
|
|
await database.increment_mfa_attempts(request.email, request.device_id)
|
|
remaining_attempts = 5 - (current_attempts + 1)
|
|
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response(
|
|
"INVALID_CODE",
|
|
f"Invalid MFA code. {remaining_attempts} attempts remaining."
|
|
)
|
|
)
|
|
|
|
# Mark as verified
|
|
await database.mark_mfa_verified(request.email, request.device_id)
|
|
|
|
# Get user data
|
|
user_data = await database.get_user(request.email)
|
|
if not user_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User not found")
|
|
)
|
|
|
|
# Add device to trusted devices if requested
|
|
if request.remember_device:
|
|
device_manager = DeviceManager(database)
|
|
device_info = device_manager.parse_device_info(http_request)
|
|
await device_manager.add_trusted_device(
|
|
user_data["id"],
|
|
request.device_id,
|
|
device_info
|
|
)
|
|
logger.info(f"🔒 Device {request.device_id} added to trusted devices for user {user_data['id']}")
|
|
|
|
# Update last login
|
|
auth_manager = AuthenticationManager(database)
|
|
await auth_manager.update_last_login(user_data["id"])
|
|
|
|
# Create tokens
|
|
access_token = create_access_token(data={"sub": user_data["id"]})
|
|
refresh_token = create_access_token(
|
|
data={"sub": user_data["id"], "type": "refresh"},
|
|
expires_delta=timedelta(days=SecurityConfig.REFRESH_TOKEN_EXPIRY_DAYS)
|
|
)
|
|
|
|
# Get user object
|
|
user = None
|
|
if user_data["type"] == "candidate":
|
|
candidate_data = await database.get_candidate(user_data["id"])
|
|
if candidate_data:
|
|
user = Candidate.model_validate(candidate_data)
|
|
elif user_data["type"] == "employer":
|
|
employer_data = await database.get_employer(user_data["id"])
|
|
if employer_data:
|
|
user = Employer.model_validate(employer_data)
|
|
|
|
if not user:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User profile not found")
|
|
)
|
|
|
|
# Log successful MFA verification and login
|
|
await database.log_security_event(
|
|
user_data["id"],
|
|
"mfa_verify_success",
|
|
{
|
|
"device_id": request.device_id,
|
|
"ip_address": http_request.client.host if http_request.client else "Unknown",
|
|
"device_remembered": request.remember_device,
|
|
"attempts_used": current_attempts + 1
|
|
}
|
|
)
|
|
|
|
await database.log_security_event(
|
|
user_data["id"],
|
|
"login",
|
|
{
|
|
"device_id": request.device_id,
|
|
"ip_address": http_request.client.host if http_request.client else "Unknown",
|
|
"mfa_verified": True,
|
|
"new_device": True
|
|
}
|
|
)
|
|
|
|
# Clean up MFA session
|
|
await database.redis.delete(f"mfa_code:{request.email.lower()}:{request.device_id}")
|
|
|
|
# Create response
|
|
auth_response = AuthResponse(
|
|
accessToken=access_token,
|
|
refreshToken=refresh_token,
|
|
user=user,
|
|
expiresAt=int((datetime.now(timezone.utc) + timedelta(hours=SecurityConfig.TOKEN_EXPIRY_HOURS)).timestamp())
|
|
)
|
|
|
|
logger.info(f"✅ MFA verified and login completed for {request.email}")
|
|
|
|
return create_success_response(auth_response.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ MFA verification error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("MFA_VERIFICATION_FAILED", "Failed to verify MFA")
|
|
)
|
|
|
|
class DebugStreamingResponse(StreamingResponse):
|
|
async def stream_response(self, send):
|
|
logger.debug("=== DEBUG STREAMING RESPONSE ===")
|
|
logger.debug(f"Body iterator: {self.body_iterator}")
|
|
logger.debug(f"Media type: {self.media_type}")
|
|
logger.debug(f"Charset: {self.charset}")
|
|
|
|
chunk_count = 0
|
|
async for chunk in self.body_iterator:
|
|
chunk_count += 1
|
|
logger.debug(f"Chunk {chunk_count}: type={type(chunk)}, repr={repr(chunk)[:200]}")
|
|
|
|
if not isinstance(chunk, (str, bytes)):
|
|
logger.error(f"PROBLEM FOUND! Chunk {chunk_count} is type {type(chunk)}, not str/bytes")
|
|
logger.error(f"Chunk content: {chunk}")
|
|
if hasattr(chunk, '__dict__'):
|
|
logger.error(f"Chunk attributes: {chunk.__dict__}")
|
|
|
|
# Try to help with conversion
|
|
if hasattr(chunk, 'model_dump_json'):
|
|
logger.error("Chunk appears to be a Pydantic model - should call .model_dump_json()")
|
|
elif hasattr(chunk, 'json'):
|
|
logger.error("Chunk appears to be a Pydantic model - should call .json()")
|
|
|
|
raise AttributeError(f"'{type(chunk).__name__}' object has no attribute 'encode'")
|
|
|
|
if isinstance(chunk, str):
|
|
chunk = chunk.encode(self.charset)
|
|
|
|
await send({
|
|
"type": "http.response.body",
|
|
"body": chunk,
|
|
"more_body": True,
|
|
})
|
|
|
|
await send({"type": "http.response.body", "body": b"", "more_body": False})
|
|
|
|
@api_router.post("/candidates/documents/upload")
|
|
async def upload_candidate_document(
|
|
file: UploadFile = File(...),
|
|
options_data: str = Form(..., alias="options"),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
try:
|
|
# Parse the JSON string and create DocumentOptions object
|
|
options_dict = json.loads(options_data)
|
|
options : DocumentOptions = DocumentOptions.model_validate(**options_dict)
|
|
except (json.JSONDecodeError, ValidationError) as e:
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Invalid options format. Please provide valid JSON."
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
# Check file size (limit to 10MB)
|
|
max_size = 10 * 1024 * 1024 # 10MB
|
|
file_content = await file.read()
|
|
if len(file_content) > max_size:
|
|
logger.info(f"⚠️ File too large: {file.filename} ({len(file_content)} bytes)")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File size exceeds 10MB limit"
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
if len(file_content) == 0:
|
|
logger.info(f"⚠️ File is empty: {file.filename}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File is empty"
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
"""Upload a document for the current candidate"""
|
|
async def upload_stream_generator(file_content):
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized upload attempt by user type: {current_user.user_type}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Only candidates can upload documents"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
candidate: Candidate = current_user
|
|
file.filename = re.sub(r'^.*/', '', file.filename) if file.filename else '' # Sanitize filename
|
|
if not file.filename or file.filename.strip() == "":
|
|
logger.warning("⚠️ File upload attempt with missing filename")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File must have a valid filename"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
logger.info(f"📁 Received file upload: filename='{file.filename}', content_type='{file.content_type}', size='{len(file_content)} bytes'")
|
|
|
|
directory = "rag-content" if options.include_in_RAG else "files"
|
|
directory = "jobs" if options.is_job_document else directory
|
|
|
|
# Ensure the file does not already exist either in 'files' or in 'rag-content'
|
|
dir_path = os.path.join(defines.user_dir, candidate.username, directory)
|
|
if not os.path.exists(dir_path):
|
|
os.makedirs(dir_path, exist_ok=True)
|
|
file_path = os.path.join(dir_path, file.filename)
|
|
if os.path.exists(file_path):
|
|
if not options.overwrite:
|
|
logger.warning(f"⚠️ File already exists: {file_path}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"File with this name already exists in the '{directory}' directory"
|
|
)
|
|
yield error_message
|
|
return
|
|
else:
|
|
logger.info(f"🔄 Overwriting existing file: {file_path}")
|
|
status_message = ChatMessageStatus(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Overwriting existing file: {file.filename}",
|
|
activity=ApiActivityType.INFO
|
|
)
|
|
yield status_message
|
|
|
|
# Validate file type
|
|
allowed_types = ['.txt', '.md', '.docx', '.pdf', '.png', '.jpg', '.jpeg', '.gif']
|
|
file_extension = pathlib.Path(file.filename).suffix.lower() if file.filename else ""
|
|
|
|
if file_extension not in allowed_types:
|
|
logger.warning(f"⚠️ Invalid file type: {file_extension} for file {file.filename}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"File type {file_extension} not supported. Allowed types: {', '.join(allowed_types)}"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
# Create document metadata
|
|
document_id = str(uuid.uuid4())
|
|
document_type = get_document_type_from_filename(file.filename or "unknown.txt")
|
|
|
|
document_data = Document(
|
|
id=document_id,
|
|
filename=file.filename or f"document_{document_id}",
|
|
originalName=file.filename or f"document_{document_id}",
|
|
type=document_type,
|
|
size=len(file_content),
|
|
uploadDate=datetime.now(UTC),
|
|
options=options,
|
|
ownerId=candidate.id
|
|
)
|
|
|
|
# Save file to disk
|
|
directory = os.path.join(defines.user_dir, candidate.username, directory)
|
|
file_path = os.path.join(directory, file.filename)
|
|
|
|
try:
|
|
with open(file_path, "wb") as f:
|
|
f.write(file_content)
|
|
|
|
logger.info(f"📁 File saved to disk: {file_path}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to save file to disk: {e}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to save file to disk",
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
converted = False
|
|
if document_type != DocumentType.MARKDOWN and document_type != DocumentType.TXT:
|
|
p = pathlib.Path(file_path)
|
|
p_as_md = p.with_suffix(".md")
|
|
# If file_path.md doesn't exist or file_path is newer than file_path.md,
|
|
# fire off markitdown
|
|
if (not p_as_md.exists()) or (
|
|
p.stat().st_mtime > p_as_md.stat().st_mtime
|
|
):
|
|
status_message = ChatMessageStatus(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Converting content from {document_type}...",
|
|
activity=ApiActivityType.CONVERTING
|
|
)
|
|
yield status_message
|
|
try:
|
|
from markitdown import MarkItDown # type: ignore
|
|
md = MarkItDown(enable_plugins=False) # Set to True to enable plugins
|
|
result = md.convert(file_path, output_format="markdown")
|
|
p_as_md.write_text(result.text_content)
|
|
file_content = result.text_content
|
|
converted = True
|
|
logger.info(f"✅ Converted {file.filename} to Markdown format: {p_as_md}")
|
|
file_path = p_as_md
|
|
except Exception as e:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Failed to convert {file.filename} to Markdown.",
|
|
)
|
|
yield error_message
|
|
logger.error(f"❌ Error converting {file_path} to Markdown: {e}")
|
|
return
|
|
|
|
# Store document metadata in database
|
|
await database.set_document(document_id, document_data.model_dump())
|
|
await database.add_document_to_candidate(candidate.id, document_id)
|
|
logger.info(f"📄 Document uploaded: {file.filename} for candidate {candidate.username}")
|
|
chat_message = DocumentMessage(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
type=ApiMessageType.JSON,
|
|
status=ApiStatusType.DONE,
|
|
document=document_data,
|
|
converted=converted,
|
|
content=file_content,
|
|
)
|
|
yield chat_message
|
|
try:
|
|
async def to_json(method):
|
|
try:
|
|
async for message in method:
|
|
json_data = message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
yield f"data: {json_str}\n\n".encode("utf-8")
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"Error in to_json conversion: {e}")
|
|
return
|
|
|
|
return StreamingResponse(
|
|
to_json(upload_stream_generator(file_content)),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to upload document"
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
async def create_job_from_content(database: RedisDatabase, current_user: Candidate, content: str):
|
|
status_message = ChatMessageStatus(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Initiating connection with {current_user.first_name}'s AI agent...",
|
|
activity=ApiActivityType.INFO
|
|
)
|
|
yield status_message
|
|
await asyncio.sleep(0) # Let the status message propagate
|
|
|
|
async with entities.get_candidate_entity(candidate=current_user) as candidate_entity:
|
|
chat_agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.JOB_REQUIREMENTS)
|
|
if not chat_agent:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="No agent found for job requirements chat type"
|
|
)
|
|
yield error_message
|
|
return
|
|
message = None
|
|
status_message = ChatMessageStatus(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Analyzing document for company and requirement details...",
|
|
activity=ApiActivityType.SEARCHING
|
|
)
|
|
yield status_message
|
|
await asyncio.sleep(0)
|
|
|
|
async for message in chat_agent.generate(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=MOCK_UUID,
|
|
prompt=content
|
|
):
|
|
pass
|
|
if not message or not isinstance(message, JobRequirementsMessage):
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to process job description file"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
logger.info(f"✅ Successfully saved job requirements job {message.id}")
|
|
yield message
|
|
return
|
|
|
|
@api_router.post("/candidates/profile/upload")
|
|
async def upload_candidate_profile(
|
|
file: UploadFile = File(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Upload a document for the current candidate"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized upload attempt by user type: {current_user.user_type}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can upload their profile")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
# Validate file type
|
|
allowed_types = ['.png', '.jpg', '.jpeg', '.gif']
|
|
file_extension = pathlib.Path(file.filename).suffix.lower() if file.filename else ""
|
|
|
|
if file_extension not in allowed_types:
|
|
logger.warning(f"⚠️ Invalid file type: {file_extension} for file {file.filename}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response(
|
|
"INVALID_FILE_TYPE",
|
|
f"File type {file_extension} not supported. Allowed types: {', '.join(allowed_types)}"
|
|
)
|
|
)
|
|
|
|
# Check file size (limit to 2MB)
|
|
max_size = 2 * 1024 * 1024 # 2MB
|
|
file_content = await file.read()
|
|
if len(file_content) > max_size:
|
|
logger.info(f"⚠️ File too large: {file.filename} ({len(file_content)} bytes)")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("FILE_TOO_LARGE", "File size exceeds 10MB limit")
|
|
)
|
|
|
|
# Save file to disk as "profile.<extension>"
|
|
_, extension = os.path.splitext(file.filename or "")
|
|
file_path = os.path.join(defines.user_dir, candidate.username, f"profile{extension}")
|
|
|
|
try:
|
|
with open(file_path, "wb") as f:
|
|
f.write(file_content)
|
|
|
|
logger.info(f"📁 File saved to disk: {file_path}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to save file to disk: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FILE_SAVE_ERROR", "Failed to save file to disk")
|
|
)
|
|
|
|
updates = {
|
|
"updated_at": datetime.now(UTC).isoformat(),
|
|
"profile_image": f"profile{extension}"
|
|
}
|
|
candidate_dict = candidate.model_dump()
|
|
candidate_dict.update(updates)
|
|
updated_candidate = Candidate.model_validate(candidate_dict)
|
|
await database.set_candidate(candidate.id, updated_candidate.model_dump())
|
|
logger.info(f"📄 Profile image uploaded: {updated_candidate.profile_image} for candidate {candidate.id}")
|
|
|
|
return create_success_response(True)
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("UPLOAD_ERROR", "Failed to upload document")
|
|
)
|
|
|
|
@api_router.get("/candidates/profile/{username}")
|
|
async def get_candidate_profile_image(
|
|
username: str = Path(..., description="Username of the candidate"),
|
|
# current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get profile image of a candidate by username"""
|
|
try:
|
|
all_candidates_data = await database.get_all_candidates()
|
|
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
|
|
|
# Normalize username to lowercase for case-insensitive search
|
|
query_lower = username.lower()
|
|
|
|
# Filter by search query
|
|
candidates_list = [
|
|
c for c in candidates_list
|
|
if (query_lower == c.email.lower() or
|
|
query_lower == c.username.lower())
|
|
]
|
|
|
|
if not len(candidates_list):
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Candidate not found")
|
|
)
|
|
|
|
candidate = Candidate.model_validate(candidates_list[0])
|
|
if not candidate.profile_image:
|
|
logger.warning(f"⚠️ Candidate {candidate.username} has no profile image set")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Profile image not found")
|
|
)
|
|
file_path = os.path.join(defines.user_dir, candidate.username, candidate.profile_image)
|
|
file_path = pathlib.Path(file_path)
|
|
if not file_path.exists():
|
|
logger.error(f"❌ Profile image file not found on disk: {file_path}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("FILE_NOT_FOUND", "Profile image file not found on disk")
|
|
)
|
|
return FileResponse(
|
|
file_path,
|
|
media_type=f"image/{file_path.suffix[1:]}", # Get extension without dot
|
|
filename=candidate.profile_image
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Get candidate profile image failed: {str(e)}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", "Failed to retrieve profile image")
|
|
)
|
|
|
|
@api_router.get("/candidates/documents")
|
|
async def get_candidate_documents(
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get all documents for the current candidate"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized access attempt by user type: {current_user.user_type}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can access documents")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
|
|
# Get documents from database
|
|
documents_data = await database.get_candidate_documents(candidate.id)
|
|
documents = [Document.model_validate(doc_data) for doc_data in documents_data]
|
|
|
|
# Sort by upload date (newest first)
|
|
documents.sort(key=lambda x: x.upload_date, reverse=True)
|
|
|
|
response_data = DocumentListResponse(
|
|
documents=documents,
|
|
total=len(documents)
|
|
)
|
|
|
|
return create_success_response(response_data.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Get candidate documents error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", "Failed to retrieve documents")
|
|
)
|
|
|
|
@api_router.get("/candidates/documents/{document_id}/content")
|
|
async def get_document_content(
|
|
document_id: str = Path(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get document content by ID"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can access documents")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
|
|
# Get document metadata
|
|
document_data = await database.get_document(document_id)
|
|
if not document_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Document not found")
|
|
)
|
|
|
|
document = Document.model_validate(document_data)
|
|
|
|
# Verify document belongs to current candidate
|
|
if document.owner_id != candidate.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot access another candidate's document")
|
|
)
|
|
|
|
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName)
|
|
file_path = pathlib.Path(file_path)
|
|
if not document.type in [DocumentType.TXT, DocumentType.MARKDOWN]:
|
|
file_path = file_path.with_suffix('.md')
|
|
|
|
if not file_path.exists():
|
|
logger.error(f"❌ Document file not found on disk: {file_path}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("FILE_NOT_FOUND", "Document file not found on disk")
|
|
)
|
|
|
|
try:
|
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
content = f.read()
|
|
|
|
response = DocumentContentResponse(
|
|
documentId=document_id,
|
|
filename=document.filename,
|
|
type=document.type,
|
|
content=content,
|
|
size=document.size
|
|
)
|
|
return create_success_response(response.model_dump(by_alias=True));
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to read document file: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("READ_ERROR", "Failed to read document content")
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Get document content error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", "Failed to retrieve document content")
|
|
)
|
|
|
|
@api_router.patch("/candidates/documents/{document_id}")
|
|
async def update_document(
|
|
document_id: str = Path(...),
|
|
updates: DocumentUpdateRequest = Body(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Update document metadata (filename, RAG status)"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can update documents")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
|
|
# Get document metadata
|
|
document_data = await database.get_document(document_id)
|
|
if not document_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Document not found")
|
|
)
|
|
|
|
document = Document.model_validate(document_data)
|
|
|
|
# Verify document belongs to current candidate
|
|
if document.owner_id != candidate.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot update another candidate's document")
|
|
)
|
|
update_options = updates.options if updates.options else DocumentOptions()
|
|
if document.options.include_in_RAG != update_options.include_in_RAG:
|
|
# If RAG status is changing, we need to handle file movement
|
|
rag_dir = os.path.join(defines.user_dir, candidate.username, "rag-content")
|
|
file_dir = os.path.join(defines.user_dir, candidate.username, "files")
|
|
os.makedirs(rag_dir, exist_ok=True)
|
|
os.makedirs(file_dir, exist_ok=True)
|
|
rag_path = os.path.join(rag_dir, document.originalName)
|
|
file_path = os.path.join(file_dir, document.originalName)
|
|
|
|
if update_options.include_in_RAG:
|
|
src = pathlib.Path(file_path)
|
|
dst = pathlib.Path(rag_path)
|
|
# Move to RAG directory
|
|
src.rename(dst)
|
|
logger.info(f"📁 Moved file to RAG directory")
|
|
if document.type != DocumentType.MARKDOWN and document.type != DocumentType.TXT:
|
|
src = pathlib.Path(file_path)
|
|
src_as_md = src.with_suffix(".md")
|
|
if src_as_md.exists():
|
|
dst = pathlib.Path(rag_path).with_suffix(".md")
|
|
src_as_md.rename(dst)
|
|
else:
|
|
src = pathlib.Path(rag_path)
|
|
dst = pathlib.Path(file_path)
|
|
# Move to regular files directory
|
|
src.rename(dst)
|
|
logger.info(f"📁 Moved file to regular files directory")
|
|
if document.type != DocumentType.MARKDOWN and document.type != DocumentType.TXT:
|
|
src_as_md = src.with_suffix(".md")
|
|
if src_as_md.exists():
|
|
dst = pathlib.Path(file_path).with_suffix(".md")
|
|
src_as_md.rename(dst)
|
|
|
|
# Apply updates
|
|
update_dict = {}
|
|
if updates.filename is not None:
|
|
update_dict["filename"] = updates.filename.strip()
|
|
if update_options.include_in_RAG is not None:
|
|
update_dict["include_in_RAG"] = update_options.include_in_RAG
|
|
|
|
if not update_dict:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("NO_UPDATES", "No valid updates provided")
|
|
)
|
|
|
|
# Add timestamp
|
|
update_dict["updatedAt"] = datetime.now(UTC).isoformat()
|
|
|
|
# Update in database
|
|
updated_data = await database.update_document(document_id, update_dict)
|
|
if not updated_data:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("UPDATE_FAILED", "Failed to update document")
|
|
)
|
|
|
|
updated_document = Document.model_validate(updated_data)
|
|
|
|
logger.info(f"📄 Document updated: {document_id} for candidate {candidate.username}")
|
|
|
|
return create_success_response(updated_document.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Update document error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("UPDATE_ERROR", "Failed to update document")
|
|
)
|
|
|
|
@api_router.delete("/candidates/documents/{document_id}")
|
|
async def delete_document(
|
|
document_id: str = Path(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Delete a document and its file"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized delete attempt by user type: {current_user.user_type}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can delete documents")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
|
|
# Get document metadata
|
|
document_data = await database.get_document(document_id)
|
|
if not document_data:
|
|
logger.warning(f"⚠️ Document not found for deletion: {document_id}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Document not found")
|
|
)
|
|
|
|
document = Document.model_validate(document_data)
|
|
|
|
# Verify document belongs to current candidate
|
|
if document.owner_id != candidate.id:
|
|
logger.warning(f"⚠️ Unauthorized delete attempt on document {document_id} by candidate {candidate.username}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot delete another candidate's document")
|
|
)
|
|
|
|
# Delete file from disk
|
|
file_path = os.path.join(defines.user_dir, candidate.username, "rag-content" if document.options.include_in_RAG else "files", document.originalName)
|
|
file_path = pathlib.Path(file_path)
|
|
|
|
try:
|
|
if file_path.exists():
|
|
file_path.unlink()
|
|
logger.info(f"🗑️ File deleted from disk: {file_path}")
|
|
else:
|
|
logger.warning(f"⚠️ File not found on disk during deletion: {file_path}")
|
|
|
|
# Delete side-car file if it exists
|
|
if document.type != DocumentType.MARKDOWN and document.type != DocumentType.TXT:
|
|
p = pathlib.Path(file_path)
|
|
p_as_md = p.with_suffix(".md")
|
|
if p_as_md.exists():
|
|
p_as_md.unlink()
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to delete file from disk: {e}")
|
|
# Continue with metadata deletion even if file deletion fails
|
|
|
|
# Remove from database
|
|
await database.remove_document_from_candidate(candidate.id, document_id)
|
|
await database.delete_document(document_id)
|
|
|
|
logger.info(f"🗑️ Document deleted: {document_id} for candidate {candidate.username}")
|
|
|
|
return create_success_response({
|
|
"message": "Document deleted successfully",
|
|
"documentId": document_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Delete document error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("DELETE_ERROR", "Failed to delete document")
|
|
)
|
|
|
|
@api_router.get("/candidates/documents/search")
|
|
async def search_candidate_documents(
|
|
query: str = Query(..., min_length=1),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Search candidate documents by filename"""
|
|
try:
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can search documents")
|
|
)
|
|
|
|
candidate: Candidate = current_user
|
|
|
|
# Search documents
|
|
documents_data = await database.search_candidate_documents(candidate.id, query)
|
|
documents = [Document.model_validate(doc_data) for doc_data in documents_data]
|
|
|
|
# Sort by upload date (newest first)
|
|
documents.sort(key=lambda x: x.upload_date, reverse=True)
|
|
|
|
response_data = DocumentListResponse(
|
|
documents=documents,
|
|
total=len(documents)
|
|
)
|
|
|
|
return create_success_response(response_data.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Search documents error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("SEARCH_ERROR", "Failed to search documents")
|
|
)
|
|
|
|
class RAGDocumentRequest(BaseModel):
|
|
"""Request model for RAG document content"""
|
|
id: str
|
|
|
|
@api_router.post("/candidates/rag-content")
|
|
async def post_candidate_vector_content(
|
|
rag_document: RAGDocumentRequest = Body(...),
|
|
current_user = Depends(get_current_user)
|
|
):
|
|
try:
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized access attempt by user type: {current_user.user_type}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
|
)
|
|
candidate : Candidate = current_user
|
|
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
collection = candidate_entity.umap_collection
|
|
if not collection:
|
|
logger.warning(f"⚠️ No UMAP collection found for candidate {candidate.username}")
|
|
return JSONResponse(
|
|
{"error": "No UMAP collection found"}, status_code=404
|
|
)
|
|
|
|
for index, id in enumerate(collection.ids):
|
|
if id == rag_document.id:
|
|
metadata = collection.metadatas[index].copy()
|
|
rag_metadata = RagContentMetadata.model_validate(metadata)
|
|
content = candidate_entity.file_watcher.prepare_metadata(metadata)
|
|
if content:
|
|
rag_response = RagContentResponse(id=id, content=content, metadata=rag_metadata)
|
|
logger.info(f"✅ Fetched RAG content for document id {id} for candidate {candidate.username}")
|
|
else:
|
|
logger.warning(f"⚠️ No content found for document id {id} for candidate {candidate.username}")
|
|
return JSONResponse(f"No content found for document id {rag_document.id}.", 404)
|
|
return create_success_response(rag_response.model_dump(by_alias=True))
|
|
|
|
logger.warning(f"⚠️ Document id {rag_document.id} not found in UMAP collection for candidate {candidate.username}")
|
|
return JSONResponse(f"Document id {rag_document.id} not found.", 404)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Post candidate content error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/candidates/rag-vectors")
|
|
async def post_candidate_vectors(
|
|
dimensions: int = Body(...),
|
|
current_user = Depends(get_current_user)
|
|
):
|
|
try:
|
|
if current_user.user_type != "candidate":
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
|
)
|
|
candidate : Candidate = current_user
|
|
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
collection = candidate_entity.umap_collection
|
|
if not collection:
|
|
results = {
|
|
"ids": [],
|
|
"metadatas": [],
|
|
"documents": [],
|
|
"embeddings": [],
|
|
"size": 0
|
|
}
|
|
return create_success_response(results)
|
|
if dimensions == 2:
|
|
umap_embedding = candidate_entity.file_watcher.umap_embedding_2d
|
|
else:
|
|
umap_embedding = candidate_entity.file_watcher.umap_embedding_3d
|
|
|
|
if len(umap_embedding) == 0:
|
|
results = {
|
|
"ids": [],
|
|
"metadatas": [],
|
|
"documents": [],
|
|
"embeddings": [],
|
|
"size": 0
|
|
}
|
|
return create_success_response(results)
|
|
|
|
result = {
|
|
"ids": collection.ids,
|
|
"metadatas": collection.metadatas,
|
|
"documents": collection.documents,
|
|
"embeddings": umap_embedding.tolist(),
|
|
"size": candidate_entity.file_watcher.collection.count()
|
|
}
|
|
|
|
return create_success_response(result)
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Post candidate vectors error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.delete("/candidates/{candidate_id}")
|
|
async def delete_candidate(
|
|
candidate_id: str = Path(...),
|
|
admin_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Delete a candidate"""
|
|
try:
|
|
# Check if admin user
|
|
if not admin_user.is_admin:
|
|
logger.warning(f"⚠️ Unauthorized delete attempt by user {admin_user.id}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only admins can delete candidates")
|
|
)
|
|
|
|
# Get candidate data
|
|
candidate_data = await database.get_candidate(candidate_id)
|
|
if not candidate_data:
|
|
logger.warning(f"⚠️ Candidate not found for deletion: {candidate_id}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Candidate not found")
|
|
)
|
|
|
|
await entities.entity_manager.remove_entity(candidate_id)
|
|
|
|
# Delete candidate from database
|
|
await database.delete_candidate(candidate_id)
|
|
|
|
# Optionally delete files and documents associated with the candidate
|
|
await database.delete_all_candidate_documents(candidate_id)
|
|
|
|
file_path = os.path.join(defines.user_dir, candidate_data["username"])
|
|
if os.path.exists(file_path):
|
|
try:
|
|
shutil.rmtree(file_path)
|
|
logger.info(f"🗑️ Deleted candidate files directory: {file_path}")
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to delete candidate files directory: {e}")
|
|
|
|
logger.info(f"🗑️ Candidate deleted: {candidate_id} by admin {admin_user.id}")
|
|
|
|
return create_success_response({
|
|
"message": "Candidate deleted successfully",
|
|
"candidateId": candidate_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Delete candidate error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("DELETE_ERROR", "Failed to delete candidate")
|
|
)
|
|
|
|
@api_router.patch("/candidates/{candidate_id}")
|
|
async def update_candidate(
|
|
candidate_id: str = Path(...),
|
|
updates: Dict[str, Any] = Body(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Update a candidate"""
|
|
try:
|
|
candidate_data = await database.get_candidate(candidate_id)
|
|
if not candidate_data:
|
|
logger.warning(f"⚠️ Candidate not found for update: {candidate_id}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Candidate not found")
|
|
)
|
|
|
|
is_AI = candidate_data.get("is_AI", False)
|
|
candidate = CandidateAI.model_validate(candidate_data) if is_AI else Candidate.model_validate(candidate_data)
|
|
|
|
# Check authorization (user can only update their own profile)
|
|
if current_user.is_admin is False and candidate.id != current_user.id:
|
|
logger.warning(f"⚠️ Unauthorized update attempt by user {current_user.id} on candidate {candidate_id}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot update another user's profile")
|
|
)
|
|
|
|
# Apply updates
|
|
updates["updatedAt"] = datetime.now(UTC).isoformat()
|
|
logger.info(f"🔄 Updating candidate {candidate_id} with data: {updates}")
|
|
candidate_dict = candidate.model_dump()
|
|
candidate_dict.update(updates)
|
|
updated_candidate = CandidateAI.model_validate(candidate_dict) if is_AI else Candidate.model_validate(candidate_dict)
|
|
await database.set_candidate(candidate_id, updated_candidate.model_dump())
|
|
|
|
return create_success_response(updated_candidate.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Update candidate error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("UPDATE_FAILED", str(e))
|
|
)
|
|
|
|
@api_router.get("/candidates")
|
|
async def get_candidates(
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
sortBy: Optional[str] = Query(None, alias="sortBy"),
|
|
sortOrder: str = Query("desc", pattern="^(asc|desc)$", alias="sortOrder"),
|
|
filters: Optional[str] = Query(None),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get paginated list of candidates"""
|
|
try:
|
|
# Parse filters if provided
|
|
filter_dict = None
|
|
if filters:
|
|
filter_dict = json.loads(filters)
|
|
|
|
# Get all candidates from Redis
|
|
all_candidates_data = await database.get_all_candidates()
|
|
candidates_list = [Candidate.model_validate(data) if not data.get("is_AI") else CandidateAI.model_validate(data) for data in all_candidates_data.values()]
|
|
|
|
paginated_candidates, total = filter_and_paginate(
|
|
candidates_list, page, limit, sortBy, sortOrder, filter_dict
|
|
)
|
|
|
|
paginated_response = create_paginated_response(
|
|
[c.model_dump(by_alias=True) for c in paginated_candidates],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get candidates error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("FETCH_FAILED", str(e))
|
|
)
|
|
|
|
@api_router.get("/candidates/search")
|
|
async def search_candidates(
|
|
query: str = Query(...),
|
|
filters: Optional[str] = Query(None),
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Search candidates"""
|
|
try:
|
|
# Parse filters
|
|
filter_dict = {}
|
|
if filters:
|
|
filter_dict = json.loads(filters)
|
|
|
|
# Get all candidates from Redis
|
|
all_candidates_data = await database.get_all_candidates()
|
|
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
|
|
|
# Filter by search query
|
|
if query:
|
|
query_lower = query.lower()
|
|
candidates_list = [
|
|
c for c in candidates_list
|
|
if (query_lower in c.first_name.lower() or
|
|
query_lower in c.last_name.lower() or
|
|
query_lower in c.email.lower() or
|
|
query_lower in c.username.lower() or
|
|
any(query_lower in skill.name.lower() for skill in c.skills or []))
|
|
]
|
|
|
|
paginated_candidates, total = filter_and_paginate(
|
|
candidates_list, page, limit, filters=filter_dict
|
|
)
|
|
|
|
paginated_response = create_paginated_response(
|
|
[c.model_dump(by_alias=True) for c in paginated_candidates],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Search candidates error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("SEARCH_FAILED", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Password Reset Endpoints
|
|
# ============================
|
|
class PasswordResetRequest(BaseModel):
|
|
email: EmailStr
|
|
|
|
class PasswordResetConfirm(BaseModel):
|
|
token: str
|
|
new_password: str
|
|
|
|
@field_validator('new_password')
|
|
def validate_password_strength(cls, v):
|
|
is_valid, issues = validate_password_strength(v)
|
|
if not is_valid:
|
|
raise ValueError('; '.join(issues))
|
|
return v
|
|
|
|
@api_router.post("/auth/password-reset/request")
|
|
async def request_password_reset(
|
|
request: PasswordResetRequest,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Request password reset"""
|
|
try:
|
|
# Check if user exists
|
|
user_data = await database.get_user(request.email)
|
|
if not user_data:
|
|
# Don't reveal whether email exists or not
|
|
return create_success_response({"message": "If the email exists, a reset link will be sent"})
|
|
|
|
auth_manager = AuthenticationManager(database)
|
|
|
|
# Generate reset token
|
|
reset_token = auth_manager.password_security.generate_secure_token()
|
|
reset_expiry = datetime.now(timezone.utc) + timedelta(hours=1) # 1 hour expiry
|
|
|
|
# Update authentication record
|
|
auth_record = await database.get_authentication(user_data["id"])
|
|
if auth_record:
|
|
auth_record["resetPasswordToken"] = reset_token
|
|
auth_record["resetPasswordExpiry"] = reset_expiry.isoformat()
|
|
await database.set_authentication(user_data["id"], auth_record)
|
|
|
|
# TODO: Send email with reset token
|
|
logger.info(f"🔐 Password reset requested for: {request.email}")
|
|
|
|
return create_success_response({"message": "If the email exists, a reset link will be sent"})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Password reset request error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESET_ERROR", "An error occurred processing the request")
|
|
)
|
|
|
|
@api_router.post("/auth/password-reset/confirm")
|
|
async def confirm_password_reset(
|
|
request: PasswordResetConfirm,
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Confirm password reset with token"""
|
|
try:
|
|
# Find user by reset token
|
|
# This would require a way to lookup by token - you might need to modify your database structure
|
|
|
|
# For now, this is a placeholder - you'd need to implement token lookup
|
|
# in your Redis database structure
|
|
|
|
return create_success_response({"message": "Password reset successfully"})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Password reset confirm error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESET_ERROR", "An error occurred resetting the password")
|
|
)
|
|
|
|
# ============================
|
|
# Job Endpoints
|
|
# ============================
|
|
|
|
@api_router.post("/jobs")
|
|
async def create_candidate_job(
|
|
job_data: Dict[str, Any] = Body(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Create a new job"""
|
|
is_employer = isinstance(current_user, Employer)
|
|
|
|
try:
|
|
if is_employer:
|
|
job = JobFull.model_validate(job_data)
|
|
else:
|
|
job = Job.model_validate(job_data)
|
|
|
|
# Add required fields
|
|
job.id = str(uuid.uuid4())
|
|
job.owner_id = current_user.id
|
|
job.owner = current_user
|
|
|
|
await database.set_job(job.id, job.model_dump())
|
|
|
|
return create_success_response(job.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Job creation error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("CREATION_FAILED", str(e))
|
|
)
|
|
|
|
|
|
@api_router.post("/jobs/from-content")
|
|
async def create_job_from_description(
|
|
content: str = Body(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Upload a document for the current candidate"""
|
|
async def content_stream_generator(content):
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized upload attempt by user type: {current_user.user_type}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Only candidates can upload documents"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
logger.info(f"📁 Received file content: size='{len(content)} bytes'")
|
|
|
|
async for message in create_job_from_content(database=database, current_user=current_user, content=content):
|
|
yield message
|
|
return
|
|
|
|
try:
|
|
async def to_json(method):
|
|
try:
|
|
async for message in method:
|
|
json_data = message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
yield f"data: {json_str}\n\n".encode("utf-8")
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"Error in to_json conversion: {e}")
|
|
return
|
|
|
|
return StreamingResponse(
|
|
to_json(content_stream_generator(content)),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to upload document"
|
|
).model_dump(by_alias=True)).encode("utf-8")]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
@api_router.post("/jobs/upload")
|
|
async def create_job_from_file(
|
|
file: UploadFile = File(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Upload a job document for the current candidate and create a Job"""
|
|
# Check file size (limit to 10MB)
|
|
max_size = 10 * 1024 * 1024 # 10MB
|
|
file_content = await file.read()
|
|
if len(file_content) > max_size:
|
|
logger.info(f"⚠️ File too large: {file.filename} ({len(file_content)} bytes)")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File size exceeds 10MB limit"
|
|
).model_dump(by_alias=True)).encode("utf-8")]),
|
|
media_type="text/event-stream"
|
|
)
|
|
if len(file_content) == 0:
|
|
logger.info(f"⚠️ File is empty: {file.filename}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File is empty"
|
|
).model_dump(by_alias=True)).encode("utf-8")]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
"""Upload a document for the current candidate"""
|
|
async def upload_stream_generator(file_content):
|
|
# Verify user is a candidate
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized upload attempt by user type: {current_user.user_type}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Only candidates can upload documents"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
file.filename = re.sub(r'^.*/', '', file.filename) if file.filename else '' # Sanitize filename
|
|
if not file.filename or file.filename.strip() == "":
|
|
logger.warning("⚠️ File upload attempt with missing filename")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="File must have a valid filename"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
logger.info(f"📁 Received file upload: filename='{file.filename}', content_type='{file.content_type}', size='{len(file_content)} bytes'")
|
|
|
|
# Validate file type
|
|
allowed_types = ['.txt', '.md', '.docx', '.pdf', '.png', '.jpg', '.jpeg', '.gif']
|
|
file_extension = pathlib.Path(file.filename).suffix.lower() if file.filename else ""
|
|
|
|
if file_extension not in allowed_types:
|
|
logger.warning(f"⚠️ Invalid file type: {file_extension} for file {file.filename}")
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"File type {file_extension} not supported. Allowed types: {', '.join(allowed_types)}"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
document_type = get_document_type_from_filename(file.filename or "unknown.txt")
|
|
|
|
if document_type != DocumentType.MARKDOWN and document_type != DocumentType.TXT:
|
|
status_message = ChatMessageStatus(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Converting content from {document_type}...",
|
|
activity=ApiActivityType.CONVERTING
|
|
)
|
|
yield status_message
|
|
try:
|
|
md = MarkItDown(enable_plugins=False) # Set to True to enable plugins
|
|
stream = io.BytesIO(file_content)
|
|
stream_info = StreamInfo(
|
|
extension=file_extension, # e.g., ".pdf"
|
|
url=file.filename # optional, helps with logging and guessing
|
|
)
|
|
result = md.convert_stream(stream, stream_info=stream_info, output_format="markdown")
|
|
file_content = result.text_content
|
|
logger.info(f"✅ Converted {file.filename} to Markdown format")
|
|
except Exception as e:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Failed to convert {file.filename} to Markdown.",
|
|
)
|
|
yield error_message
|
|
logger.error(f"❌ Error converting {file.filename} to Markdown: {e}")
|
|
return
|
|
async for message in create_job_from_content(database=database, current_user=current_user, content=file_content):
|
|
yield message
|
|
return
|
|
|
|
try:
|
|
async def to_json(method):
|
|
try:
|
|
async for message in method:
|
|
json_data = message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
yield f"data: {json_str}\n\n".encode("utf-8")
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"Error in to_json conversion: {e}")
|
|
return
|
|
|
|
return StreamingResponse(
|
|
to_json(upload_stream_generator(file_content)),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to upload document"
|
|
).model_dump(mode='json', by_alias=True)).encode("utf-8")]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
@api_router.get("/jobs/{job_id}")
|
|
async def get_job(
|
|
job_id: str = Path(...),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get a job by ID"""
|
|
try:
|
|
job_data = await database.get_job(job_id)
|
|
if not job_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Job not found")
|
|
)
|
|
|
|
# Increment view count
|
|
job_data["views"] = job_data.get("views", 0) + 1
|
|
await database.set_job(job_id, job_data)
|
|
|
|
job = Job.model_validate(job_data)
|
|
return create_success_response(job.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get job error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/jobs")
|
|
async def get_jobs(
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
sortBy: Optional[str] = Query(None, alias="sortBy"),
|
|
sortOrder: str = Query("desc", pattern="^(asc|desc)$", alias="sortOrder"),
|
|
filters: Optional[str] = Query(None),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get paginated list of jobs"""
|
|
try:
|
|
filter_dict = None
|
|
if filters:
|
|
filter_dict = json.loads(filters)
|
|
|
|
# Get all jobs from Redis
|
|
all_jobs_data = await database.get_all_jobs()
|
|
jobs_list = []
|
|
for job in all_jobs_data.values():
|
|
if job.get("user_type") == "employer":
|
|
jobs_list.append(JobFull.model_validate(job))
|
|
else:
|
|
jobs_list.append(Job.model_validate(job))
|
|
|
|
paginated_jobs, total = filter_and_paginate(
|
|
jobs_list, page, limit, sortBy, sortOrder, filter_dict
|
|
)
|
|
|
|
paginated_response = create_paginated_response(
|
|
[j.model_dump(by_alias=True) for j in paginated_jobs],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get jobs error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("FETCH_FAILED", str(e))
|
|
)
|
|
|
|
@api_router.get("/jobs/search")
|
|
async def search_jobs(
|
|
query: str = Query(...),
|
|
filters: Optional[str] = Query(None),
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Search jobs"""
|
|
try:
|
|
filter_dict = {}
|
|
if filters:
|
|
filter_dict = json.loads(filters)
|
|
|
|
# Get all jobs from Redis
|
|
all_jobs_data = await database.get_all_jobs()
|
|
jobs_list = [Job.model_validate(data) for data in all_jobs_data.values() if data.get("is_active", True)]
|
|
|
|
if query:
|
|
query_lower = query.lower()
|
|
jobs_list = [
|
|
j for j in jobs_list
|
|
if ((j.title and query_lower in j.title.lower()) or
|
|
(j.description and query_lower in j.description.lower()) or
|
|
any(query_lower in skill.lower() for skill in getattr(j, "skills", []) or []))
|
|
]
|
|
|
|
paginated_jobs, total = filter_and_paginate(
|
|
jobs_list, page, limit, filters=filter_dict
|
|
)
|
|
|
|
paginated_response = create_paginated_response(
|
|
[j.model_dump(by_alias=True) for j in paginated_jobs],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Search jobs error: {e}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("SEARCH_FAILED", str(e))
|
|
)
|
|
|
|
|
|
@api_router.delete("/jobs/{job_id}")
|
|
async def delete_job(
|
|
job_id: str = Path(...),
|
|
admin_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Delete a Job"""
|
|
try:
|
|
# Check if admin user
|
|
if not admin_user.is_admin:
|
|
logger.warning(f"⚠️ Unauthorized delete attempt by user {admin_user.id}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only admins can delete")
|
|
)
|
|
|
|
# Get candidate data
|
|
job_data = await database.get_job(job_id)
|
|
if not job_data:
|
|
logger.warning(f"⚠️ Candidate not found for deletion: {job_id}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Job not found")
|
|
)
|
|
|
|
# Delete job from database
|
|
await database.delete_job(job_id)
|
|
|
|
logger.info(f"🗑️ Job deleted: {job_id} by admin {admin_user.id}")
|
|
|
|
return create_success_response({
|
|
"message": "Job deleted successfully",
|
|
"jobId": job_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Delete job error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("DELETE_ERROR", "Failed to delete job")
|
|
)
|
|
|
|
# ============================
|
|
# Chat Endpoints
|
|
# ============================
|
|
# Chat Session Endpoints with Username Association
|
|
@api_router.get("/chat/statistics")
|
|
async def get_chat_statistics(
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get chat statistics (admin/analytics endpoint)"""
|
|
try:
|
|
stats = await database.get_chat_statistics()
|
|
return create_success_response(stats)
|
|
except Exception as e:
|
|
logger.error(f"❌ Get chat statistics error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STATS_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/candidates/rag-search")
|
|
async def post_candidate_rag_search(
|
|
query: str = Body(...),
|
|
current_user = Depends(get_current_user)
|
|
):
|
|
"""Get chat activity summary for a candidate"""
|
|
try:
|
|
if current_user.user_type != "candidate":
|
|
logger.warning(f"⚠️ Unauthorized RAG search attempt by user {current_user.id}")
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Only candidates can access this endpoint")
|
|
)
|
|
|
|
candidate : Candidate = current_user
|
|
chat_type = ChatContextType.RAG_SEARCH
|
|
# Get RAG search data
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
# Entity automatically released when done
|
|
chat_agent = candidate_entity.get_or_create_agent(agent_type=chat_type)
|
|
if not chat_agent:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("AGENT_NOT_FOUND", "No agent found for this chat type")
|
|
)
|
|
|
|
user_message = ChatMessageUser(senderId=candidate.id, sessionId=MOCK_UUID, content=query, timestamp=datetime.now(UTC))
|
|
rag_message : Any = None
|
|
async for generated_message in chat_agent.generate(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=user_message.session_id,
|
|
prompt=user_message.content,
|
|
):
|
|
rag_message = generated_message
|
|
|
|
if not rag_message:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("NO_RESPONSE", "No response generated for the RAG search")
|
|
)
|
|
final_message : ChatMessageRagSearch = rag_message
|
|
return create_success_response(final_message.content[0].model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get candidate chat summary error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("SUMMARY_ERROR", str(e))
|
|
)
|
|
|
|
# reference can be candidateId, username, or email
|
|
@api_router.get("/users/{reference}")
|
|
async def get_user(
|
|
reference: str = Path(...),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get a candidate by username"""
|
|
try:
|
|
# Normalize reference to lowercase for case-insensitive search
|
|
query_lower = reference.lower()
|
|
|
|
all_candidate_data = await database.get_all_candidates()
|
|
if not all_candidate_data:
|
|
logger.warning(f"⚠️ No users found in database")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "No users found")
|
|
)
|
|
|
|
user_data = None
|
|
for user in all_candidate_data.values():
|
|
if (user.get("id", "").lower() == query_lower or
|
|
user.get("username", "").lower() == query_lower or
|
|
user.get("email", "").lower() == query_lower):
|
|
user_data = user
|
|
break
|
|
|
|
if not user_data:
|
|
all_guest_data = await database.get_all_guests()
|
|
if not all_guest_data:
|
|
logger.warning(f"⚠️ No guests found in database")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "No users found")
|
|
)
|
|
for user in all_guest_data.values():
|
|
if (user.get("id", "").lower() == query_lower or
|
|
user.get("username", "").lower() == query_lower or
|
|
user.get("email", "").lower() == query_lower):
|
|
user_data = user
|
|
break
|
|
|
|
if not user_data:
|
|
logger.warning(f"⚠️ User nor Guest found for reference: {reference}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "User not found")
|
|
)
|
|
|
|
user = BaseUserWithType.model_validate(user_data)
|
|
|
|
return create_success_response(user.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get user error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
# reference can be candidateId, username, or email
|
|
@api_router.get("/candidates/{reference}")
|
|
async def get_candidate(
|
|
reference: str = Path(...),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get a candidate by username"""
|
|
try:
|
|
# Normalize reference to lowercase for case-insensitive search
|
|
query_lower = reference.lower()
|
|
|
|
all_candidates_data = await database.get_all_candidates()
|
|
if not all_candidates_data:
|
|
logger.warning(f"⚠️ No candidates found in database")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "No candidates found")
|
|
)
|
|
|
|
candidate_data = None
|
|
for candidate in all_candidates_data.values():
|
|
if (candidate.get("id", "").lower() == query_lower or
|
|
candidate.get("username", "").lower() == query_lower or
|
|
candidate.get("email", "").lower() == query_lower):
|
|
candidate_data = candidate
|
|
break
|
|
|
|
if not candidate_data:
|
|
logger.warning(f"⚠️ Candidate not found for reference: {reference}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Candidate not found")
|
|
)
|
|
|
|
candidate = Candidate.model_validate(candidate_data) if not candidate_data.get("is_AI") else CandidateAI.model_validate(candidate_data)
|
|
|
|
return create_success_response(candidate.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get candidate error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/candidates/{username}/chat-summary")
|
|
async def get_candidate_chat_summary(
|
|
username: str = Path(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get chat activity summary for a candidate"""
|
|
try:
|
|
# Find candidate by username
|
|
candidate_data = await database.find_candidate_by_username(username)
|
|
if not candidate_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("CANDIDATE_NOT_FOUND", f"Candidate with username '{username}' not found")
|
|
)
|
|
|
|
summary = await database.get_candidate_chat_summary(candidate_data["id"])
|
|
summary["candidate"] = {
|
|
"username": candidate_data.get("username"),
|
|
"fullName": candidate_data.get("fullName"),
|
|
"email": candidate_data.get("email")
|
|
}
|
|
|
|
return create_success_response(summary)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get candidate chat summary error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("SUMMARY_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/chat/sessions/{session_id}/archive")
|
|
async def archive_chat_session(
|
|
session_id: str = Path(...),
|
|
current_user = Depends(get_current_user),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Archive a chat session"""
|
|
try:
|
|
session_data = await database.get_chat_session(session_id)
|
|
if not session_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
|
|
# Check if user owns this session or is admin
|
|
if session_data.get("userId") != current_user.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot archive another user's session")
|
|
)
|
|
|
|
await database.archive_chat_session(session_id)
|
|
|
|
return create_success_response({
|
|
"message": "Chat session archived successfully",
|
|
"sessionId": session_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Archive chat session error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("ARCHIVE_ERROR", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Chat Endpoints
|
|
# ============================
|
|
|
|
@api_router.post("/chat/sessions")
|
|
async def create_chat_session(
|
|
session_data: Dict[str, Any] = Body(...),
|
|
current_user: BaseUserWithType = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Create a new chat session with optional candidate username association"""
|
|
try:
|
|
# Extract username if provided
|
|
username = session_data.get("username")
|
|
candidate_id = None
|
|
candidate_data = None
|
|
|
|
# If username is provided, look up the candidate
|
|
if username:
|
|
logger.info(f"🔍 Looking up candidate with username: {username}")
|
|
|
|
# Get all candidates and find by username
|
|
all_candidates_data = await database.get_all_candidates()
|
|
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
|
|
|
# Find candidate by username (case-insensitive)
|
|
matching_candidates = [
|
|
c for c in candidates_list
|
|
if c.username.lower() == username.lower()
|
|
]
|
|
|
|
if not matching_candidates:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("CANDIDATE_NOT_FOUND", f"Candidate with username '{username}' not found")
|
|
)
|
|
|
|
candidate_data = matching_candidates[0]
|
|
candidate_id = candidate_data.id
|
|
logger.info(f"✅ Found candidate: {candidate_data.full_name} (ID: {candidate_id})")
|
|
|
|
# Add required fields
|
|
session_id = str(uuid.uuid4())
|
|
session_data["id"] = session_id
|
|
session_data["userId"] = current_user.id
|
|
session_data["createdAt"] = datetime.now(UTC).isoformat()
|
|
session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
|
|
|
# Set up context with candidate association if username was provided
|
|
context = session_data.get("context", {})
|
|
if candidate_id and candidate_data:
|
|
context["relatedEntityId"] = candidate_id
|
|
context["relatedEntityType"] = "candidate"
|
|
|
|
# Add candidate info to additional context for AI reference
|
|
additional_context = context.get("additionalContext", {})
|
|
additional_context["candidateInfo"] = {
|
|
"id": candidate_data.id,
|
|
"name": candidate_data.full_name,
|
|
"email": candidate_data.email,
|
|
"username": candidate_data.username,
|
|
"skills": [skill.name for skill in candidate_data.skills] if candidate_data.skills else [],
|
|
"experience": len(candidate_data.experience) if candidate_data.experience else 0,
|
|
"location": candidate_data.location.city if candidate_data.location else "Unknown"
|
|
}
|
|
context["additionalContext"] = additional_context
|
|
|
|
# Set a descriptive title if not provided
|
|
if not session_data.get("title"):
|
|
session_data["title"] = f"Chat about {candidate_data.full_name}"
|
|
|
|
session_data["context"] = context
|
|
|
|
# Create chat session
|
|
chat_session = ChatSession.model_validate(session_data)
|
|
await database.set_chat_session(chat_session.id, chat_session.model_dump())
|
|
|
|
logger.info(f"✅ Chat session created: {chat_session.id} for user {current_user.id}" +
|
|
(f" about candidate {candidate_data.full_name}" if candidate_data else ""))
|
|
|
|
return create_success_response(chat_session.model_dump(by_alias=True))
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Chat session creation error: {e}")
|
|
logger.info(json.dumps(session_data, indent=2))
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("CREATION_FAILED", str(e))
|
|
)
|
|
|
|
@api_router.post("/chat/sessions/{session_id}/messages/stream")
|
|
async def post_chat_session_message_stream(
|
|
user_message: ChatMessageUser = Body(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Post a message to a chat session and stream the response with persistence"""
|
|
try:
|
|
chat_session_data = await database.get_chat_session(user_message.session_id)
|
|
if not chat_session_data:
|
|
logger.info("🔗 Chat session not found for session ID: " + user_message.session_id)
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
chat_session = ChatSession.model_validate(chat_session_data)
|
|
chat_type = chat_session.context.type
|
|
candidate_info = chat_session.context.additional_context.get("candidateInfo", {}) if chat_session.context and chat_session.context.additional_context else None
|
|
|
|
# Get candidate info if this chat is about a specific candidate
|
|
if candidate_info:
|
|
logger.info(f"🔗 Chat session {user_message.session_id} about candidate {candidate_info['name']} accessed by user {current_user.id}")
|
|
else:
|
|
logger.info(f"🔗 Chat session {user_message.session_id} type {chat_type} accessed by user {current_user.id}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("CANDIDATE_REQUIRED", "This chat session requires a candidate association")
|
|
)
|
|
|
|
candidate_data = await database.get_candidate(candidate_info["id"]) if candidate_info else None
|
|
candidate : Candidate | None = Candidate.model_validate(candidate_data) if candidate_data else None
|
|
if not candidate:
|
|
logger.info(f"🔗 Candidate not found for chat session {user_message.session_id} with ID {candidate_info['id']}")
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("CANDIDATE_NOT_FOUND", "Candidate not found for this chat session")
|
|
)
|
|
logger.info(f"🔗 User {current_user.id} posting message to chat session {user_message.session_id} with query length: {len(user_message.content)}")
|
|
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
# Entity automatically released when done
|
|
chat_agent = candidate_entity.get_or_create_agent(agent_type=chat_type)
|
|
if not chat_agent:
|
|
logger.info(f"🔗 No chat agent found for session {user_message.session_id} with type {chat_type}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("AGENT_NOT_FOUND", "No agent found for this chat type")
|
|
)
|
|
|
|
# Persist user message to database
|
|
await database.add_chat_message(user_message.session_id, user_message.model_dump())
|
|
logger.info(f"💬 User message saved to database for session {user_message.session_id}")
|
|
|
|
# Update session last activity
|
|
chat_session_data["lastActivity"] = datetime.now(UTC).isoformat()
|
|
await database.set_chat_session(user_message.session_id, chat_session_data)
|
|
|
|
return await stream_agent_response(
|
|
chat_agent=chat_agent,
|
|
user_message=user_message,
|
|
database=database,
|
|
chat_session_data=chat_session_data,
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Chat message streaming error")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STREAMING_ERROR", "")
|
|
)
|
|
|
|
@api_router.get("/chat/sessions/{session_id}/messages")
|
|
async def get_chat_session_messages(
|
|
session_id: str = Path(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(50, ge=1, le=100), # Increased default for chat messages
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get persisted chat messages for a session"""
|
|
try:
|
|
chat_session_data = await database.get_chat_session(session_id)
|
|
if not chat_session_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
|
|
# Get messages from database
|
|
chat_messages = await database.get_chat_messages(session_id)
|
|
|
|
# Convert to ChatMessage objects and sort by timestamp
|
|
messages_list = []
|
|
for msg_data in chat_messages:
|
|
try:
|
|
message = ChatMessage.model_validate(msg_data)
|
|
messages_list.append(message)
|
|
except Exception as e:
|
|
logger.warning(f"⚠️ Failed to validate message: {e}")
|
|
continue
|
|
|
|
# Sort by timestamp (oldest first for chat history)
|
|
messages_list.sort(key=lambda x: x.timestamp)
|
|
|
|
# Apply pagination
|
|
total = len(messages_list)
|
|
start = (page - 1) * limit
|
|
end = start + limit
|
|
paginated_messages = messages_list[start:end]
|
|
|
|
paginated_response = create_paginated_response(
|
|
[m.model_dump(by_alias=True) for m in paginated_messages],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get chat messages error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.patch("/chat/sessions/{session_id}")
|
|
async def update_chat_session(
|
|
session_id: str = Path(...),
|
|
updates: Dict[str, Any] = Body(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Update a chat session's properties"""
|
|
try:
|
|
# Get the existing session
|
|
session_data = await database.get_chat_session(session_id)
|
|
if not session_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
|
|
session = ChatSession.model_validate(session_data)
|
|
|
|
# Check authorization - user can only update their own sessions
|
|
if session.user_id != current_user.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot update another user's chat session")
|
|
)
|
|
|
|
# Validate and apply updates
|
|
allowed_fields = {"title", "context", "isArchived", "systemPrompt"}
|
|
filtered_updates = {k: v for k, v in updates.items() if k in allowed_fields}
|
|
|
|
if not filtered_updates:
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("INVALID_UPDATES", "No valid fields provided for update")
|
|
)
|
|
|
|
# Apply updates to session data
|
|
session_dict = session.model_dump()
|
|
|
|
# Handle special field mappings (camelCase to snake_case)
|
|
if "isArchived" in filtered_updates:
|
|
session_dict["is_archived"] = filtered_updates["isArchived"]
|
|
if "systemPrompt" in filtered_updates:
|
|
session_dict["system_prompt"] = filtered_updates["systemPrompt"]
|
|
if "title" in filtered_updates:
|
|
session_dict["title"] = filtered_updates["title"]
|
|
if "context" in filtered_updates:
|
|
# Merge context updates with existing context
|
|
existing_context = session_dict.get("context", {})
|
|
context_updates = filtered_updates["context"]
|
|
|
|
# Update specific context fields while preserving others
|
|
for context_key, context_value in context_updates.items():
|
|
if context_key == "additionalContext":
|
|
# Merge additional context
|
|
existing_additional = existing_context.get("additional_context", {})
|
|
existing_additional.update(context_value)
|
|
existing_context["additional_context"] = existing_additional
|
|
else:
|
|
# Convert camelCase to snake_case for context fields
|
|
snake_key = context_key
|
|
if context_key == "relatedEntityId":
|
|
snake_key = "related_entity_id"
|
|
elif context_key == "relatedEntityType":
|
|
snake_key = "related_entity_type"
|
|
elif context_key == "aiParameters":
|
|
snake_key = "ai_parameters"
|
|
|
|
existing_context[snake_key] = context_value
|
|
|
|
session_dict["context"] = existing_context
|
|
|
|
# Update last activity timestamp
|
|
session_dict["last_activity"] = datetime.now(UTC).isoformat()
|
|
|
|
# Validate the updated session
|
|
updated_session = ChatSession.model_validate(session_dict)
|
|
|
|
# Save to database
|
|
await database.set_chat_session(session_id, updated_session.model_dump())
|
|
|
|
logger.info(f"✅ Chat session {session_id} updated by user {current_user.id}")
|
|
|
|
return create_success_response(updated_session.model_dump(by_alias=True))
|
|
|
|
except ValueError as ve:
|
|
logger.warning(f"⚠️ Validation error updating chat session: {ve}")
|
|
return JSONResponse(
|
|
status_code=400,
|
|
content=create_error_response("VALIDATION_ERROR", str(ve))
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"❌ Update chat session error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("UPDATE_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.delete("/chat/sessions/{session_id}")
|
|
async def delete_chat_session(
|
|
session_id: str = Path(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Delete a chat session and all its messages"""
|
|
try:
|
|
# Get the session to verify it exists and check ownership
|
|
session_data = await database.get_chat_session(session_id)
|
|
if not session_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
|
|
session = ChatSession.model_validate(session_data)
|
|
|
|
# Check authorization - user can only delete their own sessions
|
|
if session.user_id != current_user.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot delete another user's chat session")
|
|
)
|
|
|
|
# Delete all messages associated with this session
|
|
try:
|
|
await database.delete_chat_messages(session_id)
|
|
chat_messages = await database.get_chat_messages(session_id)
|
|
message_count = len(chat_messages)
|
|
logger.info(f"🗑️ Deleted {message_count} messages from session {session_id}")
|
|
|
|
except Exception as e:
|
|
logger.warning(f"⚠️ Error deleting messages for session {session_id}: {e}")
|
|
# Continue with session deletion even if message deletion fails
|
|
|
|
# Delete the session itself
|
|
await database.delete_chat_session(session_id)
|
|
|
|
logger.info(f"🗑️ Chat session {session_id} deleted by user {current_user.id}")
|
|
|
|
return create_success_response({
|
|
"success": True,
|
|
"message": "Chat session deleted successfully",
|
|
"sessionId": session_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Delete chat session error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("DELETE_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.patch("/chat/sessions/{session_id}/reset")
|
|
async def reset_chat_session(
|
|
session_id: str = Path(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Delete a chat session and all its messages"""
|
|
try:
|
|
# Get the session to verify it exists and check ownership
|
|
session_data = await database.get_chat_session(session_id)
|
|
if not session_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("NOT_FOUND", "Chat session not found")
|
|
)
|
|
|
|
session = ChatSession.model_validate(session_data)
|
|
|
|
# Check authorization - user can only delete their own sessions
|
|
if session.user_id != current_user.id:
|
|
return JSONResponse(
|
|
status_code=403,
|
|
content=create_error_response("FORBIDDEN", "Cannot reset another user's chat session")
|
|
)
|
|
|
|
# Delete all messages associated with this session
|
|
try:
|
|
await database.delete_chat_messages(session_id)
|
|
chat_messages = await database.get_chat_messages(session_id)
|
|
message_count = len(chat_messages)
|
|
logger.info(f"🗑️ Deleted {message_count} messages from session {session_id}")
|
|
|
|
except Exception as e:
|
|
logger.warning(f"⚠️ Error deleting messages for session {session_id}: {e}")
|
|
# Continue with session deletion even if message deletion fails
|
|
|
|
|
|
logger.info(f"🗑️ Chat session {session_id} reset by user {current_user.id}")
|
|
|
|
return create_success_response({
|
|
"success": True,
|
|
"message": "Chat session reset successfully",
|
|
"sessionId": session_id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Reset chat session error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESET_ERROR", str(e))
|
|
)
|
|
|
|
|
|
# ============================
|
|
# Rate Limited Decorator
|
|
# ============================
|
|
|
|
def rate_limited(
|
|
guest_per_minute: int = 10,
|
|
user_per_minute: int = 60,
|
|
admin_per_minute: int = 120,
|
|
endpoint_specific: bool = True
|
|
):
|
|
"""
|
|
Decorator to easily apply rate limiting to endpoints
|
|
|
|
Args:
|
|
guest_per_minute: Rate limit for guest users
|
|
user_per_minute: Rate limit for authenticated users
|
|
admin_per_minute: Rate limit for admin users
|
|
endpoint_specific: Whether to apply endpoint-specific limits
|
|
|
|
Usage:
|
|
@rate_limited(guest_per_minute=5, user_per_minute=30)
|
|
@api_router.post("/my-endpoint")
|
|
async def my_endpoint(
|
|
request: Request,
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
return {"message": "Rate limited endpoint"}
|
|
"""
|
|
def decorator(func: Callable) -> Callable:
|
|
@wraps(func)
|
|
async def wrapper(*args, **kwargs):
|
|
# Extract dependencies from function signature
|
|
import inspect
|
|
sig = inspect.signature(func)
|
|
|
|
# Get request, current_user, and rate_limiter from kwargs or args
|
|
request = None
|
|
current_user = None
|
|
rate_limiter = None
|
|
|
|
# Try to find dependencies in kwargs first
|
|
for param_name, param_value in kwargs.items():
|
|
if isinstance(param_value, Request):
|
|
request = param_value
|
|
elif hasattr(param_value, 'user_type'): # User-like object
|
|
current_user = param_value
|
|
elif isinstance(param_value, RateLimiter):
|
|
rate_limiter = param_value
|
|
|
|
# If not found in kwargs, check if they're provided via Depends
|
|
if not rate_limiter:
|
|
# Create rate limiter instance (this should ideally come from DI)
|
|
database = db_manager.get_database()
|
|
rate_limiter = RateLimiter(database)
|
|
|
|
# Apply rate limiting if we have the required components
|
|
if request and current_user and rate_limiter:
|
|
await apply_custom_rate_limiting(
|
|
request, current_user, rate_limiter,
|
|
guest_per_minute, user_per_minute, admin_per_minute
|
|
)
|
|
|
|
# Call the original function
|
|
return await func(*args, **kwargs)
|
|
|
|
return wrapper
|
|
return decorator
|
|
|
|
async def apply_custom_rate_limiting(
|
|
request: Request,
|
|
current_user,
|
|
rate_limiter: RateLimiter,
|
|
guest_per_minute: int,
|
|
user_per_minute: int,
|
|
admin_per_minute: int
|
|
):
|
|
"""Apply custom rate limiting with specified limits"""
|
|
try:
|
|
# Determine user info
|
|
user_id = current_user.id
|
|
user_type = current_user.user_type.value if hasattr(current_user.user_type, 'value') else str(current_user.user_type)
|
|
is_admin = getattr(current_user, 'is_admin', False)
|
|
|
|
# Determine appropriate limit
|
|
if is_admin:
|
|
requests_per_minute = admin_per_minute
|
|
elif user_type == "guest":
|
|
requests_per_minute = guest_per_minute
|
|
else:
|
|
requests_per_minute = user_per_minute
|
|
|
|
# Create custom rate limit key
|
|
current_time = datetime.now(UTC)
|
|
custom_key = f"custom_rate_limit:{request.url.path}:{user_type}:{user_id}:minute:{current_time.strftime('%Y%m%d%H%M')}"
|
|
|
|
# Check current usage
|
|
current_count = int(await rate_limiter.redis.get(custom_key) or 0)
|
|
|
|
if current_count >= requests_per_minute:
|
|
logger.warning(f"🚫 Custom rate limit exceeded for {user_type} {user_id}: {current_count}/{requests_per_minute}")
|
|
raise HTTPException(
|
|
status_code=429,
|
|
detail={
|
|
"error": "Rate limit exceeded",
|
|
"message": f"Custom rate limit exceeded: {current_count}/{requests_per_minute} requests per minute",
|
|
"retryAfter": 60 - current_time.second,
|
|
"userType": user_type,
|
|
"endpoint": request.url.path
|
|
},
|
|
headers={"Retry-After": str(60 - current_time.second)}
|
|
)
|
|
|
|
# Increment counter
|
|
pipe = rate_limiter.redis.pipeline()
|
|
pipe.incr(custom_key)
|
|
pipe.expire(custom_key, 120) # 2 minutes TTL
|
|
await pipe.execute()
|
|
|
|
logger.debug(f"✅ Custom rate limit check passed for {user_type} {user_id}: {current_count + 1}/{requests_per_minute}")
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"❌ Custom rate limiting error: {e}")
|
|
# Fail open
|
|
|
|
# ============================
|
|
# Alternative: FastAPI Dependency-Based Rate Limiting
|
|
# ============================
|
|
|
|
def create_rate_limit_dependency(
|
|
guest_per_minute: int = 10,
|
|
user_per_minute: int = 60,
|
|
admin_per_minute: int = 120
|
|
):
|
|
"""
|
|
Create a FastAPI dependency for rate limiting
|
|
|
|
Usage:
|
|
rate_limit_5_30 = create_rate_limit_dependency(guest_per_minute=5, user_per_minute=30)
|
|
|
|
@api_router.post("/my-endpoint")
|
|
async def my_endpoint(
|
|
rate_check = Depends(rate_limit_5_30),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
return {"message": "Rate limited endpoint"}
|
|
"""
|
|
async def rate_limit_dependency(
|
|
request: Request,
|
|
current_user = Depends(get_current_user_or_guest),
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter)
|
|
):
|
|
await apply_custom_rate_limiting(
|
|
request, current_user, rate_limiter,
|
|
guest_per_minute, user_per_minute, admin_per_minute
|
|
)
|
|
return True
|
|
|
|
return rate_limit_dependency
|
|
|
|
# ============================
|
|
# Rate Limiting Utilities
|
|
# ============================
|
|
|
|
class EndpointRateLimiter:
|
|
"""Utility class for endpoint-specific rate limiting"""
|
|
|
|
def __init__(self, rate_limiter: RateLimiter):
|
|
self.rate_limiter = rate_limiter
|
|
self.custom_limits = {}
|
|
|
|
def set_endpoint_limits(self, endpoint: str, limits: dict):
|
|
"""Set custom limits for an endpoint"""
|
|
self.custom_limits[endpoint] = limits
|
|
|
|
async def check_endpoint_limit(self, request: Request, current_user) -> bool:
|
|
"""Check if request exceeds endpoint-specific limits"""
|
|
endpoint = request.url.path
|
|
|
|
if endpoint not in self.custom_limits:
|
|
return True # No custom limits set
|
|
|
|
limits = self.custom_limits[endpoint]
|
|
user_type = current_user.user_type.value if hasattr(current_user.user_type, 'value') else str(current_user.user_type)
|
|
|
|
if getattr(current_user, 'is_admin', False):
|
|
user_type = "admin"
|
|
|
|
limit = limits.get(user_type, limits.get("default", 60))
|
|
|
|
current_time = datetime.now(UTC)
|
|
key = f"endpoint_limit:{endpoint}:{user_type}:{current_user.id}:minute:{current_time.strftime('%Y%m%d%H%M')}"
|
|
|
|
current_count = int(await self.rate_limiter.redis.get(key) or 0)
|
|
|
|
if current_count >= limit:
|
|
raise HTTPException(
|
|
status_code=429,
|
|
detail=f"Endpoint rate limit exceeded: {current_count}/{limit} for {endpoint}"
|
|
)
|
|
|
|
# Increment counter
|
|
await self.rate_limiter.redis.incr(key)
|
|
await self.rate_limiter.redis.expire(key, 120)
|
|
|
|
return True
|
|
|
|
# Global endpoint rate limiter instance
|
|
endpoint_rate_limiter = None
|
|
|
|
def get_endpoint_rate_limiter(rate_limiter: RateLimiter = Depends(get_rate_limiter)) -> EndpointRateLimiter:
|
|
"""Get endpoint rate limiter instance"""
|
|
global endpoint_rate_limiter
|
|
if endpoint_rate_limiter is None:
|
|
endpoint_rate_limiter = EndpointRateLimiter(rate_limiter)
|
|
|
|
# Configure endpoint-specific limits
|
|
endpoint_rate_limiter.set_endpoint_limits("/api/1.0/chat/sessions/*/messages/stream", {
|
|
"guest": 5, "candidate": 30, "employer": 30, "admin": 100
|
|
})
|
|
endpoint_rate_limiter.set_endpoint_limits("/api/1.0/candidates/documents/upload", {
|
|
"guest": 2, "candidate": 10, "employer": 10, "admin": 50
|
|
})
|
|
endpoint_rate_limiter.set_endpoint_limits("/api/1.0/jobs", {
|
|
"guest": 1, "candidate": 5, "employer": 20, "admin": 50
|
|
})
|
|
|
|
return endpoint_rate_limiter
|
|
|
|
@api_router.post("/candidates/{candidate_id}/skill-match")
|
|
async def get_candidate_skill_match(
|
|
candidate_id: str = Path(...),
|
|
requirement: str = Body(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
) -> StreamingResponse:
|
|
|
|
"""Get skill match for a candidate against a requirement with caching"""
|
|
async def message_stream_generator():
|
|
candidate_data = await database.get_candidate(candidate_id)
|
|
if not candidate_data:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Candidate with ID '{candidate_id}' not found"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
candidate = Candidate.model_validate(candidate_data)
|
|
|
|
# Create cache key for this specific candidate + requirement combination
|
|
requirement_hash = hashlib.md5(requirement.encode()).hexdigest()[:8]
|
|
cache_key = f"skill_match:{candidate.id}:{requirement_hash}"
|
|
|
|
# Get cached assessment if it exists
|
|
assessment : SkillAssessment | None = await database.get_cached_skill_match(cache_key)
|
|
|
|
# Determine if we need to regenerate the assessment
|
|
cached_date = None
|
|
if assessment:
|
|
# Get the latest RAG data update time for the current user
|
|
user_rag_update_time = await database.get_user_rag_update_time(current_user.id)
|
|
|
|
updated = assessment.updated_at if "updated_at" in assessment else assessment.created_at
|
|
# Check if cached result is still valid
|
|
# Regenerate if user's RAG data was updated after cache date
|
|
if user_rag_update_time and user_rag_update_time >= updated:
|
|
logger.info(f"🔄 Out-of-date cached entry for {candidate.username} skill {assessment.skill}")
|
|
assessment = None
|
|
else:
|
|
cached_date = updated
|
|
else:
|
|
logger.info(f"💾 No cached skill match data: {cache_key}, {candidate.id}, {requirement}")
|
|
|
|
if assessment:
|
|
logger.info(f"✅ Found cached skill match for candidate {candidate.username} against requirement: {requirement}")
|
|
logger.info(f"💾 Cached skill match data: {assessment.evidence_strength}")
|
|
|
|
# Return cached assessment
|
|
skill_message = ChatMessageSkillAssessment(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Cached skill match found for {candidate.username}",
|
|
skill_assessment=assessment
|
|
)
|
|
yield skill_message
|
|
return
|
|
|
|
logger.info(f"🔍 Generating skill match for candidate {candidate.username} against requirement: {requirement}")
|
|
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.SKILL_MATCH)
|
|
if not agent:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"No skill match agent found for this candidate"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
# Generate new skill match
|
|
final_message = None
|
|
async for generated_message in agent.generate(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=MOCK_UUID,
|
|
prompt=requirement,
|
|
):
|
|
if generated_message.status == ApiStatusType.ERROR:
|
|
logger.error(f"❌ AI generation error: {generated_message.content}")
|
|
yield f"data: {json.dumps({'status': 'error'})}\n\n"
|
|
return
|
|
|
|
# If the message is not done, convert it to a ChatMessageBase to remove
|
|
# metadata and other unnecessary fields for streaming
|
|
if generated_message.status != ApiStatusType.DONE:
|
|
if not isinstance(generated_message, ChatMessageStreaming) and not isinstance(generated_message, ChatMessageStatus):
|
|
raise TypeError(
|
|
f"Expected ChatMessageStreaming or ChatMessageStatus, got {type(generated_message)}"
|
|
)
|
|
yield generated_message# Convert to ChatMessageBase for streaming
|
|
|
|
# Store reference to the complete AI message
|
|
if generated_message.status == ApiStatusType.DONE:
|
|
final_message = generated_message
|
|
break
|
|
|
|
if final_message is None:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"No skill match found for the given requirement"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
if not isinstance(final_message, ChatMessageSkillAssessment):
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Skill match response is not valid"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
skill_match : ChatMessageSkillAssessment = final_message
|
|
assessment = skill_match.skill_assessment
|
|
if not assessment:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Skill assessment could not be generated"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
await database.cache_skill_match(cache_key, assessment)
|
|
logger.info(f"💾 Cached new skill match for candidate {candidate.id}")
|
|
logger.info(f"✅ Skill match found for candidate {candidate.id}: {assessment.evidence_strength}")
|
|
yield skill_match
|
|
return
|
|
|
|
try:
|
|
async def to_json(method):
|
|
try:
|
|
async for message in method:
|
|
json_data = message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
yield f"data: {json_str}\n\n".encode("utf-8")
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"Error in to_json conversion: {e}")
|
|
return
|
|
|
|
return StreamingResponse(
|
|
to_json(message_stream_generator()),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to generate skill assessment"
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
@api_router.post("/candidates/job-score")
|
|
async def get_candidate_job_score(
|
|
job_requirements: JobRequirements = Body(...),
|
|
skills: List[SkillAssessment] = Body(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
) -> StreamingResponse:
|
|
# Initialize counters
|
|
required_skills_total = 0
|
|
required_skills_matched = 0
|
|
preferred_skills_total = 0
|
|
preferred_skills_matched = 0
|
|
|
|
# Count required technical skills
|
|
tech_required = job_requirements.technical_skills.required
|
|
required_skills_total += len(tech_required)
|
|
|
|
# Count preferred technical skills
|
|
tech_preferred = job_requirements.technical_skills.preferred
|
|
preferred_skills_total += len(tech_preferred)
|
|
|
|
# Count required experience
|
|
exp_required = job_requirements.experience_requirements.required
|
|
required_skills_total += len(exp_required)
|
|
|
|
# Count preferred experience
|
|
exp_preferred = job_requirements.experience_requirements.preferred
|
|
preferred_skills_total += len(exp_preferred)
|
|
|
|
# Education requirements count toward required
|
|
edu_required = job_requirements.education or []
|
|
required_skills_total += len(edu_required)
|
|
|
|
# Soft skills count toward preferred
|
|
soft_skills = job_requirements.soft_skills or []
|
|
preferred_skills_total += len(soft_skills)
|
|
|
|
# Industry knowledge counts toward preferred
|
|
certifications = job_requirements.certifications or []
|
|
preferred_skills_total += len(certifications)
|
|
|
|
preferred_attributes = job_requirements.preferred_attributes or []
|
|
preferred_skills_total += len(preferred_attributes)
|
|
|
|
# Check matches in assessment results
|
|
for assessment in skills:
|
|
evidence_found = assessment.evidence_found
|
|
evidence_strength = assessment.evidence_strength
|
|
|
|
# Consider STRONG and MODERATE evidence as matches
|
|
is_match = evidence_found and evidence_strength in ["STRONG", "MODERATE"]
|
|
|
|
if not is_match:
|
|
continue
|
|
|
|
# Loop through each of the job requirements categories
|
|
# and see if the skill matches the required or preferred skills
|
|
if assessment.skill in tech_required:
|
|
required_skills_matched += 1
|
|
elif assessment.skill in tech_preferred:
|
|
preferred_skills_matched += 1
|
|
elif assessment.skill in exp_required:
|
|
required_skills_matched += 1
|
|
elif assessment.skill in exp_preferred:
|
|
preferred_skills_matched += 1
|
|
elif assessment.skill in edu_required:
|
|
required_skills_matched += 1
|
|
elif assessment.skill in soft_skills:
|
|
preferred_skills_matched += 1
|
|
elif assessment.skill in certifications:
|
|
preferred_skills_matched += 1
|
|
elif assessment.skill in preferred_attributes:
|
|
preferred_skills_matched += 1
|
|
# If no skills were found, return empty statistics
|
|
if required_skills_total == 0 and preferred_skills_total == 0:
|
|
return create_success_response({
|
|
"required_skills": {
|
|
"total": 0,
|
|
"matched": 0,
|
|
"percentage": 0.0,
|
|
},
|
|
"preferred_skills": {
|
|
"total": 0,
|
|
"matched": 0,
|
|
"percentage": 0.0,
|
|
},
|
|
"overall_match": {
|
|
"total": 0,
|
|
"matched": 0,
|
|
"percentage": 0.0,
|
|
},
|
|
})
|
|
|
|
# Calculate percentages
|
|
required_match_percent = (
|
|
(required_skills_matched / required_skills_total * 100)
|
|
if required_skills_total > 0
|
|
else 0
|
|
)
|
|
preferred_match_percent = (
|
|
(preferred_skills_matched / preferred_skills_total * 100)
|
|
if preferred_skills_total > 0
|
|
else 0
|
|
)
|
|
overall_total = required_skills_total + preferred_skills_total
|
|
overall_matched = required_skills_matched + preferred_skills_matched
|
|
overall_match_percent = (
|
|
(overall_matched / overall_total * 100) if overall_total > 0 else 0
|
|
)
|
|
|
|
return create_success_response({
|
|
"required_skills": {
|
|
"total": required_skills_total,
|
|
"matched": required_skills_matched,
|
|
"percentage": round(required_match_percent, 1),
|
|
},
|
|
"preferred_skills": {
|
|
"total": preferred_skills_total,
|
|
"matched": preferred_skills_matched,
|
|
"percentage": round(preferred_match_percent, 1),
|
|
},
|
|
"overall_match": {
|
|
"total": overall_total,
|
|
"matched": overall_matched,
|
|
"percentage": round(overall_match_percent, 1),
|
|
},
|
|
})
|
|
|
|
@api_router.post("/candidates/{candidate_id}/generate-resume")
|
|
async def generate_resume(
|
|
candidate_id: str = Path(...),
|
|
skills: List[SkillAssessment] = Body(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
database: RedisDatabase = Depends(get_database)
|
|
) -> StreamingResponse:
|
|
|
|
"""Get skill match for a candidate against a requirement with caching"""
|
|
async def message_stream_generator():
|
|
candidate_data = await database.get_candidate(candidate_id)
|
|
if not candidate_data:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Candidate with ID '{candidate_id}' not found"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
candidate = Candidate.model_validate(candidate_data)
|
|
|
|
logger.info(f"🔍 Generating resume for candidate {candidate.username}")
|
|
|
|
async with entities.get_candidate_entity(candidate=candidate) as candidate_entity:
|
|
agent = candidate_entity.get_or_create_agent(agent_type=ChatContextType.GENERATE_RESUME)
|
|
if not agent:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"No skill match agent found for this candidate"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
# Generate new skill match
|
|
final_message = None
|
|
async for generated_message in agent.generate_resume(
|
|
llm=llm_manager.get_llm(),
|
|
model=defines.model,
|
|
session_id=MOCK_UUID,
|
|
skills=skills,
|
|
):
|
|
if generated_message.status == ApiStatusType.ERROR:
|
|
logger.error(f"❌ AI generation error: {generated_message.content}")
|
|
yield f"data: {json.dumps({'status': 'error'})}\n\n"
|
|
return
|
|
|
|
# If the message is not done, convert it to a ChatMessageBase to remove
|
|
# metadata and other unnecessary fields for streaming
|
|
if generated_message.status != ApiStatusType.DONE:
|
|
if not isinstance(generated_message, ChatMessageStreaming) and not isinstance(generated_message, ChatMessageStatus):
|
|
raise TypeError(
|
|
f"Expected ChatMessageStreaming or ChatMessageStatus, got {type(generated_message)}"
|
|
)
|
|
yield generated_message# Convert to ChatMessageBase for streaming
|
|
|
|
# Store reference to the complete AI message
|
|
if generated_message.status == ApiStatusType.DONE:
|
|
final_message = generated_message
|
|
break
|
|
|
|
if final_message is None:
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"No skill match found for the given requirement"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
if not isinstance(final_message, ChatMessageResume):
|
|
error_message = ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content=f"Skill match response is not valid"
|
|
)
|
|
yield error_message
|
|
return
|
|
|
|
resume : ChatMessageResume = final_message
|
|
yield resume
|
|
return
|
|
|
|
try:
|
|
async def to_json(method):
|
|
try:
|
|
async for message in method:
|
|
json_data = message.model_dump(mode='json', by_alias=True)
|
|
json_str = json.dumps(json_data)
|
|
yield f"data: {json_str}\n\n".encode("utf-8")
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"Error in to_json conversion: {e}")
|
|
return
|
|
|
|
return StreamingResponse(
|
|
to_json(message_stream_generator()),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no", # Nginx
|
|
"X-Content-Type-Options": "nosniff",
|
|
"Access-Control-Allow-Origin": "*", # Adjust for your CORS needs
|
|
"Transfer-Encoding": "chunked",
|
|
},
|
|
)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Document upload error: {e}")
|
|
return StreamingResponse(
|
|
iter([json.dumps(ChatMessageError(
|
|
sessionId=MOCK_UUID, # No session ID for document uploads
|
|
content="Failed to generate skill assessment"
|
|
).model_dump(mode='json', by_alias=True))]),
|
|
media_type="text/event-stream"
|
|
)
|
|
|
|
@rate_limited(guest_per_minute=5, user_per_minute=30, admin_per_minute=100)
|
|
@api_router.get("/candidates/{username}/chat-sessions")
|
|
async def get_candidate_chat_sessions(
|
|
username: str = Path(...),
|
|
current_user = Depends(get_current_user_or_guest),
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get all chat sessions related to a specific candidate"""
|
|
try:
|
|
logger.info(f"🔍 Fetching chat sessions for candidate with username: {username}")
|
|
# Find candidate by username
|
|
all_candidates_data = await database.get_all_candidates()
|
|
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
|
|
|
matching_candidates = [
|
|
c for c in candidates_list
|
|
if c.username.lower() == username.lower()
|
|
]
|
|
|
|
if not matching_candidates:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("CANDIDATE_NOT_FOUND", f"Candidate with username '{username}' not found")
|
|
)
|
|
|
|
candidate = matching_candidates[0]
|
|
|
|
# Get all chat sessions
|
|
all_sessions_data = await database.get_all_chat_sessions()
|
|
sessions_list = []
|
|
|
|
for index, session_data in enumerate(all_sessions_data.values()):
|
|
try:
|
|
session = ChatSession.model_validate(session_data)
|
|
if session.user_id != current_user.id:
|
|
# User can only access their own sessions
|
|
logger.info(f"🔗 Skipping session {session.id} - not owned by user {current_user.id} (created by {session.user_id})")
|
|
continue
|
|
# Check if this session is related to the candidate
|
|
context = session.context
|
|
if (context and
|
|
context.related_entity_type == "candidate" and
|
|
context.related_entity_id == candidate.id):
|
|
sessions_list.append(session)
|
|
except Exception as e:
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Failed to validate session ({index}): {e}")
|
|
logger.error(f"❌ Session data: {session_data}")
|
|
continue
|
|
|
|
# Sort by last activity (most recent first)
|
|
sessions_list.sort(key=lambda x: x.last_activity, reverse=True)
|
|
|
|
# Apply pagination
|
|
total = len(sessions_list)
|
|
start = (page - 1) * limit
|
|
end = start + limit
|
|
paginated_sessions = sessions_list[start:end]
|
|
|
|
paginated_response = create_paginated_response(
|
|
[s.model_dump(by_alias=True) for s in paginated_sessions],
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response({
|
|
"candidate": {
|
|
"id": candidate.id,
|
|
"username": candidate.username,
|
|
"fullName": candidate.full_name,
|
|
"email": candidate.email
|
|
},
|
|
"sessions": paginated_response
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get candidate chat sessions error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Admin Endpoints
|
|
# ============================
|
|
# @api_router.get("/admin/verification-stats")
|
|
async def get_verification_statistics(
|
|
current_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get verification statistics (admin only)"""
|
|
try:
|
|
if not current_user.is_admin:
|
|
raise HTTPException(status_code=403, detail="Admin access required")
|
|
|
|
stats = {
|
|
"pending_verifications": await database.get_pending_verifications_count(),
|
|
"expired_tokens_cleaned": await database.cleanup_expired_verification_tokens()
|
|
}
|
|
|
|
return create_success_response(stats)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error getting verification stats: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STATS_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/admin/cleanup-verifications")
|
|
async def cleanup_verification_tokens(
|
|
current_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Manually trigger cleanup of expired verification tokens (admin only)"""
|
|
try:
|
|
if not current_user.is_admin:
|
|
raise HTTPException(status_code=403, detail="Admin access required")
|
|
|
|
cleaned_count = await database.cleanup_expired_verification_tokens()
|
|
|
|
logger.info(f"🧹 Manual cleanup completed by admin {current_user.id}: {cleaned_count} tokens cleaned")
|
|
|
|
return create_success_response({
|
|
"message": f"Cleanup completed. Removed {cleaned_count} expired verification tokens.",
|
|
"cleaned_count": cleaned_count
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error in manual cleanup: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CLEANUP_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/admin/pending-verifications")
|
|
async def get_pending_verifications(
|
|
current_user = Depends(get_current_admin),
|
|
page: int = Query(1, ge=1),
|
|
limit: int = Query(20, ge=1, le=100),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get list of pending email verifications (admin only)"""
|
|
try:
|
|
if not current_user.is_admin:
|
|
raise HTTPException(status_code=403, detail="Admin access required")
|
|
|
|
pattern = "email_verification:*"
|
|
cursor = 0
|
|
pending_verifications = []
|
|
current_time = datetime.now(timezone.utc)
|
|
|
|
while True:
|
|
cursor, keys = await database.redis.scan(cursor, match=pattern, count=100)
|
|
|
|
for key in keys:
|
|
token_data = await database.redis.get(key)
|
|
if token_data:
|
|
verification_info = json.loads(token_data)
|
|
if not verification_info.get("verified", False):
|
|
expires_at = datetime.fromisoformat(verification_info.get("expires_at", ""))
|
|
|
|
pending_verifications.append({
|
|
"email": verification_info.get("email"),
|
|
"user_type": verification_info.get("user_type"),
|
|
"created_at": verification_info.get("created_at"),
|
|
"expires_at": verification_info.get("expires_at"),
|
|
"is_expired": current_time > expires_at,
|
|
"resend_count": verification_info.get("resend_count", 0)
|
|
})
|
|
|
|
if cursor == 0:
|
|
break
|
|
|
|
# Sort by creation date (newest first)
|
|
pending_verifications.sort(key=lambda x: x["created_at"], reverse=True)
|
|
|
|
# Apply pagination
|
|
total = len(pending_verifications)
|
|
start = (page - 1) * limit
|
|
end = start + limit
|
|
paginated_verifications = pending_verifications[start:end]
|
|
|
|
paginated_response = create_paginated_response(
|
|
paginated_verifications,
|
|
page, limit, total
|
|
)
|
|
|
|
return create_success_response(paginated_response)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error getting pending verifications: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("FETCH_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/admin/rate-limits/info")
|
|
async def get_user_rate_limit_status(
|
|
current_user = Depends(get_current_user_or_guest),
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get rate limit status for a user (admin only)"""
|
|
try:
|
|
# Get user to determine type
|
|
user_data = await database.get_user_by_id(current_user.id)
|
|
if not user_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User not found")
|
|
)
|
|
|
|
user_type = user_data.get("type", "unknown")
|
|
is_admin = False
|
|
|
|
if user_type == "candidate":
|
|
candidate_data = await database.get_candidate(current_user.id)
|
|
if candidate_data:
|
|
is_admin = candidate_data.get("is_admin", False)
|
|
elif user_type == "employer":
|
|
employer_data = await database.get_employer(current_user.id)
|
|
if employer_data:
|
|
is_admin = employer_data.get("is_admin", False)
|
|
|
|
status = await rate_limiter.get_user_rate_limit_status(current_user.id, user_type, is_admin)
|
|
|
|
return create_success_response(status)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get rate limit status error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STATUS_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/admin/rate-limits/{user_id}")
|
|
async def get_anyone_rate_limit_status(
|
|
user_id: str = Path(...),
|
|
admin_user = Depends(get_current_admin),
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Get rate limit status for a user (admin only)"""
|
|
try:
|
|
# Get user to determine type
|
|
user_data = await database.get_user_by_id(user_id)
|
|
if not user_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User not found")
|
|
)
|
|
|
|
user_type = user_data.get("type", "unknown")
|
|
is_admin = False
|
|
|
|
if user_type == "candidate":
|
|
candidate_data = await database.get_candidate(user_id)
|
|
if candidate_data:
|
|
is_admin = candidate_data.get("is_admin", False)
|
|
elif user_type == "employer":
|
|
employer_data = await database.get_employer(user_id)
|
|
if employer_data:
|
|
is_admin = employer_data.get("is_admin", False)
|
|
|
|
status = await rate_limiter.get_user_rate_limit_status(user_id, user_type, is_admin)
|
|
|
|
return create_success_response(status)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get rate limit status error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STATUS_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/admin/rate-limits/{user_id}/reset")
|
|
async def reset_user_rate_limits(
|
|
user_id: str = Path(...),
|
|
admin_user = Depends(get_current_admin),
|
|
rate_limiter: RateLimiter = Depends(get_rate_limiter),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Reset rate limits for a user (admin only)"""
|
|
try:
|
|
# Get user to determine type
|
|
user_data = await database.get_user_by_id(user_id)
|
|
if not user_data:
|
|
return JSONResponse(
|
|
status_code=404,
|
|
content=create_error_response("USER_NOT_FOUND", "User not found")
|
|
)
|
|
|
|
user_type = user_data.get("type", "unknown")
|
|
success = await rate_limiter.reset_user_rate_limits(user_id, user_type)
|
|
|
|
if success:
|
|
logger.info(f"🔄 Rate limits reset for {user_type} {user_id} by admin {admin_user.id}")
|
|
return create_success_response({
|
|
"message": f"Rate limits reset for {user_type} {user_id}",
|
|
"resetBy": admin_user.id
|
|
})
|
|
else:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESET_FAILED", "Failed to reset rate limits")
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Reset rate limits error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("RESET_ERROR", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Debugging Endpoints
|
|
# ============================
|
|
@api_router.get("/debug/guest/{guest_id}")
|
|
async def debug_guest_session(
|
|
guest_id: str = Path(...),
|
|
admin_user = Depends(get_current_admin),
|
|
database: RedisDatabase = Depends(get_database)
|
|
):
|
|
"""Debug guest session issues (admin only)"""
|
|
try:
|
|
# Check primary storage
|
|
primary_data = await database.redis.hget("guests", guest_id)
|
|
primary_exists = primary_data is not None
|
|
|
|
# Check backup storage
|
|
backup_data = await database.redis.get(f"guest_backup:{guest_id}")
|
|
backup_exists = backup_data is not None
|
|
|
|
# Check user lookup
|
|
user_lookup = await database.get_user_by_id(guest_id)
|
|
|
|
# Get TTL info
|
|
primary_ttl = await database.redis.ttl(f"guests")
|
|
backup_ttl = await database.redis.ttl(f"guest_backup:{guest_id}")
|
|
|
|
debug_info = {
|
|
"guest_id": guest_id,
|
|
"primary_storage": {
|
|
"exists": primary_exists,
|
|
"data": json.loads(primary_data) if primary_data else None,
|
|
"ttl": primary_ttl
|
|
},
|
|
"backup_storage": {
|
|
"exists": backup_exists,
|
|
"data": json.loads(backup_data) if backup_data else None,
|
|
"ttl": backup_ttl
|
|
},
|
|
"user_lookup": user_lookup,
|
|
"timestamp": datetime.now(UTC).isoformat()
|
|
}
|
|
|
|
return create_success_response(debug_info)
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Debug guest session error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("DEBUG_ERROR", str(e))
|
|
)
|
|
# ============================
|
|
# Health Check and Info Endpoints
|
|
# ============================
|
|
async def get_redis() -> redis.Redis:
|
|
"""Dependency to get Redis client"""
|
|
return redis_manager.get_client()
|
|
|
|
@app.get("/health")
|
|
async def health_check():
|
|
"""Health check endpoint"""
|
|
try:
|
|
database = db_manager.get_database()
|
|
if not redis_manager.redis:
|
|
raise RuntimeError("Redis client not initialized")
|
|
|
|
# Test Redis connection
|
|
await redis_manager.redis.ping()
|
|
|
|
# Get database stats
|
|
stats = await database.get_stats()
|
|
|
|
# Redis info
|
|
redis_info = await redis_manager.redis.info()
|
|
|
|
return {
|
|
"status": "healthy",
|
|
"timestamp": datetime.utcnow().isoformat(),
|
|
"database": {
|
|
"status": "connected",
|
|
"stats": stats
|
|
},
|
|
"redis": {
|
|
"version": redis_info.get("redis_version", "unknown"),
|
|
"uptime": redis_info.get("uptime_in_seconds", 0),
|
|
"memory_used": redis_info.get("used_memory_human", "unknown")
|
|
},
|
|
"application": {
|
|
"active_requests": db_manager._active_requests,
|
|
"shutting_down": db_manager.is_shutting_down
|
|
}
|
|
}
|
|
|
|
except RuntimeError as e:
|
|
return {"status": "shutting_down", "message": str(e)}
|
|
except Exception as e:
|
|
logger.error(f"❌ Health check failed: {e}")
|
|
return {"status": "error", "message": str(e)}
|
|
|
|
@api_router.get("/redis/stats")
|
|
async def redis_stats(redis: redis.Redis = Depends(get_redis)):
|
|
try:
|
|
info = await redis.info()
|
|
return {
|
|
"connected_clients": info.get("connected_clients"),
|
|
"used_memory_human": info.get("used_memory_human"),
|
|
"total_commands_processed": info.get("total_commands_processed"),
|
|
"keyspace_hits": info.get("keyspace_hits"),
|
|
"keyspace_misses": info.get("keyspace_misses"),
|
|
"uptime_in_seconds": info.get("uptime_in_seconds")
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=503, detail=f"Redis stats unavailable: {e}")
|
|
|
|
@api_router.get("/system-info")
|
|
async def get_system_info(request: Request):
|
|
"""Get system information"""
|
|
from system_info import system_info # Import system_info function from system_info module
|
|
system = system_info()
|
|
|
|
return create_success_response(system.model_dump(mode='json'))
|
|
|
|
@api_router.get("/")
|
|
async def api_info():
|
|
"""API information endpoint"""
|
|
return {
|
|
"message": "Backstory API",
|
|
"version": "1.0.0",
|
|
"prefix": defines.api_prefix,
|
|
"documentation": f"{defines.api_prefix}/docs",
|
|
"health": f"{defines.api_prefix}/health"
|
|
}
|
|
|
|
# ============================
|
|
# Manual Task Execution Endpoints (Admin Only)
|
|
# ============================
|
|
# Global background task manager
|
|
background_task_manager: Optional[BackgroundTaskManager] = None
|
|
|
|
@asynccontextmanager
|
|
async def enhanced_lifespan(app: FastAPI):
|
|
# Startup
|
|
global background_task_manager
|
|
|
|
logger.info("🚀 Starting Backstory API with enhanced background tasks")
|
|
logger.info(f"📝 API Documentation available at: http://{defines.host}:{defines.port}{defines.api_prefix}/docs")
|
|
logger.info("🔗 API endpoints prefixed with: /api/1.0")
|
|
if os.path.exists(defines.static_content):
|
|
logger.info(f"📁 Serving static files from: {defines.static_content}")
|
|
|
|
try:
|
|
# Initialize database
|
|
await db_manager.initialize()
|
|
entities.entity_manager.initialize(prometheus_collector, database=db_manager.get_database())
|
|
|
|
# Initialize background task manager
|
|
background_task_manager = BackgroundTaskManager(db_manager)
|
|
background_task_manager.start()
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
|
|
logger.info("🚀 Application startup completed with background tasks")
|
|
|
|
yield # Application is running
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to start application: {e}")
|
|
raise
|
|
|
|
finally:
|
|
# Shutdown
|
|
logger.info("Application shutdown requested")
|
|
|
|
# Stop background tasks first
|
|
if background_task_manager:
|
|
background_task_manager.stop()
|
|
|
|
await db_manager.graceful_shutdown()
|
|
|
|
# ============================
|
|
# Manual Task Execution Endpoints (Admin Only)
|
|
# ============================
|
|
|
|
|
|
# ============================
|
|
# Task Monitoring and Metrics
|
|
# ============================
|
|
|
|
@api_router.post("/admin/tasks/cleanup-guests")
|
|
async def manual_guest_cleanup(
|
|
inactive_hours: int = Body(24, embed=True),
|
|
current_user = Depends(get_current_admin),
|
|
admin_user = Depends(get_current_admin)
|
|
):
|
|
"""Manually trigger guest cleanup (admin only)"""
|
|
try:
|
|
global background_task_manager
|
|
|
|
if not background_task_manager:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("TASK_MANAGER_NOT_AVAILABLE", "Background task manager not available")
|
|
)
|
|
|
|
cleaned_count = await background_task_manager.cleanup_inactive_guests(inactive_hours)
|
|
|
|
logger.info(f"🧹 Manual guest cleanup triggered by admin {admin_user.id}: {cleaned_count} guests cleaned")
|
|
|
|
return create_success_response({
|
|
"message": f"Guest cleanup completed. Removed {cleaned_count} inactive sessions.",
|
|
"cleaned_count": cleaned_count,
|
|
"triggered_by": admin_user.id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Manual guest cleanup error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CLEANUP_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/admin/tasks/cleanup-tokens")
|
|
async def manual_token_cleanup(
|
|
admin_user = Depends(get_current_admin)
|
|
):
|
|
"""Manually trigger verification token cleanup (admin only)"""
|
|
try:
|
|
global background_task_manager
|
|
|
|
if not background_task_manager:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("TASK_MANAGER_NOT_AVAILABLE", "Background task manager not available")
|
|
)
|
|
|
|
cleaned_count = await background_task_manager.cleanup_expired_verification_tokens()
|
|
|
|
logger.info(f"🧹 Manual token cleanup triggered by admin {admin_user.id}: {cleaned_count} tokens cleaned")
|
|
|
|
return create_success_response({
|
|
"message": f"Token cleanup completed. Removed {cleaned_count} expired tokens.",
|
|
"cleaned_count": cleaned_count,
|
|
"triggered_by": admin_user.id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Manual token cleanup error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CLEANUP_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.post("/admin/tasks/cleanup-rate-limits")
|
|
async def manual_rate_limit_cleanup(
|
|
days_old: int = Body(7, embed=True),
|
|
admin_user = Depends(get_current_admin)
|
|
):
|
|
"""Manually trigger rate limit data cleanup (admin only)"""
|
|
try:
|
|
global background_task_manager
|
|
|
|
if not background_task_manager:
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("TASK_MANAGER_NOT_AVAILABLE", "Background task manager not available")
|
|
)
|
|
|
|
cleaned_count = await background_task_manager.cleanup_old_rate_limit_data(days_old)
|
|
|
|
logger.info(f"🧹 Manual rate limit cleanup triggered by admin {admin_user.id}: {cleaned_count} keys cleaned")
|
|
|
|
return create_success_response({
|
|
"message": f"Rate limit cleanup completed. Removed {cleaned_count} old keys.",
|
|
"cleaned_count": cleaned_count,
|
|
"triggered_by": admin_user.id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Manual rate limit cleanup error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("CLEANUP_ERROR", str(e))
|
|
)
|
|
|
|
@api_router.get("/admin/tasks/status")
|
|
async def get_background_task_status(
|
|
admin_user = Depends(get_current_admin)
|
|
):
|
|
"""Get background task manager status (admin only)"""
|
|
try:
|
|
global background_task_manager
|
|
|
|
if not background_task_manager:
|
|
return create_success_response({
|
|
"running": False,
|
|
"message": "Background task manager not initialized"
|
|
})
|
|
|
|
# Get next scheduled run times
|
|
next_runs = []
|
|
for job in schedule.jobs:
|
|
next_runs.append({
|
|
"job": str(job.job_func),
|
|
"next_run": job.next_run.isoformat() if job.next_run else None
|
|
})
|
|
|
|
return create_success_response({
|
|
"running": background_task_manager.running,
|
|
"scheduler_thread_alive": background_task_manager.scheduler_thread.is_alive() if background_task_manager.scheduler_thread else False,
|
|
"scheduled_jobs": len(schedule.jobs),
|
|
"next_runs": next_runs
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get task status error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("STATUS_ERROR", str(e))
|
|
)
|
|
|
|
|
|
# ============================
|
|
# Task Monitoring and Metrics
|
|
# ============================
|
|
|
|
class TaskMetrics:
|
|
"""Collect metrics for background tasks"""
|
|
|
|
def __init__(self):
|
|
self.task_runs = {}
|
|
self.task_durations = {}
|
|
self.task_errors = {}
|
|
|
|
def record_task_run(self, task_name: str, duration: float, success: bool = True):
|
|
"""Record a task execution"""
|
|
if task_name not in self.task_runs:
|
|
self.task_runs[task_name] = 0
|
|
self.task_durations[task_name] = []
|
|
self.task_errors[task_name] = 0
|
|
|
|
self.task_runs[task_name] += 1
|
|
self.task_durations[task_name].append(duration)
|
|
|
|
if not success:
|
|
self.task_errors[task_name] += 1
|
|
|
|
# Keep only last 100 durations to prevent memory growth
|
|
if len(self.task_durations[task_name]) > 100:
|
|
self.task_durations[task_name] = self.task_durations[task_name][-100:]
|
|
|
|
def get_metrics(self) -> dict:
|
|
"""Get task metrics summary"""
|
|
metrics = {}
|
|
|
|
for task_name in self.task_runs:
|
|
durations = self.task_durations[task_name]
|
|
avg_duration = sum(durations) / len(durations) if durations else 0
|
|
|
|
metrics[task_name] = {
|
|
"total_runs": self.task_runs[task_name],
|
|
"total_errors": self.task_errors[task_name],
|
|
"success_rate": (self.task_runs[task_name] - self.task_errors[task_name]) / self.task_runs[task_name] if self.task_runs[task_name] > 0 else 0,
|
|
"average_duration": avg_duration,
|
|
"last_runs": durations[-10:] if durations else []
|
|
}
|
|
|
|
return metrics
|
|
|
|
# Global task metrics
|
|
task_metrics = TaskMetrics()
|
|
|
|
@api_router.get("/admin/tasks/metrics")
|
|
async def get_task_metrics(
|
|
admin_user = Depends(get_current_admin)
|
|
):
|
|
"""Get background task metrics (admin only)"""
|
|
try:
|
|
global task_metrics
|
|
metrics = task_metrics.get_metrics()
|
|
|
|
return create_success_response({
|
|
"metrics": metrics,
|
|
"timestamp": datetime.now(UTC).isoformat()
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Get task metrics error: {e}")
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content=create_error_response("METRICS_ERROR", str(e))
|
|
)
|
|
|
|
# ============================
|
|
# Include Router in App
|
|
# ============================
|
|
|
|
# Include the API router
|
|
app.include_router(api_router)
|
|
|
|
# ============================
|
|
# Debug logging
|
|
# ============================
|
|
logger.info(f"Debug mode is {'enabled' if defines.debug else 'disabled'}")
|
|
|
|
@app.middleware("http")
|
|
async def log_requests(request: Request, call_next):
|
|
try:
|
|
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
|
logger.info(f"📝 Request {request.method}: {request.url.path}, Remote: {request.client.host if request.client else ''}")
|
|
response = await call_next(request)
|
|
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
|
if response.status_code < 200 or response.status_code >= 300:
|
|
logger.warning(f"⚠️ Response {request.method} {response.status_code}: Path: {request.url.path}")
|
|
return response
|
|
except Exception as e:
|
|
import traceback
|
|
logger.error(traceback.format_exc())
|
|
logger.error(backstory_traceback.format_exc())
|
|
logger.error(f"❌ Error processing request: {str(e)}, Path: {request.url.path}, Method: {request.method}")
|
|
return JSONResponse(status_code=400, content={"detail": "Invalid HTTP request"})
|
|
|
|
# ============================
|
|
# Request tracking middleware
|
|
# ============================
|
|
@app.middleware("http")
|
|
async def track_requests(request, call_next):
|
|
"""Middleware to track active requests during shutdown"""
|
|
if db_manager.is_shutting_down:
|
|
return JSONResponse(status_code=503, content={"error": "Application is shutting down"})
|
|
|
|
db_manager.increment_requests()
|
|
try:
|
|
response = await call_next(request)
|
|
return response
|
|
finally:
|
|
db_manager.decrement_requests()
|
|
|
|
# ============================
|
|
# FastAPI Metrics
|
|
# ============================
|
|
prometheus_collector = CollectorRegistry()
|
|
|
|
# Keep the Instrumentator instance alive
|
|
instrumentator = Instrumentator(
|
|
should_group_status_codes=True,
|
|
should_ignore_untemplated=True,
|
|
should_group_untemplated=True,
|
|
excluded_handlers=[f"{defines.api_prefix}/metrics"],
|
|
registry=prometheus_collector
|
|
)
|
|
|
|
# Instrument the FastAPI app
|
|
instrumentator.instrument(app)
|
|
|
|
# Expose the /metrics endpoint
|
|
logger.info(f"Exposing Prometheus metrics at {defines.api_prefix}/metrics")
|
|
instrumentator.expose(app, endpoint=f"{defines.api_prefix}/metrics")
|
|
|
|
# ============================
|
|
# Static File Serving
|
|
# ============================
|
|
|
|
# Serve static files (for frontend build)
|
|
# This should be last to not interfere with API routes
|
|
if os.path.exists(defines.static_content):
|
|
app.mount("/", StaticFiles(directory=defines.static_content, html=True), name="static")
|
|
else:
|
|
logger.info(f"⚠️ Static directory '{defines.static_content}' not found. Static file serving disabled.")
|
|
|
|
# Root endpoint when no static files
|
|
@app.get("/", include_in_schema=False)
|
|
async def root():
|
|
"""Root endpoint with API information (when no static files)"""
|
|
return {
|
|
"message": "Backstory API",
|
|
"version": "1.0.0",
|
|
"api_prefix": defines.api_prefix,
|
|
"documentation": f"{defines.api_prefix}/docs",
|
|
"health": f"{defines.api_prefix}/health"
|
|
}
|
|
|
|
async def periodic_verification_cleanup():
|
|
"""Background task to periodically clean up expired verification tokens"""
|
|
try:
|
|
database = db_manager.get_database()
|
|
cleaned_count = await database.cleanup_expired_verification_tokens()
|
|
|
|
if cleaned_count > 0:
|
|
logger.info(f"🧹 Periodic cleanup: removed {cleaned_count} expired verification tokens")
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Error in periodic verification cleanup: {e}")
|
|
|
|
if __name__ == "__main__":
|
|
host = defines.host
|
|
port = defines.port
|
|
if ssl_enabled:
|
|
logger.info(f"Starting web server at https://{host}:{port}")
|
|
uvicorn.run(
|
|
app="main:app",
|
|
host=host,
|
|
port=port,
|
|
log_config=None,
|
|
ssl_keyfile=defines.key_path,
|
|
ssl_certfile=defines.cert_path,
|
|
reload=True,
|
|
)
|
|
else:
|
|
logger.info(f"Starting web server at http://{host}:{port}")
|
|
uvicorn.run(app="main:app", host=host, port=port, log_config=None) |