Transitioning to Redis
This commit is contained in:
parent
7311027811
commit
71c8cb0ac8
18
Dockerfile
18
Dockerfile
@ -179,11 +179,27 @@ FROM llm-base AS backstory
|
||||
|
||||
#COPY /src/requirements.txt /opt/backstory/src/requirements.txt
|
||||
#RUN pip install -r /opt/backstory/src/requirements.txt
|
||||
RUN pip install 'markitdown[all]' pydantic
|
||||
RUN pip install 'markitdown[all]' pydantic 'pydantic[email]'
|
||||
|
||||
# Prometheus
|
||||
RUN pip install prometheus-client prometheus-fastapi-instrumentator
|
||||
|
||||
# Redis
|
||||
RUN pip install "redis[hiredis]>=4.5.0"
|
||||
|
||||
# New backend implementation
|
||||
RUN pip install fastapi uvicorn "python-jose[cryptography]" bcrypt python-multipart
|
||||
|
||||
# Automatic type conversion pydantic -> typescript
|
||||
RUN pip install pydantic typing-inspect jinja2
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
||||
nodejs \
|
||||
npm \
|
||||
&& npm install -g typescript \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/{apt,dpkg,cache,log}
|
||||
|
||||
SHELL [ "/bin/bash", "-c" ]
|
||||
|
||||
RUN { \
|
||||
|
@ -5,17 +5,20 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
target: backstory
|
||||
container_name: backstory
|
||||
image: backstory
|
||||
#image: backstory
|
||||
restart: "always"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- PRODUCTION=0
|
||||
- MODEL_NAME=${MODEL_NAME:-qwen2.5:7b}
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- REDIS_DB=0
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
depends_on:
|
||||
- ollama
|
||||
- redis
|
||||
networks:
|
||||
- internal
|
||||
ports:
|
||||
@ -26,6 +29,7 @@ services:
|
||||
- ./dev-keys:/opt/backstory/keys:ro # Developer keys
|
||||
- ./users:/opt/backstory/users:rw # Live mount of user data
|
||||
- ./src:/opt/backstory/src:rw # Live mount server src
|
||||
- ./frontend/src/types:/opt/backstory/frontend/src/types # Live mount of types for pydantic->ts
|
||||
cap_add: # used for running ze-monitor within container
|
||||
- CAP_DAC_READ_SEARCH # Bypass all filesystem read access checks
|
||||
- CAP_PERFMON # Access to perf_events (vs. overloaded CAP_SYS_ADMIN)
|
||||
@ -36,7 +40,7 @@ services:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: backstory
|
||||
image: backstory
|
||||
#image: backstory
|
||||
container_name: backstory-prod
|
||||
restart: "always"
|
||||
env_file:
|
||||
@ -44,6 +48,8 @@ services:
|
||||
environment:
|
||||
- PRODUCTION=1
|
||||
- MODEL_NAME=${MODEL_NAME:-qwen2.5:7b}
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- REDIS_DB=1
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
depends_on:
|
||||
@ -64,13 +70,46 @@ services:
|
||||
- CAP_PERFMON # Access to perf_events (vs. overloaded CAP_SYS_ADMIN)
|
||||
- CAP_SYS_PTRACE # PTRACE_MODE_READ_REALCREDS ptrace access mode check
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: redis
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
- ./redis.conf:/usr/local/etc/redis/redis.conf
|
||||
command: redis-server /usr/local/etc/redis/redis.conf
|
||||
networks:
|
||||
- internal
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Optional: Redis Commander for GUI management
|
||||
redis-commander:
|
||||
image: rediscommander/redis-commander:latest
|
||||
container_name: backstory-redis-commander
|
||||
ports:
|
||||
- "8081:8081"
|
||||
environment:
|
||||
- REDIS_HOSTS=local:redis:6379
|
||||
networks:
|
||||
- internal
|
||||
depends_on:
|
||||
- redis
|
||||
profiles:
|
||||
- tools # Only start with --profile tools
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: frontend
|
||||
container_name: frontend
|
||||
image: frontend
|
||||
#image: frontend
|
||||
restart: "always"
|
||||
env_file:
|
||||
- .env
|
||||
@ -86,7 +125,7 @@ services:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: ollama
|
||||
image: ollama
|
||||
#image: ollama
|
||||
container_name: ollama
|
||||
restart: "always"
|
||||
env_file:
|
||||
@ -113,7 +152,7 @@ services:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: jupyter
|
||||
image: jupyter
|
||||
#image: jupyter
|
||||
container_name: jupyter
|
||||
restart: "always"
|
||||
env_file:
|
||||
@ -137,7 +176,7 @@ services:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
target: miniircd
|
||||
image: miniircd
|
||||
#image: miniircd
|
||||
container_name: miniircd
|
||||
restart: "no"
|
||||
env_file:
|
||||
@ -198,3 +237,7 @@ networks:
|
||||
internal:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
driver: local
|
||||
|
||||
|
@ -21,6 +21,13 @@ module.exports = {
|
||||
buffer: false,
|
||||
proxyTimeout: 3600000,
|
||||
onProxyRes: function(proxyRes, req, res) {
|
||||
proxyRes.headers['cache-control'] = 'no-cache';
|
||||
|
||||
if (req.url.includes('/docs') ||
|
||||
req.url.includes('/redoc') ||
|
||||
req.url.includes('/openapi.json')) {
|
||||
return; // Let original headers pass through
|
||||
}
|
||||
// Remove any header that might cause buffering
|
||||
proxyRes.headers['transfer-encoding'] = 'chunked';
|
||||
delete proxyRes.headers['content-length'];
|
||||
|
22
frontend/package-lock.json
generated
22
frontend/package-lock.json
generated
@ -20,13 +20,17 @@
|
||||
"@testing-library/react": "^16.2.0",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"@types/jest": "^27.5.2",
|
||||
"@types/lodash": "^4.17.17",
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/node": "^16.18.126",
|
||||
"@types/react": "^19.0.12",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@uiw/react-json-view": "^2.0.0-alpha.31",
|
||||
"@uiw/react-markdown-editor": "^6.1.4",
|
||||
"jsonrepair": "^3.12.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.511.0",
|
||||
"luxon": "^3.6.1",
|
||||
"markdown-it": "^14.1.0",
|
||||
"mermaid": "^11.6.0",
|
||||
"mui-markdown": "^2.0.1",
|
||||
@ -5737,11 +5741,21 @@
|
||||
"integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/lodash": {
|
||||
"version": "4.17.17",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.17.tgz",
|
||||
"integrity": "sha512-RRVJ+J3J+WmyOTqnz3PiBLA501eKwXl2noseKOrNo/6+XEHjTAxO4xHvxQB6QuNm+s4WRbn6rSiap8+EA+ykFQ=="
|
||||
},
|
||||
"node_modules/@types/long": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
|
||||
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA=="
|
||||
},
|
||||
"node_modules/@types/luxon": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.6.2.tgz",
|
||||
"integrity": "sha512-R/BdP7OxEMc44l2Ex5lSXHoIXTB2JLNa3y2QISIbr58U/YcsffyQrYW//hZSdrfxrjRZj3GcUoxMPGdO8gSYuw=="
|
||||
},
|
||||
"node_modules/@types/mapbox__point-geometry": {
|
||||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/mapbox__point-geometry/-/mapbox__point-geometry-0.1.4.tgz",
|
||||
@ -15626,6 +15640,14 @@
|
||||
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.6.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.6.1.tgz",
|
||||
"integrity": "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/lz-string": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz",
|
||||
|
@ -15,13 +15,17 @@
|
||||
"@testing-library/react": "^16.2.0",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"@types/jest": "^27.5.2",
|
||||
"@types/lodash": "^4.17.17",
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/node": "^16.18.126",
|
||||
"@types/react": "^19.0.12",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@uiw/react-json-view": "^2.0.0-alpha.31",
|
||||
"@uiw/react-markdown-editor": "^6.1.4",
|
||||
"jsonrepair": "^3.12.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.511.0",
|
||||
"luxon": "^3.6.1",
|
||||
"markdown-it": "^14.1.0",
|
||||
"mermaid": "^11.6.0",
|
||||
"mui-markdown": "^2.0.1",
|
||||
|
492
frontend/public/docs/type-safety.md
Normal file
492
frontend/public/docs/type-safety.md
Normal file
@ -0,0 +1,492 @@
|
||||
# Type Safety Setup and Configuration
|
||||
|
||||
This document describes how to set up and maintain type consistency between the Python Pydantic backend and TypeScript frontend.
|
||||
|
||||
## Files Overview
|
||||
|
||||
### 1. TypeScript Types (`front/src/types/types.ts`)
|
||||
- Complete TypeScript type definitions for all entities
|
||||
- Includes enums, interfaces, and utility types
|
||||
- Used by React components and API calls
|
||||
|
||||
### 2. Pydantic Models (`src/models.py`)
|
||||
- Python data models with validation
|
||||
- Backend API request/response validation
|
||||
- Database schema definitions
|
||||
|
||||
### 3. Type Generation Tool (`src/generate_types.py`)
|
||||
- Automated TypeScript generation from Pydantic models
|
||||
- Keeps types in sync
|
||||
- Watch mode for development
|
||||
|
||||
## Setup Instructions
|
||||
|
||||
### 2. Generate TypeScript Types
|
||||
|
||||
Run the type generation tool:
|
||||
|
||||
```bash
|
||||
# One-time generation
|
||||
docker compose exec backstory shell "python src/generate_types.py --source src/models.py --output frontend/src/types/types.ts"
|
||||
|
||||
# Watch mode for development
|
||||
docker compose exec backstory shell "python src/generate_types.py --source src/models.py --output frontend/src/types/types.ts --watch"
|
||||
```
|
||||
|
||||
### 3. API Client Setup
|
||||
|
||||
Create an API client that uses the types:
|
||||
|
||||
```typescript
|
||||
// api/client.ts
|
||||
import * as Types from '../types/types';
|
||||
import { formatApiRequest, parseApiResponse } from '../types/conversion';
|
||||
|
||||
class ApiClient {
|
||||
private baseUrl: string;
|
||||
|
||||
constructor(baseUrl: string) {
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
async createCandidate(candidate: Types.Candidate): Promise<Types.Candidate> {
|
||||
const response = await fetch(`${this.baseUrl}/candidates`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(formatApiRequest(candidate))
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
const apiResponse = parseApiResponse<Types.Candidate>(data);
|
||||
|
||||
if (!apiResponse.success) {
|
||||
throw new Error(apiResponse.error?.message || 'API request failed');
|
||||
}
|
||||
|
||||
return apiResponse.data!;
|
||||
}
|
||||
|
||||
async getCandidates(request: Types.PaginatedRequest): Promise<Types.PaginatedResponse<Types.Candidate>> {
|
||||
const params = new URLSearchParams(formatApiRequest(request));
|
||||
const response = await fetch(`${this.baseUrl}/candidates?${params}`);
|
||||
const data = await response.json();
|
||||
|
||||
return parseApiResponse<Types.PaginatedResponse<Types.Candidate>>(data).data!;
|
||||
}
|
||||
}
|
||||
|
||||
export default ApiClient;
|
||||
```
|
||||
|
||||
### 4. Backend API Setup
|
||||
|
||||
Use Pydantic models in your FastAPI/Flask routes:
|
||||
|
||||
```python
|
||||
# api/routes.py (FastAPI example)
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from typing import List
|
||||
from models import Candidate, PaginatedRequest, PaginatedResponse, ApiResponse
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@app.post("/candidates", response_model=ApiResponse[Candidate])
|
||||
async def create_candidate(candidate: Candidate):
|
||||
try:
|
||||
# Validate and save candidate
|
||||
saved_candidate = await save_candidate(candidate)
|
||||
return ApiResponse(success=True, data=saved_candidate)
|
||||
except Exception as e:
|
||||
return ApiResponse(success=False, error={"code": "CREATION_FAILED", "message": str(e)})
|
||||
|
||||
@app.get("/candidates", response_model=ApiResponse[PaginatedResponse[Candidate]])
|
||||
async def get_candidates(request: PaginatedRequest):
|
||||
try:
|
||||
candidates = await fetch_candidates(request)
|
||||
return ApiResponse(success=True, data=candidates)
|
||||
except Exception as e:
|
||||
return ApiResponse(success=False, error={"code": "FETCH_FAILED", "message": str(e)})
|
||||
```
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### 1. Making Changes
|
||||
|
||||
When you modify data structures:
|
||||
|
||||
1. **Update Pydantic models first** in `models.py`
|
||||
2. **Regenerate TypeScript types** using the generation tool
|
||||
3. **Update API endpoints** to use new models
|
||||
4. **Update frontend components** to use new types
|
||||
5. **Run validation** to ensure consistency
|
||||
|
||||
### 2. Type Generation Automation
|
||||
|
||||
For automatic type generation in development, add to your package.json:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm run dev:frontend\" \"npm run dev:types\"",
|
||||
"dev:frontend": "react-scripts start",
|
||||
"dev:types": "python ../backend/pydantic_to_typescript.py --input ../backend/models.py --output src/types/types.ts --watch",
|
||||
"generate-types": "python ../backend/pydantic_to_typescript.py --input ../backend/models.py --output src/types/types.ts"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Validation Strategy
|
||||
|
||||
Use validation at multiple layers:
|
||||
|
||||
```typescript
|
||||
// Component validation example
|
||||
import { validateData } from '../types/validation';
|
||||
|
||||
const UserForm: React.FC = () => {
|
||||
const [formData, setFormData] = useState<Partial<Types.Candidate>>({});
|
||||
const [errors, setErrors] = useState<ValidationError[]>([]);
|
||||
|
||||
const handleSubmit = async () => {
|
||||
const validation = validateData<Types.Candidate>(formData, 'Candidate');
|
||||
|
||||
if (!validation.isValid) {
|
||||
setErrors(validation.errors);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await apiClient.createCandidate(validation.data!);
|
||||
// Success handling
|
||||
} catch (error) {
|
||||
// Error handling
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
// Form JSX with error display
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Testing and Validation
|
||||
|
||||
### 1. Type Consistency Tests
|
||||
|
||||
Create tests to ensure types stay in sync:
|
||||
|
||||
```typescript
|
||||
// tests/type-consistency.test.ts
|
||||
import { candidateFromPydantic, candidateToPydantic } from '../types/conversion';
|
||||
|
||||
describe('Type Consistency', () => {
|
||||
test('candidate conversion roundtrip', () => {
|
||||
const originalCandidate: Types.Candidate = {
|
||||
id: '123e4567-e89b-12d3-a456-426614174000',
|
||||
email: 'test@example.com',
|
||||
userType: 'candidate',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
skills: [],
|
||||
experience: [],
|
||||
education: [],
|
||||
preferredJobTypes: ['full-time'],
|
||||
location: { city: 'Austin', country: 'USA' },
|
||||
languages: [],
|
||||
certifications: [],
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
status: 'active'
|
||||
};
|
||||
|
||||
// Convert to Python format and back
|
||||
const pythonFormat = candidateToPydantic(originalCandidate);
|
||||
const backToTypeScript = candidateFromPydantic(pythonFormat);
|
||||
|
||||
expect(backToTypeScript).toEqual(originalCandidate);
|
||||
});
|
||||
|
||||
test('validation consistency', () => {
|
||||
const invalidCandidate = {
|
||||
id: 'invalid-uuid',
|
||||
email: 'not-an-email',
|
||||
userType: 'invalid-type'
|
||||
};
|
||||
|
||||
const validation = validateData<Types.Candidate>(invalidCandidate, 'Candidate');
|
||||
expect(validation.isValid).toBe(false);
|
||||
expect(validation.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### 2. API Integration Tests
|
||||
|
||||
Test the full API integration:
|
||||
|
||||
```python
|
||||
# tests/test_api_integration.py
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from api.routes import app
|
||||
from models import Candidate
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
def test_candidate_creation():
|
||||
candidate_data = {
|
||||
"email": "test@example.com",
|
||||
"user_type": "candidate",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
"skills": [],
|
||||
"experience": [],
|
||||
"education": [],
|
||||
"preferred_job_types": ["full-time"],
|
||||
"location": {"city": "Austin", "country": "USA"},
|
||||
"languages": [],
|
||||
"certifications": []
|
||||
}
|
||||
|
||||
response = client.post("/candidates", json=candidate_data)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["data"]["email"] == candidate_data["email"]
|
||||
|
||||
def test_type_validation():
|
||||
invalid_data = {
|
||||
"email": "not-an-email",
|
||||
"user_type": "invalid"
|
||||
}
|
||||
|
||||
response = client.post("/candidates", json=invalid_data)
|
||||
assert response.status_code == 422 # Validation error
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Field Naming Conventions
|
||||
|
||||
- **TypeScript**: Use `camelCase` for consistency with JavaScript conventions
|
||||
- **Python**: Use `snake_case` for consistency with Python conventions
|
||||
- **API**: Always use the conversion utilities to transform between formats
|
||||
|
||||
### 2. Date Handling
|
||||
|
||||
```typescript
|
||||
// Always use Date objects in TypeScript
|
||||
const user: Types.BaseUser = {
|
||||
// ...
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
};
|
||||
|
||||
// Convert to ISO string for API
|
||||
const apiData = formatApiRequest(user);
|
||||
// apiData.created_at will be "2024-01-01T00:00:00.000Z"
|
||||
```
|
||||
|
||||
```python
|
||||
# Use datetime objects in Python
|
||||
from datetime import datetime
|
||||
from models import BaseUser
|
||||
|
||||
user = BaseUser(
|
||||
# ...
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow()
|
||||
)
|
||||
```
|
||||
|
||||
### 3. Optional Fields
|
||||
|
||||
Handle optional fields consistently:
|
||||
|
||||
```typescript
|
||||
// TypeScript - use optional chaining
|
||||
const user: Types.Candidate = getUser();
|
||||
const profileImage = user.profileImage ?? '/default-avatar.png';
|
||||
```
|
||||
|
||||
```python
|
||||
# Python - use Optional type hints
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
class User(BaseModel):
|
||||
profile_image: Optional[str] = None
|
||||
```
|
||||
|
||||
### 4. Enum Synchronization
|
||||
|
||||
Keep enums in sync between TypeScript and Python:
|
||||
|
||||
```typescript
|
||||
// TypeScript
|
||||
export type UserStatus = 'active' | 'inactive' | 'pending' | 'banned';
|
||||
```
|
||||
|
||||
```python
|
||||
# Python
|
||||
from enum import Enum
|
||||
|
||||
class UserStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
INACTIVE = "inactive"
|
||||
PENDING = "pending"
|
||||
BANNED = "banned"
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
### 1. Type Generation in CI
|
||||
|
||||
Add type generation to your CI pipeline:
|
||||
|
||||
```yaml
|
||||
# .github/workflows/ci.yml
|
||||
name: CI/CD Pipeline
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
type-sync:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: pip install pydantic typing-inspect jinja2
|
||||
|
||||
- name: Generate TypeScript types
|
||||
run: python backend/pydantic_to_typescript.py --input backend/models.py --output frontend/src/types/types.ts
|
||||
|
||||
- name: Check for type changes
|
||||
run: |
|
||||
if git diff --exit-code frontend/src/types/types.ts; then
|
||||
echo "Types are in sync"
|
||||
else
|
||||
echo "Types are out of sync!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
test:
|
||||
needs: type-sync
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# ... rest of test steps
|
||||
```
|
||||
|
||||
### 2. Pre-commit Hooks
|
||||
|
||||
Set up pre-commit hooks to ensure types stay in sync:
|
||||
|
||||
```yaml
|
||||
# .pre-commit-config.yaml
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: generate-types
|
||||
name: Generate TypeScript types
|
||||
entry: python backend/pydantic_to_typescript.py --input backend/models.py --output frontend/src/types/types.ts
|
||||
language: system
|
||||
files: backend/models.py
|
||||
pass_filenames: false
|
||||
|
||||
- id: validate-types
|
||||
name: Validate type consistency
|
||||
entry: npm run test:types
|
||||
language: system
|
||||
files: 'frontend/src/types/.*\.(ts|js)
|
||||
pass_filenames: false
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Date Serialization Errors**
|
||||
- Ensure dates are converted to ISO strings before API calls
|
||||
- Use the conversion utilities consistently
|
||||
|
||||
2. **Field Name Mismatches**
|
||||
- Always use the conversion utilities
|
||||
- Check that Pydantic field aliases match TypeScript property names
|
||||
|
||||
3. **Type Generation Failures**
|
||||
- Ensure all dependencies are installed
|
||||
- Check that the input Python file is valid
|
||||
- Verify that all Pydantic models extend BaseModel
|
||||
|
||||
4. **Validation Inconsistencies**
|
||||
- Keep validation rules in sync between TypeScript and Python
|
||||
- Use the same enum values and constraints
|
||||
|
||||
### Debugging Tools
|
||||
|
||||
1. **Type Validation Debugging**
|
||||
```typescript
|
||||
// Add debug logging
|
||||
const validation = validateData<Types.Candidate>(data, 'Candidate');
|
||||
if (!validation.isValid) {
|
||||
console.log('Validation errors:', validation.errors);
|
||||
validation.errors.forEach(error => {
|
||||
console.log(`Field: ${error.field}, Message: ${error.message}, Value:`, error.value);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
2. **Conversion Debugging**
|
||||
```typescript
|
||||
// Log conversion results
|
||||
const originalData = { firstName: 'John', lastName: 'Doe' };
|
||||
const converted = toSnakeCase(originalData);
|
||||
console.log('Original:', originalData);
|
||||
console.log('Converted:', converted);
|
||||
const backConverted = toCamelCase(converted);
|
||||
console.log('Back converted:', backConverted);
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### 1. Type Generation
|
||||
- Run type generation only when models change
|
||||
- Use file watching in development
|
||||
- Cache generated types in production builds
|
||||
|
||||
### 2. Validation
|
||||
- Use validation strategically (form submission, API boundaries)
|
||||
- Consider lazy validation for large datasets
|
||||
- Cache validation results when appropriate
|
||||
|
||||
### 3. Conversion
|
||||
- Minimize conversions in hot paths
|
||||
- Consider keeping data in the appropriate format for the layer
|
||||
- Use object pooling for frequently converted objects
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Regular Tasks
|
||||
|
||||
1. **Weekly**: Review type generation logs for any issues
|
||||
2. **Monthly**: Update dependencies and test compatibility
|
||||
3. **Quarterly**: Review and optimize validation rules
|
||||
4. **Annually**: Evaluate new tools and migration paths
|
||||
|
||||
### Monitoring
|
||||
|
||||
Set up monitoring for:
|
||||
- Type generation failures
|
||||
- Validation error rates
|
||||
- API request/response format mismatches
|
||||
- Performance impacts of type operations
|
||||
|
||||
This setup ensures type safety and consistency across your full-stack application while maintaining developer productivity and code quality.
|
@ -1,261 +0,0 @@
|
||||
.App {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
div {
|
||||
box-sizing: border-box;
|
||||
overflow-wrap: break-word;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.gl-container #scene {
|
||||
top: 0px !important;
|
||||
left: 0px !important;
|
||||
}
|
||||
|
||||
pre {
|
||||
max-width: 100%;
|
||||
max-height: 100%;
|
||||
overflow: auto;
|
||||
white-space: pre-wrap;
|
||||
box-sizing: border-box;
|
||||
border: 3px solid #E0E0E0;
|
||||
}
|
||||
|
||||
button {
|
||||
overflow-wrap: initial;
|
||||
word-break: initial;
|
||||
}
|
||||
|
||||
.TabPanel {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.MuiToolbar-root .MuiBox-root {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.MuiTabs-root .MuiTabs-indicator {
|
||||
background-color: orange;
|
||||
}
|
||||
|
||||
.SystemInfo {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 5px;
|
||||
padding: 5px;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.SystemInfoItem {
|
||||
display: flex; /* Grid for individual items */
|
||||
flex-direction: row;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.SystemInfoItem > div:first-child {
|
||||
display: flex;
|
||||
justify-self: end; /* Align the first column content to the right */
|
||||
width: 10rem;
|
||||
}
|
||||
|
||||
.SystemInfoItem > div:last-child {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
justify-self: end; /* Align the first column content to the right */
|
||||
}
|
||||
|
||||
.DocBox {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-grow: 1;
|
||||
max-width: 2048px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.Controls {
|
||||
display: flex;
|
||||
background-color: #F5F5F5;
|
||||
border: 1px solid #E0E0E0;
|
||||
overflow-y: auto;
|
||||
padding: 10px;
|
||||
flex-direction: column;
|
||||
margin-left: 10px;
|
||||
box-sizing: border-box;
|
||||
overflow-x: visible;
|
||||
min-width: 10rem;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.MessageContent div > p:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.MenuCard.MuiCard-root {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 10rem;
|
||||
flex-grow: 1;
|
||||
background-color: #1A2536; /* Midnight Blue */
|
||||
color: #D3CDBF; /* Warm Gray */
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.MenuCard.MuiCard-root button {
|
||||
min-height: 64px;
|
||||
}
|
||||
/* Prevent toolbar from shrinking vertically when media < 600px */
|
||||
.MuiToolbar-root {
|
||||
min-height: 72px !important;
|
||||
padding-left: 16px !important;
|
||||
padding-right: 16px !important;
|
||||
}
|
||||
|
||||
.ChatBox {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-grow: 1;
|
||||
max-width: 1024px;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
background-color: #D3CDBF;
|
||||
}
|
||||
|
||||
.user-message.MuiCard-root {
|
||||
background-color: #DCF8C6;
|
||||
border: 1px solid #B2E0A7;
|
||||
color: #333333;
|
||||
margin-bottom: 0.75rem;
|
||||
margin-left: 1rem;
|
||||
border-radius: 0.25rem;
|
||||
min-width: 80%;
|
||||
max-width: 80%;
|
||||
justify-self: right;
|
||||
display: flex;
|
||||
white-space: pre-wrap;
|
||||
overflow-wrap: break-word;
|
||||
word-break: break-word;
|
||||
flex-direction: column;
|
||||
align-items: self-end;
|
||||
align-self: end;
|
||||
flex-grow: 0;
|
||||
}
|
||||
|
||||
.About.MuiCard-root,
|
||||
.assistant-message.MuiCard-root {
|
||||
border: 1px solid #E0E0E0;
|
||||
background-color: #FFFFFF;
|
||||
color: #333333;
|
||||
margin-bottom: 0.75rem;
|
||||
margin-right: 1rem;
|
||||
min-width: 70%;
|
||||
border-radius: 0.25rem;
|
||||
justify-self: left;
|
||||
display: flex;
|
||||
white-space: pre-wrap;
|
||||
overflow-wrap: break-word;
|
||||
word-break: break-word;
|
||||
flex-direction: column;
|
||||
flex-grow: 0;
|
||||
padding: 16px 0;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
|
||||
.About.MuiCard-root {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
width: 100%;
|
||||
margin-left: 0;
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
.About .MuiCardContent-root,
|
||||
.assistant-message .MuiCardContent-root {
|
||||
padding: 0 16px !important;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.About span,
|
||||
.assistant-message span {
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.user-message .MuiCardContent-root:last-child,
|
||||
.assistant-message .MuiCardContent-root:last-child,
|
||||
.About .MuiCardContent-root:last-child {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.users > div {
|
||||
padding: 0.25rem;
|
||||
}
|
||||
|
||||
.user-active {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.metadata {
|
||||
border: 1px solid #E0E0E0;
|
||||
font-size: 0.75rem;
|
||||
padding: 0.125rem;
|
||||
}
|
||||
|
||||
/* Reduce general whitespace in markdown content */
|
||||
* p.MuiTypography-root {
|
||||
margin-top: 0.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
/* Reduce space between headings and content */
|
||||
* h1.MuiTypography-root,
|
||||
* h2.MuiTypography-root,
|
||||
* h3.MuiTypography-root,
|
||||
* h4.MuiTypography-root,
|
||||
* h5.MuiTypography-root,
|
||||
* h6.MuiTypography-root {
|
||||
margin-top: 1rem;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
/* Reduce space in lists */
|
||||
* ul.MuiTypography-root,
|
||||
* ol.MuiTypography-root {
|
||||
margin-top: 0.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
* li.MuiTypography-root {
|
||||
margin-bottom: 0.25rem;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
* .MuiTypography-root li {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
padding: 0;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
/* Reduce space around code blocks */
|
||||
* .MuiTypography-root pre {
|
||||
border: 1px solid #F5F5F5;
|
||||
border-radius: 0.5rem;
|
||||
padding: 0.5rem 0.75rem;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.PromptStats .MuiTableCell-root {
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
#SystemPromptInput {
|
||||
font-size: 0.9rem;
|
||||
line-height: 1.25rem;
|
||||
}
|
@ -1,330 +0,0 @@
|
||||
import React, { useEffect, useState, useRef, useMemo } from 'react';
|
||||
import { useNavigate, useLocation } from 'react-router-dom';
|
||||
import useMediaQuery from '@mui/material/useMediaQuery';
|
||||
import Card from '@mui/material/Card';
|
||||
import { styled } from '@mui/material/styles';
|
||||
import Avatar from '@mui/material/Avatar';
|
||||
import Tabs from '@mui/material/Tabs';
|
||||
import Tab from '@mui/material/Tab';
|
||||
import Tooltip from '@mui/material/Tooltip';
|
||||
import AppBar from '@mui/material/AppBar';
|
||||
import Drawer from '@mui/material/Drawer';
|
||||
import Toolbar from '@mui/material/Toolbar';
|
||||
import SettingsIcon from '@mui/icons-material/Settings';
|
||||
import IconButton from '@mui/material/IconButton';
|
||||
import Box from '@mui/material/Box';
|
||||
import CssBaseline from '@mui/material/CssBaseline';
|
||||
import MenuIcon from '@mui/icons-material/Menu';
|
||||
|
||||
import { ConversationHandle } from '../Components/Conversation';
|
||||
import { Query } from '../Components/ChatQuery';
|
||||
import { Scrollable } from '../Components/Scrollable';
|
||||
import { BackstoryPage, BackstoryTabProps } from '../Components/BackstoryTab';
|
||||
|
||||
import { HomePage } from '../Pages/HomePage';
|
||||
import { LoadingPage } from '../Pages/LoadingPage';
|
||||
import { ResumeBuilderPage } from '../Pages/ResumeBuilderPage';
|
||||
import { VectorVisualizerPage } from '../Pages/VectorVisualizerPage';
|
||||
import { AboutPage } from '../Pages/AboutPage';
|
||||
import { ControlsPage } from '../Pages/ControlsPage';
|
||||
import { SetSnackType } from '../Components/Snack';
|
||||
|
||||
import './Main.css';
|
||||
|
||||
import '@fontsource/roboto/300.css';
|
||||
import '@fontsource/roboto/400.css';
|
||||
import '@fontsource/roboto/500.css';
|
||||
import '@fontsource/roboto/700.css';
|
||||
|
||||
interface MainProps {
|
||||
sessionId: string,
|
||||
setSnack: SetSnackType
|
||||
}
|
||||
const Main = (props: MainProps) => {
|
||||
const { sessionId } = props;
|
||||
const navigate = useNavigate();
|
||||
const location = useLocation();
|
||||
const [menuOpen, setMenuOpen] = useState(false);
|
||||
const [isMenuClosing, setIsMenuClosing] = useState(false);
|
||||
const [activeTab, setActiveTab] = useState<number>(0);
|
||||
const [tab, setTab] = useState<any>(undefined);
|
||||
const isDesktop = useMediaQuery('(min-width:650px)');
|
||||
const prevIsDesktopRef = useRef<boolean>(isDesktop);
|
||||
const chatRef = useRef<ConversationHandle>(null);
|
||||
const [subRoute, setSubRoute] = useState<string>("");
|
||||
const backstoryProps = useMemo(() => {
|
||||
const handleSubmitChatQuery = (query: Query) => {
|
||||
console.log(`handleSubmitChatQuery:`, query, chatRef.current ? ' sending' : 'no handler');
|
||||
chatRef.current?.submitQuery(query);
|
||||
setActiveTab(0);
|
||||
};
|
||||
return { ...props, route: subRoute, setRoute: setSubRoute, submitQuery: handleSubmitChatQuery };
|
||||
}, [props, setActiveTab, subRoute]);
|
||||
|
||||
useEffect(() => {
|
||||
if (prevIsDesktopRef.current === isDesktop)
|
||||
return;
|
||||
|
||||
if (menuOpen) {
|
||||
setMenuOpen(false);
|
||||
}
|
||||
|
||||
prevIsDesktopRef.current = isDesktop;
|
||||
}, [isDesktop, setMenuOpen, menuOpen])
|
||||
|
||||
const tabs: BackstoryTabProps[] = useMemo(() => {
|
||||
const homeTab: BackstoryTabProps = {
|
||||
label: "",
|
||||
path: "",
|
||||
tabProps: {
|
||||
label: "Backstory",
|
||||
sx: { flexGrow: 1, fontSize: '1rem' },
|
||||
icon:
|
||||
<Avatar sx={{
|
||||
width: 24,
|
||||
height: 24
|
||||
}}
|
||||
variant="rounded"
|
||||
alt="Backstory logo"
|
||||
src="/logo192.png" />,
|
||||
iconPosition: "start"
|
||||
},
|
||||
children: <HomePage ref={chatRef} {...backstoryProps} />
|
||||
};
|
||||
|
||||
const loadingTab: BackstoryTabProps = {
|
||||
...homeTab,
|
||||
children: <LoadingPage {...backstoryProps} />
|
||||
};
|
||||
|
||||
const resumeBuilderTab: BackstoryTabProps = {
|
||||
label: "Resume Builder",
|
||||
path: "resume-builder",
|
||||
children: <ResumeBuilderPage {...backstoryProps} />
|
||||
};
|
||||
|
||||
const contextVisualizerTab: BackstoryTabProps = {
|
||||
label: "Context Visualizer",
|
||||
path: "context-visualizer",
|
||||
children: <VectorVisualizerPage sx={{ p: 1 }} {...backstoryProps} />
|
||||
};
|
||||
|
||||
const aboutTab = {
|
||||
label: "About",
|
||||
path: "about",
|
||||
children: <AboutPage {...backstoryProps} />
|
||||
};
|
||||
|
||||
const controlsTab: BackstoryTabProps = {
|
||||
path: "controls",
|
||||
tabProps: {
|
||||
sx: { flexShrink: 1, flexGrow: 0, fontSize: '1rem' },
|
||||
icon: <SettingsIcon />
|
||||
},
|
||||
children: (
|
||||
<Scrollable
|
||||
autoscroll={false}
|
||||
sx={{
|
||||
maxWidth: "1024px",
|
||||
height: "calc(100vh - 72px)",
|
||||
flexDirection: "column",
|
||||
margin: "0 auto",
|
||||
p: 1,
|
||||
}}
|
||||
>
|
||||
<ControlsPage {...backstoryProps} />
|
||||
</Scrollable>
|
||||
)
|
||||
};
|
||||
|
||||
if (sessionId === undefined || !sessionId.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)) {
|
||||
return [loadingTab];
|
||||
} else {
|
||||
return [
|
||||
homeTab,
|
||||
resumeBuilderTab,
|
||||
contextVisualizerTab,
|
||||
aboutTab,
|
||||
controlsTab,
|
||||
];
|
||||
}
|
||||
}, [backstoryProps, sessionId]);
|
||||
|
||||
const handleMenuClose = () => {
|
||||
setIsMenuClosing(true);
|
||||
setMenuOpen(false);
|
||||
};
|
||||
|
||||
const handleMenuTransitionEnd = () => {
|
||||
setIsMenuClosing(false);
|
||||
};
|
||||
|
||||
const handleMenuToggle = () => {
|
||||
if (!isMenuClosing) {
|
||||
setMenuOpen(!menuOpen);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (tab === undefined || tab === tabs[activeTab]) {
|
||||
return;
|
||||
}
|
||||
setTab(tabs[activeTab]);
|
||||
setSubRoute("");
|
||||
}, [tabs, activeTab, tab]);
|
||||
|
||||
const handleTabChange = (event: React.SyntheticEvent, newValue: number) => {
|
||||
if (newValue > tabs.length) {
|
||||
console.log(`Invalid tab requested: ${newValue}`);
|
||||
return;
|
||||
}
|
||||
setActiveTab(newValue);
|
||||
handleMenuClose();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (sessionId === undefined || !sessionId.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)) {
|
||||
return;
|
||||
}
|
||||
const pathParts = window.location.pathname.split('/').filter(Boolean);
|
||||
const currentPath = pathParts.length < 2 ? '' : pathParts[0];
|
||||
let currentSubRoute = pathParts.length > 2 ? pathParts.slice(1, -1).join('/') : '';
|
||||
let tabIndex = tabs.findIndex((tab) => tab.path === currentPath);
|
||||
if (tabIndex === -1) {
|
||||
console.log(`Invalid path "${currentPath}" -- redirecting to default`);
|
||||
tabIndex = 0
|
||||
currentSubRoute = ""
|
||||
}
|
||||
|
||||
setActiveTab(tabIndex);
|
||||
setTab(tabs[tabIndex]);
|
||||
setSubRoute(currentSubRoute);
|
||||
console.log(`Initial load set to tab ${tabs[tabIndex].path} subRoute: ${currentSubRoute}`);
|
||||
}, [tabs, sessionId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (tab === undefined || sessionId === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
let path = tab.path ? `/${tab.path}` : '';
|
||||
if (subRoute) path += `/${subRoute}`;
|
||||
path += `/${sessionId}`;
|
||||
|
||||
if (path !== location.pathname) {
|
||||
console.log(`Pusing state ${path}`)
|
||||
navigate(path, { replace: true });
|
||||
}
|
||||
}, [tab, subRoute, sessionId, navigate, location.pathname]);
|
||||
|
||||
/* toolbar height is 64px + 8px margin-top */
|
||||
const Offset = styled('div')(() => ({ minHeight: '72px', height: '72px' }));
|
||||
|
||||
return (
|
||||
<Box className="App"
|
||||
sx={{ display: 'flex', flexDirection: 'column' }}>
|
||||
<CssBaseline />
|
||||
<AppBar
|
||||
position="fixed"
|
||||
sx={{
|
||||
zIndex: (theme) => theme.zIndex.drawer + 1,
|
||||
maxWidth: "100vw"
|
||||
}}
|
||||
>
|
||||
<Toolbar>
|
||||
<Box sx={{ display: "flex", flexGrow: 1, flexDirection: "row" }}>
|
||||
{!isDesktop &&
|
||||
<Box sx={{ display: "flex", flexGrow: 1, flexDirection: "row" }}>
|
||||
<IconButton
|
||||
sx={{ display: "flex", margin: 'auto 0px' }}
|
||||
size="large"
|
||||
edge="start"
|
||||
color="inherit"
|
||||
onClick={handleMenuToggle}
|
||||
>
|
||||
<Tooltip title="Navigation">
|
||||
<MenuIcon />
|
||||
</Tooltip>
|
||||
</IconButton>
|
||||
<Tooltip title="Backstory">
|
||||
<Box
|
||||
sx={{ m: 1, gap: 1, display: "flex", flexDirection: "row", alignItems: "center", fontWeight: "bold", fontSize: "1.0rem", cursor: "pointer" }}
|
||||
onClick={() => { setActiveTab(0); setMenuOpen(false); }}
|
||||
>
|
||||
<Avatar sx={{
|
||||
width: 24,
|
||||
height: 24
|
||||
}}
|
||||
variant="rounded"
|
||||
alt="Backstory logo"
|
||||
src="/logo192.png" />
|
||||
BACKSTORY
|
||||
</Box>
|
||||
</Tooltip>
|
||||
</Box>
|
||||
}
|
||||
|
||||
{menuOpen === false && isDesktop &&
|
||||
<Tabs sx={{ display: "flex", flexGrow: 1 }}
|
||||
value={activeTab}
|
||||
indicatorColor="secondary"
|
||||
textColor="inherit"
|
||||
variant="fullWidth"
|
||||
allowScrollButtonsMobile
|
||||
onChange={handleTabChange}
|
||||
aria-label="Backstory navigation">
|
||||
{tabs.map((tab, index) => <Tab key={index} value={index} label={tab.label} {...tab.tabProps} />)}
|
||||
</Tabs>
|
||||
}
|
||||
</Box>
|
||||
</Toolbar>
|
||||
|
||||
</AppBar>
|
||||
|
||||
<Offset />
|
||||
|
||||
<Box sx={{ display: "flex", flexGrow: 1, flexDirection: "column" }} >
|
||||
<Drawer
|
||||
container={window.document.body}
|
||||
variant="temporary"
|
||||
open={menuOpen}
|
||||
onTransitionEnd={handleMenuTransitionEnd}
|
||||
onClose={handleMenuClose}
|
||||
sx={{
|
||||
display: 'block',
|
||||
'& .MuiDrawer-paper': { boxSizing: 'border-box' },
|
||||
}}
|
||||
slotProps={{
|
||||
root: {
|
||||
keepMounted: true, // Better open performance on mobile.
|
||||
},
|
||||
}}
|
||||
>
|
||||
<Toolbar />
|
||||
<Card className="MenuCard">
|
||||
<Tabs sx={{ display: "flex", flexGrow: 1 }}
|
||||
orientation="vertical"
|
||||
value={activeTab}
|
||||
indicatorColor="secondary"
|
||||
textColor="inherit"
|
||||
variant="scrollable"
|
||||
allowScrollButtonsMobile
|
||||
onChange={handleTabChange}
|
||||
aria-label="Backstory navigation">
|
||||
{tabs.map((tab, index) => <Tab key={index} value={index} label={tab.label} {...tab.tabProps} />)}
|
||||
</Tabs>
|
||||
</Card>
|
||||
</Drawer>
|
||||
{
|
||||
tabs.map((tab: any, i: number) =>
|
||||
<BackstoryPage key={i} active={i === activeTab} path={tab.path}>{tab.children}</BackstoryPage>
|
||||
)
|
||||
}
|
||||
</Box>
|
||||
</Box >
|
||||
);
|
||||
};
|
||||
|
||||
export {
|
||||
Main
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
import { useEffect, useState, useRef } from "react";
|
||||
import { useNavigate, useLocation } from "react-router-dom";
|
||||
import { connectionBase } from '../Global';
|
||||
import { SetSnackType } from '../Components/Snack';
|
||||
|
||||
const getSessionId = async (userId?: string) => {
|
||||
const endpoint = userId
|
||||
? `/api/context/u/${encodeURIComponent(userId)}`
|
||||
: `/api/context`;
|
||||
|
||||
const response = await fetch(connectionBase + endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw Error("Server is temporarily down.");
|
||||
}
|
||||
|
||||
const newSession = (await response.json()).id;
|
||||
console.log(`Session created: ${newSession}`);
|
||||
|
||||
return newSession;
|
||||
};
|
||||
|
||||
interface SessionWrapperProps {
|
||||
setSnack: SetSnackType;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
const SessionWrapper = ({ setSnack, children }: SessionWrapperProps) => {
|
||||
const navigate = useNavigate();
|
||||
const location = useLocation();
|
||||
const [sessionId, setSessionId] = useState<string | undefined>(undefined);
|
||||
const fetchingRef = useRef(false);
|
||||
const [retry, setRetry] = useState<number>(0);
|
||||
|
||||
useEffect(() => {
|
||||
console.log(`SessionWrapper: ${location.pathname}`);
|
||||
|
||||
const ensureSessionId = async () => {
|
||||
const parts = location.pathname.split("/").filter(Boolean);
|
||||
const pattern = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89ab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/i;
|
||||
|
||||
// Case: path starts with "u/{USERID}"
|
||||
if (parts.length >= 2 && parts[0] === "u") {
|
||||
const userId = parts[1];
|
||||
|
||||
// Case: "u/{USERID}" - fetch session for this user
|
||||
const activeSession = await getSessionId(userId);
|
||||
setSessionId(activeSession);
|
||||
|
||||
// Append session to path
|
||||
const newPath = [...parts, activeSession].join("/");
|
||||
navigate(`/${activeSession}`, { replace: true });
|
||||
return;
|
||||
}
|
||||
|
||||
// Default case (original behavior)
|
||||
const hasSession = parts.length !== 0 && pattern.test(parts[parts.length - 1]);
|
||||
|
||||
if (!hasSession) {
|
||||
let activeSession = sessionId;
|
||||
if (!activeSession) {
|
||||
activeSession = await getSessionId();
|
||||
setSessionId(activeSession);
|
||||
}
|
||||
|
||||
const newPath = [...parts, activeSession].join("/");
|
||||
navigate(`/${newPath}`, { replace: true });
|
||||
}
|
||||
};
|
||||
|
||||
if (!fetchingRef.current) {
|
||||
fetchingRef.current = true;
|
||||
ensureSessionId()
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
setSnack("Backstory is temporarily unavailable. Retrying in 5 seconds.", "warning");
|
||||
setTimeout(() => {
|
||||
fetchingRef.current = false;
|
||||
setRetry(retry => retry + 1);
|
||||
}, 5000);
|
||||
})
|
||||
.finally(() => {
|
||||
if (fetchingRef.current) {
|
||||
fetchingRef.current = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [location.pathname, navigate, setSnack, sessionId, retry]);
|
||||
|
||||
return <>{children}</>;
|
||||
};
|
||||
|
||||
export { SessionWrapper };
|
@ -1,7 +1,7 @@
|
||||
import React, { ReactElement, JSXElementConstructor } from 'react';
|
||||
import Box from '@mui/material/Box';
|
||||
import { SxProps, Theme } from '@mui/material';
|
||||
import { ChatSubmitQueryInterface } from './ChatQuery';
|
||||
import { ChatSubmitQueryInterface } from '../NewApp/Components/ChatQuery';
|
||||
import { SetSnackType } from './Snack';
|
||||
|
||||
interface BackstoryElementProps {
|
||||
|
@ -1,14 +0,0 @@
|
||||
.Conversation {
|
||||
display: flex;
|
||||
background-color: #F5F5F5;
|
||||
border: 1px solid #E0E0E0;
|
||||
flex-grow: 1;
|
||||
padding: 10px;
|
||||
flex-direction: column;
|
||||
font-size: 0.9rem;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
overflow-y: auto;
|
||||
height: calc(100vh - 72px);
|
||||
}
|
||||
|
@ -1,625 +0,0 @@
|
||||
import React, { useState, useImperativeHandle, forwardRef, useEffect, useRef, useCallback } from 'react';
|
||||
import Typography from '@mui/material/Typography';
|
||||
import Tooltip from '@mui/material/Tooltip';
|
||||
import IconButton from '@mui/material/IconButton';
|
||||
import Button from '@mui/material/Button';
|
||||
import Box from '@mui/material/Box';
|
||||
import SendIcon from '@mui/icons-material/Send';
|
||||
import CancelIcon from '@mui/icons-material/Cancel';
|
||||
import { SxProps, Theme } from '@mui/material';
|
||||
import PropagateLoader from "react-spinners/PropagateLoader";
|
||||
|
||||
import { Message, MessageList, BackstoryMessage } from './Message';
|
||||
import { ContextStatus } from './ContextStatus';
|
||||
import { Scrollable } from './Scrollable';
|
||||
import { DeleteConfirmation } from './DeleteConfirmation';
|
||||
import { Query } from './ChatQuery';
|
||||
import './Conversation.css';
|
||||
import { BackstoryTextField, BackstoryTextFieldRef } from './BackstoryTextField';
|
||||
import { BackstoryElementProps } from './BackstoryTab';
|
||||
import { connectionBase } from '../Global';
|
||||
|
||||
const loadingMessage: BackstoryMessage = { "role": "status", "content": "Establishing connection with server..." };
|
||||
|
||||
type ConversationMode = 'chat' | 'job_description' | 'resume' | 'fact_check';
|
||||
|
||||
interface ConversationHandle {
|
||||
submitQuery: (query: Query) => void;
|
||||
fetchHistory: () => void;
|
||||
}
|
||||
|
||||
interface ConversationProps extends BackstoryElementProps {
|
||||
className?: string, // Override default className
|
||||
type: ConversationMode, // Type of Conversation chat
|
||||
placeholder?: string, // Prompt to display in TextField input
|
||||
actionLabel?: string, // Label to put on the primary button
|
||||
resetAction?: () => void, // Callback when Reset is pressed
|
||||
resetLabel?: string, // Label to put on Reset button
|
||||
defaultPrompts?: React.ReactElement[], // Set of Elements to display after the TextField
|
||||
defaultQuery?: string, // Default text to populate the TextField input
|
||||
preamble?: MessageList, // Messages to display at start of Conversation until Action has been invoked
|
||||
hidePreamble?: boolean, // Whether to hide the preamble after an Action has been invoked
|
||||
hideDefaultPrompts?: boolean, // Whether to hide the defaultPrompts after an Action has been invoked
|
||||
messageFilter?: ((messages: MessageList) => MessageList) | undefined, // Filter callback to determine which Messages to display in Conversation
|
||||
messages?: MessageList, //
|
||||
sx?: SxProps<Theme>,
|
||||
onResponse?: ((message: BackstoryMessage) => void) | undefined, // Event called when a query completes (provides messages)
|
||||
};
|
||||
|
||||
const Conversation = forwardRef<ConversationHandle, ConversationProps>((props: ConversationProps, ref) => {
|
||||
const {
|
||||
sessionId,
|
||||
actionLabel,
|
||||
className,
|
||||
defaultPrompts,
|
||||
defaultQuery,
|
||||
hideDefaultPrompts,
|
||||
hidePreamble,
|
||||
messageFilter,
|
||||
messages,
|
||||
onResponse,
|
||||
placeholder,
|
||||
preamble,
|
||||
resetAction,
|
||||
resetLabel,
|
||||
setSnack,
|
||||
submitQuery,
|
||||
sx,
|
||||
type,
|
||||
} = props;
|
||||
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
|
||||
const [processing, setProcessing] = useState<boolean>(false);
|
||||
const [countdown, setCountdown] = useState<number>(0);
|
||||
const [conversation, setConversation] = useState<MessageList>([]);
|
||||
const [filteredConversation, setFilteredConversation] = useState<MessageList>([]);
|
||||
const [processingMessage, setProcessingMessage] = useState<BackstoryMessage | undefined>(undefined);
|
||||
const [streamingMessage, setStreamingMessage] = useState<BackstoryMessage | undefined>(undefined);
|
||||
const timerRef = useRef<any>(null);
|
||||
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
|
||||
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
|
||||
const [noInteractions, setNoInteractions] = useState<boolean>(true);
|
||||
const conversationRef = useRef<MessageList>([]);
|
||||
const viewableElementRef = useRef<HTMLDivElement>(null);
|
||||
const backstoryTextRef = useRef<BackstoryTextFieldRef>(null);
|
||||
const stopRef = useRef(false);
|
||||
|
||||
// Keep the ref updated whenever items changes
|
||||
useEffect(() => {
|
||||
conversationRef.current = conversation;
|
||||
}, [conversation]);
|
||||
|
||||
// Update the context status
|
||||
const updateContextStatus = useCallback(() => {
|
||||
const fetchContextStatus = async () => {
|
||||
try {
|
||||
const response = await fetch(connectionBase + `/api/context-status/${sessionId}/${type}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
setContextStatus(data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Error getting context status:', error);
|
||||
setSnack("Unable to obtain context status.", "error");
|
||||
}
|
||||
};
|
||||
fetchContextStatus();
|
||||
}, [setContextStatus, setSnack, sessionId, type]);
|
||||
|
||||
/* Transform the 'Conversation' by filtering via callback, then adding
|
||||
* preamble and messages based on whether the conversation
|
||||
* has any elements yet */
|
||||
useEffect(() => {
|
||||
let filtered = [];
|
||||
if (messageFilter === undefined) {
|
||||
filtered = conversation;
|
||||
// console.log('No message filter provided. Using all messages.', filtered);
|
||||
} else {
|
||||
//console.log('Filtering conversation...')
|
||||
filtered = messageFilter(conversation); /* Do not copy conversation or useEffect will loop forever */
|
||||
//console.log(`${conversation.length - filtered.length} messages filtered out.`);
|
||||
}
|
||||
if (filtered.length === 0) {
|
||||
setFilteredConversation([
|
||||
...(preamble || []),
|
||||
...(messages || []),
|
||||
]);
|
||||
} else {
|
||||
setFilteredConversation([
|
||||
...(hidePreamble ? [] : (preamble || [])),
|
||||
...(messages || []),
|
||||
...filtered,
|
||||
]);
|
||||
};
|
||||
}, [conversation, setFilteredConversation, messageFilter, preamble, messages, hidePreamble]);
|
||||
|
||||
const fetchHistory = useCallback(async () => {
|
||||
let retries = 5;
|
||||
while (--retries > 0) {
|
||||
try {
|
||||
const response = await fetch(connectionBase + `/api/history/${sessionId}/${type}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const { messages } = await response.json();
|
||||
|
||||
if (messages === undefined || messages.length === 0) {
|
||||
console.log(`History returned for ${type} from server with 0 entries`)
|
||||
setConversation([])
|
||||
setNoInteractions(true);
|
||||
} else {
|
||||
console.log(`History returned for ${type} from server with ${messages.length} entries:`, messages)
|
||||
|
||||
const backstoryMessages: BackstoryMessage[] = messages;
|
||||
|
||||
setConversation(backstoryMessages.flatMap((backstoryMessage: BackstoryMessage) => {
|
||||
if (backstoryMessage.status === "partial") {
|
||||
return [{
|
||||
...backstoryMessage,
|
||||
role: "assistant",
|
||||
content: backstoryMessage.response || "",
|
||||
expanded: false,
|
||||
expandable: true,
|
||||
}]
|
||||
}
|
||||
return [{
|
||||
role: 'user',
|
||||
content: backstoryMessage.prompt || "",
|
||||
}, {
|
||||
...backstoryMessage,
|
||||
role: ['done'].includes(backstoryMessage.status || "") ? "assistant" : backstoryMessage.status,
|
||||
content: backstoryMessage.response || "",
|
||||
}] as MessageList;
|
||||
}));
|
||||
setNoInteractions(false);
|
||||
}
|
||||
setProcessingMessage(undefined);
|
||||
setStreamingMessage(undefined);
|
||||
updateContextStatus();
|
||||
return;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error generating session ID:', error);
|
||||
setProcessingMessage({ role: "error", content: `Unable to obtain history from server. Retrying in 3 seconds (${retries} remain.)` });
|
||||
setTimeout(() => {
|
||||
setProcessingMessage(undefined);
|
||||
}, 3000);
|
||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
||||
setSnack("Unable to obtain chat history.", "error");
|
||||
}
|
||||
};
|
||||
}, [setConversation, updateContextStatus, setSnack, type, sessionId]);
|
||||
|
||||
// Set the initial chat history to "loading" or the welcome message if loaded.
|
||||
useEffect(() => {
|
||||
if (sessionId === undefined) {
|
||||
setProcessingMessage(loadingMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
fetchHistory();
|
||||
}, [fetchHistory, sessionId, setProcessing]);
|
||||
|
||||
const startCountdown = (seconds: number) => {
|
||||
if (timerRef.current) clearInterval(timerRef.current);
|
||||
setCountdown(seconds);
|
||||
timerRef.current = setInterval(() => {
|
||||
setCountdown((prev) => {
|
||||
if (prev <= 1) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
return 0;
|
||||
}
|
||||
return prev - 1;
|
||||
});
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
const stopCountdown = () => {
|
||||
if (timerRef.current) {
|
||||
clearInterval(timerRef.current);
|
||||
timerRef.current = null;
|
||||
setCountdown(0);
|
||||
}
|
||||
};
|
||||
|
||||
const handleEnter = (value: string) => {
|
||||
const query: Query = {
|
||||
prompt: value
|
||||
}
|
||||
sendQuery(query);
|
||||
};
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
submitQuery: (query: Query) => {
|
||||
sendQuery(query);
|
||||
},
|
||||
fetchHistory: () => { return fetchHistory(); }
|
||||
}));
|
||||
|
||||
// If context status changes, show a warning if necessary. If it drops
|
||||
// back below the threshold, clear the warning trigger
|
||||
useEffect(() => {
|
||||
const context_used_percentage = Math.round(100 * contextStatus.context_used / contextStatus.max_context);
|
||||
if (context_used_percentage >= 90 && !contextWarningShown) {
|
||||
setSnack(`${context_used_percentage}% of context used. You may wish to start a new chat.`, "warning");
|
||||
setContextWarningShown(true);
|
||||
}
|
||||
if (context_used_percentage < 90 && contextWarningShown) {
|
||||
setContextWarningShown(false);
|
||||
}
|
||||
setContextUsedPercentage(context_used_percentage)
|
||||
}, [contextStatus, setContextWarningShown, contextWarningShown, setContextUsedPercentage, setSnack]);
|
||||
|
||||
const reset = async () => {
|
||||
try {
|
||||
const response = await fetch(connectionBase + `/api/reset/${sessionId}/${type}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ reset: ['history'] })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('Response body is null');
|
||||
}
|
||||
|
||||
setProcessingMessage(undefined);
|
||||
setStreamingMessage(undefined);
|
||||
setConversation([]);
|
||||
setNoInteractions(true);
|
||||
|
||||
} catch (e) {
|
||||
setSnack("Error resetting history", "error")
|
||||
console.error('Error resetting history:', e);
|
||||
}
|
||||
};
|
||||
|
||||
const cancelQuery = () => {
|
||||
console.log("Stop query");
|
||||
stopRef.current = true;
|
||||
};
|
||||
|
||||
const sendQuery = async (query: Query) => {
|
||||
query.prompt = query.prompt.trim();
|
||||
|
||||
// If the request was empty, a default request was provided,
|
||||
// and there is no prompt for the user, send the default request.
|
||||
if (!query.prompt && defaultQuery && !prompt) {
|
||||
query.prompt = defaultQuery.trim();
|
||||
}
|
||||
|
||||
// Do not send an empty request.
|
||||
if (!query.prompt) {
|
||||
return;
|
||||
}
|
||||
|
||||
stopRef.current = false;
|
||||
|
||||
setNoInteractions(false);
|
||||
|
||||
setConversation([
|
||||
...conversationRef.current,
|
||||
{
|
||||
role: 'user',
|
||||
origin: type,
|
||||
content: query.prompt,
|
||||
disableCopy: true
|
||||
}
|
||||
]);
|
||||
|
||||
// Add a small delay to ensure React has time to update the UI
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
|
||||
try {
|
||||
setProcessing(true);
|
||||
|
||||
// Add initial processing message
|
||||
setProcessingMessage(
|
||||
{ role: 'status', content: 'Submitting request...', disableCopy: true }
|
||||
);
|
||||
|
||||
// Add a small delay to ensure React has time to update the UI
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
|
||||
let data: any = query;
|
||||
if (type === "job_description") {
|
||||
data = {
|
||||
prompt: "",
|
||||
agent_options: {
|
||||
job_description: query.prompt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(connectionBase + `/api/${type}/${sessionId}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
|
||||
setSnack(`Query sent.`, "info");
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('Response body is null');
|
||||
}
|
||||
|
||||
let streaming_response = ""
|
||||
// Set up stream processing with explicit chunking
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = '';
|
||||
|
||||
const process_line = async (line: string) => {
|
||||
let update = JSON.parse(line);
|
||||
|
||||
switch (update.status) {
|
||||
case 'done':
|
||||
case 'partial':
|
||||
if (update.status === 'done') stopCountdown();
|
||||
if (update.status === 'done') setStreamingMessage(undefined);
|
||||
if (update.status === 'done') setProcessingMessage(undefined);
|
||||
const backstoryMessage: BackstoryMessage = update;
|
||||
setConversation([
|
||||
...conversationRef.current, {
|
||||
...backstoryMessage,
|
||||
role: 'assistant',
|
||||
origin: type,
|
||||
prompt: ['done', 'partial'].includes(update.status) ? update.prompt : '',
|
||||
content: backstoryMessage.response || "",
|
||||
expanded: update.status === "done" ? true : false,
|
||||
expandable: update.status === "done" ? false : true,
|
||||
}] as MessageList);
|
||||
// Add a small delay to ensure React has time to update the UI
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
|
||||
const metadata = update.metadata;
|
||||
if (metadata) {
|
||||
updateContextStatus();
|
||||
}
|
||||
|
||||
if (onResponse) {
|
||||
onResponse(update);
|
||||
}
|
||||
break;
|
||||
case 'error':
|
||||
// Show error
|
||||
setConversation([
|
||||
...conversationRef.current, {
|
||||
...update,
|
||||
role: 'error',
|
||||
origin: type,
|
||||
content: update.response || "",
|
||||
}] as MessageList);
|
||||
|
||||
setProcessing(false);
|
||||
stopCountdown();
|
||||
|
||||
// Add a small delay to ensure React has time to update the UI
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
break;
|
||||
default:
|
||||
// Force an immediate state update based on the message type
|
||||
// Update processing message with immediate re-render
|
||||
if (update.status === "streaming") {
|
||||
streaming_response += update.chunk
|
||||
setStreamingMessage({ role: update.status, content: streaming_response, disableCopy: true });
|
||||
} else {
|
||||
setProcessingMessage({ role: update.status, content: update.response, disableCopy: true });
|
||||
/* Reset stream on non streaming message */
|
||||
streaming_response = ""
|
||||
}
|
||||
startCountdown(Math.ceil(update.remaining_time));
|
||||
// Add a small delay to ensure React has time to update the UI
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
while (!stopRef.current) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
const chunk = decoder.decode(value, { stream: true });
|
||||
|
||||
// Process each complete line immediately
|
||||
buffer += chunk;
|
||||
let lines = buffer.split('\n');
|
||||
buffer = lines.pop() || ''; // Keep incomplete line in buffer
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
try {
|
||||
await process_line(line);
|
||||
} catch (e) {
|
||||
setSnack("Error processing query", "error")
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process any remaining buffer content
|
||||
if (buffer.trim()) {
|
||||
try {
|
||||
await process_line(buffer);
|
||||
} catch (e) {
|
||||
setSnack("Error processing query", "error")
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
if (stopRef.current) {
|
||||
await reader.cancel();
|
||||
setProcessingMessage(undefined);
|
||||
setStreamingMessage(undefined);
|
||||
setSnack("Processing cancelled", "warning");
|
||||
}
|
||||
stopCountdown();
|
||||
setProcessing(false);
|
||||
stopRef.current = false;
|
||||
} catch (error) {
|
||||
console.error('Fetch error:', error);
|
||||
setSnack("Unable to process query", "error");
|
||||
setProcessingMessage({ role: 'error', content: "Unable to process query", disableCopy: true });
|
||||
setTimeout(() => {
|
||||
setProcessingMessage(undefined);
|
||||
}, 5000);
|
||||
stopRef.current = false;
|
||||
setProcessing(false);
|
||||
stopCountdown();
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
// <Scrollable
|
||||
// className={`${className || ""} Conversation`}
|
||||
// autoscroll
|
||||
// textFieldRef={viewableElementRef}
|
||||
// fallbackThreshold={0.5}
|
||||
// sx={{
|
||||
// p: 1,
|
||||
// mt: 0,
|
||||
// ...sx
|
||||
// }}
|
||||
// >
|
||||
<Box sx={{ p: 1, mt: 0, overflow: "hidden", ...sx }}>
|
||||
{
|
||||
filteredConversation.map((message, index) =>
|
||||
<Message key={index} expanded={message.expanded === undefined ? true : message.expanded} {...{ sendQuery, message, connectionBase, sessionId, setSnack, submitQuery }} />
|
||||
)
|
||||
}
|
||||
{
|
||||
processingMessage !== undefined &&
|
||||
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: processingMessage, submitQuery }} />
|
||||
}
|
||||
{
|
||||
streamingMessage !== undefined &&
|
||||
<Message {...{ sendQuery, connectionBase, sessionId, setSnack, message: streamingMessage, submitQuery }} />
|
||||
}
|
||||
<Box sx={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
m: 1,
|
||||
}}>
|
||||
<PropagateLoader
|
||||
size="10px"
|
||||
loading={processing}
|
||||
aria-label="Loading Spinner"
|
||||
data-testid="loader"
|
||||
/>
|
||||
{processing === true && countdown > 0 && (
|
||||
<Box
|
||||
sx={{
|
||||
pt: 1,
|
||||
fontSize: "0.7rem",
|
||||
color: "darkgrey"
|
||||
}}
|
||||
>Response will be stopped in: {countdown}s</Box>
|
||||
)}
|
||||
</Box>
|
||||
<Box className="Query" sx={{ display: "flex", flexDirection: "column", p: 1, flexGrow: 1 }}>
|
||||
{placeholder &&
|
||||
<Box sx={{ display: "flex", flexGrow: 1, p: 0, m: 0, flexDirection: "column" }}
|
||||
ref={viewableElementRef}>
|
||||
<BackstoryTextField
|
||||
ref={backstoryTextRef}
|
||||
disabled={processing}
|
||||
onEnter={handleEnter}
|
||||
placeholder={placeholder}
|
||||
/>
|
||||
</Box>
|
||||
}
|
||||
|
||||
<Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}>
|
||||
<DeleteConfirmation
|
||||
label={resetLabel || "all data"}
|
||||
disabled={sessionId === undefined || processingMessage !== undefined || noInteractions}
|
||||
onDelete={() => { reset(); resetAction && resetAction(); }} />
|
||||
<Tooltip title={actionLabel || "Send"}>
|
||||
<span style={{ display: "flex", flexGrow: 1 }}>
|
||||
<Button
|
||||
sx={{ m: 1, gap: 1, flexGrow: 1 }}
|
||||
variant="contained"
|
||||
disabled={sessionId === undefined || processingMessage !== undefined}
|
||||
onClick={() => { sendQuery({ prompt: (backstoryTextRef.current && backstoryTextRef.current.getAndResetValue()) || "" }); }}>
|
||||
{actionLabel}<SendIcon />
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
<Tooltip title="Cancel">
|
||||
<span style={{ display: "flex" }}> { /* This span is used to wrap the IconButton to ensure Tooltip works even when disabled */}
|
||||
<IconButton
|
||||
aria-label="cancel"
|
||||
onClick={() => { cancelQuery(); }}
|
||||
sx={{ display: "flex", margin: 'auto 0px' }}
|
||||
size="large"
|
||||
edge="start"
|
||||
disabled={stopRef.current || sessionId === undefined || processing === false}
|
||||
>
|
||||
<CancelIcon />
|
||||
</IconButton>
|
||||
</span>
|
||||
</Tooltip>
|
||||
</Box>
|
||||
</Box>
|
||||
{(noInteractions || !hideDefaultPrompts) && defaultPrompts !== undefined && defaultPrompts.length &&
|
||||
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
||||
{
|
||||
defaultPrompts.map((element, index) => {
|
||||
return (<Box key={index}>{element}</Box>);
|
||||
})
|
||||
}
|
||||
</Box>
|
||||
}
|
||||
<Box sx={{ ml: "0.25rem", fontSize: "0.6rem", color: "darkgrey", display: "flex", flexShrink: 1, flexDirection: "row", gap: 1, mb: "auto", mt: 1 }}>
|
||||
Context used: {contextUsedPercentage}% {contextStatus.context_used}/{contextStatus.max_context}
|
||||
{
|
||||
contextUsedPercentage >= 90 ? <Typography sx={{ fontSize: "0.6rem", color: "red" }}>WARNING: Context almost exhausted. You should start a new chat.</Typography>
|
||||
: (contextUsedPercentage >= 50 ? <Typography sx={{ fontSize: "0.6rem", color: "orange" }}>NOTE: Context is getting long. Queries will be slower, and the LLM may stop issuing tool calls.</Typography>
|
||||
: <></>)
|
||||
}
|
||||
</Box>
|
||||
<Box sx={{ display: "flex", flexGrow: 1 }}></Box>
|
||||
</Box >
|
||||
);
|
||||
});
|
||||
|
||||
export type {
|
||||
ConversationProps,
|
||||
ConversationHandle,
|
||||
};
|
||||
|
||||
export {
|
||||
Conversation
|
||||
};
|
@ -5,7 +5,7 @@ import { ThemeProvider } from '@mui/material/styles';
|
||||
import { backstoryTheme } from '../BackstoryTheme';
|
||||
|
||||
import { SeverityType } from '../Components/Snack';
|
||||
import { Query } from '../Components/ChatQuery';
|
||||
import { Query } from '../types/types';
|
||||
import { ConversationHandle } from './Components/Conversation';
|
||||
import { UserProvider } from './Components/UserContext';
|
||||
import { BetaPage } from './Pages/BetaPage';
|
||||
|
@ -15,7 +15,8 @@ import {Header} from './Header';
|
||||
import { Scrollable } from '../../Components/Scrollable';
|
||||
import { Footer } from './Footer';
|
||||
import { Snack, SetSnackType } from '../../Components/Snack';
|
||||
import { useUser, UserInfo } from './UserContext';
|
||||
import { useUser } from './UserContext';
|
||||
import { User } from '../../types/types';
|
||||
import { getBackstoryDynamicRoutes } from './BackstoryRoutes';
|
||||
import { LoadingComponent } from "../Components/LoadingComponent";
|
||||
|
||||
@ -66,17 +67,19 @@ const EmployerNavItems: NavigationLinkType[] = [
|
||||
];
|
||||
|
||||
// Navigation links based on user type
|
||||
const getNavigationLinks = (user: UserInfo | null): NavigationLinkType[] => {
|
||||
const getNavigationLinks = (user: User | null): NavigationLinkType[] => {
|
||||
if (!user) {
|
||||
return DefaultNavItems;
|
||||
}
|
||||
|
||||
if (user.type === 'candidate' && user.isAuthenticated) {
|
||||
return CandidateNavItems;
|
||||
switch (user.userType) {
|
||||
case 'UserType.CANDIDATE':
|
||||
return CandidateNavItems;
|
||||
case 'UserType.EMPLOYER':
|
||||
return EmployerNavItems;
|
||||
default:
|
||||
return DefaultNavItems;
|
||||
}
|
||||
|
||||
// Employer navigation
|
||||
return EmployerNavItems;
|
||||
};
|
||||
|
||||
interface BackstoryPageContainerProps {
|
||||
|
@ -5,10 +5,10 @@ import { Box, Typography, Container, Paper } from '@mui/material';
|
||||
|
||||
import { BackstoryPageProps } from '../../Components/BackstoryTab';
|
||||
import { ConversationHandle } from './Conversation';
|
||||
import { UserInfo } from './UserContext';
|
||||
import { User } from '../../types/types';
|
||||
|
||||
import { ChatPage } from '../Pages/ChatPage';
|
||||
import { ResumeBuilderPage } from '../../Pages/ResumeBuilderPage';
|
||||
import { ResumeBuilderPage } from '../Pages/ResumeBuilderPage';
|
||||
import { DocsPage } from '../Pages/DocsPage';
|
||||
import { CreateProfilePage } from '../Pages/CreateProfilePage';
|
||||
import { VectorVisualizerPage } from 'Pages/VectorVisualizerPage';
|
||||
@ -37,7 +37,7 @@ const LoginPage = () => (<BetaPage><Typography variant="h4">Login page...</Typog
|
||||
interface BackstoryDynamicRoutesProps extends BackstoryPageProps {
|
||||
chatRef: Ref<ConversationHandle>
|
||||
}
|
||||
const getBackstoryDynamicRoutes = (props : BackstoryDynamicRoutesProps, user?: UserInfo | null) : ReactNode => {
|
||||
const getBackstoryDynamicRoutes = (props: BackstoryDynamicRoutesProps, user?: User | null): ReactNode => {
|
||||
const { sessionId, setSnack, submitQuery, chatRef } = props;
|
||||
let index=0
|
||||
const routes = [
|
||||
@ -59,14 +59,9 @@ const getBackstoryDynamicRoutes = (props : BackstoryDynamicRoutesProps, user?: U
|
||||
routes.push(<Route key={`${index++}`} path="*" element={<BetaPage />} />);
|
||||
} else {
|
||||
|
||||
if (!user.isAuthenticated) {
|
||||
routes.push(<Route key={`${index++}`} path="/register" element={(<BetaPage><CreateProfilePage /></BetaPage>)} />);
|
||||
routes.push(<Route key={`${index++}`} path="/login" element={<LoginPage />} />);
|
||||
} else {
|
||||
routes.push(<Route key={`${index++}`} path="/logout" element={<LogoutPage />} />);
|
||||
}
|
||||
routes.push(<Route key={`${index++}`} path="/logout" element={<LogoutPage />} />);
|
||||
|
||||
if (user.type === "candidate" && user.isAuthenticated) {
|
||||
if (user.userType === "UserType.CANDIDATE") {
|
||||
routes.splice(-1, 0, ...[
|
||||
<Route key={`${index++}`} path="/profile" element={<ProfilePage />} />,
|
||||
<Route key={`${index++}`} path="/backstory" element={<BackstoryPage />} />,
|
||||
@ -75,7 +70,7 @@ const getBackstoryDynamicRoutes = (props : BackstoryDynamicRoutesProps, user?: U
|
||||
]);
|
||||
}
|
||||
|
||||
if (user.type === "employer") {
|
||||
if (user.userType === "UserType.EMPLOYER") {
|
||||
routes.splice(-1, 0, ...[
|
||||
<Route key={`${index++}`} path="/search" element={<SearchPage />} />,
|
||||
<Route key={`${index++}`} path="/saved" element={<SavedPage />} />,
|
||||
|
@ -9,14 +9,15 @@ import {
|
||||
} from '@mui/material';
|
||||
import { useMediaQuery } from '@mui/material';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { UserInfo, useUser } from "./UserContext";
|
||||
import { useUser } from "./UserContext";
|
||||
import { Candidate } from '../../types/types';
|
||||
import { CopyBubble } from "../../Components/CopyBubble";
|
||||
|
||||
interface CandidateInfoProps {
|
||||
sessionId: string;
|
||||
user?: UserInfo;
|
||||
sx?: SxProps;
|
||||
action?: string;
|
||||
sessionId: string;
|
||||
user?: Candidate;
|
||||
sx?: SxProps;
|
||||
action?: string;
|
||||
};
|
||||
|
||||
const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps) => {
|
||||
@ -36,7 +37,7 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
if (size < 1000000) return `${(size / 1000).toFixed(1)}K RAG elements`;
|
||||
return `${(size / 1000000).toFixed(1)}M RAG elements`;
|
||||
};
|
||||
const candidate = props.user || user;
|
||||
const candidate: Candidate | null = props.user || (user as Candidate);
|
||||
|
||||
if (!candidate) {
|
||||
return <Box>No user loaded.</Box>;
|
||||
@ -66,8 +67,8 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
maxWidth: "80px"
|
||||
}}>
|
||||
<Avatar
|
||||
src={candidate.has_profile ? `/api/u/${candidate.username}/profile/${sessionId}?timestamp=${Date.now()}` : ''}
|
||||
alt={`${candidate.full_name}'s profile`}
|
||||
src={candidate.hasProfile ? `/api/u/${candidate.username}/profile/${sessionId}?timestamp=${Date.now()}` : ''}
|
||||
alt={`${candidate.fullName}'s profile`}
|
||||
sx={{
|
||||
alignSelf: "flex-start",
|
||||
width: 80,
|
||||
@ -100,7 +101,7 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
fontWeight: 'bold',
|
||||
whiteSpace: 'nowrap'
|
||||
}}>
|
||||
{candidate.full_name}
|
||||
{candidate.fullName}
|
||||
</Typography>
|
||||
</Box>
|
||||
<Box sx={{ fontSize: "0.75rem", alignItems: "center" }} >
|
||||
@ -111,14 +112,14 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
</Box>
|
||||
</Box>
|
||||
|
||||
{candidate.rag_content_size !== undefined && candidate.rag_content_size > 0 &&
|
||||
{/* {candidate.rag_content_size !== undefined && candidate.rag_content_size > 0 &&
|
||||
<Chip
|
||||
onClick={(event: React.MouseEvent<HTMLDivElement>) => { navigate('/knowledge-explorer'); event.stopPropagation() }}
|
||||
label={formatRagSize(candidate.rag_content_size)}
|
||||
color="primary"
|
||||
size="small"
|
||||
sx={{ ml: 2 }}
|
||||
/>}
|
||||
/>} */}
|
||||
</Box>
|
||||
|
||||
<Typography variant="body1" color="text.secondary">
|
||||
@ -128,10 +129,10 @@ const CandidateInfo: React.FC<CandidateInfoProps> = (props: CandidateInfoProps)
|
||||
<Divider sx={{ my: 2 }} />
|
||||
|
||||
{ candidate.location && <Typography variant="body2" sx={{ mb: 1 }}>
|
||||
<strong>Location:</strong> {candidate.location}
|
||||
<strong>Location:</strong> {candidate.location.city}, {candidate.location.state || candidate.location.country}
|
||||
</Typography> }
|
||||
{ candidate.email && <Typography variant="body2" sx={{ mb: 1 }}>
|
||||
<strong>Email:</strong> {candidate.email}
|
||||
<strong>Email:</strong> {candidate.email}
|
||||
</Typography> }
|
||||
{ candidate.phone && <Typography variant="body2">
|
||||
<strong>Phone:</strong> {candidate.phone}
|
||||
|
@ -1,19 +1,7 @@
|
||||
import Box from '@mui/material/Box';
|
||||
import Button from '@mui/material/Button';
|
||||
|
||||
/* backstory/src/utils/message.py */
|
||||
type Tunables = {
|
||||
enable_rag?: boolean,
|
||||
enable_tools?: boolean,
|
||||
enable_context?: boolean,
|
||||
};
|
||||
|
||||
/* backstory/src/server.py */
|
||||
type Query = {
|
||||
prompt: string,
|
||||
tunables?: Tunables,
|
||||
agent_options?: {},
|
||||
};
|
||||
import { Query } from "../../types/types";
|
||||
|
||||
type ChatSubmitQueryInterface = (query: Query) => void;
|
||||
|
||||
@ -42,9 +30,7 @@ const ChatQuery = (props : ChatQueryInterface) => {
|
||||
|
||||
export type {
|
||||
ChatQueryInterface,
|
||||
Query,
|
||||
ChatSubmitQueryInterface,
|
||||
Tunables,
|
||||
};
|
||||
|
||||
export {
|
@ -11,7 +11,7 @@ import PropagateLoader from "react-spinners/PropagateLoader";
|
||||
|
||||
import { Message, MessageList, BackstoryMessage, MessageRoles } from '../../Components/Message';
|
||||
import { DeleteConfirmation } from '../../Components/DeleteConfirmation';
|
||||
import { Query } from '../../Components/ChatQuery';
|
||||
import { Query } from '../../types/types';
|
||||
import { BackstoryTextField, BackstoryTextFieldRef } from '../../Components/BackstoryTextField';
|
||||
import { BackstoryElementProps } from '../../Components/BackstoryTab';
|
||||
import { connectionBase } from '../../Global';
|
||||
|
@ -9,11 +9,11 @@ import CancelIcon from '@mui/icons-material/Cancel';
|
||||
import SendIcon from '@mui/icons-material/Send';
|
||||
import PropagateLoader from 'react-spinners/PropagateLoader';
|
||||
import { CandidateInfo } from '../Components/CandidateInfo';
|
||||
import { Query } from '../../Components/ChatQuery'
|
||||
import { Query } from '../../types/types'
|
||||
import { Quote } from 'NewApp/Components/Quote';
|
||||
import { streamQueryResponse, StreamQueryController } from '../Components/streamQueryResponse';
|
||||
import { connectionBase } from 'Global';
|
||||
import { UserInfo } from '../Components/UserContext';
|
||||
import { User } from '../../types/types';
|
||||
import { BackstoryElementProps } from 'Components/BackstoryTab';
|
||||
import { BackstoryTextField, BackstoryTextFieldRef } from 'Components/BackstoryTextField';
|
||||
import { jsonrepair } from 'jsonrepair';
|
||||
@ -53,7 +53,7 @@ const GenerateImage = (props: GenerateImageProps) => {
|
||||
controllerRef.current = streamQueryResponse({
|
||||
query: {
|
||||
prompt: prompt,
|
||||
agent_options: {
|
||||
agentOptions: {
|
||||
username: user?.username,
|
||||
}
|
||||
},
|
||||
|
@ -34,7 +34,8 @@ import ContentCopyIcon from '@mui/icons-material/ContentCopy';
|
||||
import { NavigationLinkType } from './BackstoryLayout';
|
||||
import { Beta } from './Beta';
|
||||
import './Header.css';
|
||||
import { useUser, UserInfo } from './UserContext';
|
||||
import { useUser } from './UserContext';
|
||||
import { Candidate, Employer } from '../../types/types';
|
||||
import { SetSnackType } from '../../Components/Snack';
|
||||
import { CopyBubble } from '../../Components/CopyBubble';
|
||||
|
||||
@ -96,7 +97,8 @@ interface HeaderProps {
|
||||
|
||||
const Header: React.FC<HeaderProps> = (props: HeaderProps) => {
|
||||
const { user } = useUser();
|
||||
|
||||
const candidate: Candidate | null = (user && user.userType === "UserType.CANDIDATE") ? user as Candidate : null;
|
||||
const employer: Employer | null = (user && user.userType === "UserType.EMPLOYER") ? user as Employer : null;
|
||||
const {
|
||||
transparent = false,
|
||||
className,
|
||||
@ -230,7 +232,7 @@ const Header: React.FC<HeaderProps> = (props: HeaderProps) => {
|
||||
))}
|
||||
</Tabs>
|
||||
<Divider />
|
||||
{(!user || !user.isAuthenticated) && (showLogin === undefined || showLogin !== false) && (
|
||||
{!user && (showLogin === undefined || showLogin !== false) && (
|
||||
<Box sx={{ p: 2, display: 'flex', flexDirection: 'column', gap: 1 }}>
|
||||
<Button
|
||||
variant="contained"
|
||||
@ -260,7 +262,7 @@ const Header: React.FC<HeaderProps> = (props: HeaderProps) => {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
if (!user || !user.isAuthenticated) {
|
||||
if (!user) {
|
||||
return (
|
||||
<>
|
||||
<Button
|
||||
@ -299,10 +301,10 @@ const Header: React.FC<HeaderProps> = (props: HeaderProps) => {
|
||||
height: 32,
|
||||
bgcolor: theme.palette.secondary.main,
|
||||
}}>
|
||||
{user?.full_name.charAt(0).toUpperCase()}
|
||||
{user?.username.charAt(0).toUpperCase()}
|
||||
</Avatar>
|
||||
<Box sx={{ display: { xs: 'none', sm: 'block' } }}>
|
||||
{user?.full_name}
|
||||
{user?.username}
|
||||
</Box>
|
||||
<ExpandMore fontSize="small" />
|
||||
</UserButton>
|
||||
|
@ -2,7 +2,7 @@ import React from 'react';
|
||||
import { MuiMarkdown } from 'mui-markdown';
|
||||
import { SxProps, useTheme } from '@mui/material/styles';
|
||||
import { Link } from '@mui/material';
|
||||
import { ChatQuery } from '../../Components/ChatQuery';
|
||||
import { ChatQuery } from './ChatQuery';
|
||||
import Box from '@mui/material/Box';
|
||||
import JsonView from '@uiw/react-json-view';
|
||||
import { vscodeTheme } from '@uiw/react-json-view/vscode';
|
||||
|
@ -1,7 +1,8 @@
|
||||
import React, { createContext, useContext, useEffect, useState } from "react";
|
||||
import { Tunables } from '../../Components/ChatQuery';
|
||||
import { Tunables } from '../../types/types';
|
||||
import { SetSnackType } from '../../Components/Snack';
|
||||
import { connectionBase } from '../../Global';
|
||||
import { User } from '../../types/types';
|
||||
|
||||
// Define the UserInfo interface for type safety
|
||||
interface UserQuestion {
|
||||
@ -9,31 +10,9 @@ interface UserQuestion {
|
||||
tunables?: Tunables;
|
||||
};
|
||||
|
||||
interface UserInfo {
|
||||
type: 'candidate' | 'employer' | 'guest';
|
||||
description: string;
|
||||
rag_content_size: number;
|
||||
username: string;
|
||||
first_name: string;
|
||||
last_name: string;
|
||||
full_name: string;
|
||||
contact_info: Record<string, string>;
|
||||
questions: UserQuestion[],
|
||||
isAuthenticated: boolean,
|
||||
has_profile: boolean,
|
||||
title: string;
|
||||
location: string;
|
||||
email: string;
|
||||
phone: string;
|
||||
// Fields used in AI generated personas
|
||||
age?: number,
|
||||
ethnicity?: string,
|
||||
gender?: string,
|
||||
};
|
||||
|
||||
type UserContextType = {
|
||||
user: UserInfo | null;
|
||||
setUser: (user: UserInfo | null) => void;
|
||||
user: User | null;
|
||||
setUser: (user: User | null) => void;
|
||||
};
|
||||
|
||||
const UserContext = createContext<UserContextType | undefined>(undefined);
|
||||
@ -51,14 +30,14 @@ interface UserProviderProps {
|
||||
};
|
||||
const UserProvider: React.FC<UserProviderProps> = (props: UserProviderProps) => {
|
||||
const { sessionId, children, setSnack } = props;
|
||||
const [user, setUser] = useState<UserInfo | null>(null);
|
||||
const [user, setUser] = useState<User | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!sessionId || user) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fetchUserFromSession = async (): Promise<UserInfo | null> => {
|
||||
const fetchUserFromSession = async (): Promise<User | null> => {
|
||||
try {
|
||||
let response;
|
||||
response = await fetch(`${connectionBase}/api/user/${sessionId}`, {
|
||||
@ -68,12 +47,9 @@ const UserProvider: React.FC<UserProviderProps> = (props: UserProviderProps) =>
|
||||
if (!response.ok) {
|
||||
throw new Error('Session not found');
|
||||
}
|
||||
const user: UserInfo = {
|
||||
const user: User = {
|
||||
...(await response.json()),
|
||||
type: "guest",
|
||||
isAuthenticated: false,
|
||||
logout: () => { },
|
||||
}
|
||||
};
|
||||
console.log("Loaded user:", user);
|
||||
setUser(user);
|
||||
} catch (err) {
|
||||
@ -96,10 +72,6 @@ const UserProvider: React.FC<UserProviderProps> = (props: UserProviderProps) =>
|
||||
);
|
||||
};
|
||||
|
||||
export type {
|
||||
UserInfo
|
||||
};
|
||||
|
||||
export {
|
||||
UserProvider,
|
||||
useUser
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { BackstoryMessage } from '../../Components/Message';
|
||||
import { Query } from '../../Components/ChatQuery';
|
||||
import { Query } from '../../types/types';
|
||||
import { jsonrepair } from 'jsonrepair';
|
||||
|
||||
type StreamQueryOptions = {
|
||||
|
@ -1,25 +0,0 @@
|
||||
import React from 'react';
|
||||
import {
|
||||
Typography,
|
||||
} from '@mui/material';
|
||||
import { BetaPage } from './BetaPage';
|
||||
|
||||
const MyIncompletePage = () => {
|
||||
return (
|
||||
<BetaPage
|
||||
title="Analytics Dashboard"
|
||||
subtitle="Our powerful analytics tools are coming soon"
|
||||
returnLabel="Back to Home"
|
||||
returnPath="/home"
|
||||
>
|
||||
<Typography variant="body1">
|
||||
We're building a comprehensive analytics dashboard that will provide real-time insights
|
||||
into your business performance. The expected completion date is June 15, 2025.
|
||||
</Typography>
|
||||
|
||||
<Typography variant="body1" sx={{ mt: 2 }}>
|
||||
Features will include custom reports, data visualization, and export capabilities.
|
||||
</Typography>
|
||||
</BetaPage>
|
||||
);
|
||||
};
|
@ -6,18 +6,18 @@ import Box from '@mui/material/Box';
|
||||
import { BackstoryPageProps } from '../../Components/BackstoryTab';
|
||||
import { CandidateInfo } from 'NewApp/Components/CandidateInfo';
|
||||
import { connectionBase } from '../../Global';
|
||||
import { UserInfo } from "../Components/UserContext";
|
||||
import { Candidate } from "../../types/types";
|
||||
|
||||
const CandidateListingPage = (props: BackstoryPageProps) => {
|
||||
const navigate = useNavigate();
|
||||
const { sessionId, setSnack } = props;
|
||||
const [users, setUsers] = useState<UserInfo[] | undefined>(undefined);
|
||||
const [candidates, setCandidates] = useState<Candidate[] | undefined>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
if (users !== undefined) {
|
||||
if (candidates !== undefined) {
|
||||
return;
|
||||
}
|
||||
const fetchUsers = async () => {
|
||||
const fetchCandidates = async () => {
|
||||
try {
|
||||
let response;
|
||||
response = await fetch(`${connectionBase}/api/u/${sessionId}`, {
|
||||
@ -26,30 +26,26 @@ const CandidateListingPage = (props: BackstoryPageProps) => {
|
||||
if (!response.ok) {
|
||||
throw new Error('Session not found');
|
||||
}
|
||||
const users: UserInfo[] = await response.json();
|
||||
users.forEach(u => {
|
||||
u.type = 'guest';
|
||||
u.isAuthenticated = false;
|
||||
});
|
||||
users.sort((a, b) => {
|
||||
let result = a.last_name.localeCompare(b.last_name);
|
||||
const candidates: Candidate[] = await response.json();
|
||||
candidates.sort((a, b) => {
|
||||
let result = a.lastName.localeCompare(b.lastName);
|
||||
if (result === 0) {
|
||||
result = a.first_name.localeCompare(b.first_name);
|
||||
result = a.firstName.localeCompare(b.firstName);
|
||||
}
|
||||
if (result === 0) {
|
||||
result = a.username.localeCompare(b.username);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
console.log(users);
|
||||
setUsers(users);
|
||||
console.log(candidates);
|
||||
setCandidates(candidates);
|
||||
} catch (err) {
|
||||
setSnack("" + err);
|
||||
}
|
||||
};
|
||||
|
||||
fetchUsers();
|
||||
}, [users, sessionId, setSnack]);
|
||||
fetchCandidates();
|
||||
}, [candidates, sessionId, setSnack]);
|
||||
|
||||
return (
|
||||
<Box sx={{display: "flex", flexDirection: "column"}}>
|
||||
@ -63,7 +59,7 @@ const CandidateListingPage = (props: BackstoryPageProps) => {
|
||||
</Button>
|
||||
</Box>
|
||||
<Box sx={{ display: "flex", gap: 1, flexWrap: "wrap"}}>
|
||||
{users?.map((u, i) =>
|
||||
{candidates?.map((u, i) =>
|
||||
<Box key={`${u.username}`}
|
||||
onClick={(event: React.MouseEvent<HTMLDivElement>) : void => {
|
||||
navigate(`/u/${u.username}`)
|
||||
|
@ -7,12 +7,13 @@ import MuiMarkdown from 'mui-markdown';
|
||||
|
||||
import { BackstoryPageProps } from '../../Components/BackstoryTab';
|
||||
import { Conversation, ConversationHandle } from '../Components/Conversation';
|
||||
import { ChatQuery, Tunables } from '../../Components/ChatQuery';
|
||||
import { ChatQuery } from '../Components/ChatQuery';
|
||||
import { MessageList } from '../../Components/Message';
|
||||
import { CandidateInfo } from 'NewApp/Components/CandidateInfo';
|
||||
import { connectionBase } from '../../Global';
|
||||
import { LoadingComponent } from 'NewApp/Components/LoadingComponent';
|
||||
import { useUser } from "../Components/UserContext";
|
||||
import { Candidate, Tunables } from "../../types/types";
|
||||
import { Navigate } from 'react-router-dom';
|
||||
|
||||
const ChatPage = forwardRef<ConversationHandle, BackstoryPageProps>((props: BackstoryPageProps, ref) => {
|
||||
@ -22,26 +23,27 @@ const ChatPage = forwardRef<ConversationHandle, BackstoryPageProps>((props: Back
|
||||
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
|
||||
const [questions, setQuestions] = useState<React.ReactElement[]>([]);
|
||||
const { user } = useUser();
|
||||
const candidate: Candidate | null = (user && user.userType === "UserType.CANDIDATE") ? user as Candidate : null;
|
||||
|
||||
useEffect(() => {
|
||||
if (!user) {
|
||||
if (!candidate) {
|
||||
return;
|
||||
}
|
||||
|
||||
setQuestions([
|
||||
<Box sx={{ display: "flex", flexDirection: isMobile ? "column" : "row" }}>
|
||||
{user.questions.map(({ question, tunables }, i: number) =>
|
||||
{candidate.questions?.map(({ question, tunables }, i: number) =>
|
||||
<ChatQuery key={i} query={{ prompt: question, tunables: tunables }} submitQuery={submitQuery} />
|
||||
)}
|
||||
</Box>,
|
||||
<Box sx={{ p: 1 }}>
|
||||
<MuiMarkdown>
|
||||
{`As with all LLM interactions, the results may not be 100% accurate. Please contact **${user.full_name}** if you have any questions.`}
|
||||
{`As with all LLM interactions, the results may not be 100% accurate. Please contact **${candidate.fullName}** if you have any questions.`}
|
||||
</MuiMarkdown>
|
||||
</Box>]);
|
||||
}, [user, isMobile, submitQuery]);
|
||||
}, [candidate, isMobile, submitQuery]);
|
||||
|
||||
if (!user) {
|
||||
if (!candidate) {
|
||||
return (<></>);
|
||||
}
|
||||
return (
|
||||
@ -52,7 +54,7 @@ const ChatPage = forwardRef<ConversationHandle, BackstoryPageProps>((props: Back
|
||||
{...{
|
||||
multiline: true,
|
||||
type: "chat",
|
||||
placeholder: `What would you like to know about ${user?.first_name}?`,
|
||||
placeholder: `What would you like to know about ${candidate?.firstName}?`,
|
||||
resetLabel: "chat",
|
||||
sessionId,
|
||||
setSnack,
|
||||
|
@ -1,5 +1,5 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { useNavigate, useLocation, useParams } from 'react-router-dom';
|
||||
import { useNavigate, useLocation, useParams, Navigate } from 'react-router-dom';
|
||||
import {
|
||||
Box,
|
||||
Drawer,
|
||||
@ -17,12 +17,13 @@ import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardActionArea,
|
||||
Divider,
|
||||
useTheme,
|
||||
useMediaQuery
|
||||
} from '@mui/material';
|
||||
import MenuIcon from '@mui/icons-material/Menu';
|
||||
import PersonIcon from '@mui/icons-material/Person';
|
||||
import CloseIcon from '@mui/icons-material/Close';
|
||||
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
||||
import DescriptionIcon from '@mui/icons-material/Description';
|
||||
import CodeIcon from '@mui/icons-material/Code';
|
||||
import LayersIcon from '@mui/icons-material/Layers';
|
||||
@ -33,32 +34,11 @@ import ViewQuiltIcon from '@mui/icons-material/ViewQuilt';
|
||||
|
||||
import { Document } from '../Components/Document';
|
||||
import { BackstoryPageProps } from '../../Components/BackstoryTab';
|
||||
import { BackstoryUIOverviewPage } from './BackstoryUIOverviewPage';
|
||||
import { BackstoryAppAnalysisPage } from './BackstoryAppAnalysisPage';
|
||||
import { BackstoryThemeVisualizerPage } from './BackstoryThemeVisualizerPage';
|
||||
import { MockupPage } from './MockupPage';
|
||||
|
||||
// Get appropriate icon for document type
|
||||
const getDocumentIcon = (title: string) => {
|
||||
switch (title) {
|
||||
case 'Docs':
|
||||
return <DescriptionIcon />;
|
||||
case 'BETA':
|
||||
return <CodeIcon />;
|
||||
case 'Resume Generation Architecture':
|
||||
case 'Application Architecture':
|
||||
return <LayersIcon />;
|
||||
case 'UI Overview':
|
||||
case 'UI Mockup':
|
||||
return <DashboardIcon />;
|
||||
case 'Theme Visualizer':
|
||||
return <PaletteIcon />;
|
||||
case 'App Analysis':
|
||||
return <AnalyticsIcon />;
|
||||
default:
|
||||
return <ViewQuiltIcon />;
|
||||
}
|
||||
};
|
||||
import { BackstoryUIOverviewPage } from './documents/BackstoryUIOverviewPage';
|
||||
import { BackstoryAppAnalysisPage } from './documents/BackstoryAppAnalysisPage';
|
||||
import { BackstoryThemeVisualizerPage } from './documents/BackstoryThemeVisualizerPage';
|
||||
import { UserManagement } from './documents/UserManagement';
|
||||
import { MockupPage } from './documents/MockupPage';
|
||||
|
||||
// Sidebar navigation component using MUI components
|
||||
const Sidebar: React.FC<{
|
||||
@ -67,6 +47,8 @@ const Sidebar: React.FC<{
|
||||
onClose?: () => void;
|
||||
isMobile: boolean;
|
||||
}> = ({ currentPage, onDocumentSelect, onClose, isMobile }) => {
|
||||
const navigate = useNavigate();
|
||||
|
||||
// Document definitions
|
||||
|
||||
const handleItemClick = (route: string) => {
|
||||
@ -109,7 +91,7 @@ const Sidebar: React.FC<{
|
||||
{documents.map((doc, index) => (
|
||||
<ListItem key={index} disablePadding>
|
||||
<ListItemButton
|
||||
onClick={() => handleItemClick(doc.route)}
|
||||
onClick={() => doc.route ? handleItemClick(doc.route) : navigate('/')}
|
||||
selected={currentPage === doc.route}
|
||||
sx={{
|
||||
borderRadius: 1,
|
||||
@ -139,22 +121,34 @@ const Sidebar: React.FC<{
|
||||
);
|
||||
};
|
||||
|
||||
const getDocumentIcon = (title: string): React.ReactNode => {
|
||||
const item = documents.find(d => d.title.toLocaleLowerCase() === title.toLocaleLowerCase());
|
||||
if (!item) {
|
||||
throw Error(`${title} does not exist in documents`);
|
||||
}
|
||||
return item.icon || <ViewQuiltIcon />;
|
||||
}
|
||||
|
||||
type DocType = {
|
||||
title: string;
|
||||
route: string;
|
||||
route: string | null;
|
||||
description: string;
|
||||
icon?: React.ReactNode;
|
||||
};
|
||||
|
||||
const documents : DocType[] = [
|
||||
{ title: "About", route: "about", description: "General information about the application and its purpose" },
|
||||
{ title: "BETA", route: "beta", description: "Details about the current beta version and upcoming features" },
|
||||
{ title: "Resume Generation Architecture", route: "resume-generation", description: "Technical overview of how resumes are processed and generated" },
|
||||
{ title: "Application Architecture", route: "about-app", description: "System design and technical stack information" },
|
||||
{ title: "UI Overview", route: "ui-overview", description: "Guide to the user interface components and interactions" },
|
||||
{ title: "Theme Visualizer", route: "theme-visualizer", description: "Explore and customize application themes and visual styles" },
|
||||
{ title: "App Analysis", route: "app-analysis", description: "Statistics and performance metrics of the application" },
|
||||
{ title: "UI Mockup", route: "ui-mockup", description: "Visual previews of interfaces and layout concepts" },
|
||||
{ title: "Backstory", route: null, description: "Backstory", icon: <ArrowBackIcon /> },
|
||||
{ title: "About", route: "about", description: "General information about the application and its purpose", icon: <DescriptionIcon /> },
|
||||
{ title: "BETA", route: "beta", description: "Details about the current beta version and upcoming features", icon: <CodeIcon /> },
|
||||
{ title: "Resume Generation Architecture", route: "resume-generation", description: "Technical overview of how resumes are processed and generated", icon: <LayersIcon /> },
|
||||
{ title: "Application Architecture", route: "about-app", description: "System design and technical stack information", icon: <LayersIcon /> },
|
||||
{ title: "UI Overview", route: "ui-overview", description: "Guide to the user interface components and interactions", icon: <DashboardIcon /> },
|
||||
{ title: "UI Mockup", route: "ui-mockup", description: "Visual previews of interfaces and layout concepts", icon: <DashboardIcon /> },
|
||||
{ title: "Theme Visualizer", route: "theme-visualizer", description: "Explore and customize application themes and visual styles", icon: <PaletteIcon /> },
|
||||
{ title: "App Analysis", route: "app-analysis", description: "Statistics and performance metrics of the application", icon: <AnalyticsIcon /> },
|
||||
{ title: 'Text Mockups', route: "backstory-ui-mockups", description: "Early text mockups of many of the interaction points." },
|
||||
{ title: 'User Management', route: "user-management", description: "User management.", icon: <PersonIcon /> },
|
||||
{ title: 'Type Safety', route: "type-safety", description: "Overview of front/back-end type synchronization.", icon: <CodeIcon /> },
|
||||
];
|
||||
|
||||
const documentFromRoute = (route: string) : DocType | null => {
|
||||
@ -207,6 +201,10 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
console.log("Document expanded:", { docName, open, location });
|
||||
if (open) {
|
||||
const parts = location.pathname.split('/');
|
||||
if (docName === "backstory") {
|
||||
navigate('/');
|
||||
return;
|
||||
}
|
||||
if (parts.length > 2) {
|
||||
const basePath = parts.slice(0, -1).join('/');
|
||||
navigate(`${basePath}/${docName}`);
|
||||
@ -228,13 +226,12 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
const closeDrawer = () => {
|
||||
setDrawerOpen(false);
|
||||
};
|
||||
|
||||
|
||||
interface DocViewProps {
|
||||
page: string
|
||||
};
|
||||
const DocView = (props: DocViewProps) => {
|
||||
const { page } = props;
|
||||
const { page = 'about' } = props;
|
||||
const title = documentTitleFromRoute(page);
|
||||
const icon = getDocumentIcon(title);
|
||||
|
||||
@ -245,12 +242,12 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
{icon}
|
||||
{title}
|
||||
</Box>
|
||||
<Document
|
||||
{page && <Document
|
||||
filepath={`/docs/${page}.md`}
|
||||
sessionId={sessionId}
|
||||
submitQuery={submitQuery}
|
||||
setSnack={setSnack}
|
||||
/>
|
||||
/>}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
@ -267,6 +264,8 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
return (<BackstoryAppAnalysisPage />);
|
||||
case 'ui-mockup':
|
||||
return (<MockupPage />);
|
||||
case 'user-management':
|
||||
return (<UserManagement />);
|
||||
default:
|
||||
if (documentFromRoute(page)) {
|
||||
return <DocView page={page}/>
|
||||
@ -282,10 +281,11 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
</Typography>
|
||||
|
||||
<Grid container spacing={2}>
|
||||
{documents.map((doc, index) => (
|
||||
<Grid size={{ xs: 12, sm: 6, md: 4 }} key={index}>
|
||||
{documents.map((doc, index) => {
|
||||
if (doc.route === null) return;
|
||||
return (<Grid size={{ xs: 12, sm: 6, md: 4 }} key={index}>
|
||||
<Card>
|
||||
<CardActionArea onClick={() => onDocumentExpand(doc.route, true)}>
|
||||
<CardActionArea onClick={() => doc.route ? onDocumentExpand(doc.route, true) : navigate('/')}>
|
||||
<CardContent>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', mb: 1 }}>
|
||||
<Box sx={{ color: 'primary.main', mr: 1.5 }}>
|
||||
@ -300,7 +300,8 @@ const DocsPage = (props: BackstoryPageProps) => {
|
||||
</CardActionArea>
|
||||
</Card>
|
||||
</Grid>
|
||||
))}
|
||||
)
|
||||
})}
|
||||
</Grid>
|
||||
</Paper>
|
||||
);
|
||||
|
@ -9,11 +9,11 @@ import CancelIcon from '@mui/icons-material/Cancel';
|
||||
import SendIcon from '@mui/icons-material/Send';
|
||||
import PropagateLoader from 'react-spinners/PropagateLoader';
|
||||
import { CandidateInfo } from '../Components/CandidateInfo';
|
||||
import { Query } from '../../Components/ChatQuery'
|
||||
import { Query } from '../../types/types'
|
||||
import { Quote } from 'NewApp/Components/Quote';
|
||||
import { streamQueryResponse, StreamQueryController } from '../Components/streamQueryResponse';
|
||||
import { connectionBase } from 'Global';
|
||||
import { UserInfo } from '../Components/UserContext';
|
||||
import { Candidate } from '../../types/types';
|
||||
import { BackstoryElementProps } from 'Components/BackstoryTab';
|
||||
import { BackstoryTextField, BackstoryTextFieldRef } from 'Components/BackstoryTextField';
|
||||
import { jsonrepair } from 'jsonrepair';
|
||||
@ -21,29 +21,34 @@ import { StyledMarkdown } from 'NewApp/Components/StyledMarkdown';
|
||||
import { Scrollable } from 'Components/Scrollable';
|
||||
import { Pulse } from 'NewApp/Components/Pulse';
|
||||
|
||||
const emptyUser : UserInfo = {
|
||||
type: 'candidate',
|
||||
const emptyUser: Candidate = {
|
||||
description: "[blank]",
|
||||
rag_content_size: 0,
|
||||
username: "[blank]",
|
||||
first_name: "[blank]",
|
||||
last_name: "[blank]",
|
||||
full_name: "[blank] [blank]",
|
||||
contact_info: {},
|
||||
firstName: "[blank]",
|
||||
lastName: "[blank]",
|
||||
fullName: "[blank] [blank]",
|
||||
questions: [],
|
||||
isAuthenticated: false,
|
||||
has_profile: false,
|
||||
title: '[blank]',
|
||||
location: '[blank]',
|
||||
location: {
|
||||
city: '[blank]',
|
||||
country: '[blank]'
|
||||
},
|
||||
email: '[blank]',
|
||||
phone: '[blank]',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
status: "pending",
|
||||
skills: [],
|
||||
experience: [],
|
||||
education: [],
|
||||
preferredJobTypes: [],
|
||||
languages: [],
|
||||
certifications: []
|
||||
};
|
||||
|
||||
const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
const {sessionId, setSnack, submitQuery} = props;
|
||||
const [streaming, setStreaming] = useState<string>('');
|
||||
const [processing, setProcessing] = useState<boolean>(false);
|
||||
const [user, setUser] = useState<UserInfo | null>(null);
|
||||
const [user, setUser] = useState<Candidate | null>(null);
|
||||
const [prompt, setPrompt] = useState<string>('');
|
||||
const [resume, setResume] = useState<string>('');
|
||||
const [canGenImage, setCanGenImage] = useState<boolean>(false);
|
||||
@ -83,8 +88,8 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
switch (currentState) {
|
||||
case 0: /* Generating persona */
|
||||
let partialUser = JSON.parse(jsonrepair((msg.response || '').trim()));
|
||||
if (!partialUser.full_name) {
|
||||
partialUser.full_name = `${partialUser.first_name} ${partialUser.last_name}`;
|
||||
if (!partialUser.fullName) {
|
||||
partialUser.fullName = `${partialUser.firstName} ${partialUser.lastName}`;
|
||||
}
|
||||
console.log("Setting final user data:", partialUser);
|
||||
setUser({ ...partialUser });
|
||||
@ -156,8 +161,8 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
|
||||
// Effect to trigger profile generation when user data is ready
|
||||
useEffect(() => {
|
||||
console.log("useEffect triggered - shouldGenerateProfile:", shouldGenerateProfile, "user:", user?.username, user?.first_name);
|
||||
if (shouldGenerateProfile && user?.username !== "[blank]" && user?.first_name !== "[blank]") {
|
||||
console.log("useEffect triggered - shouldGenerateProfile:", shouldGenerateProfile, "user:", user?.username, user?.firstName);
|
||||
if (shouldGenerateProfile && user?.username !== "[blank]" && user?.firstName !== "[blank]") {
|
||||
console.log("Triggering profile generation with updated user data:", user);
|
||||
if (controllerRef.current) {
|
||||
console.log("Controller already active, skipping profile generation");
|
||||
@ -165,7 +170,7 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
}
|
||||
|
||||
// Don't generate if we still have blank user data
|
||||
if (user?.username === "[blank]" || user?.first_name === "[blank]") {
|
||||
if (user?.username === "[blank]" || user?.firstName === "[blank]") {
|
||||
console.log("Cannot generate profile: user data not ready");
|
||||
return;
|
||||
}
|
||||
@ -180,7 +185,7 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
controllerRef.current = streamQueryResponse({
|
||||
query: {
|
||||
prompt: imagePrompt,
|
||||
agent_options: {
|
||||
agentOptions: {
|
||||
username: user?.username,
|
||||
filename: "profile.png"
|
||||
}
|
||||
@ -201,7 +206,7 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
setShouldGenerateProfile(false);
|
||||
setUser({
|
||||
...(user ? user : emptyUser),
|
||||
has_profile: true
|
||||
hasProfile: true
|
||||
});
|
||||
}
|
||||
break;
|
||||
@ -249,8 +254,8 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
switch (state) {
|
||||
case 0: /* Generating persona */
|
||||
const partialUser = {...emptyUser, ...JSON.parse(jsonrepair(`${streaming.trim()}...`))};
|
||||
if (!partialUser.full_name) {
|
||||
partialUser.full_name = `${partialUser.first_name} ${partialUser.last_name}`;
|
||||
if (!partialUser.fullName) {
|
||||
partialUser.fullName = `${partialUser.firstName} ${partialUser.lastName}`;
|
||||
}
|
||||
setUser(partialUser);
|
||||
break;
|
||||
@ -315,8 +320,8 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
}}>
|
||||
<Box sx={{ display: "flex", position: "relative", width: "min-content", height: "min-content" }}>
|
||||
<Avatar
|
||||
src={user?.has_profile ? `/api/u/${user.username}/profile/${sessionId}` : ''}
|
||||
alt={`${user?.full_name}'s profile`}
|
||||
src={user?.hasProfile ? `/api/u/${user.username}/profile/${sessionId}` : ''}
|
||||
alt={`${user?.fullName}'s profile`}
|
||||
sx={{
|
||||
width: 80,
|
||||
height: 80,
|
||||
@ -326,7 +331,7 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
{processing && <Pulse sx={{ position: "relative", left: "-80px", top: "0px", mr: "-80px" }} timestamp={timestamp} />}
|
||||
</Box>
|
||||
|
||||
<Tooltip title={`${user?.has_profile ? 'Re-': ''}Generate Picture`}>
|
||||
<Tooltip title={`${user?.hasProfile ? 'Re-' : ''}Generate Picture`}>
|
||||
<span style={{ display: "flex", flexGrow: 1 }}>
|
||||
<Button
|
||||
sx={{ m: 1, gap: 1, justifySelf: "flex-start", alignSelf: "center", flexGrow: 0, maxHeight: "min-content" }}
|
||||
@ -335,7 +340,7 @@ const GenerateCandidate = (props: BackstoryElementProps) => {
|
||||
sessionId === undefined || processing || !canGenImage
|
||||
}
|
||||
onClick={() => { setShouldGenerateProfile(true); }}>
|
||||
{user?.has_profile ? 'Re-': ''}Generate Picture<SendIcon />
|
||||
{user?.hasProfile ? 'Re-' : ''}Generate Picture<SendIcon />
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
|
@ -6,10 +6,11 @@ import {
|
||||
} from '@mui/material';
|
||||
import { SxProps } from '@mui/material';
|
||||
|
||||
import { ChatQuery, Query } from '../Components/ChatQuery';
|
||||
import { MessageList, BackstoryMessage } from '../Components/Message';
|
||||
import { ChatQuery } from '../Components/ChatQuery';
|
||||
import { MessageList, BackstoryMessage } from '../../Components/Message';
|
||||
import { Conversation } from '../Components/Conversation';
|
||||
import { BackstoryPageProps } from '../Components/BackstoryTab';
|
||||
import { BackstoryPageProps } from '../../Components/BackstoryTab';
|
||||
import { Query } from "../../types/types";
|
||||
|
||||
import './ResumeBuilderPage.css';
|
||||
|
||||
@ -199,8 +200,8 @@ const ResumeBuilderPage: React.FC<BackstoryPageProps> = (props: BackstoryPagePro
|
||||
console.log('renderJobDescriptionView');
|
||||
const jobDescriptionQuestions = [
|
||||
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
||||
<ChatQuery query={{ prompt: "What are the key skills necessary for this position?", tunables: { enable_tools: false } }} submitQuery={handleJobQuery} />
|
||||
<ChatQuery query={{ prompt: "How much should this position pay (accounting for inflation)?", tunables: { enable_tools: false } }} submitQuery={handleJobQuery} />
|
||||
<ChatQuery query={{ prompt: "What are the key skills necessary for this position?", tunables: { enableTools: false } }} submitQuery={handleJobQuery} />
|
||||
<ChatQuery query={{ prompt: "How much should this position pay (accounting for inflation)?", tunables: { enableTools: false } }} submitQuery={handleJobQuery} />
|
||||
</Box>,
|
||||
];
|
||||
|
||||
@ -271,8 +272,8 @@ See [About > Resume Generation Architecture](/about/resume-generation) for more
|
||||
const renderResumeView = useCallback((sx?: SxProps) => {
|
||||
const resumeQuestions = [
|
||||
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
||||
<ChatQuery query={{ prompt: "Is this resume a good fit for the provided job description?", tunables: { enable_tools: false } }} submitQuery={handleResumeQuery} />
|
||||
<ChatQuery query={{ prompt: "Provide a more concise resume.", tunables: { enable_tools: false } }} submitQuery={handleResumeQuery} />
|
||||
<ChatQuery query={{ prompt: "Is this resume a good fit for the provided job description?", tunables: { enableTools: false } }} submitQuery={handleResumeQuery} />
|
||||
<ChatQuery query={{ prompt: "Provide a more concise resume.", tunables: { enableTools: false } }} submitQuery={handleResumeQuery} />
|
||||
</Box>,
|
||||
];
|
||||
|
||||
@ -320,7 +321,7 @@ See [About > Resume Generation Architecture](/about/resume-generation) for more
|
||||
const renderFactCheckView = useCallback((sx?: SxProps) => {
|
||||
const factsQuestions = [
|
||||
<Box sx={{ display: "flex", flexDirection: "column" }}>
|
||||
<ChatQuery query={{ prompt: "Rewrite the resume to address any discrepancies.", tunables: { enable_tools: false } }} submitQuery={handleFactsQuery} />
|
||||
<ChatQuery query={{ prompt: "Rewrite the resume to address any discrepancies.", tunables: { enableTools: false } }} submitQuery={handleFactsQuery} />
|
||||
</Box>,
|
||||
];
|
||||
|
@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import { backstoryTheme } from '../BackstoryTheme';
|
||||
import { backstoryTheme } from '../../BackstoryTheme';
|
||||
import { Box, Typography, Paper, Container } from '@mui/material';
|
||||
|
||||
// This component provides a visual demonstration of the theme colors
|
509
frontend/src/NewApp/Pages/documents/UserManagement.tsx
Normal file
509
frontend/src/NewApp/Pages/documents/UserManagement.tsx
Normal file
@ -0,0 +1,509 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import {
|
||||
Box,
|
||||
Typography,
|
||||
Paper,
|
||||
Tabs,
|
||||
Tab,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableContainer,
|
||||
TableHead,
|
||||
TableRow,
|
||||
Avatar,
|
||||
Chip,
|
||||
Button,
|
||||
Dialog,
|
||||
DialogTitle,
|
||||
DialogContent,
|
||||
DialogActions,
|
||||
TextField,
|
||||
MenuItem,
|
||||
Select,
|
||||
FormControl,
|
||||
InputLabel,
|
||||
Grid
|
||||
} from '@mui/material';
|
||||
import { Person, Business, AssignmentInd } from '@mui/icons-material';
|
||||
|
||||
// Interfaces from the data model
|
||||
interface BaseUser {
|
||||
id: string;
|
||||
email: string;
|
||||
createdAt: Date;
|
||||
lastLogin: Date;
|
||||
profileImage?: string;
|
||||
isActive: boolean;
|
||||
}
|
||||
|
||||
interface Candidate extends BaseUser {
|
||||
type: 'candidate';
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
skills: { id: string; name: string; level: string }[];
|
||||
location: { city: string; country: string; remote?: boolean };
|
||||
}
|
||||
|
||||
interface Employer extends BaseUser {
|
||||
type: 'employer';
|
||||
companyName: string;
|
||||
industry: string;
|
||||
companySize: string;
|
||||
location: { city: string; country: string };
|
||||
companyLogo?: string;
|
||||
}
|
||||
|
||||
type User = Candidate | Employer;
|
||||
|
||||
// Mock data
|
||||
const mockUsers: User[] = [
|
||||
{
|
||||
id: '1',
|
||||
email: 'john.doe@example.com',
|
||||
createdAt: new Date('2023-08-15'),
|
||||
lastLogin: new Date('2023-10-22'),
|
||||
isActive: true,
|
||||
type: 'candidate',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
skills: [
|
||||
{ id: 's1', name: 'React', level: 'advanced' },
|
||||
{ id: 's2', name: 'TypeScript', level: 'intermediate' }
|
||||
],
|
||||
location: { city: 'Austin', country: 'USA' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
email: 'sarah.smith@example.com',
|
||||
createdAt: new Date('2023-09-10'),
|
||||
lastLogin: new Date('2023-10-24'),
|
||||
isActive: true,
|
||||
type: 'candidate',
|
||||
firstName: 'Sarah',
|
||||
lastName: 'Smith',
|
||||
skills: [
|
||||
{ id: 's3', name: 'Python', level: 'expert' },
|
||||
{ id: 's4', name: 'Data Science', level: 'advanced' }
|
||||
],
|
||||
location: { city: 'Seattle', country: 'USA', remote: true }
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
email: 'tech@acme.com',
|
||||
createdAt: new Date('2023-07-05'),
|
||||
lastLogin: new Date('2023-10-23'),
|
||||
isActive: true,
|
||||
type: 'employer',
|
||||
companyName: 'Acme Tech',
|
||||
industry: 'Software',
|
||||
companySize: '50-200',
|
||||
location: { city: 'San Francisco', country: 'USA' }
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
email: 'careers@globex.com',
|
||||
createdAt: new Date('2023-08-20'),
|
||||
lastLogin: new Date('2023-10-20'),
|
||||
isActive: false,
|
||||
type: 'employer',
|
||||
companyName: 'Globex Corporation',
|
||||
industry: 'Manufacturing',
|
||||
companySize: '1000+',
|
||||
location: { city: 'Chicago', country: 'USA' }
|
||||
}
|
||||
];
|
||||
|
||||
// Component for User Management
|
||||
const UserManagement: React.FC = () => {
|
||||
const [tabValue, setTabValue] = useState(0);
|
||||
const [users, setUsers] = useState<User[]>(mockUsers);
|
||||
const [openDialog, setOpenDialog] = useState(false);
|
||||
const [selectedUser, setSelectedUser] = useState<User | null>(null);
|
||||
const [aiConfigOpen, setAiConfigOpen] = useState(false);
|
||||
|
||||
// Handle tab change
|
||||
const handleTabChange = (event: React.SyntheticEvent, newValue: number) => {
|
||||
setTabValue(newValue);
|
||||
};
|
||||
|
||||
// Filter users based on tab value
|
||||
const filteredUsers = users.filter(user => {
|
||||
if (tabValue === 0) return true;
|
||||
if (tabValue === 1) return user.type === 'candidate';
|
||||
if (tabValue === 2) return user.type === 'employer';
|
||||
return false;
|
||||
});
|
||||
|
||||
// Handle open user detail dialog
|
||||
const handleOpenUserDetails = (user: User) => {
|
||||
setSelectedUser(user);
|
||||
setOpenDialog(true);
|
||||
};
|
||||
|
||||
// Handle close user detail dialog
|
||||
const handleCloseDialog = () => {
|
||||
setOpenDialog(false);
|
||||
setSelectedUser(null);
|
||||
};
|
||||
|
||||
// Handle open AI configuration dialog
|
||||
const handleOpenAiConfig = (user: User) => {
|
||||
setSelectedUser(user);
|
||||
setAiConfigOpen(true);
|
||||
};
|
||||
|
||||
// Handle close AI configuration dialog
|
||||
const handleCloseAiConfig = () => {
|
||||
setAiConfigOpen(false);
|
||||
};
|
||||
|
||||
// Helper function to get user's name for display
|
||||
const getUserDisplayName = (user: User) => {
|
||||
if (user.type === 'candidate') {
|
||||
return `${user.firstName} ${user.lastName}`;
|
||||
} else {
|
||||
return user.companyName;
|
||||
}
|
||||
};
|
||||
|
||||
// Helper function to format date
|
||||
const formatDate = (date: Date) => {
|
||||
return new Date(date).toLocaleDateString();
|
||||
};
|
||||
|
||||
return (
|
||||
<Box sx={{ width: '100%', p: 3 }}>
|
||||
<Paper sx={{ width: '100%', mb: 2 }}>
|
||||
<Tabs
|
||||
value={tabValue}
|
||||
onChange={handleTabChange}
|
||||
indicatorColor="primary"
|
||||
textColor="primary"
|
||||
centered
|
||||
>
|
||||
<Tab icon={<AssignmentInd />} label="All Users" />
|
||||
<Tab icon={<Person />} label="Candidates" />
|
||||
<Tab icon={<Business />} label="Employers" />
|
||||
</Tabs>
|
||||
|
||||
<TableContainer>
|
||||
<Table>
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
<TableCell>User</TableCell>
|
||||
<TableCell>Type</TableCell>
|
||||
{/* <TableCell>Location</TableCell> */}
|
||||
{/* <TableCell>Created</TableCell> */}
|
||||
<TableCell>Last Login</TableCell>
|
||||
<TableCell>Status</TableCell>
|
||||
<TableCell>Actions</TableCell>
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
{filteredUsers.map((user) => (
|
||||
<TableRow key={user.id} sx={{ "& > td": { whiteSpace: "nowrap"}}}>
|
||||
<TableCell>
|
||||
<Box sx={{ display: 'flex', alignItems: 'flex-start', flexDirection: "column" }}>
|
||||
<Typography>{getUserDisplayName(user)}</Typography>
|
||||
</Box>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Chip
|
||||
label={user.type === 'candidate' ? 'Candidate' : 'Employer'}
|
||||
color={user.type === 'candidate' ? 'primary' : 'secondary'}
|
||||
size="small"
|
||||
/>
|
||||
</TableCell>
|
||||
{/* <TableCell>
|
||||
{user.location.city}, {user.location.country}
|
||||
{user.type === 'candidate' && user.location.remote &&
|
||||
<Chip label="Remote" size="small" sx={{ ml: 1 }} />
|
||||
}
|
||||
</TableCell> */}
|
||||
{/* <TableCell>{formatDate(user.createdAt)}</TableCell> */}
|
||||
<TableCell>{formatDate(user.lastLogin)}</TableCell>
|
||||
<TableCell>
|
||||
<Chip
|
||||
label={user.isActive ? 'Active' : 'Inactive'}
|
||||
color={user.isActive ? 'success' : 'error'}
|
||||
size="small"
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Button
|
||||
size="small"
|
||||
variant="outlined"
|
||||
onClick={() => handleOpenUserDetails(user)}
|
||||
sx={{ mr: 1 }}
|
||||
>
|
||||
Details
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
variant="outlined"
|
||||
color="secondary"
|
||||
onClick={() => handleOpenAiConfig(user)}
|
||||
>
|
||||
AI Config
|
||||
</Button>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Paper>
|
||||
|
||||
{/* User Details Dialog */}
|
||||
<Dialog open={openDialog} onClose={handleCloseDialog} maxWidth="md" fullWidth>
|
||||
{selectedUser && (
|
||||
<>
|
||||
<DialogTitle>
|
||||
{selectedUser.type === 'candidate' ? 'Candidate Details' : 'Employer Details'}
|
||||
</DialogTitle>
|
||||
<DialogContent dividers>
|
||||
{selectedUser.type === 'candidate' ? (
|
||||
<Grid container spacing={2}>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<Typography variant="subtitle1">Personal Information</Typography>
|
||||
<TextField
|
||||
label="First Name"
|
||||
value={selectedUser.firstName}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
<TextField
|
||||
label="Last Name"
|
||||
value={selectedUser.lastName}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
<TextField
|
||||
label="Email"
|
||||
value={selectedUser.email}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
</Grid>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<Typography variant="subtitle1">Skills</Typography>
|
||||
<Box sx={{ mt: 2 }}>
|
||||
{selectedUser.skills.map((skill) => (
|
||||
<Chip
|
||||
key={skill.id}
|
||||
label={`${skill.name} (${skill.level})`}
|
||||
sx={{ m: 0.5 }}
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
</Grid>
|
||||
</Grid>
|
||||
) : (
|
||||
<Grid container spacing={2}>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<Typography variant="subtitle1">Company Information</Typography>
|
||||
<TextField
|
||||
label="Company Name"
|
||||
value={selectedUser.companyName}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
<TextField
|
||||
label="Industry"
|
||||
value={selectedUser.industry}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
<TextField
|
||||
label="Company Size"
|
||||
value={selectedUser.companySize}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
</Grid>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<Typography variant="subtitle1">Contact Information</Typography>
|
||||
<TextField
|
||||
label="Email"
|
||||
value={selectedUser.email}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
<TextField
|
||||
label="Location"
|
||||
value={`${selectedUser.location.city}, ${selectedUser.location.country}`}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ readOnly: true }}
|
||||
/>
|
||||
</Grid>
|
||||
</Grid>
|
||||
)}
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={handleCloseDialog}>Close</Button>
|
||||
</DialogActions>
|
||||
</>
|
||||
)}
|
||||
</Dialog>
|
||||
|
||||
{/* AI Config Dialog */}
|
||||
<Dialog open={aiConfigOpen} onClose={handleCloseAiConfig} maxWidth="md" fullWidth>
|
||||
{selectedUser && (
|
||||
<>
|
||||
<DialogTitle>
|
||||
AI Configuration for {getUserDisplayName(selectedUser)}
|
||||
</DialogTitle>
|
||||
<DialogContent dividers>
|
||||
<Typography variant="subtitle1" gutterBottom>
|
||||
RAG Database Configuration
|
||||
</Typography>
|
||||
|
||||
<FormControl fullWidth margin="normal">
|
||||
<InputLabel id="embedding-model-label">Embedding Model</InputLabel>
|
||||
<Select
|
||||
labelId="embedding-model-label"
|
||||
label="Embedding Model"
|
||||
defaultValue="openai-ada-002"
|
||||
>
|
||||
<MenuItem value="openai-ada-002">OpenAI Ada 002</MenuItem>
|
||||
<MenuItem value="bert-base">BERT Base</MenuItem>
|
||||
<MenuItem value="sentence-t5">Sentence T5</MenuItem>
|
||||
</Select>
|
||||
</FormControl>
|
||||
|
||||
<FormControl fullWidth margin="normal">
|
||||
<InputLabel id="vector-store-label">Vector Store</InputLabel>
|
||||
<Select
|
||||
labelId="vector-store-label"
|
||||
label="Vector Store"
|
||||
defaultValue="pinecone"
|
||||
>
|
||||
<MenuItem value="pinecone">Pinecone</MenuItem>
|
||||
<MenuItem value="qdrant">Qdrant</MenuItem>
|
||||
<MenuItem value="faiss">FAISS</MenuItem>
|
||||
</Select>
|
||||
</FormControl>
|
||||
|
||||
<Typography variant="subtitle1" gutterBottom sx={{ mt: 2 }}>
|
||||
AI Model Parameters
|
||||
</Typography>
|
||||
|
||||
<Grid container spacing={2}>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<FormControl fullWidth margin="normal">
|
||||
<InputLabel id="model-label">AI Model</InputLabel>
|
||||
<Select
|
||||
labelId="model-label"
|
||||
label="AI Model"
|
||||
defaultValue="gpt-4"
|
||||
>
|
||||
<MenuItem value="gpt-4">GPT-4</MenuItem>
|
||||
<MenuItem value="claude-3">Claude 3</MenuItem>
|
||||
<MenuItem value="custom">Custom</MenuItem>
|
||||
</Select>
|
||||
</FormControl>
|
||||
</Grid>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<TextField
|
||||
label="Temperature"
|
||||
type="number"
|
||||
defaultValue={0.7}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ inputProps: { min: 0, max: 1, step: 0.1 } }}
|
||||
/>
|
||||
</Grid>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<TextField
|
||||
label="Max Tokens"
|
||||
type="number"
|
||||
defaultValue={2000}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
/>
|
||||
</Grid>
|
||||
<Grid size={{xs: 12, md: 6}}>
|
||||
<TextField
|
||||
label="Top P"
|
||||
type="number"
|
||||
defaultValue={0.95}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
InputProps={{ inputProps: { min: 0, max: 1, step: 0.05 } }}
|
||||
/>
|
||||
</Grid>
|
||||
</Grid>
|
||||
|
||||
<TextField
|
||||
label="System Prompt"
|
||||
multiline
|
||||
rows={4}
|
||||
fullWidth
|
||||
margin="normal"
|
||||
defaultValue={`You are an AI assistant helping ${selectedUser.type === 'candidate' ? 'job candidates find relevant positions' : 'employers find qualified candidates'}. Be professional, helpful, and concise in your responses.`}
|
||||
/>
|
||||
|
||||
<Typography variant="subtitle1" gutterBottom sx={{ mt: 2 }}>
|
||||
Data Sources
|
||||
</Typography>
|
||||
|
||||
<TableContainer component={Paper} sx={{ mt: 1 }}>
|
||||
<Table size="small">
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
<TableCell>Source Name</TableCell>
|
||||
<TableCell>Type</TableCell>
|
||||
<TableCell>Status</TableCell>
|
||||
<TableCell>Last Refreshed</TableCell>
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
<TableRow>
|
||||
<TableCell>Profile Data</TableCell>
|
||||
<TableCell>Internal</TableCell>
|
||||
<TableCell>
|
||||
<Chip label="Active" color="success" size="small" />
|
||||
</TableCell>
|
||||
<TableCell>Auto</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>Company Documents</TableCell>
|
||||
<TableCell>Document</TableCell>
|
||||
<TableCell>
|
||||
<Chip label="Active" color="success" size="small" />
|
||||
</TableCell>
|
||||
<TableCell>10/20/2024</TableCell>
|
||||
</TableRow>
|
||||
<TableRow>
|
||||
<TableCell>Industry News</TableCell>
|
||||
<TableCell>Web Crawler</TableCell>
|
||||
<TableCell>
|
||||
<Chip label="Active" color="success" size="small" />
|
||||
</TableCell>
|
||||
<TableCell>Daily</TableCell>
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button onClick={handleCloseAiConfig}>Cancel</Button>
|
||||
<Button variant="contained" color="primary">Save Configuration</Button>
|
||||
</DialogActions>
|
||||
</>
|
||||
)}
|
||||
</Dialog>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export { UserManagement };
|
551
frontend/src/NewApp/TestApp.tsx
Normal file
551
frontend/src/NewApp/TestApp.tsx
Normal file
@ -0,0 +1,551 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import {
|
||||
Box,
|
||||
Container,
|
||||
Paper,
|
||||
TextField,
|
||||
Button,
|
||||
Typography,
|
||||
Grid,
|
||||
Alert,
|
||||
CircularProgress,
|
||||
Tabs,
|
||||
Tab,
|
||||
AppBar,
|
||||
Toolbar,
|
||||
Card,
|
||||
CardContent,
|
||||
Divider,
|
||||
Avatar
|
||||
} from '@mui/material';
|
||||
import { Person, PersonAdd, AccountCircle, ExitToApp } from '@mui/icons-material';
|
||||
|
||||
// Import conversion utilities
|
||||
import {
|
||||
formatApiRequest,
|
||||
parseApiResponse,
|
||||
handleApiResponse,
|
||||
extractApiData,
|
||||
isSuccessResponse,
|
||||
debugConversion,
|
||||
type ApiResponse
|
||||
} from '../types/conversion';
|
||||
|
||||
import {
|
||||
AuthResponse, BaseUser, Guest
|
||||
} from '../types/types'
|
||||
|
||||
|
||||
interface LoginRequest {
|
||||
login: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
interface RegisterRequest {
|
||||
username: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
password: string;
|
||||
phone?: string;
|
||||
}
|
||||
|
||||
const API_BASE_URL = 'https://backstory-beta.ketrenos.com/api/1.0';
|
||||
|
||||
const BackstoryTestApp: React.FC = () => {
|
||||
const [currentUser, setCurrentUser] = useState<BaseUser | null>(null);
|
||||
const [guestSession, setGuestSession] = useState<Guest | null>(null);
|
||||
const [tabValue, setTabValue] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [success, setSuccess] = useState<string | null>(null);
|
||||
|
||||
// Login form state
|
||||
const [loginForm, setLoginForm] = useState<LoginRequest>({
|
||||
login: '',
|
||||
password: ''
|
||||
});
|
||||
|
||||
// Register form state
|
||||
const [registerForm, setRegisterForm] = useState<RegisterRequest>({
|
||||
username: '',
|
||||
email: '',
|
||||
firstName: '',
|
||||
lastName: '',
|
||||
password: '',
|
||||
phone: ''
|
||||
});
|
||||
|
||||
// Create guest session on component mount
|
||||
useEffect(() => {
|
||||
createGuestSession();
|
||||
checkExistingAuth();
|
||||
}, []);
|
||||
|
||||
const createGuestSession = () => {
|
||||
const sessionId = `guest_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
const guest: Guest = {
|
||||
sessionId,
|
||||
createdAt: new Date(),
|
||||
lastActivity: new Date(),
|
||||
ipAddress: 'unknown',
|
||||
userAgent: navigator.userAgent
|
||||
};
|
||||
setGuestSession(guest);
|
||||
debugConversion(guest, 'Guest Session');
|
||||
};
|
||||
|
||||
const checkExistingAuth = () => {
|
||||
const token = localStorage.getItem('accessToken');
|
||||
const userData = localStorage.getItem('userData');
|
||||
if (token && userData) {
|
||||
try {
|
||||
const user = JSON.parse(userData);
|
||||
// Convert dates back to Date objects if they're stored as strings
|
||||
if (user.createdAt && typeof user.createdAt === 'string') {
|
||||
user.createdAt = new Date(user.createdAt);
|
||||
}
|
||||
if (user.updatedAt && typeof user.updatedAt === 'string') {
|
||||
user.updatedAt = new Date(user.updatedAt);
|
||||
}
|
||||
if (user.lastLogin && typeof user.lastLogin === 'string') {
|
||||
user.lastLogin = new Date(user.lastLogin);
|
||||
}
|
||||
setCurrentUser(user);
|
||||
} catch (e) {
|
||||
localStorage.removeItem('accessToken');
|
||||
localStorage.removeItem('refreshToken');
|
||||
localStorage.removeItem('userData');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleLogin = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
setSuccess(null);
|
||||
|
||||
try {
|
||||
// Format request data for API (camelCase to snake_case)
|
||||
const requestData = formatApiRequest({
|
||||
login: loginForm.login,
|
||||
password: loginForm.password
|
||||
});
|
||||
|
||||
debugConversion(requestData, 'Login Request');
|
||||
|
||||
const response = await fetch(`${API_BASE_URL}/auth/login`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestData)
|
||||
});
|
||||
|
||||
// Use conversion utility to handle response
|
||||
const authResponse = await handleApiResponse<AuthResponse>(response);
|
||||
|
||||
debugConversion(authResponse, 'Login Response');
|
||||
|
||||
// Store tokens in localStorage
|
||||
localStorage.setItem('accessToken', authResponse.accessToken);
|
||||
localStorage.setItem('refreshToken', authResponse.refreshToken);
|
||||
localStorage.setItem('userData', JSON.stringify(authResponse.user));
|
||||
|
||||
setCurrentUser(authResponse.user);
|
||||
setSuccess('Login successful!');
|
||||
|
||||
// Clear form
|
||||
setLoginForm({ login: '', password: '' });
|
||||
|
||||
} catch (err) {
|
||||
console.error('Login error:', err);
|
||||
setError(err instanceof Error ? err.message : 'Login failed');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRegister = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
setSuccess(null);
|
||||
|
||||
try {
|
||||
const candidateData = {
|
||||
username: registerForm.username,
|
||||
email: registerForm.email,
|
||||
firstName: registerForm.firstName,
|
||||
lastName: registerForm.lastName,
|
||||
fullName: `${registerForm.firstName} ${registerForm.lastName}`,
|
||||
phone: registerForm.phone || undefined,
|
||||
userType: 'candidate',
|
||||
status: 'active',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
skills: [],
|
||||
experience: [],
|
||||
education: [],
|
||||
preferredJobTypes: [],
|
||||
languages: [],
|
||||
certifications: [],
|
||||
location: {
|
||||
city: '',
|
||||
country: '',
|
||||
remote: true
|
||||
}
|
||||
};
|
||||
|
||||
// Format request data for API (camelCase to snake_case, dates to ISO strings)
|
||||
const requestData = formatApiRequest(candidateData);
|
||||
|
||||
debugConversion(requestData, 'Registration Request');
|
||||
|
||||
const response = await fetch(`${API_BASE_URL}/candidates`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestData)
|
||||
});
|
||||
|
||||
// Use conversion utility to handle response
|
||||
const result = await handleApiResponse<any>(response);
|
||||
|
||||
debugConversion(result, 'Registration Response');
|
||||
|
||||
setSuccess('Registration successful! You can now login.');
|
||||
|
||||
// Clear form and switch to login tab
|
||||
setRegisterForm({
|
||||
username: '',
|
||||
email: '',
|
||||
firstName: '',
|
||||
lastName: '',
|
||||
password: '',
|
||||
phone: ''
|
||||
});
|
||||
setTabValue(0);
|
||||
|
||||
} catch (err) {
|
||||
console.error('Registration error:', err);
|
||||
setError(err instanceof Error ? err.message : 'Registration failed');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLogout = () => {
|
||||
localStorage.removeItem('accessToken');
|
||||
localStorage.removeItem('refreshToken');
|
||||
localStorage.removeItem('userData');
|
||||
setCurrentUser(null);
|
||||
setSuccess('Logged out successfully');
|
||||
createGuestSession();
|
||||
};
|
||||
|
||||
const handleTabChange = (event: React.SyntheticEvent, newValue: number) => {
|
||||
setTabValue(newValue);
|
||||
setError(null);
|
||||
setSuccess(null);
|
||||
};
|
||||
|
||||
// API helper function for authenticated requests
|
||||
const makeAuthenticatedRequest = async (url: string, options: RequestInit = {}) => {
|
||||
const token = localStorage.getItem('accessToken');
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { 'Authorization': `Bearer ${token}` }),
|
||||
...options.headers,
|
||||
};
|
||||
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers,
|
||||
});
|
||||
|
||||
return handleApiResponse(response);
|
||||
};
|
||||
|
||||
// If user is logged in, show their profile
|
||||
if (currentUser) {
|
||||
return (
|
||||
<Box sx={{ flexGrow: 1 }}>
|
||||
<AppBar position="static">
|
||||
<Toolbar>
|
||||
<AccountCircle sx={{ mr: 2 }} />
|
||||
<Typography variant="h6" component="div" sx={{ flexGrow: 1 }}>
|
||||
Welcome, {currentUser.username}
|
||||
</Typography>
|
||||
<Button
|
||||
color="inherit"
|
||||
onClick={handleLogout}
|
||||
startIcon={<ExitToApp />}
|
||||
>
|
||||
Logout
|
||||
</Button>
|
||||
</Toolbar>
|
||||
</AppBar>
|
||||
|
||||
<Container maxWidth="md" sx={{ mt: 4 }}>
|
||||
<Card elevation={3}>
|
||||
<CardContent>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', mb: 3 }}>
|
||||
<Avatar sx={{ mr: 2, bgcolor: 'primary.main' }}>
|
||||
<AccountCircle />
|
||||
</Avatar>
|
||||
<Typography variant="h4" component="h1">
|
||||
User Profile
|
||||
</Typography>
|
||||
</Box>
|
||||
|
||||
<Divider sx={{ mb: 3 }} />
|
||||
|
||||
<Grid container spacing={3}>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Username:</strong> {currentUser.username}
|
||||
</Typography>
|
||||
</Grid>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Email:</strong> {currentUser.email}
|
||||
</Typography>
|
||||
</Grid>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Status:</strong> {currentUser.status}
|
||||
</Typography>
|
||||
</Grid>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Phone:</strong> {currentUser.phone || 'Not provided'}
|
||||
</Typography>
|
||||
</Grid>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Last Login:</strong> {
|
||||
currentUser.lastLogin
|
||||
? currentUser.lastLogin.toLocaleString()
|
||||
: 'N/A'
|
||||
}
|
||||
</Typography>
|
||||
</Grid>
|
||||
<Grid size={{ xs: 12, md: 6 }}>
|
||||
<Typography variant="body1" sx={{ mb: 1 }}>
|
||||
<strong>Member Since:</strong> {currentUser.createdAt.toLocaleDateString()}
|
||||
</Typography>
|
||||
</Grid>
|
||||
</Grid>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Container>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
const validateInput = (value: string) => {
|
||||
if (!value) return 'This field is required';
|
||||
|
||||
// Username: alphanumeric, 3-20 characters, no @
|
||||
const usernameRegex = /^[a-zA-Z0-9]{3,20}$/;
|
||||
// Email: basic email format
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
|
||||
if (usernameRegex.test(value)) return '';
|
||||
if (emailRegex.test(value)) return '';
|
||||
return 'Enter a valid username (3-20 alphanumeric characters) or email';
|
||||
};
|
||||
|
||||
const handleLoginChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const { value } = event.target;
|
||||
setLoginForm({ ...loginForm, login: value });
|
||||
setError(validateInput(value));
|
||||
};
|
||||
|
||||
return (
|
||||
<Container maxWidth="sm" sx={{ mt: 4 }}>
|
||||
<Paper elevation={3} sx={{ p: 4 }}>
|
||||
<Typography variant="h4" component="h1" gutterBottom align="center" color="primary">
|
||||
Backstory Platform
|
||||
</Typography>
|
||||
|
||||
{guestSession && (
|
||||
<Card sx={{ mb: 3, bgcolor: 'grey.50' }} elevation={1}>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom color="primary">
|
||||
Guest Session Active
|
||||
</Typography>
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mb: 0.5 }}>
|
||||
Session ID: {guestSession.sessionId}
|
||||
</Typography>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
Created: {guestSession.createdAt.toLocaleString()}
|
||||
</Typography>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
<Box sx={{ borderBottom: 1, borderColor: 'divider', mb: 3 }}>
|
||||
<Tabs value={tabValue} onChange={handleTabChange} centered>
|
||||
<Tab icon={<Person />} label="Login" />
|
||||
<Tab icon={<PersonAdd />} label="Register" />
|
||||
</Tabs>
|
||||
</Box>
|
||||
|
||||
{error && (
|
||||
<Alert severity="error" sx={{ mb: 2 }}>
|
||||
{error}
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{success && (
|
||||
<Alert severity="success" sx={{ mb: 2 }}>
|
||||
{success}
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{tabValue === 0 && (
|
||||
<Box component="form" onSubmit={handleLogin}>
|
||||
<Typography variant="h5" gutterBottom>
|
||||
Sign In
|
||||
</Typography>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Username or Email"
|
||||
type="text"
|
||||
value={loginForm.login}
|
||||
onChange={handleLoginChange}
|
||||
margin="normal"
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
placeholder="Enter username or email"
|
||||
/>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Password"
|
||||
type="password"
|
||||
value={loginForm.password}
|
||||
onChange={(e) => setLoginForm({ ...loginForm, password: e.target.value })}
|
||||
margin="normal"
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
autoComplete='current-password'
|
||||
/>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
fullWidth
|
||||
variant="contained"
|
||||
sx={{ mt: 3, mb: 2 }}
|
||||
disabled={loading}
|
||||
startIcon={loading ? <CircularProgress size={20} color="inherit" /> : <Person />}
|
||||
>
|
||||
{loading ? 'Signing In...' : 'Sign In'}
|
||||
</Button>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{tabValue === 1 && (
|
||||
<Box component="form" onSubmit={handleRegister}>
|
||||
<Typography variant="h5" gutterBottom>
|
||||
Create Account
|
||||
</Typography>
|
||||
|
||||
<Grid container spacing={2} sx={{ mb: 2 }}>
|
||||
<Grid size={{ xs: 12, sm: 6 }}>
|
||||
<TextField
|
||||
fullWidth
|
||||
label="First Name"
|
||||
value={registerForm.firstName}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, firstName: e.target.value })}
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
</Grid>
|
||||
|
||||
<Grid size={{ xs: 12, sm: 6 }}>
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Last Name"
|
||||
value={registerForm.lastName}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, lastName: e.target.value })}
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
</Grid>
|
||||
</Grid>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Username"
|
||||
value={registerForm.username}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, username: e.target.value })}
|
||||
margin="normal"
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Email"
|
||||
type="email"
|
||||
value={registerForm.email}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, email: e.target.value })}
|
||||
margin="normal"
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Phone (Optional)"
|
||||
type="tel"
|
||||
value={registerForm.phone}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, phone: e.target.value })}
|
||||
margin="normal"
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
|
||||
<TextField
|
||||
fullWidth
|
||||
label="Password"
|
||||
type="password"
|
||||
value={registerForm.password}
|
||||
onChange={(e) => setRegisterForm({ ...registerForm, password: e.target.value })}
|
||||
margin="normal"
|
||||
required
|
||||
disabled={loading}
|
||||
variant="outlined"
|
||||
/>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
fullWidth
|
||||
variant="contained"
|
||||
sx={{ mt: 3, mb: 2 }}
|
||||
disabled={loading}
|
||||
startIcon={loading ? <CircularProgress size={20} color="inherit" /> : <PersonAdd />}
|
||||
>
|
||||
{loading ? 'Creating Account...' : 'Create Account'}
|
||||
</Button>
|
||||
</Box>
|
||||
)}
|
||||
</Paper>
|
||||
</Container>
|
||||
);
|
||||
};
|
||||
|
||||
export { BackstoryTestApp };
|
@ -1,6 +1,7 @@
|
||||
import React, { useEffect } from "react";
|
||||
import { Navigate, useParams, useNavigate, useLocation } from "react-router-dom";
|
||||
import { useUser, UserInfo } from "../Components/UserContext";
|
||||
import { useUser } from "../Components/UserContext";
|
||||
import { User } from "../../types/types";
|
||||
import { Box } from "@mui/material";
|
||||
import { connectionBase } from "../../Global";
|
||||
import { SetSnackType } from '../../Components/Snack';
|
||||
@ -23,7 +24,7 @@ const UserRoute: React.FC<UserRouteProps> = (props: UserRouteProps) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const fetchUser = async (username: string): Promise<UserInfo | null> => {
|
||||
const fetchUser = async (username: string): Promise<User | null> => {
|
||||
try {
|
||||
let response;
|
||||
response = await fetch(`${connectionBase}/api/u/${username}/${sessionId}`, {
|
||||
@ -33,12 +34,7 @@ const UserRoute: React.FC<UserRouteProps> = (props: UserRouteProps) => {
|
||||
if (!response.ok) {
|
||||
throw new Error('Session not found');
|
||||
}
|
||||
const user: UserInfo = {
|
||||
...(await response.json()),
|
||||
type: "guest",
|
||||
isAuthenticated: false,
|
||||
logout: () => { },
|
||||
}
|
||||
const user: User = await response.json();
|
||||
console.log("Loaded user:", user);
|
||||
setUser(user);
|
||||
navigate('/chat');
|
||||
|
@ -1,116 +0,0 @@
|
||||
import React, { forwardRef, useEffect, useState } from 'react';
|
||||
import useMediaQuery from '@mui/material/useMediaQuery';
|
||||
import Box from '@mui/material/Box';
|
||||
import { useTheme } from '@mui/material/styles';
|
||||
import MuiMarkdown from 'mui-markdown';
|
||||
|
||||
import { BackstoryPageProps } from '../Components//BackstoryTab';
|
||||
import { Conversation, ConversationHandle } from '../Components/Conversation';
|
||||
import { ChatQuery, Tunables } from '../Components/ChatQuery';
|
||||
import { MessageList } from '../Components/Message';
|
||||
|
||||
import { connectionBase } from '../Global';
|
||||
|
||||
type UserData = {
|
||||
user_name: string;
|
||||
first_name: string;
|
||||
last_name: string;
|
||||
full_name: string;
|
||||
contact_info: Record<string, string>;
|
||||
questions: [{
|
||||
question: string;
|
||||
tunables?: Tunables
|
||||
}]
|
||||
};
|
||||
|
||||
const HomePage = forwardRef<ConversationHandle, BackstoryPageProps>((props: BackstoryPageProps, ref) => {
|
||||
const { sessionId, setSnack, submitQuery } = props;
|
||||
const theme = useTheme();
|
||||
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
|
||||
const [preamble, setPreamble] = useState<MessageList>([]);
|
||||
const [questions, setQuestions] = useState<React.ReactElement[]>([]);
|
||||
const [user, setUser] = useState<UserData | undefined>(undefined)
|
||||
|
||||
useEffect(() => {
|
||||
if (user === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
setPreamble([{
|
||||
role: 'content',
|
||||
title: 'Welcome to Backstory',
|
||||
disableCopy: true,
|
||||
content: `
|
||||
Backstory is a RAG enabled expert system with access to real-time data running
|
||||
self-hosted (no cloud) versions of industry leading Large and Small Language
|
||||
Models (LLM/SLMs). It provides potential employees the opportunityt to ask
|
||||
questions about a job candidate, as well as to allow the job candidate to
|
||||
generate resumes based on their personal data.
|
||||
|
||||
This instances has been launched for ${user.full_name}.
|
||||
|
||||
What would you like to know about ${user.first_name}?
|
||||
`,
|
||||
}]);
|
||||
|
||||
setQuestions([
|
||||
<Box sx={{ display: "flex", flexDirection: isMobile ? "column" : "row" }}>
|
||||
{user.questions.map(({ question, tunables }, i: number) =>
|
||||
<ChatQuery key={i} query={{ prompt: question, tunables: tunables }} submitQuery={submitQuery} />
|
||||
)}
|
||||
</Box>,
|
||||
<Box sx={{ p: 1 }}>
|
||||
<MuiMarkdown>
|
||||
{`As with all LLM interactions, the results may not be 100% accurate. Please contact **${user.full_name}** if you have any questions.`}
|
||||
</MuiMarkdown>
|
||||
</Box>]);
|
||||
}, [user, isMobile, submitQuery]);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchUserData = async () => {
|
||||
try {
|
||||
const response = await fetch(connectionBase + `/api/user/${sessionId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
setUser(data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Error getting user info:', error);
|
||||
setSnack("Unable to obtain user information.", "error");
|
||||
}
|
||||
};
|
||||
fetchUserData();
|
||||
}, [setSnack, sessionId]);
|
||||
|
||||
if (sessionId === undefined || user === undefined) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return <Conversation
|
||||
ref={ref}
|
||||
{...{
|
||||
multiline: true,
|
||||
type: "chat",
|
||||
placeholder: "What would you like to know about James?",
|
||||
resetLabel: "chat",
|
||||
sessionId,
|
||||
setSnack,
|
||||
preamble: preamble,
|
||||
defaultPrompts: questions,
|
||||
submitQuery,
|
||||
}}
|
||||
/>;
|
||||
});
|
||||
|
||||
export {
|
||||
HomePage
|
||||
};
|
@ -4,9 +4,9 @@ import { ThemeProvider } from '@mui/material/styles';
|
||||
import { backstoryTheme } from './BackstoryTheme';
|
||||
import { BrowserRouter as Router } from "react-router-dom";
|
||||
import { BackstoryApp } from './NewApp/BackstoryApp';
|
||||
import { BackstoryTestApp } from 'NewApp/TestApp';
|
||||
|
||||
import './index.css';
|
||||
import { ViewHeadline } from '@mui/icons-material';
|
||||
import Box from '@mui/material/Box';
|
||||
|
||||
const root = ReactDOM.createRoot(
|
||||
document.getElementById('root') as HTMLElement
|
||||
@ -16,7 +16,8 @@ root.render(
|
||||
<React.StrictMode>
|
||||
<ThemeProvider theme={backstoryTheme}>
|
||||
<Router>
|
||||
<BackstoryApp />
|
||||
{/* <BackstoryApp /> */}
|
||||
<BackstoryTestApp />
|
||||
</Router>
|
||||
</ThemeProvider>
|
||||
</React.StrictMode>
|
||||
|
576
frontend/src/types/api-client.ts
Normal file
576
frontend/src/types/api-client.ts
Normal file
@ -0,0 +1,576 @@
|
||||
/**
|
||||
* API Client Example
|
||||
*
|
||||
* This demonstrates how to use the generated types with the conversion utilities
|
||||
* for seamless frontend-backend communication.
|
||||
*/
|
||||
|
||||
// Import generated types (from running generate_types.py)
|
||||
import * as Types from './types';
|
||||
import {
|
||||
formatApiRequest,
|
||||
parseApiResponse,
|
||||
parsePaginatedResponse,
|
||||
handleApiResponse,
|
||||
handlePaginatedApiResponse,
|
||||
createPaginatedRequest,
|
||||
toUrlParams,
|
||||
extractApiData,
|
||||
ApiResponse,
|
||||
PaginatedResponse,
|
||||
PaginatedRequest
|
||||
} from './conversion';
|
||||
|
||||
export class ApiClient {
|
||||
private baseUrl: string;
|
||||
private defaultHeaders: Record<string, string>;
|
||||
|
||||
constructor(baseUrl: string, authToken?: string) {
|
||||
this.baseUrl = baseUrl.replace(/\/$/, ''); // Remove trailing slash
|
||||
this.defaultHeaders = {
|
||||
'Content-Type': 'application/json',
|
||||
...(authToken && { 'Authorization': `Bearer ${authToken}` })
|
||||
};
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Authentication Methods
|
||||
// ============================
|
||||
|
||||
async login(email: string, password: string): Promise<Types.AuthResponse> {
|
||||
const response = await fetch(`${this.baseUrl}/auth/login`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest({ email, password }))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.AuthResponse>(response);
|
||||
}
|
||||
|
||||
async refreshToken(refreshToken: string): Promise<Types.AuthResponse> {
|
||||
const response = await fetch(`${this.baseUrl}/auth/refresh`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest({ refreshToken }))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.AuthResponse>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Candidate Methods
|
||||
// ============================
|
||||
|
||||
async createCandidate(candidate: Omit<Types.Candidate, 'id' | 'createdAt' | 'updatedAt'>): Promise<Types.Candidate> {
|
||||
const response = await fetch(`${this.baseUrl}/candidates`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(candidate))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Candidate>(response);
|
||||
}
|
||||
|
||||
async getCandidate(id: string): Promise<Types.Candidate> {
|
||||
const response = await fetch(`${this.baseUrl}/candidates/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Candidate>(response);
|
||||
}
|
||||
|
||||
async updateCandidate(id: string, updates: Partial<Types.Candidate>): Promise<Types.Candidate> {
|
||||
const response = await fetch(`${this.baseUrl}/candidates/${id}`, {
|
||||
method: 'PATCH',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(updates))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Candidate>(response);
|
||||
}
|
||||
|
||||
async getCandidates(request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.Candidate>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/candidates?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.Candidate>(response);
|
||||
}
|
||||
|
||||
async searchCandidates(query: string, filters?: Record<string, any>): Promise<PaginatedResponse<Types.Candidate>> {
|
||||
const searchRequest = {
|
||||
query,
|
||||
filters,
|
||||
page: 1,
|
||||
limit: 20
|
||||
};
|
||||
|
||||
const params = toUrlParams(formatApiRequest(searchRequest));
|
||||
const response = await fetch(`${this.baseUrl}/candidates/search?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.Candidate>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Employer Methods
|
||||
// ============================
|
||||
|
||||
async createEmployer(employer: Omit<Types.Employer, 'id' | 'createdAt' | 'updatedAt'>): Promise<Types.Employer> {
|
||||
const response = await fetch(`${this.baseUrl}/employers`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(employer))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Employer>(response);
|
||||
}
|
||||
|
||||
async getEmployer(id: string): Promise<Types.Employer> {
|
||||
const response = await fetch(`${this.baseUrl}/employers/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Employer>(response);
|
||||
}
|
||||
|
||||
async updateEmployer(id: string, updates: Partial<Types.Employer>): Promise<Types.Employer> {
|
||||
const response = await fetch(`${this.baseUrl}/employers/${id}`, {
|
||||
method: 'PATCH',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(updates))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Employer>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Job Methods
|
||||
// ============================
|
||||
|
||||
async createJob(job: Omit<Types.Job, 'id' | 'datePosted' | 'views' | 'applicationCount'>): Promise<Types.Job> {
|
||||
const response = await fetch(`${this.baseUrl}/jobs`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(job))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Job>(response);
|
||||
}
|
||||
|
||||
async getJob(id: string): Promise<Types.Job> {
|
||||
const response = await fetch(`${this.baseUrl}/jobs/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.Job>(response);
|
||||
}
|
||||
|
||||
async getJobs(request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.Job>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/jobs?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.Job>(response);
|
||||
}
|
||||
|
||||
async getJobsByEmployer(employerId: string, request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.Job>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/employers/${employerId}/jobs?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.Job>(response);
|
||||
}
|
||||
|
||||
async searchJobs(query: string, filters?: Record<string, any>): Promise<PaginatedResponse<Types.Job>> {
|
||||
const searchRequest = {
|
||||
query,
|
||||
filters,
|
||||
page: 1,
|
||||
limit: 20
|
||||
};
|
||||
|
||||
const params = toUrlParams(formatApiRequest(searchRequest));
|
||||
const response = await fetch(`${this.baseUrl}/jobs/search?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.Job>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Job Application Methods
|
||||
// ============================
|
||||
|
||||
async applyToJob(application: Omit<Types.JobApplication, 'id' | 'appliedDate' | 'updatedDate' | 'status'>): Promise<Types.JobApplication> {
|
||||
const response = await fetch(`${this.baseUrl}/job-applications`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(application))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.JobApplication>(response);
|
||||
}
|
||||
|
||||
async getJobApplication(id: string): Promise<Types.JobApplication> {
|
||||
const response = await fetch(`${this.baseUrl}/job-applications/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.JobApplication>(response);
|
||||
}
|
||||
|
||||
async getJobApplications(request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.JobApplication>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/job-applications?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.JobApplication>(response);
|
||||
}
|
||||
|
||||
async updateApplicationStatus(id: string, status: Types.ApplicationStatus): Promise<Types.JobApplication> {
|
||||
const response = await fetch(`${this.baseUrl}/job-applications/${id}/status`, {
|
||||
method: 'PATCH',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest({ status }))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.JobApplication>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Chat Methods
|
||||
// ============================
|
||||
|
||||
async createChatSession(context: Types.ChatContext): Promise<Types.ChatSession> {
|
||||
const response = await fetch(`${this.baseUrl}/chat/sessions`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest({ context }))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.ChatSession>(response);
|
||||
}
|
||||
|
||||
async getChatSession(id: string): Promise<Types.ChatSession> {
|
||||
const response = await fetch(`${this.baseUrl}/chat/sessions/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.ChatSession>(response);
|
||||
}
|
||||
|
||||
async sendMessage(sessionId: string, content: string): Promise<Types.ChatMessage> {
|
||||
const response = await fetch(`${this.baseUrl}/chat/sessions/${sessionId}/messages`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest({ content }))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.ChatMessage>(response);
|
||||
}
|
||||
|
||||
async getChatMessages(sessionId: string, request: Partial<PaginatedRequest> = {}): Promise<PaginatedResponse<Types.ChatMessage>> {
|
||||
const paginatedRequest = createPaginatedRequest(request);
|
||||
const params = toUrlParams(formatApiRequest(paginatedRequest));
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/chat/sessions/${sessionId}/messages?${params}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handlePaginatedApiResponse<Types.ChatMessage>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// AI Configuration Methods
|
||||
// ============================
|
||||
|
||||
async createAIParameters(params: Omit<Types.AIParameters, 'id' | 'createdAt' | 'updatedAt'>): Promise<Types.AIParameters> {
|
||||
const response = await fetch(`${this.baseUrl}/ai/parameters`, {
|
||||
method: 'POST',
|
||||
headers: this.defaultHeaders,
|
||||
body: JSON.stringify(formatApiRequest(params))
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.AIParameters>(response);
|
||||
}
|
||||
|
||||
async getAIParameters(id: string): Promise<Types.AIParameters> {
|
||||
const response = await fetch(`${this.baseUrl}/ai/parameters/${id}`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.AIParameters>(response);
|
||||
}
|
||||
|
||||
async getUserAIParameters(userId: string): Promise<Types.AIParameters[]> {
|
||||
const response = await fetch(`${this.baseUrl}/users/${userId}/ai/parameters`, {
|
||||
headers: this.defaultHeaders
|
||||
});
|
||||
|
||||
return handleApiResponse<Types.AIParameters[]>(response);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Error Handling Helper
|
||||
// ============================
|
||||
|
||||
// ============================
|
||||
// Error Handling Helper
|
||||
// ============================
|
||||
|
||||
async handleRequest<T>(requestFn: () => Promise<Response>): Promise<T> {
|
||||
try {
|
||||
const response = await requestFn();
|
||||
return await handleApiResponse<T>(response);
|
||||
} catch (error) {
|
||||
// Log error for debugging
|
||||
console.error('API request failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Utility Methods
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Update authorization token for future requests
|
||||
*/
|
||||
setAuthToken(token: string): void {
|
||||
this.defaultHeaders['Authorization'] = `Bearer ${token}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove authorization token
|
||||
*/
|
||||
clearAuthToken(): void {
|
||||
delete this.defaultHeaders['Authorization'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current base URL
|
||||
*/
|
||||
getBaseUrl(): string {
|
||||
return this.baseUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update base URL
|
||||
*/
|
||||
setBaseUrl(url: string): void {
|
||||
this.baseUrl = url.replace(/\/$/, '');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Usage Examples
|
||||
// ============================
|
||||
|
||||
/*
|
||||
// Initialize API client
|
||||
const apiClient = new ApiClient('https://api.yourjobplatform.com');
|
||||
|
||||
// Login and set auth token
|
||||
try {
|
||||
const authResponse = await apiClient.login('user@example.com', 'password');
|
||||
apiClient.setAuthToken(authResponse.accessToken);
|
||||
console.log('Logged in as:', authResponse.user);
|
||||
} catch (error) {
|
||||
console.error('Login failed:', error);
|
||||
}
|
||||
|
||||
// Create a new candidate
|
||||
try {
|
||||
const newCandidate = await apiClient.createCandidate({
|
||||
email: 'candidate@example.com',
|
||||
status: 'active',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
skills: [],
|
||||
experience: [],
|
||||
education: [],
|
||||
preferredJobTypes: ['full-time'],
|
||||
location: {
|
||||
city: 'San Francisco',
|
||||
country: 'USA'
|
||||
},
|
||||
languages: [],
|
||||
certifications: []
|
||||
});
|
||||
console.log('Created candidate:', newCandidate);
|
||||
} catch (error) {
|
||||
console.error('Failed to create candidate:', error);
|
||||
}
|
||||
|
||||
// Search for jobs
|
||||
try {
|
||||
const jobResults = await apiClient.searchJobs('software engineer', {
|
||||
location: 'San Francisco',
|
||||
experienceLevel: 'senior'
|
||||
});
|
||||
|
||||
console.log(`Found ${jobResults.total} jobs:`);
|
||||
jobResults.data.forEach(job => {
|
||||
console.log(`- ${job.title} at ${job.location.city}`);
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Job search failed:', error);
|
||||
}
|
||||
|
||||
// Get paginated candidates
|
||||
try {
|
||||
const candidates = await apiClient.getCandidates({
|
||||
page: 1,
|
||||
limit: 10,
|
||||
sortBy: 'createdAt',
|
||||
sortOrder: 'desc',
|
||||
filters: {
|
||||
status: 'active',
|
||||
skills: ['javascript', 'react']
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Page ${candidates.page} of ${candidates.totalPages}`);
|
||||
console.log(`${candidates.data.length} candidates on this page`);
|
||||
} catch (error) {
|
||||
console.error('Failed to get candidates:', error);
|
||||
}
|
||||
|
||||
// Start a chat session
|
||||
try {
|
||||
const chatSession = await apiClient.createChatSession({
|
||||
type: 'job_search',
|
||||
aiParameters: {
|
||||
name: 'Job Search Assistant',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
maxTokens: 2000,
|
||||
topP: 0.95,
|
||||
frequencyPenalty: 0.0,
|
||||
presencePenalty: 0.0,
|
||||
isDefault: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
}
|
||||
});
|
||||
|
||||
// Send a message
|
||||
const message = await apiClient.sendMessage(
|
||||
chatSession.id,
|
||||
'Help me find software engineering jobs in San Francisco'
|
||||
);
|
||||
|
||||
console.log('AI Response:', message.content);
|
||||
} catch (error) {
|
||||
console.error('Chat session failed:', error);
|
||||
}
|
||||
*/
|
||||
|
||||
// ============================
|
||||
// React Hook Examples
|
||||
// ============================
|
||||
|
||||
/*
|
||||
// Custom hooks for React applications
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
export function useApiClient() {
|
||||
const [client] = useState(() => new ApiClient(process.env.REACT_APP_API_URL || ''));
|
||||
return client;
|
||||
}
|
||||
|
||||
export function useCandidates(request?: Partial<PaginatedRequest>) {
|
||||
const [data, setData] = useState<PaginatedResponse<Types.Candidate> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const apiClient = useApiClient();
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchCandidates() {
|
||||
try {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
const result = await apiClient.getCandidates(request);
|
||||
setData(result);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to fetch candidates');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchCandidates();
|
||||
}, [request]);
|
||||
|
||||
return { data, loading, error, refetch: () => fetchCandidates() };
|
||||
}
|
||||
|
||||
export function useJobs(request?: Partial<PaginatedRequest>) {
|
||||
const [data, setData] = useState<PaginatedResponse<Types.Job> | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const apiClient = useApiClient();
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchJobs() {
|
||||
try {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
const result = await apiClient.getJobs(request);
|
||||
setData(result);
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to fetch jobs');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchJobs();
|
||||
}, [request]);
|
||||
|
||||
return { data, loading, error, refetch: () => fetchJobs() };
|
||||
}
|
||||
|
||||
// Usage in React component:
|
||||
function CandidateList() {
|
||||
const { data: candidates, loading, error } = useCandidates({
|
||||
limit: 10,
|
||||
sortBy: 'createdAt'
|
||||
});
|
||||
|
||||
if (loading) return <div>Loading candidates...</div>;
|
||||
if (error) return <div>Error: {error}</div>;
|
||||
if (!candidates) return <div>No candidates found</div>;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>Candidates ({candidates.total})</h2>
|
||||
{candidates.data.map(candidate => (
|
||||
<div key={candidate.id}>
|
||||
{candidate.firstName} {candidate.lastName} - {candidate.email}
|
||||
</div>
|
||||
))}
|
||||
|
||||
{candidates.hasMore && (
|
||||
<button>Load More</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
*/
|
||||
|
||||
export default ApiClient;
|
379
frontend/src/types/conversion.ts
Normal file
379
frontend/src/types/conversion.ts
Normal file
@ -0,0 +1,379 @@
|
||||
/**
|
||||
* Type Conversion Utilities
|
||||
*
|
||||
* This file provides utilities to convert between TypeScript and Python/API formats,
|
||||
* ensuring data consistency between frontend and backend.
|
||||
*/
|
||||
|
||||
// ============================
|
||||
// Generic Conversion Functions
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Converts a camelCase object to snake_case for sending to the Python backend
|
||||
*/
|
||||
export function toSnakeCase<T extends Record<string, any>>(obj: T): Record<string, any> {
|
||||
if (!obj || typeof obj !== 'object') return obj;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => toSnakeCase(item));
|
||||
}
|
||||
|
||||
const result: Record<string, any> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const snakeCaseKey = camelToSnake(key);
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
result[snakeCaseKey] = value;
|
||||
} else if (Array.isArray(value)) {
|
||||
result[snakeCaseKey] = value.map(item =>
|
||||
typeof item === 'object' && item !== null ? toSnakeCase(item) : item
|
||||
);
|
||||
} else if (value instanceof Date) {
|
||||
// Convert Date to ISO string for Python datetime
|
||||
result[snakeCaseKey] = value.toISOString();
|
||||
} else if (typeof value === 'object') {
|
||||
result[snakeCaseKey] = toSnakeCase(value);
|
||||
} else {
|
||||
result[snakeCaseKey] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a snake_case object to camelCase for TypeScript/JavaScript
|
||||
*/
|
||||
export function toCamelCase<T>(obj: Record<string, any>): T {
|
||||
if (!obj || typeof obj !== 'object') return obj as T;
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => toCamelCase(item)) as T;
|
||||
}
|
||||
|
||||
const result: Record<string, any> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
const camelCaseKey = snakeToCamel(key);
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
result[camelCaseKey] = value;
|
||||
} else if (Array.isArray(value)) {
|
||||
result[camelCaseKey] = value.map(item =>
|
||||
typeof item === 'object' && item !== null ? toCamelCase(item) : item
|
||||
);
|
||||
} else if (typeof value === 'string' && isIsoDateString(value)) {
|
||||
// Convert ISO date string to Date object
|
||||
result[camelCaseKey] = new Date(value);
|
||||
} else if (typeof value === 'object') {
|
||||
result[camelCaseKey] = toCamelCase(value);
|
||||
} else {
|
||||
result[camelCaseKey] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return result as T;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to convert camelCase to snake_case
|
||||
*/
|
||||
function camelToSnake(str: string): string {
|
||||
return str.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to convert snake_case to camelCase
|
||||
*/
|
||||
function snakeToCamel(str: string): string {
|
||||
return str.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a string is an ISO date format
|
||||
*/
|
||||
function isIsoDateString(value: string): boolean {
|
||||
if (typeof value !== 'string') return false;
|
||||
return /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:\d{2})?$/.test(value);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// API Request/Response Formatting
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Format data for API requests (converts to format expected by Python backend)
|
||||
*/
|
||||
export function formatApiRequest<T extends Record<string, any>>(data: T): Record<string, any> {
|
||||
if (!data) return data;
|
||||
|
||||
// Create a new object to avoid mutating the original
|
||||
const formatted: Record<string, any> = {};
|
||||
|
||||
// Convert dates to ISO strings and handle nested objects
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (value instanceof Date) {
|
||||
formatted[key] = value.toISOString();
|
||||
} else if (Array.isArray(value)) {
|
||||
formatted[key] = value.map(item => {
|
||||
if (item instanceof Date) {
|
||||
return item.toISOString();
|
||||
} else if (typeof item === 'object' && item !== null) {
|
||||
return formatApiRequest(item);
|
||||
}
|
||||
return item;
|
||||
});
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
formatted[key] = formatApiRequest(value);
|
||||
} else {
|
||||
formatted[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return formatted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse API responses and convert to TypeScript format
|
||||
*/
|
||||
export function parseApiResponse<T>(data: any): ApiResponse<T> {
|
||||
if (!data || typeof data !== 'object') {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: 'INVALID_RESPONSE',
|
||||
message: 'Invalid response format'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Convert any snake_case fields to camelCase and parse dates
|
||||
const parsed = toCamelCase<ApiResponse<T>>(data);
|
||||
|
||||
return parsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse paginated API responses
|
||||
*/
|
||||
export function parsePaginatedResponse<T>(
|
||||
data: any,
|
||||
itemParser?: (item: any) => T
|
||||
): ApiResponse<PaginatedResponse<T>> {
|
||||
const apiResponse = parseApiResponse<PaginatedResponse<any>>(data);
|
||||
|
||||
if (!apiResponse.success || !apiResponse.data) {
|
||||
return apiResponse as ApiResponse<PaginatedResponse<T>>;
|
||||
}
|
||||
|
||||
const paginatedData = apiResponse.data;
|
||||
|
||||
// Apply item parser if provided
|
||||
if (itemParser && Array.isArray(paginatedData.data)) {
|
||||
return {
|
||||
...apiResponse,
|
||||
data: {
|
||||
...paginatedData,
|
||||
data: paginatedData.data.map(itemParser)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return apiResponse as ApiResponse<PaginatedResponse<T>>;
|
||||
}
|
||||
|
||||
// ============================
|
||||
// URL Parameter Formatting
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Convert object to URL search parameters
|
||||
*/
|
||||
export function toUrlParams(obj: Record<string, any>): URLSearchParams {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value !== null && value !== undefined) {
|
||||
if (Array.isArray(value)) {
|
||||
// Handle arrays by adding multiple params with same key
|
||||
value.forEach(item => {
|
||||
params.append(key, String(item));
|
||||
});
|
||||
} else if (value instanceof Date) {
|
||||
params.append(key, value.toISOString());
|
||||
} else if (typeof value === 'object') {
|
||||
// For nested objects, we could flatten or JSON stringify
|
||||
params.append(key, JSON.stringify(value));
|
||||
} else {
|
||||
params.append(key, String(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Validation Helpers
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Check if response is a successful API response
|
||||
*/
|
||||
export function isSuccessResponse<T>(response: any): response is SuccessApiResponse<T> {
|
||||
return response && typeof response === 'object' && response.success === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if response is an error API response
|
||||
*/
|
||||
export function isErrorResponse(response: any): response is ErrorApiResponse {
|
||||
return response && typeof response === 'object' && response.success === false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract data from API response or throw error
|
||||
*/
|
||||
export function extractApiData<T>(response: ApiResponse<T>): T {
|
||||
if (isSuccessResponse(response) && response.data !== undefined) {
|
||||
return response.data;
|
||||
}
|
||||
|
||||
const errorMessage = isErrorResponse(response)
|
||||
? response.error?.message || 'Unknown API error'
|
||||
: 'Invalid API response format';
|
||||
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Type Definitions (matching generated types)
|
||||
// ============================
|
||||
|
||||
export interface ApiResponse<T> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: {
|
||||
code: string;
|
||||
message: string;
|
||||
details?: any;
|
||||
};
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface SuccessApiResponse<T> extends ApiResponse<T> {
|
||||
success: true;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export interface ErrorApiResponse extends ApiResponse<never> {
|
||||
success: false;
|
||||
error: {
|
||||
code: string;
|
||||
message: string;
|
||||
details?: any;
|
||||
};
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
data: T[];
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
totalPages: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
export interface PaginatedRequest {
|
||||
page?: number;
|
||||
limit?: number;
|
||||
sortBy?: string;
|
||||
sortOrder?: 'asc' | 'desc';
|
||||
filters?: Record<string, any>;
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Convenience Functions for Common Operations
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Create a paginated request with defaults
|
||||
*/
|
||||
export function createPaginatedRequest(params: Partial<PaginatedRequest> = {}): PaginatedRequest {
|
||||
return {
|
||||
page: 1,
|
||||
limit: 20,
|
||||
sortOrder: 'desc',
|
||||
...params
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle API response with automatic error throwing
|
||||
*/
|
||||
export async function handleApiResponse<T>(response: Response): Promise<T> {
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`HTTP ${response.status}: ${errorText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const apiResponse = parseApiResponse<T>(data);
|
||||
|
||||
return extractApiData(apiResponse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle paginated API response
|
||||
*/
|
||||
export async function handlePaginatedApiResponse<T>(
|
||||
response: Response,
|
||||
itemParser?: (item: any) => T
|
||||
): Promise<PaginatedResponse<T>> {
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`HTTP ${response.status}: ${errorText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const apiResponse = parsePaginatedResponse<T>(data, itemParser);
|
||||
|
||||
return extractApiData(apiResponse);
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Debug Utilities
|
||||
// ============================
|
||||
|
||||
/**
|
||||
* Log conversion for debugging
|
||||
*/
|
||||
export function debugConversion<T>(obj: T, label: string = 'Object'): T {
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
console.group(`🔄 ${label} Conversion`);
|
||||
console.log('Original:', obj);
|
||||
if (typeof obj === 'object' && obj !== null) {
|
||||
console.log('Formatted for API:', formatApiRequest(obj as any));
|
||||
}
|
||||
console.groupEnd();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
export default {
|
||||
toSnakeCase,
|
||||
toCamelCase,
|
||||
formatApiRequest,
|
||||
parseApiResponse,
|
||||
parsePaginatedResponse,
|
||||
toUrlParams,
|
||||
isSuccessResponse,
|
||||
isErrorResponse,
|
||||
extractApiData,
|
||||
createPaginatedRequest,
|
||||
handleApiResponse,
|
||||
handlePaginatedApiResponse,
|
||||
debugConversion
|
||||
};
|
640
frontend/src/types/types.ts
Normal file
640
frontend/src/types/types.ts
Normal file
@ -0,0 +1,640 @@
|
||||
// Generated TypeScript types from Pydantic models
|
||||
// Source: src/models.py
|
||||
// Generated on: 2025-05-27T23:44:38.806039
|
||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||
|
||||
// ============================
|
||||
// Enums
|
||||
// ============================
|
||||
|
||||
export type AIModelType = "gpt-4" | "gpt-3.5-turbo" | "claude-3" | "claude-3-opus" | "custom";
|
||||
|
||||
export type ActivityType = "login" | "search" | "view_job" | "apply_job" | "message" | "update_profile" | "chat";
|
||||
|
||||
export type ApplicationStatus = "applied" | "reviewing" | "interview" | "offer" | "rejected" | "accepted" | "withdrawn";
|
||||
|
||||
export type ChatContextType = "job_search" | "candidate_screening" | "interview_prep" | "resume_review" | "general";
|
||||
|
||||
export type ChatSenderType = "user" | "ai" | "system";
|
||||
|
||||
export type ColorBlindMode = "protanopia" | "deuteranopia" | "tritanopia" | "none";
|
||||
|
||||
export type DataSourceType = "document" | "website" | "api" | "database" | "internal";
|
||||
|
||||
export type EmploymentType = "full-time" | "part-time" | "contract" | "internship" | "freelance";
|
||||
|
||||
export type FontSize = "small" | "medium" | "large";
|
||||
|
||||
export type InterviewRecommendation = "strong_hire" | "hire" | "no_hire" | "strong_no_hire";
|
||||
|
||||
export type InterviewType = "phone" | "video" | "onsite" | "technical" | "behavioral";
|
||||
|
||||
export type LanguageProficiency = "basic" | "conversational" | "fluent" | "native";
|
||||
|
||||
export type MFAMethod = "app" | "sms" | "email";
|
||||
|
||||
export type NotificationType = "email" | "push" | "in_app";
|
||||
|
||||
export type ProcessingStepType = "extract" | "transform" | "chunk" | "embed" | "filter" | "summarize";
|
||||
|
||||
export type SalaryPeriod = "hour" | "day" | "month" | "year";
|
||||
|
||||
export type SearchType = "similarity" | "mmr" | "hybrid" | "keyword";
|
||||
|
||||
export type SkillLevel = "beginner" | "intermediate" | "advanced" | "expert";
|
||||
|
||||
export type SocialPlatform = "linkedin" | "twitter" | "github" | "dribbble" | "behance" | "website" | "other";
|
||||
|
||||
export type SortOrder = "asc" | "desc";
|
||||
|
||||
export type ThemePreference = "light" | "dark" | "system";
|
||||
|
||||
export type UserGender = "female" | "male";
|
||||
|
||||
export type UserStatus = "active" | "inactive" | "pending" | "banned";
|
||||
|
||||
export type UserType = "candidate" | "employer" | "guest";
|
||||
|
||||
export type VectorStoreType = "pinecone" | "qdrant" | "faiss" | "milvus" | "weaviate";
|
||||
|
||||
// ============================
|
||||
// Interfaces
|
||||
// ============================
|
||||
|
||||
export interface AIParameters {
|
||||
id?: string;
|
||||
userId?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
model: "gpt-4" | "gpt-3.5-turbo" | "claude-3" | "claude-3-opus" | "custom";
|
||||
temperature: number;
|
||||
maxTokens: number;
|
||||
topP: number;
|
||||
frequencyPenalty: number;
|
||||
presencePenalty: number;
|
||||
systemPrompt?: string;
|
||||
isDefault: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
customModelConfig?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface AccessibilitySettings {
|
||||
fontSize: "small" | "medium" | "large";
|
||||
highContrast: boolean;
|
||||
reduceMotion: boolean;
|
||||
screenReader: boolean;
|
||||
colorBlindMode?: "protanopia" | "deuteranopia" | "tritanopia" | "none";
|
||||
}
|
||||
|
||||
export interface Analytics {
|
||||
id?: string;
|
||||
entityType: "job" | "candidate" | "chat" | "system" | "employer";
|
||||
entityId: string;
|
||||
metricType: string;
|
||||
value: number;
|
||||
timestamp: Date;
|
||||
dimensions?: Record<string, any>;
|
||||
segment?: string;
|
||||
}
|
||||
|
||||
export interface ApiResponse {
|
||||
success: boolean;
|
||||
data?: any;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface ApplicationDecision {
|
||||
status: "accepted" | "rejected";
|
||||
reason?: string;
|
||||
date: Date;
|
||||
by: string;
|
||||
}
|
||||
|
||||
export interface Attachment {
|
||||
id?: string;
|
||||
fileName: string;
|
||||
fileType: string;
|
||||
fileSize: number;
|
||||
fileUrl: string;
|
||||
uploadedAt: Date;
|
||||
isProcessed: boolean;
|
||||
processingResult?: any;
|
||||
thumbnailUrl?: string;
|
||||
}
|
||||
|
||||
export interface AuthResponse {
|
||||
accessToken: string;
|
||||
refreshToken: string;
|
||||
user: BaseUser;
|
||||
expiresAt: number;
|
||||
}
|
||||
|
||||
export interface Authentication {
|
||||
userId: string;
|
||||
passwordHash: string;
|
||||
salt: string;
|
||||
refreshTokens: Array<RefreshToken>;
|
||||
resetPasswordToken?: string;
|
||||
resetPasswordExpiry?: Date;
|
||||
lastPasswordChange: Date;
|
||||
mfaEnabled: boolean;
|
||||
mfaMethod?: "app" | "sms" | "email";
|
||||
mfaSecret?: string;
|
||||
loginAttempts: number;
|
||||
lockedUntil?: Date;
|
||||
}
|
||||
|
||||
export interface BaseUser {
|
||||
id?: string;
|
||||
username: string;
|
||||
email: string;
|
||||
phone?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
lastLogin?: Date;
|
||||
profileImage?: string;
|
||||
status: "active" | "inactive" | "pending" | "banned";
|
||||
}
|
||||
|
||||
export interface BaseUserWithType {
|
||||
id?: string;
|
||||
username: string;
|
||||
email: string;
|
||||
phone?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
lastLogin?: Date;
|
||||
profileImage?: string;
|
||||
status: "active" | "inactive" | "pending" | "banned";
|
||||
userType: "candidate" | "employer" | "guest";
|
||||
}
|
||||
|
||||
export interface Candidate {
|
||||
id?: string;
|
||||
username: string;
|
||||
email: string;
|
||||
phone?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
lastLogin?: Date;
|
||||
profileImage?: string;
|
||||
status: "active" | "inactive" | "pending" | "banned";
|
||||
userType?: "UserType.CANDIDATE";
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
fullName: string;
|
||||
description?: string;
|
||||
resume?: string;
|
||||
skills: Array<Skill>;
|
||||
experience: Array<WorkExperience>;
|
||||
questions?: Array<CandidateQuestion>;
|
||||
education: Array<Education>;
|
||||
preferredJobTypes: Array<"full-time" | "part-time" | "contract" | "internship" | "freelance">;
|
||||
desiredSalary?: DesiredSalary;
|
||||
location: Location;
|
||||
availabilityDate?: Date;
|
||||
summary?: string;
|
||||
languages: Array<Language>;
|
||||
certifications: Array<Certification>;
|
||||
jobApplications?: Array<JobApplication>;
|
||||
hasProfile?: boolean;
|
||||
age?: number;
|
||||
gender?: "female" | "male";
|
||||
ethnicity?: string;
|
||||
}
|
||||
|
||||
export interface CandidateContact {
|
||||
email: string;
|
||||
phone?: string;
|
||||
}
|
||||
|
||||
export interface CandidateListResponse {
|
||||
success: boolean;
|
||||
data?: Array<Candidate>;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface CandidateQuestion {
|
||||
question: string;
|
||||
tunables?: Tunables;
|
||||
}
|
||||
|
||||
export interface CandidateResponse {
|
||||
success: boolean;
|
||||
data?: Candidate;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface Certification {
|
||||
id?: string;
|
||||
name: string;
|
||||
issuingOrganization: string;
|
||||
issueDate: Date;
|
||||
expirationDate?: Date;
|
||||
credentialId?: string;
|
||||
credentialUrl?: string;
|
||||
}
|
||||
|
||||
export interface ChatContext {
|
||||
type: "job_search" | "candidate_screening" | "interview_prep" | "resume_review" | "general";
|
||||
relatedEntityId?: string;
|
||||
relatedEntityType?: "job" | "candidate" | "employer";
|
||||
aiParameters: AIParameters;
|
||||
additionalContext?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface ChatMessage {
|
||||
id?: string;
|
||||
sessionId: string;
|
||||
sender: "user" | "ai" | "system";
|
||||
senderId?: string;
|
||||
content: string;
|
||||
timestamp: Date;
|
||||
attachments?: Array<Attachment>;
|
||||
reactions?: Array<MessageReaction>;
|
||||
isEdited?: boolean;
|
||||
editHistory?: Array<EditHistory>;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface ChatSession {
|
||||
id?: string;
|
||||
userId?: string;
|
||||
guestId?: string;
|
||||
createdAt: Date;
|
||||
lastActivity: Date;
|
||||
title?: string;
|
||||
context: ChatContext;
|
||||
messages?: Array<ChatMessage>;
|
||||
isArchived?: boolean;
|
||||
systemPrompt?: string;
|
||||
}
|
||||
|
||||
export interface CustomQuestion {
|
||||
question: string;
|
||||
answer: string;
|
||||
}
|
||||
|
||||
export interface DataSourceConfiguration {
|
||||
id?: string;
|
||||
ragConfigId: string;
|
||||
name: string;
|
||||
sourceType: "document" | "website" | "api" | "database" | "internal";
|
||||
connectionDetails: Record<string, any>;
|
||||
processingPipeline: Array<ProcessingStep>;
|
||||
refreshSchedule?: string;
|
||||
lastRefreshed?: Date;
|
||||
status: "active" | "pending" | "error" | "processing";
|
||||
errorDetails?: string;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface DesiredSalary {
|
||||
amount: number;
|
||||
currency: string;
|
||||
period: "hour" | "day" | "month" | "year";
|
||||
}
|
||||
|
||||
export interface EditHistory {
|
||||
content: string;
|
||||
editedAt: Date;
|
||||
editedBy: string;
|
||||
}
|
||||
|
||||
export interface Education {
|
||||
id?: string;
|
||||
institution: string;
|
||||
degree: string;
|
||||
fieldOfStudy: string;
|
||||
startDate: Date;
|
||||
endDate?: Date;
|
||||
isCurrent: boolean;
|
||||
gpa?: number;
|
||||
achievements?: Array<string>;
|
||||
location?: Location;
|
||||
}
|
||||
|
||||
export interface Employer {
|
||||
id?: string;
|
||||
username: string;
|
||||
email: string;
|
||||
phone?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
lastLogin?: Date;
|
||||
profileImage?: string;
|
||||
status: "active" | "inactive" | "pending" | "banned";
|
||||
userType?: "UserType.EMPLOYER";
|
||||
companyName: string;
|
||||
industry: string;
|
||||
description?: string;
|
||||
companySize: string;
|
||||
companyDescription: string;
|
||||
websiteUrl?: string;
|
||||
jobs?: Array<Job>;
|
||||
location: Location;
|
||||
companyLogo?: string;
|
||||
socialLinks?: Array<SocialLink>;
|
||||
poc?: PointOfContact;
|
||||
}
|
||||
|
||||
export interface EmployerResponse {
|
||||
success: boolean;
|
||||
data?: Employer;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface ErrorDetail {
|
||||
code: string;
|
||||
message: string;
|
||||
details?: any;
|
||||
}
|
||||
|
||||
export interface Guest {
|
||||
id?: string;
|
||||
sessionId: string;
|
||||
createdAt: Date;
|
||||
lastActivity: Date;
|
||||
convertedToUserId?: string;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
}
|
||||
|
||||
export interface InterviewFeedback {
|
||||
id?: string;
|
||||
interviewId: string;
|
||||
reviewerId: string;
|
||||
technicalScore: number;
|
||||
culturalScore: number;
|
||||
overallScore: number;
|
||||
strengths: Array<string>;
|
||||
weaknesses: Array<string>;
|
||||
recommendation: "strong_hire" | "hire" | "no_hire" | "strong_no_hire";
|
||||
comments: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
isVisible: boolean;
|
||||
skillAssessments?: Array<SkillAssessment>;
|
||||
}
|
||||
|
||||
export interface InterviewSchedule {
|
||||
id?: string;
|
||||
applicationId: string;
|
||||
scheduledDate: Date;
|
||||
endDate: Date;
|
||||
interviewType: "phone" | "video" | "onsite" | "technical" | "behavioral";
|
||||
interviewers: Array<string>;
|
||||
location?: string | Location;
|
||||
notes?: string;
|
||||
feedback?: InterviewFeedback;
|
||||
status: "scheduled" | "completed" | "cancelled" | "rescheduled";
|
||||
meetingLink?: string;
|
||||
}
|
||||
|
||||
export interface Job {
|
||||
id?: string;
|
||||
title: string;
|
||||
description: string;
|
||||
responsibilities: Array<string>;
|
||||
requirements: Array<string>;
|
||||
preferredSkills?: Array<string>;
|
||||
employerId: string;
|
||||
location: Location;
|
||||
salaryRange?: SalaryRange;
|
||||
employmentType: "full-time" | "part-time" | "contract" | "internship" | "freelance";
|
||||
datePosted: Date;
|
||||
applicationDeadline?: Date;
|
||||
isActive: boolean;
|
||||
applicants?: Array<JobApplication>;
|
||||
department?: string;
|
||||
reportsTo?: string;
|
||||
benefits?: Array<string>;
|
||||
visaSponsorship?: boolean;
|
||||
featuredUntil?: Date;
|
||||
views?: number;
|
||||
applicationCount?: number;
|
||||
}
|
||||
|
||||
export interface JobApplication {
|
||||
id?: string;
|
||||
jobId: string;
|
||||
candidateId: string;
|
||||
status: "applied" | "reviewing" | "interview" | "offer" | "rejected" | "accepted" | "withdrawn";
|
||||
appliedDate: Date;
|
||||
updatedDate: Date;
|
||||
resumeVersion: string;
|
||||
coverLetter?: string;
|
||||
notes?: string;
|
||||
interviewSchedules?: Array<InterviewSchedule>;
|
||||
customQuestions?: Array<CustomQuestion>;
|
||||
candidateContact?: CandidateContact;
|
||||
decision?: ApplicationDecision;
|
||||
}
|
||||
|
||||
export interface JobListResponse {
|
||||
success: boolean;
|
||||
data?: Array<Job>;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface JobResponse {
|
||||
success: boolean;
|
||||
data?: Job;
|
||||
error?: ErrorDetail;
|
||||
meta?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface Language {
|
||||
language: string;
|
||||
proficiency: "basic" | "conversational" | "fluent" | "native";
|
||||
}
|
||||
|
||||
export interface Location {
|
||||
city: string;
|
||||
state?: string;
|
||||
country: string;
|
||||
postalCode?: string;
|
||||
latitude?: number;
|
||||
longitude?: number;
|
||||
remote?: boolean;
|
||||
hybridOptions?: Array<string>;
|
||||
address?: string;
|
||||
}
|
||||
|
||||
export interface MessageReaction {
|
||||
userId: string;
|
||||
reaction: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface NotificationPreference {
|
||||
type: "email" | "push" | "in_app";
|
||||
events: Array<string>;
|
||||
isEnabled: boolean;
|
||||
}
|
||||
|
||||
export interface PaginatedRequest {
|
||||
page?: number;
|
||||
limit?: number;
|
||||
sortBy?: string;
|
||||
sortOrder?: "asc" | "desc";
|
||||
filters?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface PaginatedResponse {
|
||||
data: Array<any>;
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
totalPages: number;
|
||||
hasMore: boolean;
|
||||
}
|
||||
|
||||
export interface PointOfContact {
|
||||
name: string;
|
||||
position: string;
|
||||
email: string;
|
||||
phone?: string;
|
||||
}
|
||||
|
||||
export interface ProcessingStep {
|
||||
id?: string;
|
||||
type: "extract" | "transform" | "chunk" | "embed" | "filter" | "summarize";
|
||||
parameters: Record<string, any>;
|
||||
order: number;
|
||||
dependsOn?: Array<string>;
|
||||
}
|
||||
|
||||
export interface Query {
|
||||
prompt: string;
|
||||
tunables?: Tunables;
|
||||
agentOptions?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface RAGConfiguration {
|
||||
id?: string;
|
||||
userId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
dataSourceConfigurations: Array<DataSourceConfiguration>;
|
||||
embeddingModel: string;
|
||||
vectorStoreType: "pinecone" | "qdrant" | "faiss" | "milvus" | "weaviate";
|
||||
retrievalParameters: RetrievalParameters;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
isDefault: boolean;
|
||||
version: number;
|
||||
isActive: boolean;
|
||||
}
|
||||
|
||||
export interface RefreshToken {
|
||||
token: string;
|
||||
expiresAt: Date;
|
||||
device: string;
|
||||
ipAddress: string;
|
||||
isRevoked: boolean;
|
||||
revokedReason?: string;
|
||||
}
|
||||
|
||||
export interface RetrievalParameters {
|
||||
searchType: "similarity" | "mmr" | "hybrid" | "keyword";
|
||||
topK: number;
|
||||
similarityThreshold?: number;
|
||||
rerankerModel?: string;
|
||||
useKeywordBoost: boolean;
|
||||
filterOptions?: Record<string, any>;
|
||||
contextWindow: number;
|
||||
}
|
||||
|
||||
export interface SalaryRange {
|
||||
min: number;
|
||||
max: number;
|
||||
currency: string;
|
||||
period: "hour" | "day" | "month" | "year";
|
||||
isVisible: boolean;
|
||||
}
|
||||
|
||||
export interface SearchQuery {
|
||||
query: string;
|
||||
filters?: Record<string, any>;
|
||||
page?: number;
|
||||
limit?: number;
|
||||
sortBy?: string;
|
||||
sortOrder?: "asc" | "desc";
|
||||
}
|
||||
|
||||
export interface Skill {
|
||||
id?: string;
|
||||
name: string;
|
||||
category: string;
|
||||
level: "beginner" | "intermediate" | "advanced" | "expert";
|
||||
yearsOfExperience?: number;
|
||||
}
|
||||
|
||||
export interface SkillAssessment {
|
||||
skillName: string;
|
||||
score: number;
|
||||
comments?: string;
|
||||
}
|
||||
|
||||
export interface SocialLink {
|
||||
platform: "linkedin" | "twitter" | "github" | "dribbble" | "behance" | "website" | "other";
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface Tunables {
|
||||
enableRAG?: boolean;
|
||||
enableTools?: boolean;
|
||||
enableContext?: boolean;
|
||||
}
|
||||
|
||||
export interface UserActivity {
|
||||
id?: string;
|
||||
userId?: string;
|
||||
guestId?: string;
|
||||
activityType: "login" | "search" | "view_job" | "apply_job" | "message" | "update_profile" | "chat";
|
||||
timestamp: Date;
|
||||
metadata: Record<string, any>;
|
||||
ipAddress?: string;
|
||||
userAgent?: string;
|
||||
sessionId?: string;
|
||||
}
|
||||
|
||||
export interface UserPreference {
|
||||
userId: string;
|
||||
theme: "light" | "dark" | "system";
|
||||
notifications: Array<NotificationPreference>;
|
||||
accessibility: AccessibilitySettings;
|
||||
dashboardLayout?: Record<string, any>;
|
||||
language: string;
|
||||
timezone: string;
|
||||
emailFrequency: "immediate" | "daily" | "weekly" | "never";
|
||||
}
|
||||
|
||||
export interface WorkExperience {
|
||||
id?: string;
|
||||
companyName: string;
|
||||
position: string;
|
||||
startDate: Date;
|
||||
endDate?: Date;
|
||||
isCurrent: boolean;
|
||||
description: string;
|
||||
skills: Array<string>;
|
||||
location: Location;
|
||||
achievements?: Array<string>;
|
||||
}
|
||||
|
||||
// ============================
|
||||
// Union Types
|
||||
// ============================
|
||||
|
||||
export type User = Candidate | Employer;
|
||||
|
||||
// Export all types
|
||||
export type { };
|
@ -3,7 +3,7 @@ global:
|
||||
scrape_configs:
|
||||
- job_name: 'backstory'
|
||||
scrape_interval: 5s
|
||||
metrics_path: /metrics
|
||||
metrics_path: /api/1.0/metrics
|
||||
scheme: https
|
||||
static_configs:
|
||||
- targets: ['backstory:8911']
|
||||
@ -12,7 +12,7 @@ scrape_configs:
|
||||
|
||||
- job_name: 'backstory-prod'
|
||||
scrape_interval: 5s
|
||||
metrics_path: /metrics
|
||||
metrics_path: /api/1.0/metrics
|
||||
scheme: http
|
||||
static_configs:
|
||||
- targets: ['backstory-prod:8911']
|
||||
|
32
redis.conf
Normal file
32
redis.conf
Normal file
@ -0,0 +1,32 @@
|
||||
# Network
|
||||
bind 0.0.0.0
|
||||
port 6379
|
||||
protected-mode no
|
||||
|
||||
# General
|
||||
daemonize no
|
||||
supervised no
|
||||
loglevel notice
|
||||
logfile ""
|
||||
|
||||
# Persistence
|
||||
save 900 1
|
||||
save 300 10
|
||||
save 60 10000
|
||||
stop-writes-on-bgsave-error yes
|
||||
rdbcompression yes
|
||||
rdbchecksum yes
|
||||
dbfilename dump.rdb
|
||||
dir /data
|
||||
|
||||
# Memory management
|
||||
maxmemory 256mb
|
||||
maxmemory-policy allkeys-lru
|
||||
|
||||
# Security (optional - add password protection)
|
||||
# requirepass your_secure_password
|
||||
|
||||
# Performance
|
||||
tcp-keepalive 300
|
||||
timeout 0
|
||||
tcp-backlog 511
|
666
src/backend/database.py
Normal file
666
src/backend/database.py
Normal file
@ -0,0 +1,666 @@
|
||||
import redis.asyncio as redis # type: ignore
|
||||
from typing import Optional, Dict, List, Optional, Any
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta, UTC
|
||||
import asyncio
|
||||
from models import (
|
||||
# User models
|
||||
Candidate, Employer, BaseUser, Guest, Authentication, AuthResponse,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class _RedisManager:
|
||||
def __init__(self):
|
||||
self.redis_client: Optional[redis.Redis] = None
|
||||
self.redis_url = os.getenv("REDIS_URL", "redis://redis:6379")
|
||||
self._connection_pool: Optional[redis.ConnectionPool] = None
|
||||
self._is_connected = False
|
||||
|
||||
async def connect(self):
|
||||
"""Initialize Redis connection with connection pooling"""
|
||||
if self._is_connected and self.redis_client:
|
||||
logger.info("Redis already connected")
|
||||
return
|
||||
|
||||
try:
|
||||
# Create connection pool for better resource management
|
||||
self._connection_pool = redis.ConnectionPool.from_url(
|
||||
self.redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True,
|
||||
max_connections=20,
|
||||
retry_on_timeout=True,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options={},
|
||||
health_check_interval=30
|
||||
)
|
||||
|
||||
self.redis_client = redis.Redis(
|
||||
connection_pool=self._connection_pool
|
||||
)
|
||||
|
||||
if not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
|
||||
# Test connection
|
||||
await self.redis_client.ping()
|
||||
self._is_connected = True
|
||||
logger.info("Successfully connected to Redis")
|
||||
|
||||
# Log Redis info
|
||||
info = await self.redis_client.info()
|
||||
logger.info(f"Redis version: {info.get('redis_version', 'unknown')}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
self._is_connected = False
|
||||
self.redis_client = None
|
||||
self._connection_pool = None
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Close Redis connection gracefully"""
|
||||
if not self._is_connected:
|
||||
logger.info("Redis already disconnected")
|
||||
return
|
||||
|
||||
try:
|
||||
if self.redis_client:
|
||||
# Wait for any pending operations to complete
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
# Close the client
|
||||
await self.redis_client.aclose()
|
||||
logger.info("Redis client closed")
|
||||
|
||||
if self._connection_pool:
|
||||
# Close the connection pool
|
||||
await self._connection_pool.aclose()
|
||||
logger.info("Redis connection pool closed")
|
||||
|
||||
self._is_connected = False
|
||||
self.redis_client = None
|
||||
self._connection_pool = None
|
||||
|
||||
logger.info("Successfully disconnected from Redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during Redis disconnect: {e}")
|
||||
# Force cleanup even if there's an error
|
||||
self._is_connected = False
|
||||
self.redis_client = None
|
||||
self._connection_pool = None
|
||||
|
||||
def get_client(self) -> redis.Redis:
|
||||
"""Get Redis client instance"""
|
||||
if not self._is_connected or not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized or disconnected")
|
||||
return self.redis_client
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if Redis is connected"""
|
||||
return self._is_connected and self.redis_client is not None
|
||||
|
||||
async def health_check(self) -> dict:
|
||||
"""Perform health check on Redis connection"""
|
||||
if not self.is_connected:
|
||||
return {"status": "disconnected", "error": "Redis not connected"}
|
||||
|
||||
if not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
|
||||
try:
|
||||
# Test basic operations
|
||||
await self.redis_client.ping()
|
||||
info = await self.redis_client.info()
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"redis_version": info.get("redis_version", "unknown"),
|
||||
"uptime_seconds": info.get("uptime_in_seconds", 0),
|
||||
"connected_clients": info.get("connected_clients", 0),
|
||||
"used_memory_human": info.get("used_memory_human", "unknown"),
|
||||
"total_commands_processed": info.get("total_commands_processed", 0)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Redis health check failed: {e}")
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
async def force_save(self, background: bool = True) -> bool:
|
||||
"""Force Redis to save data to disk"""
|
||||
if not self.is_connected:
|
||||
logger.warning("Cannot save: Redis not connected")
|
||||
return False
|
||||
|
||||
try:
|
||||
if not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
|
||||
if background:
|
||||
# Non-blocking background save
|
||||
await self.redis_client.bgsave()
|
||||
logger.info("Background save initiated")
|
||||
else:
|
||||
# Blocking save
|
||||
await self.redis_client.save()
|
||||
logger.info("Synchronous save completed")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Redis save failed: {e}")
|
||||
return False
|
||||
|
||||
async def get_info(self) -> Optional[dict]:
|
||||
"""Get Redis server information"""
|
||||
if not self.is_connected:
|
||||
return None
|
||||
|
||||
try:
|
||||
if not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
return await self.redis_client.info()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get Redis info: {e}")
|
||||
return None
|
||||
|
||||
class RedisDatabase:
|
||||
def __init__(self, redis_client: redis.Redis):
|
||||
self.redis_client = redis_client
|
||||
|
||||
# Redis key prefixes for different data types
|
||||
self.KEY_PREFIXES = {
|
||||
'candidates': 'candidate:',
|
||||
'employers': 'employer:',
|
||||
'jobs': 'job:',
|
||||
'job_applications': 'job_application:',
|
||||
'chat_sessions': 'chat_session:',
|
||||
'chat_messages': 'chat_messages:', # This will store lists
|
||||
'ai_parameters': 'ai_parameters:',
|
||||
'users': 'user:',
|
||||
}
|
||||
|
||||
def _serialize(self, data: Any) -> str:
|
||||
"""Serialize data to JSON string for Redis storage"""
|
||||
if data is None:
|
||||
return ""
|
||||
return json.dumps(data, default=str) # default=str handles datetime objects
|
||||
|
||||
def _deserialize(self, data: str) -> Any:
|
||||
"""Deserialize JSON string from Redis"""
|
||||
if not data:
|
||||
return None
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"Failed to deserialize data: {data}")
|
||||
return None
|
||||
|
||||
# Candidates operations
|
||||
async def get_candidate(self, candidate_id: str) -> Optional[Dict]:
|
||||
"""Get candidate by ID"""
|
||||
key = f"{self.KEY_PREFIXES['candidates']}{candidate_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_candidate(self, candidate_id: str, candidate_data: Dict):
|
||||
"""Set candidate data"""
|
||||
key = f"{self.KEY_PREFIXES['candidates']}{candidate_id}"
|
||||
await self.redis_client.set(key, self._serialize(candidate_data))
|
||||
|
||||
async def get_all_candidates(self) -> Dict[str, Any]:
|
||||
"""Get all candidates"""
|
||||
pattern = f"{self.KEY_PREFIXES['candidates']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
# Use pipeline for efficiency
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
candidate_id = key.replace(self.KEY_PREFIXES['candidates'], '')
|
||||
result[candidate_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_candidate(self, candidate_id: str):
|
||||
"""Delete candidate"""
|
||||
key = f"{self.KEY_PREFIXES['candidates']}{candidate_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Employers operations
|
||||
async def get_employer(self, employer_id: str) -> Optional[Dict]:
|
||||
"""Get employer by ID"""
|
||||
key = f"{self.KEY_PREFIXES['employers']}{employer_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_employer(self, employer_id: str, employer_data: Dict):
|
||||
"""Set employer data"""
|
||||
key = f"{self.KEY_PREFIXES['employers']}{employer_id}"
|
||||
await self.redis_client.set(key, self._serialize(employer_data))
|
||||
|
||||
async def get_all_employers(self) -> Dict[str, Any]:
|
||||
"""Get all employers"""
|
||||
pattern = f"{self.KEY_PREFIXES['employers']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
employer_id = key.replace(self.KEY_PREFIXES['employers'], '')
|
||||
result[employer_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_employer(self, employer_id: str):
|
||||
"""Delete employer"""
|
||||
key = f"{self.KEY_PREFIXES['employers']}{employer_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Jobs operations
|
||||
async def get_job(self, job_id: str) -> Optional[Dict]:
|
||||
"""Get job by ID"""
|
||||
key = f"{self.KEY_PREFIXES['jobs']}{job_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_job(self, job_id: str, job_data: Dict):
|
||||
"""Set job data"""
|
||||
key = f"{self.KEY_PREFIXES['jobs']}{job_id}"
|
||||
await self.redis_client.set(key, self._serialize(job_data))
|
||||
|
||||
async def get_all_jobs(self) -> Dict[str, Any]:
|
||||
"""Get all jobs"""
|
||||
pattern = f"{self.KEY_PREFIXES['jobs']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
job_id = key.replace(self.KEY_PREFIXES['jobs'], '')
|
||||
result[job_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_job(self, job_id: str):
|
||||
"""Delete job"""
|
||||
key = f"{self.KEY_PREFIXES['jobs']}{job_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Job Applications operations
|
||||
async def get_job_application(self, application_id: str) -> Optional[Dict]:
|
||||
"""Get job application by ID"""
|
||||
key = f"{self.KEY_PREFIXES['job_applications']}{application_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_job_application(self, application_id: str, application_data: Dict):
|
||||
"""Set job application data"""
|
||||
key = f"{self.KEY_PREFIXES['job_applications']}{application_id}"
|
||||
await self.redis_client.set(key, self._serialize(application_data))
|
||||
|
||||
async def get_all_job_applications(self) -> Dict[str, Any]:
|
||||
"""Get all job applications"""
|
||||
pattern = f"{self.KEY_PREFIXES['job_applications']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
app_id = key.replace(self.KEY_PREFIXES['job_applications'], '')
|
||||
result[app_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_job_application(self, application_id: str):
|
||||
"""Delete job application"""
|
||||
key = f"{self.KEY_PREFIXES['job_applications']}{application_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Chat Sessions operations
|
||||
async def get_chat_session(self, session_id: str) -> Optional[Dict]:
|
||||
"""Get chat session by ID"""
|
||||
key = f"{self.KEY_PREFIXES['chat_sessions']}{session_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_chat_session(self, session_id: str, session_data: Dict):
|
||||
"""Set chat session data"""
|
||||
key = f"{self.KEY_PREFIXES['chat_sessions']}{session_id}"
|
||||
await self.redis_client.set(key, self._serialize(session_data))
|
||||
|
||||
async def get_all_chat_sessions(self) -> Dict[str, Any]:
|
||||
"""Get all chat sessions"""
|
||||
pattern = f"{self.KEY_PREFIXES['chat_sessions']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
session_id = key.replace(self.KEY_PREFIXES['chat_sessions'], '')
|
||||
result[session_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_chat_session(self, session_id: str):
|
||||
"""Delete chat session"""
|
||||
key = f"{self.KEY_PREFIXES['chat_sessions']}{session_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Chat Messages operations (stored as lists)
|
||||
async def get_chat_messages(self, session_id: str) -> List[Dict]:
|
||||
"""Get chat messages for a session"""
|
||||
key = f"{self.KEY_PREFIXES['chat_messages']}{session_id}"
|
||||
messages = await self.redis_client.lrange(key, 0, -1)
|
||||
return [self._deserialize(msg) for msg in messages if msg]
|
||||
|
||||
async def add_chat_message(self, session_id: str, message_data: Dict):
|
||||
"""Add a chat message to a session"""
|
||||
key = f"{self.KEY_PREFIXES['chat_messages']}{session_id}"
|
||||
await self.redis_client.rpush(key, self._serialize(message_data))
|
||||
|
||||
async def set_chat_messages(self, session_id: str, messages: List[Dict]):
|
||||
"""Set all chat messages for a session (replaces existing)"""
|
||||
key = f"{self.KEY_PREFIXES['chat_messages']}{session_id}"
|
||||
|
||||
# Clear existing messages
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Add new messages
|
||||
if messages:
|
||||
serialized_messages = [self._serialize(msg) for msg in messages]
|
||||
await self.redis_client.rpush(key, *serialized_messages)
|
||||
|
||||
async def get_all_chat_messages(self) -> Dict[str, List[Dict]]:
|
||||
"""Get all chat messages grouped by session"""
|
||||
pattern = f"{self.KEY_PREFIXES['chat_messages']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
result = {}
|
||||
for key in keys:
|
||||
session_id = key.replace(self.KEY_PREFIXES['chat_messages'], '')
|
||||
messages = await self.redis_client.lrange(key, 0, -1)
|
||||
result[session_id] = [self._deserialize(msg) for msg in messages if msg]
|
||||
|
||||
return result
|
||||
|
||||
async def delete_chat_messages(self, session_id: str):
|
||||
"""Delete all chat messages for a session"""
|
||||
key = f"{self.KEY_PREFIXES['chat_messages']}{session_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# AI Parameters operations
|
||||
async def get_ai_parameters(self, param_id: str) -> Optional[Dict]:
|
||||
"""Get AI parameters by ID"""
|
||||
key = f"{self.KEY_PREFIXES['ai_parameters']}{param_id}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_ai_parameters(self, param_id: str, param_data: Dict):
|
||||
"""Set AI parameters data"""
|
||||
key = f"{self.KEY_PREFIXES['ai_parameters']}{param_id}"
|
||||
await self.redis_client.set(key, self._serialize(param_data))
|
||||
|
||||
async def get_all_ai_parameters(self) -> Dict[str, Any]:
|
||||
"""Get all AI parameters"""
|
||||
pattern = f"{self.KEY_PREFIXES['ai_parameters']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
param_id = key.replace(self.KEY_PREFIXES['ai_parameters'], '')
|
||||
result[param_id] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_ai_parameters(self, param_id: str):
|
||||
"""Delete AI parameters"""
|
||||
key = f"{self.KEY_PREFIXES['ai_parameters']}{param_id}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Users operations (for auth)
|
||||
async def get_user(self, login: str) -> Optional[Dict]:
|
||||
"""Get user by email or username"""
|
||||
if '@' in login:
|
||||
email = login.lower()
|
||||
key = f"{self.KEY_PREFIXES['users']}{email}"
|
||||
else:
|
||||
username = login.lower()
|
||||
key = f"{self.KEY_PREFIXES['users']}{username}"
|
||||
data = await self.redis_client.get(key)
|
||||
return self._deserialize(data) if data else None
|
||||
|
||||
async def set_user(self, user: BaseUser, user_data: Dict):
|
||||
"""Set user data"""
|
||||
email_key = f"{self.KEY_PREFIXES['users']}{user.email.lower()}"
|
||||
username_key = f"{self.KEY_PREFIXES['users']}{user.username.lower()}"
|
||||
serialized_data = self._serialize(user_data)
|
||||
await self.redis_client.set(email_key, serialized_data)
|
||||
await self.redis_client.set(username_key, serialized_data)
|
||||
|
||||
async def get_all_users(self) -> Dict[str, Any]:
|
||||
"""Get all users"""
|
||||
pattern = f"{self.KEY_PREFIXES['users']}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
pipe = self.redis_client.pipeline()
|
||||
for key in keys:
|
||||
pipe.get(key)
|
||||
values = await pipe.execute()
|
||||
|
||||
result = {}
|
||||
for key, value in zip(keys, values):
|
||||
email = key.replace(self.KEY_PREFIXES['users'], '')
|
||||
result[email] = self._deserialize(value)
|
||||
|
||||
return result
|
||||
|
||||
async def delete_user(self, email: str):
|
||||
"""Delete user"""
|
||||
key = f"{self.KEY_PREFIXES['users']}{email}"
|
||||
await self.redis_client.delete(key)
|
||||
|
||||
# Utility methods
|
||||
async def clear_all_data(self):
|
||||
"""Clear all data from Redis (use with caution!)"""
|
||||
for prefix in self.KEY_PREFIXES.values():
|
||||
pattern = f"{prefix}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
if keys:
|
||||
await self.redis_client.delete(*keys)
|
||||
|
||||
async def get_stats(self) -> Dict[str, int]:
|
||||
"""Get statistics about stored data"""
|
||||
stats = {}
|
||||
for data_type, prefix in self.KEY_PREFIXES.items():
|
||||
pattern = f"{prefix}*"
|
||||
keys = await self.redis_client.keys(pattern)
|
||||
stats[data_type] = len(keys)
|
||||
return stats
|
||||
|
||||
# Global Redis manager instance
|
||||
redis_manager = _RedisManager()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Enhanced database manager with graceful shutdown capabilities"""
|
||||
|
||||
def __init__(self):
|
||||
self.db: Optional[RedisDatabase] = None
|
||||
self._shutdown_initiated = False
|
||||
self._active_requests = 0
|
||||
self._shutdown_timeout = int(os.getenv("SHUTDOWN_TIMEOUT", "30")) # seconds
|
||||
self._backup_on_shutdown = os.getenv("BACKUP_ON_SHUTDOWN", "false").lower() == "true"
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize database connection"""
|
||||
try:
|
||||
# Connect to Redis
|
||||
await redis_manager.connect()
|
||||
logger.info("Redis connection established")
|
||||
|
||||
# Create database instance
|
||||
self.db = RedisDatabase(redis_manager.get_client())
|
||||
|
||||
# Test connection and log stats
|
||||
if not redis_manager.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
await redis_manager.redis_client.ping()
|
||||
stats = await self.db.get_stats()
|
||||
logger.info(f"Database initialized successfully. Stats: {stats}")
|
||||
|
||||
return self.db
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize database: {e}")
|
||||
raise
|
||||
|
||||
async def backup_data(self) -> Optional[str]:
|
||||
"""Create a backup of critical data before shutdown"""
|
||||
if not self.db:
|
||||
return None
|
||||
|
||||
try:
|
||||
backup_data = {
|
||||
"timestamp": datetime.now(UTC).isoformat(),
|
||||
"stats": await self.db.get_stats(),
|
||||
"users": await self.db.get_all_users(),
|
||||
# Add other critical data as needed
|
||||
}
|
||||
|
||||
backup_filename = f"backup_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}.json"
|
||||
|
||||
# Save to local file (you might want to save to cloud storage instead)
|
||||
with open(backup_filename, 'w') as f:
|
||||
json.dump(backup_data, f, indent=2, default=str)
|
||||
|
||||
logger.info(f"Backup created: {backup_filename}")
|
||||
return backup_filename
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Backup failed: {e}")
|
||||
return None
|
||||
|
||||
async def graceful_shutdown(self):
|
||||
"""Perform graceful shutdown with optional backup"""
|
||||
self._shutdown_initiated = True
|
||||
logger.info("Initiating graceful shutdown...")
|
||||
|
||||
# Wait for active requests to complete (with timeout)
|
||||
wait_time = 0
|
||||
while self._active_requests > 0 and wait_time < self._shutdown_timeout:
|
||||
logger.info(f"Waiting for {self._active_requests} active requests to complete...")
|
||||
await asyncio.sleep(1)
|
||||
wait_time += 1
|
||||
|
||||
if self._active_requests > 0:
|
||||
logger.warning(f"Shutdown timeout reached. {self._active_requests} requests may be interrupted.")
|
||||
|
||||
# Create backup if configured
|
||||
if self._backup_on_shutdown:
|
||||
backup_file = await self.backup_data()
|
||||
if backup_file:
|
||||
logger.info(f"Pre-shutdown backup completed: {backup_file}")
|
||||
|
||||
# Force Redis to save data to disk
|
||||
try:
|
||||
if redis_manager.redis_client:
|
||||
# Try BGSAVE first (non-blocking)
|
||||
try:
|
||||
await redis_manager.redis_client.bgsave()
|
||||
logger.info("Background save initiated")
|
||||
|
||||
# Wait a bit for background save to start
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Background save failed, trying synchronous save: {e}")
|
||||
try:
|
||||
# Fallback to synchronous save
|
||||
await redis_manager.redis_client.save()
|
||||
logger.info("Synchronous save completed")
|
||||
except Exception as e2:
|
||||
logger.warning(f"Synchronous save also failed (Redis persistence may be disabled): {e2}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during Redis save: {e}")
|
||||
|
||||
# Close Redis connection
|
||||
try:
|
||||
await redis_manager.disconnect()
|
||||
logger.info("Redis connection closed successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing Redis connection: {e}")
|
||||
|
||||
logger.info("Graceful shutdown completed")
|
||||
|
||||
def increment_requests(self):
|
||||
"""Track active requests"""
|
||||
self._active_requests += 1
|
||||
|
||||
def decrement_requests(self):
|
||||
"""Track completed requests"""
|
||||
self._active_requests = max(0, self._active_requests - 1)
|
||||
|
||||
@property
|
||||
def is_shutting_down(self) -> bool:
|
||||
"""Check if shutdown is in progress"""
|
||||
return self._shutdown_initiated
|
||||
|
||||
def get_database(self) -> RedisDatabase:
|
||||
"""Get database instance"""
|
||||
if self.db is None:
|
||||
raise RuntimeError("Database not initialized")
|
||||
if self._shutdown_initiated:
|
||||
raise RuntimeError("Application is shutting down")
|
||||
return self.db
|
63
src/backend/defines.py
Normal file
63
src/backend/defines.py
Normal file
@ -0,0 +1,63 @@
|
||||
import os
|
||||
|
||||
ollama_api_url = "http://ollama:11434" # Default Ollama local endpoint
|
||||
|
||||
user_dir = "/opt/backstory/users"
|
||||
user_info_file = "info.json" # Relative to "{user_dir}/{user}"
|
||||
default_username = "jketreno"
|
||||
rag_content_dir = "rag-content" # Relative to "{user_dir}/{user}"
|
||||
# Path to candidate full resume
|
||||
resume_doc_dir = f"{rag_content_dir}/resume" # Relative to "{user_dir}/{user}
|
||||
resume_doc = "resume.md"
|
||||
persist_directory = "db" # Relative to "{user_dir}/{user}"
|
||||
|
||||
# Model name License Notes
|
||||
# model = "deepseek-r1:7b" # MIT Tool calls don"t work
|
||||
# model = "gemma3:4b" # Gemma Requires newer ollama https://ai.google.dev/gemma/terms
|
||||
# model = "llama3.2" # Llama Good results; qwen seems slightly better https://huggingface.co/meta-llama/Llama-3.2-1B/blob/main/LICENSE.txt
|
||||
# model = "mistral:7b" # Apache 2.0 Tool calls don"t work
|
||||
model = "qwen2.5:7b" # Apache 2.0 Good results
|
||||
# model = "qwen3:8b" # Apache 2.0 Requires newer ollama
|
||||
model = os.getenv("MODEL_NAME", model)
|
||||
|
||||
# Embedding model for producing vectors to use in RAG
|
||||
embedding_model = os.getenv("EMBEDDING_MODEL_NAME", "mxbai-embed-large")
|
||||
|
||||
# Maximum context size to allow the LLM to use. This starts
|
||||
# smaller and will go up if different agents are requesting larger
|
||||
# contexts. Changing context size requires the LLM to reload, which
|
||||
# can take a few seconds.
|
||||
max_context = 2048 * 8 * 2
|
||||
|
||||
# Where to store session json files
|
||||
context_dir = "/opt/backstory/sessions"
|
||||
|
||||
# Location of frontend container's build output mapped into the container
|
||||
static_content = os.getenv("STATIC_DIRECTORY", "/opt/backstory/frontend/deployed")
|
||||
|
||||
logging_level = os.getenv("LOGGING_LEVEL", "INFO").upper()
|
||||
|
||||
# RAG and Vector DB settings
|
||||
## Where to read RAG content
|
||||
|
||||
chunk_buffer = 5 # Number of lines before and after chunk beyond the portion used in embedding (to return to callers)
|
||||
|
||||
# Maximum number of entries for ChromaDB to find
|
||||
default_rag_top_k = 50
|
||||
|
||||
# Cosine Distance Equivalent Similarity Retrieval Characteristics
|
||||
# 0.2 - 0.3 0.85 - 0.90 Very strict, highly precise results only
|
||||
# 0.3 - 0.5 0.75 - 0.85 Strong relevance, good precision
|
||||
# 0.5 - 0.7 0.65 - 0.75 Balanced precision/recall
|
||||
# 0.7 - 0.9 0.55 - 0.65 Higher recall, more inclusive
|
||||
# 0.9 - 1.2 0.40 - 0.55 Very inclusive, may include tangential content
|
||||
default_rag_threshold = 0.75
|
||||
|
||||
# Only used for testing; backstory-prod does not use this
|
||||
key_path = "/opt/backstory/keys/key.pem"
|
||||
cert_path = "/opt/backstory/keys/cert.pem"
|
||||
|
||||
host = os.getenv("BACKSTORY_HOST", "0.0.0.0")
|
||||
port = int(os.getenv("BACKSTORY_PORT", "8911"))
|
||||
api_prefix = "/api/1.0"
|
||||
debug=os.getenv("BACKSTORY_DEBUG", "false").lower() in ("true", "1", "yes")
|
207
src/backend/focused_test.py
Normal file
207
src/backend/focused_test.py
Normal file
@ -0,0 +1,207 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Focused test script that tests the most important functionality
|
||||
without getting caught up in serialization format complexities
|
||||
"""
|
||||
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from models import (
|
||||
UserStatus, UserType, SkillLevel, EmploymentType,
|
||||
Candidate, Employer, Location, Skill, AIParameters, AIModelType
|
||||
)
|
||||
|
||||
def test_model_creation():
|
||||
"""Test that we can create models successfully"""
|
||||
print("🧪 Testing model creation...")
|
||||
|
||||
# Create supporting objects
|
||||
location = Location(city="Austin", country="USA")
|
||||
skill = Skill(name="Python", category="Programming", level=SkillLevel.ADVANCED)
|
||||
|
||||
# Create candidate
|
||||
candidate = Candidate(
|
||||
email="test@example.com",
|
||||
username="test_candidate",
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
status=UserStatus.ACTIVE,
|
||||
firstName="John",
|
||||
lastName="Doe",
|
||||
fullName="John Doe",
|
||||
skills=[skill],
|
||||
experience=[],
|
||||
education=[],
|
||||
preferredJobTypes=[EmploymentType.FULL_TIME],
|
||||
location=location,
|
||||
languages=[],
|
||||
certifications=[]
|
||||
)
|
||||
|
||||
# Create employer
|
||||
employer = Employer(
|
||||
email="hr@company.com",
|
||||
username="test_employer",
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
status=UserStatus.ACTIVE,
|
||||
companyName="Test Company",
|
||||
industry="Technology",
|
||||
companySize="50-200",
|
||||
companyDescription="A test company",
|
||||
location=location
|
||||
)
|
||||
|
||||
print(f"✅ Candidate: {candidate.first_name} {candidate.last_name}")
|
||||
print(f"✅ Employer: {employer.company_name}")
|
||||
print(f"✅ User types: {candidate.user_type}, {employer.user_type}")
|
||||
|
||||
return candidate, employer
|
||||
|
||||
def test_json_api_format():
|
||||
"""Test JSON serialization in API format (the most important use case)"""
|
||||
print("\n📡 Testing JSON API format...")
|
||||
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Serialize to JSON (API format)
|
||||
candidate_json = candidate.model_dump_json(by_alias=True)
|
||||
employer_json = employer.model_dump_json(by_alias=True)
|
||||
|
||||
print(f"✅ Candidate JSON: {len(candidate_json)} chars")
|
||||
print(f"✅ Employer JSON: {len(employer_json)} chars")
|
||||
|
||||
# Deserialize from JSON
|
||||
candidate_back = Candidate.model_validate_json(candidate_json)
|
||||
employer_back = Employer.model_validate_json(employer_json)
|
||||
|
||||
# Verify data integrity
|
||||
assert candidate_back.email == candidate.email
|
||||
assert candidate_back.first_name == candidate.first_name
|
||||
assert employer_back.company_name == employer.company_name
|
||||
|
||||
print(f"✅ JSON round-trip successful")
|
||||
print(f"✅ Data integrity verified")
|
||||
|
||||
return True
|
||||
|
||||
def test_api_dict_format():
|
||||
"""Test dictionary format with aliases (for API requests/responses)"""
|
||||
print("\n📊 Testing API dictionary format...")
|
||||
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Create API format dictionaries
|
||||
candidate_dict = candidate.model_dump(by_alias=True)
|
||||
employer_dict = employer.model_dump(by_alias=True)
|
||||
|
||||
# Verify camelCase aliases are used
|
||||
assert "firstName" in candidate_dict
|
||||
assert "lastName" in candidate_dict
|
||||
assert "createdAt" in candidate_dict
|
||||
assert "companyName" in employer_dict
|
||||
|
||||
print(f"✅ API format dictionaries created")
|
||||
print(f"✅ CamelCase aliases verified")
|
||||
|
||||
# Test deserializing from API format
|
||||
candidate_back = Candidate.model_validate(candidate_dict)
|
||||
employer_back = Employer.model_validate(employer_dict)
|
||||
|
||||
assert candidate_back.email == candidate.email
|
||||
assert employer_back.company_name == employer.company_name
|
||||
|
||||
print(f"✅ API format round-trip successful")
|
||||
|
||||
return True
|
||||
|
||||
def test_validation_constraints():
|
||||
"""Test that validation constraints work"""
|
||||
print("\n🔒 Testing validation constraints...")
|
||||
|
||||
# Test AI Parameters with constraints
|
||||
valid_params = AIParameters(
|
||||
name="Test Config",
|
||||
model=AIModelType.GPT_4,
|
||||
temperature=0.7, # Valid: 0-1
|
||||
maxTokens=2000, # Valid: > 0
|
||||
topP=0.95, # Valid: 0-1
|
||||
frequencyPenalty=0.0, # Valid: -2 to 2
|
||||
presencePenalty=0.0, # Valid: -2 to 2
|
||||
isDefault=True,
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now()
|
||||
)
|
||||
print(f"✅ Valid AI parameters created")
|
||||
|
||||
# Test constraint violation
|
||||
try:
|
||||
invalid_params = AIParameters(
|
||||
name="Invalid Config",
|
||||
model=AIModelType.GPT_4,
|
||||
temperature=1.5, # Invalid: > 1
|
||||
maxTokens=2000,
|
||||
topP=0.95,
|
||||
frequencyPenalty=0.0,
|
||||
presencePenalty=0.0,
|
||||
isDefault=True,
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now()
|
||||
)
|
||||
print("❌ Should have rejected invalid temperature")
|
||||
return False
|
||||
except Exception:
|
||||
print(f"✅ Constraint validation working")
|
||||
|
||||
return True
|
||||
|
||||
def test_enum_values():
|
||||
"""Test that enum values work correctly"""
|
||||
print("\n📋 Testing enum values...")
|
||||
|
||||
# Test that enum values are properly handled
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Check enum values in serialization
|
||||
candidate_dict = candidate.model_dump(by_alias=True)
|
||||
|
||||
assert candidate_dict["status"] == "active"
|
||||
assert candidate_dict["userType"] == "candidate"
|
||||
assert employer.user_type == UserType.EMPLOYER
|
||||
|
||||
print(f"✅ Enum values correctly serialized")
|
||||
print(f"✅ User types: candidate={candidate.user_type}, employer={employer.user_type}")
|
||||
|
||||
return True
|
||||
|
||||
def main():
|
||||
"""Run all focused tests"""
|
||||
print("🎯 Focused Pydantic Model Tests")
|
||||
print("=" * 40)
|
||||
|
||||
try:
|
||||
test_model_creation()
|
||||
test_json_api_format()
|
||||
test_api_dict_format()
|
||||
test_validation_constraints()
|
||||
test_enum_values()
|
||||
|
||||
print(f"\n🎉 All focused tests passed!")
|
||||
print("=" * 40)
|
||||
print("✅ Models work correctly")
|
||||
print("✅ JSON API format works")
|
||||
print("✅ Validation constraints work")
|
||||
print("✅ Enum values work")
|
||||
print("✅ Ready for type generation!")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Test failed: {type(e).__name__}: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
461
src/backend/generate_types.py
Normal file
461
src/backend/generate_types.py
Normal file
@ -0,0 +1,461 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Enhanced Type Generator - Generate TypeScript types from Pydantic models
|
||||
Now with command line parameters, pre-test validation, and TypeScript compilation
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import subprocess
|
||||
from typing import Any, Dict, List, Optional, Union, get_origin, get_args
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
def run_command(command: str, description: str, cwd: str | None = None) -> bool:
|
||||
"""Run a command and return success status"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=cwd
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f"✅ {description}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ {description} failed:")
|
||||
if result.stderr.strip():
|
||||
print(f" Error: {result.stderr.strip()}")
|
||||
if result.stdout.strip():
|
||||
print(f" Output: {result.stdout.strip()}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ {description} failed with exception: {e}")
|
||||
return False
|
||||
|
||||
def run_focused_test() -> bool:
|
||||
"""Run the focused test to validate models before generating types"""
|
||||
print("🧪 Running focused test to validate models...")
|
||||
|
||||
# Get the directory of the currently executing script
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
test_file_path = os.path.join(script_dir, "focused_test.py")
|
||||
|
||||
if not os.path.exists(test_file_path):
|
||||
print("❌ focused_test.py not found - skipping model validation")
|
||||
return False
|
||||
|
||||
return run_command(f"python {test_file_path}", "Model validation")
|
||||
|
||||
def check_typescript_available() -> bool:
|
||||
"""Check if TypeScript compiler is available"""
|
||||
return run_command("npx tsc --version", "TypeScript version check")
|
||||
|
||||
# Add current directory to Python path so we can import models
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, current_dir)
|
||||
|
||||
try:
|
||||
from pydantic import BaseModel # type: ignore
|
||||
except ImportError as e:
|
||||
print(f"Error importing pydantic: {e}")
|
||||
print("Make sure pydantic is installed: pip install pydantic")
|
||||
sys.exit(1)
|
||||
|
||||
def python_type_to_typescript(python_type: Any) -> str:
|
||||
"""Convert a Python type to TypeScript type string"""
|
||||
|
||||
# Handle None/null
|
||||
if python_type is type(None):
|
||||
return "null"
|
||||
|
||||
# Handle basic types
|
||||
if python_type == str:
|
||||
return "string"
|
||||
elif python_type == int or python_type == float:
|
||||
return "number"
|
||||
elif python_type == bool:
|
||||
return "boolean"
|
||||
elif python_type == dict or python_type == Dict:
|
||||
return "Record<string, any>"
|
||||
elif python_type == list or python_type == List:
|
||||
return "Array<any>"
|
||||
|
||||
# Handle typing generics
|
||||
origin = get_origin(python_type)
|
||||
args = get_args(python_type)
|
||||
|
||||
if origin is Union:
|
||||
# Handle Optional (Union[T, None])
|
||||
if len(args) == 2 and type(None) in args:
|
||||
non_none_type = next(arg for arg in args if arg is not type(None))
|
||||
return python_type_to_typescript(non_none_type)
|
||||
|
||||
# Handle other unions
|
||||
union_types = [python_type_to_typescript(arg) for arg in args if arg is not type(None)]
|
||||
return " | ".join(union_types)
|
||||
|
||||
elif origin is list or origin is List:
|
||||
if args:
|
||||
item_type = python_type_to_typescript(args[0])
|
||||
return f"Array<{item_type}>"
|
||||
return "Array<any>"
|
||||
|
||||
elif origin is dict or origin is Dict:
|
||||
if len(args) == 2:
|
||||
key_type = python_type_to_typescript(args[0])
|
||||
value_type = python_type_to_typescript(args[1])
|
||||
return f"Record<{key_type}, {value_type}>"
|
||||
return "Record<string, any>"
|
||||
|
||||
# Handle Literal types
|
||||
if hasattr(python_type, '__origin__') and str(python_type.__origin__).endswith('Literal'):
|
||||
if args:
|
||||
literal_values = [f'"{arg}"' if isinstance(arg, str) else str(arg) for arg in args]
|
||||
return " | ".join(literal_values)
|
||||
|
||||
# Handle Enum types
|
||||
if isinstance(python_type, type) and issubclass(python_type, Enum):
|
||||
enum_values = [f'"{v.value}"' for v in python_type]
|
||||
return " | ".join(enum_values)
|
||||
|
||||
# Handle datetime
|
||||
if python_type == datetime:
|
||||
return "Date"
|
||||
|
||||
# Handle Pydantic models
|
||||
if isinstance(python_type, type) and issubclass(python_type, BaseModel):
|
||||
return python_type.__name__
|
||||
|
||||
# Handle string representations
|
||||
type_str = str(python_type)
|
||||
if "EmailStr" in type_str:
|
||||
return "string"
|
||||
elif "HttpUrl" in type_str:
|
||||
return "string"
|
||||
elif "UUID" in type_str:
|
||||
return "string"
|
||||
|
||||
# Default fallback
|
||||
return "any"
|
||||
|
||||
def snake_to_camel(snake_str: str) -> str:
|
||||
"""Convert snake_case to camelCase"""
|
||||
components = snake_str.split('_')
|
||||
return components[0] + ''.join(x.title() for x in components[1:])
|
||||
|
||||
def process_pydantic_model(model_class) -> Dict[str, Any]:
|
||||
"""Process a Pydantic model and return TypeScript interface definition"""
|
||||
interface_name = model_class.__name__
|
||||
properties = []
|
||||
|
||||
# Get fields from the model
|
||||
if hasattr(model_class, 'model_fields'):
|
||||
# Pydantic v2
|
||||
fields = model_class.model_fields
|
||||
for field_name, field_info in fields.items():
|
||||
ts_name = snake_to_camel(field_name)
|
||||
|
||||
# Check for alias
|
||||
if hasattr(field_info, 'alias') and field_info.alias:
|
||||
ts_name = field_info.alias
|
||||
|
||||
# Get type annotation
|
||||
field_type = getattr(field_info, 'annotation', str)
|
||||
ts_type = python_type_to_typescript(field_type)
|
||||
|
||||
# Check if optional
|
||||
is_optional = False
|
||||
if hasattr(field_info, 'is_required'):
|
||||
is_optional = not field_info.is_required()
|
||||
elif hasattr(field_info, 'default'):
|
||||
is_optional = field_info.default is not None
|
||||
|
||||
properties.append({
|
||||
'name': ts_name,
|
||||
'type': ts_type,
|
||||
'optional': is_optional
|
||||
})
|
||||
|
||||
elif hasattr(model_class, '__fields__'):
|
||||
# Pydantic v1
|
||||
fields = model_class.__fields__
|
||||
for field_name, field_info in fields.items():
|
||||
ts_name = snake_to_camel(field_name)
|
||||
|
||||
if hasattr(field_info, 'alias') and field_info.alias:
|
||||
ts_name = field_info.alias
|
||||
|
||||
field_type = getattr(field_info, 'annotation', getattr(field_info, 'type_', str))
|
||||
ts_type = python_type_to_typescript(field_type)
|
||||
|
||||
is_optional = not getattr(field_info, 'required', True)
|
||||
if hasattr(field_info, 'default') and field_info.default is not None:
|
||||
is_optional = True
|
||||
|
||||
properties.append({
|
||||
'name': ts_name,
|
||||
'type': ts_type,
|
||||
'optional': is_optional
|
||||
})
|
||||
|
||||
return {
|
||||
'name': interface_name,
|
||||
'properties': properties
|
||||
}
|
||||
|
||||
def process_enum(enum_class) -> Dict[str, Any]:
|
||||
"""Process an Enum and return TypeScript type definition"""
|
||||
enum_name = enum_class.__name__
|
||||
values = [f'"{v.value}"' for v in enum_class]
|
||||
if len(values) == 0:
|
||||
raise ValueError(f"Enum class '{enum_name}' has no values.")
|
||||
return {
|
||||
'name': enum_name,
|
||||
'values': " | ".join(values)
|
||||
}
|
||||
|
||||
def generate_typescript_interfaces(source_file: str):
|
||||
"""Generate TypeScript interfaces from models"""
|
||||
|
||||
print(f"📖 Scanning {source_file} for Pydantic models and enums...")
|
||||
|
||||
# Import the models module dynamically
|
||||
try:
|
||||
import importlib.util
|
||||
spec = importlib.util.spec_from_file_location("models", source_file)
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Could not load module from {source_file}")
|
||||
|
||||
models_module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["models"] = models_module
|
||||
spec.loader.exec_module(models_module)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error importing {source_file}: {e}")
|
||||
return None
|
||||
|
||||
interfaces = []
|
||||
enums = []
|
||||
|
||||
# Scan the models module
|
||||
for name in dir(models_module):
|
||||
obj = getattr(models_module, name)
|
||||
|
||||
# Skip private attributes
|
||||
if name.startswith('_'):
|
||||
continue
|
||||
|
||||
try:
|
||||
# Check if it's a Pydantic model
|
||||
if (isinstance(obj, type) and
|
||||
issubclass(obj, BaseModel) and
|
||||
obj != BaseModel):
|
||||
|
||||
interface = process_pydantic_model(obj)
|
||||
interfaces.append(interface)
|
||||
print(f" ✅ Found Pydantic model: {name}")
|
||||
|
||||
# Check if it's an Enum
|
||||
elif (isinstance(obj, type) and
|
||||
issubclass(obj, Enum)):
|
||||
|
||||
enum_def = process_enum(obj)
|
||||
enums.append(enum_def)
|
||||
print(f" ✅ Found enum: {name}")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Error processing {name}: {e}")
|
||||
continue
|
||||
|
||||
print(f"\n📊 Found {len(interfaces)} interfaces and {len(enums)} enums")
|
||||
|
||||
# Generate TypeScript content
|
||||
ts_content = f"""// Generated TypeScript types from Pydantic models
|
||||
// Source: {source_file}
|
||||
// Generated on: {datetime.now().isoformat()}
|
||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||
|
||||
"""
|
||||
|
||||
# Add enums
|
||||
if enums:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Enums\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
|
||||
for enum_def in enums:
|
||||
ts_content += f"export type {enum_def['name']} = {enum_def['values']};\n\n"
|
||||
|
||||
# Add interfaces
|
||||
if interfaces:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Interfaces\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
|
||||
for interface in interfaces:
|
||||
ts_content += f"export interface {interface['name']} {{\n"
|
||||
|
||||
for prop in interface['properties']:
|
||||
optional_marker = "?" if prop['optional'] else ""
|
||||
ts_content += f" {prop['name']}{optional_marker}: {prop['type']};\n"
|
||||
|
||||
ts_content += "}\n\n"
|
||||
|
||||
# Add user union type if we have user types
|
||||
user_interfaces = [i for i in interfaces if i['name'] in ['Candidate', 'Employer']]
|
||||
if len(user_interfaces) >= 2:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Union Types\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
user_type_names = [i['name'] for i in user_interfaces]
|
||||
ts_content += f"export type User = {' | '.join(user_type_names)};\n\n"
|
||||
|
||||
# Add export statement
|
||||
ts_content += "// Export all types\n"
|
||||
ts_content += "export type { };\n"
|
||||
|
||||
return ts_content
|
||||
|
||||
def compile_typescript(ts_file: str) -> bool:
|
||||
"""Compile TypeScript file to check for syntax errors"""
|
||||
print(f"🔧 Compiling TypeScript file to check syntax...")
|
||||
|
||||
# Check if TypeScript is available
|
||||
if not check_typescript_available():
|
||||
print("⚠️ TypeScript compiler not available - skipping compilation check")
|
||||
print(" To install: npm install -g typescript")
|
||||
return True # Don't fail if TS isn't available
|
||||
|
||||
# Run TypeScript compiler in check mode
|
||||
return run_command(
|
||||
f"npx tsc --noEmit --skipLibCheck {ts_file}",
|
||||
"TypeScript syntax validation"
|
||||
)
|
||||
|
||||
def main():
|
||||
"""Main function with command line argument parsing"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate TypeScript types from Pydantic models',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python generate_types.py # Use defaults
|
||||
python generate_types.py --source models.py --output types.ts # Specify files
|
||||
python generate_types.py --skip-test # Skip model validation
|
||||
python generate_types.py --skip-compile # Skip TS compilation
|
||||
python generate_types.py --source models.py --output types.ts --skip-test --skip-compile
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--source', '-s',
|
||||
default='models.py',
|
||||
help='Source Python file with Pydantic models (default: models.py)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--output', '-o',
|
||||
default='types.ts',
|
||||
help='Output TypeScript file (default: types.ts)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--skip-test',
|
||||
action='store_true',
|
||||
help='Skip running focused_test.py before generation'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--skip-compile',
|
||||
action='store_true',
|
||||
help='Skip TypeScript compilation check after generation'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--version', '-v',
|
||||
action='version',
|
||||
version='TypeScript Generator 2.0'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("🚀 Enhanced TypeScript Type Generator")
|
||||
print("=" * 50)
|
||||
print(f"📁 Source file: {args.source}")
|
||||
print(f"📁 Output file: {args.output}")
|
||||
print()
|
||||
|
||||
try:
|
||||
# Step 1: Validate source file exists
|
||||
if not os.path.exists(args.source):
|
||||
print(f"❌ Source file '{args.source}' not found")
|
||||
sys.exit(1)
|
||||
|
||||
# Step 2: Run focused test (unless skipped)
|
||||
if not args.skip_test:
|
||||
if not run_focused_test():
|
||||
print("❌ Model validation failed - aborting type generation")
|
||||
sys.exit(1)
|
||||
print()
|
||||
else:
|
||||
print("⏭️ Skipping model validation test")
|
||||
print()
|
||||
|
||||
# Step 3: Generate TypeScript content
|
||||
print("🔄 Generating TypeScript types...")
|
||||
ts_content = generate_typescript_interfaces(args.source)
|
||||
|
||||
if ts_content is None:
|
||||
print("❌ Failed to generate TypeScript content")
|
||||
sys.exit(1)
|
||||
|
||||
# Step 4: Write to output file
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(ts_content)
|
||||
|
||||
file_size = len(ts_content)
|
||||
print(f"✅ TypeScript types generated: {args.output} ({file_size} characters)")
|
||||
|
||||
# Step 5: Compile TypeScript (unless skipped)
|
||||
if not args.skip_compile:
|
||||
print()
|
||||
if not compile_typescript(args.output):
|
||||
print("❌ TypeScript compilation failed - check the generated file")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("⏭️ Skipping TypeScript compilation check")
|
||||
|
||||
# Step 6: Success summary
|
||||
print(f"\n🎉 Type generation completed successfully!")
|
||||
print("=" * 50)
|
||||
print(f"✅ Generated {args.output} from {args.source}")
|
||||
print(f"✅ File size: {file_size} characters")
|
||||
if not args.skip_test:
|
||||
print("✅ Model validation passed")
|
||||
if not args.skip_compile:
|
||||
print("✅ TypeScript syntax validated")
|
||||
print(f"\n💡 Usage in your TypeScript project:")
|
||||
print(f" import {{ Candidate, Employer, Job }} from './{Path(args.output).stem}';")
|
||||
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n⏹️ Type generation cancelled by user")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error generating types: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
54
src/backend/logger.py
Normal file
54
src/backend/logger.py
Normal file
@ -0,0 +1,54 @@
|
||||
import os
|
||||
import warnings
|
||||
import logging
|
||||
import defines
|
||||
def _setup_logging(level=defines.logging_level) -> logging.Logger:
|
||||
os.environ["TORCH_CPP_LOG_LEVEL"] = "ERROR"
|
||||
warnings.filterwarnings(
|
||||
"ignore", message="Overriding a previously registered kernel"
|
||||
)
|
||||
warnings.filterwarnings("ignore", message="Warning only once for all operators")
|
||||
warnings.filterwarnings("ignore", message=".*Couldn't find ffmpeg or avconv.*")
|
||||
warnings.filterwarnings("ignore", message="'force_all_finite' was renamed to")
|
||||
warnings.filterwarnings("ignore", message="n_jobs value 1 overridden")
|
||||
warnings.filterwarnings("ignore", message=".*websocket.*is deprecated")
|
||||
|
||||
numeric_level = getattr(logging, level.upper(), None)
|
||||
if not isinstance(numeric_level, int):
|
||||
raise ValueError(f"Invalid log level: {level}")
|
||||
|
||||
# Create a custom formatter
|
||||
formatter = logging.Formatter(
|
||||
fmt="%(levelname)s - %(filename)s:%(lineno)d - %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
|
||||
# Create a handler (e.g., StreamHandler for console output)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=numeric_level,
|
||||
handlers=[handler], # Use only your handler
|
||||
force=True,
|
||||
)
|
||||
|
||||
# Set levels for noisy loggers
|
||||
for noisy_logger in (
|
||||
"uvicorn",
|
||||
"uvicorn.error",
|
||||
"uvicorn.access",
|
||||
"fastapi",
|
||||
"starlette",
|
||||
):
|
||||
logger = logging.getLogger(noisy_logger)
|
||||
logger.setLevel(logging.WARNING)
|
||||
logger.handlers = [] # Remove default handlers
|
||||
logger.addHandler(handler) # Add your custom handler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
return logger
|
||||
|
||||
logger = _setup_logging(level=defines.logging_level)
|
||||
logger.debug(f"Logging initialized with level: {defines.logging_level}")
|
988
src/backend/main.py
Normal file
988
src/backend/main.py
Normal file
@ -0,0 +1,988 @@
|
||||
from fastapi import FastAPI, HTTPException, Depends, Query, Path, Body, status, APIRouter, Request # type: ignore
|
||||
from fastapi.middleware.cors import CORSMiddleware # type: ignore
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials # type: ignore
|
||||
from fastapi.responses import JSONResponse # type: ignore
|
||||
from fastapi.staticfiles import StaticFiles # type: ignore
|
||||
import uvicorn # type: ignore
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta, UTC
|
||||
import uuid
|
||||
import jwt
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
import redis.asyncio as redis # type: ignore
|
||||
import re
|
||||
import asyncio
|
||||
import signal
|
||||
import json
|
||||
|
||||
# Prometheus
|
||||
from prometheus_client import Summary # type: ignore
|
||||
from prometheus_fastapi_instrumentator import Instrumentator # type: ignore
|
||||
from prometheus_client import CollectorRegistry, Counter # type: ignore
|
||||
|
||||
# Import Pydantic models
|
||||
from models import (
|
||||
# User models
|
||||
Candidate, Employer, BaseUser, Guest, Authentication, AuthResponse,
|
||||
|
||||
# Job models
|
||||
Job, JobApplication, ApplicationStatus,
|
||||
|
||||
# Chat models
|
||||
ChatSession, ChatMessage, ChatContext,
|
||||
|
||||
# AI models
|
||||
AIParameters,
|
||||
|
||||
# Supporting models
|
||||
Location, Skill, WorkExperience, Education
|
||||
)
|
||||
|
||||
import defines
|
||||
from logger import logger
|
||||
from database import RedisDatabase, redis_manager, DatabaseManager
|
||||
from metrics import Metrics
|
||||
|
||||
# Initialize FastAPI app
|
||||
# ============================
|
||||
# Startup Event
|
||||
# ============================
|
||||
db_manager = DatabaseManager()
|
||||
|
||||
prev_int = signal.getsignal(signal.SIGINT)
|
||||
prev_term = signal.getsignal(signal.SIGTERM)
|
||||
|
||||
def signal_handler(signum, frame):
|
||||
logger.info(f"⚠️ Received signal {signum!r}, shutting down…")
|
||||
# now call the old handler (it might raise KeyboardInterrupt or exit)
|
||||
if signum == signal.SIGINT and callable(prev_int):
|
||||
prev_int(signum, frame)
|
||||
elif signum == signal.SIGTERM and callable(prev_term):
|
||||
prev_term(signum, frame)
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
logger.info("🚀 Starting Backstory API")
|
||||
logger.info(f"📝 API Documentation available at: http://{defines.host}:{defines.port}{defines.api_prefix}/docs")
|
||||
logger.info("🔗 API endpoints prefixed with: /api/1.0")
|
||||
if os.path.exists(defines.static_content):
|
||||
logger.info(f"📁 Serving static files from: {defines.static_content}")
|
||||
|
||||
try:
|
||||
# Initialize database
|
||||
await db_manager.initialize()
|
||||
|
||||
# Seed development data if needed
|
||||
if defines.debug:
|
||||
await seed_development_data()
|
||||
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
|
||||
logger.info("🚀 Application startup completed")
|
||||
|
||||
yield # Application is running
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start application: {e}")
|
||||
raise
|
||||
|
||||
finally:
|
||||
# Shutdown
|
||||
logger.info("Application shutdown requested")
|
||||
await db_manager.graceful_shutdown()
|
||||
|
||||
app = FastAPI(
|
||||
lifespan=lifespan,
|
||||
title="Backstory API",
|
||||
description="FastAPI backend for Backstory platform with TypeScript frontend",
|
||||
version="1.0.0",
|
||||
docs_url=f"{defines.api_prefix}/docs",
|
||||
redoc_url=f"{defines.api_prefix}/redoc",
|
||||
openapi_url=f"{defines.api_prefix}/openapi.json",
|
||||
)
|
||||
|
||||
ssl_enabled = os.getenv("SSL_ENABLED", "true").lower() == "true"
|
||||
if ssl_enabled:
|
||||
allow_origins = ["https://battle-linux.ketrenos.com:3000",
|
||||
"https://backstory-beta.ketrenos.com"]
|
||||
else:
|
||||
allow_origins = ["http://battle-linux.ketrenos.com:3000",
|
||||
"http://backstory-beta.ketrenos.com"]
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Security
|
||||
security = HTTPBearer()
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "26fc1f29bd4599f5f29200b6ca083531")
|
||||
ALGORITHM = "HS256"
|
||||
|
||||
# ============================
|
||||
# Authentication Utilities
|
||||
# ============================
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.now(UTC) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(UTC) + timedelta(hours=24)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)):
|
||||
try:
|
||||
payload = jwt.decode(credentials.credentials, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user_id: str = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
||||
return user_id
|
||||
except jwt.PyJWTError:
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
||||
|
||||
async def get_current_user(
|
||||
user_id: str = Depends(verify_token),
|
||||
database: RedisDatabase = Depends(lambda: db_manager.get_database())
|
||||
):
|
||||
"""Get current user from database"""
|
||||
try:
|
||||
# Check candidates first
|
||||
candidate = await database.get_candidate(user_id)
|
||||
if candidate:
|
||||
return Candidate.model_validate(candidate)
|
||||
|
||||
# Check employers
|
||||
employer = await database.get_employer(user_id)
|
||||
if employer:
|
||||
return Employer.model_validate(employer)
|
||||
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting current user: {e}")
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
# ============================
|
||||
# Helper Functions
|
||||
# ============================
|
||||
async def get_database() -> RedisDatabase:
|
||||
"""
|
||||
FastAPI dependency to get database instance with shutdown protection
|
||||
"""
|
||||
return db_manager.get_database()
|
||||
|
||||
def create_success_response(data: Any, meta: Optional[Dict] = None) -> Dict:
|
||||
return {
|
||||
"success": True,
|
||||
"data": data,
|
||||
"meta": meta
|
||||
}
|
||||
|
||||
def create_error_response(code: str, message: str, details: Any = None) -> Dict:
|
||||
return {
|
||||
"success": False,
|
||||
"error": {
|
||||
"code": code,
|
||||
"message": message,
|
||||
"details": details
|
||||
}
|
||||
}
|
||||
|
||||
def create_paginated_response(
|
||||
data: List[Any],
|
||||
page: int,
|
||||
limit: int,
|
||||
total: int
|
||||
) -> Dict:
|
||||
total_pages = (total + limit - 1) // limit
|
||||
has_more = page < total_pages
|
||||
|
||||
return {
|
||||
"data": data,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"totalPages": total_pages,
|
||||
"hasMore": has_more
|
||||
}
|
||||
|
||||
def filter_and_paginate(
|
||||
items: List[Any],
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
sort_by: Optional[str] = None,
|
||||
sort_order: str = "desc",
|
||||
filters: Optional[Dict] = None
|
||||
) -> tuple:
|
||||
"""Filter, sort, and paginate items"""
|
||||
filtered_items = items.copy()
|
||||
|
||||
# Apply filters (simplified filtering logic)
|
||||
if filters:
|
||||
for key, value in filters.items():
|
||||
if isinstance(filtered_items[0], dict) and key in filtered_items[0]:
|
||||
filtered_items = [item for item in filtered_items if item.get(key) == value]
|
||||
elif hasattr(filtered_items[0], key) if filtered_items else False:
|
||||
filtered_items = [item for item in filtered_items
|
||||
if getattr(item, key, None) == value]
|
||||
|
||||
# Sort items
|
||||
if sort_by and filtered_items:
|
||||
reverse = sort_order.lower() == "desc"
|
||||
try:
|
||||
if isinstance(filtered_items[0], dict):
|
||||
filtered_items.sort(key=lambda x: x.get(sort_by, ""), reverse=reverse)
|
||||
else:
|
||||
filtered_items.sort(key=lambda x: getattr(x, sort_by, ""), reverse=reverse)
|
||||
except (AttributeError, TypeError):
|
||||
pass # Skip sorting if attribute doesn't exist or isn't comparable
|
||||
|
||||
# Paginate
|
||||
total = len(filtered_items)
|
||||
start = (page - 1) * limit
|
||||
end = start + limit
|
||||
paginated_items = filtered_items[start:end]
|
||||
|
||||
return paginated_items, total
|
||||
|
||||
# ============================
|
||||
# API Router Setup
|
||||
# ============================
|
||||
|
||||
# Create API router with prefix
|
||||
api_router = APIRouter(prefix="/api/1.0")
|
||||
|
||||
# ============================
|
||||
# Authentication Endpoints
|
||||
# ============================
|
||||
|
||||
@api_router.post("/auth/login")
|
||||
async def login(
|
||||
login: str = Body(...),
|
||||
password: str = Body(...),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Login endpoint"""
|
||||
try:
|
||||
# Check if user exists (simplified - in real app, check hashed password)
|
||||
user_data = await database.get_user(login)
|
||||
if not user_data:
|
||||
logger.info(f"⚠️ Login attempt with non-existent email: {login}")
|
||||
return JSONResponse(
|
||||
status_code=401,
|
||||
content=create_error_response("AUTH_FAILED", "Invalid credentials")
|
||||
)
|
||||
|
||||
logger.info(f"🔑 User {login} logged in successfully")
|
||||
|
||||
# Create tokens
|
||||
access_token = create_access_token(data={"sub": user_data["id"]})
|
||||
refresh_token = create_access_token(
|
||||
data={"sub": user_data["id"], "type": "refresh"},
|
||||
expires_delta=timedelta(days=30)
|
||||
)
|
||||
|
||||
# Get user object
|
||||
user = None
|
||||
if user_data["type"] == "candidate":
|
||||
logger.info(f"🔑 User {login} is a candidate")
|
||||
candidate_data = await database.get_candidate(user_data["id"])
|
||||
if candidate_data:
|
||||
user = Candidate.model_validate(candidate_data)
|
||||
elif user_data["type"] == "employer":
|
||||
logger.info(f"🔑 User {login} is a employer")
|
||||
employer_data = await database.get_employer(user_data["id"])
|
||||
if employer_data:
|
||||
user = Employer.model_validate(employer_data)
|
||||
|
||||
if not user:
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("USER_NOT_FOUND", "User not found")
|
||||
)
|
||||
|
||||
auth_response = AuthResponse(
|
||||
accessToken=access_token,
|
||||
refreshToken=refresh_token,
|
||||
user=user,
|
||||
expiresAt=int((datetime.now(UTC) + timedelta(hours=24)).timestamp())
|
||||
)
|
||||
|
||||
return create_success_response(auth_response.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Login error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=create_error_response("LOGIN_ERROR", str(e))
|
||||
)
|
||||
|
||||
@api_router.post("/auth/refresh")
|
||||
async def refresh_token_endpoint(
|
||||
refreshToken: str = Body(..., alias="refreshToken"),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Refresh token endpoint"""
|
||||
try:
|
||||
# Verify refresh token
|
||||
payload = jwt.decode(refreshToken, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user_id = payload.get("sub")
|
||||
token_type = payload.get("type")
|
||||
|
||||
if not user_id or token_type != "refresh":
|
||||
return JSONResponse(
|
||||
status_code=401,
|
||||
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
||||
)
|
||||
|
||||
# Create new access token
|
||||
access_token = create_access_token(data={"sub": user_id})
|
||||
|
||||
# Get user
|
||||
user = None
|
||||
candidate_data = await database.get_candidate(user_id)
|
||||
if candidate_data:
|
||||
user = Candidate.model_validate(candidate_data)
|
||||
else:
|
||||
employer_data = await database.get_employer(user_id)
|
||||
if employer_data:
|
||||
user = Employer.model_validate(employer_data)
|
||||
|
||||
if not user:
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("USER_NOT_FOUND", "User not found")
|
||||
)
|
||||
|
||||
auth_response = AuthResponse(
|
||||
accessToken=access_token,
|
||||
refreshToken=refreshToken, # Keep same refresh token
|
||||
user=user,
|
||||
expiresAt=int((datetime.now(UTC) + timedelta(hours=24)).timestamp())
|
||||
)
|
||||
|
||||
return create_success_response(auth_response.model_dump(by_alias=True))
|
||||
|
||||
except jwt.PyJWTError:
|
||||
return JSONResponse(
|
||||
status_code=401,
|
||||
content=create_error_response("INVALID_TOKEN", "Invalid refresh token")
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Token refresh error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=create_error_response("REFRESH_ERROR", str(e))
|
||||
)
|
||||
|
||||
# ============================
|
||||
# Candidate Endpoints
|
||||
# ============================
|
||||
|
||||
@api_router.post("/candidates")
|
||||
async def create_candidate(
|
||||
candidate_data: Dict[str, Any] = Body(...),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Create a new candidate"""
|
||||
try:
|
||||
# Add required fields
|
||||
candidate_data["id"] = str(uuid.uuid4())
|
||||
candidate_data["createdAt"] = datetime.now(UTC).isoformat()
|
||||
candidate_data["updatedAt"] = datetime.now(UTC).isoformat()
|
||||
|
||||
# Create candidate
|
||||
candidate = Candidate.model_validate(candidate_data)
|
||||
await database.set_candidate(candidate.id, candidate.model_dump())
|
||||
|
||||
# Add to users for auth (simplified)
|
||||
await database.set_user(candidate, {
|
||||
"id": candidate.id,
|
||||
"type": "candidate"
|
||||
})
|
||||
|
||||
return create_success_response(candidate.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Candidate creation error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("CREATION_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/candidates/{candidate_id}")
|
||||
async def get_candidate(
|
||||
candidate_id: str = Path(...),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Get a candidate by ID"""
|
||||
try:
|
||||
candidate_data = await database.get_candidate(candidate_id)
|
||||
if not candidate_data:
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("NOT_FOUND", "Candidate not found")
|
||||
)
|
||||
|
||||
candidate = Candidate.model_validate(candidate_data)
|
||||
return create_success_response(candidate.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Get candidate error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=create_error_response("FETCH_ERROR", str(e))
|
||||
)
|
||||
|
||||
@api_router.patch("/candidates/{candidate_id}")
|
||||
async def update_candidate(
|
||||
candidate_id: str = Path(...),
|
||||
updates: Dict[str, Any] = Body(...),
|
||||
current_user = Depends(get_current_user),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Update a candidate"""
|
||||
try:
|
||||
candidate_data = await database.get_candidate(candidate_id)
|
||||
if not candidate_data:
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("NOT_FOUND", "Candidate not found")
|
||||
)
|
||||
|
||||
candidate = Candidate.model_validate(candidate_data)
|
||||
|
||||
# Check authorization (user can only update their own profile)
|
||||
if candidate.id != current_user.id:
|
||||
return JSONResponse(
|
||||
status_code=403,
|
||||
content=create_error_response("FORBIDDEN", "Cannot update another user's profile")
|
||||
)
|
||||
|
||||
# Apply updates
|
||||
updates["updatedAt"] = datetime.now(UTC).isoformat()
|
||||
candidate_dict = candidate.model_dump()
|
||||
candidate_dict.update(updates)
|
||||
|
||||
updated_candidate = Candidate.model_validate(candidate_dict)
|
||||
await database.set_candidate(candidate_id, updated_candidate.model_dump())
|
||||
|
||||
return create_success_response(updated_candidate.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Update candidate error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("UPDATE_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/candidates")
|
||||
async def get_candidates(
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
sortBy: Optional[str] = Query(None, alias="sortBy"),
|
||||
sortOrder: str = Query("desc", pattern="^(asc|desc)$", alias="sortOrder"),
|
||||
filters: Optional[str] = Query(None),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Get paginated list of candidates"""
|
||||
try:
|
||||
# Parse filters if provided
|
||||
filter_dict = None
|
||||
if filters:
|
||||
filter_dict = json.loads(filters)
|
||||
|
||||
# Get all candidates from Redis
|
||||
all_candidates_data = await database.get_all_candidates()
|
||||
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
||||
|
||||
paginated_candidates, total = filter_and_paginate(
|
||||
candidates_list, page, limit, sortBy, sortOrder, filter_dict
|
||||
)
|
||||
|
||||
paginated_response = create_paginated_response(
|
||||
[c.model_dump(by_alias=True) for c in paginated_candidates],
|
||||
page, limit, total
|
||||
)
|
||||
|
||||
return create_success_response(paginated_response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Get candidates error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("FETCH_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/candidates/search")
|
||||
async def search_candidates(
|
||||
query: str = Query(...),
|
||||
filters: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Search candidates"""
|
||||
try:
|
||||
# Parse filters
|
||||
filter_dict = {}
|
||||
if filters:
|
||||
filter_dict = json.loads(filters)
|
||||
|
||||
# Get all candidates from Redis
|
||||
all_candidates_data = await database.get_all_candidates()
|
||||
candidates_list = [Candidate.model_validate(data) for data in all_candidates_data.values()]
|
||||
|
||||
# Filter by search query
|
||||
if query:
|
||||
query_lower = query.lower()
|
||||
candidates_list = [
|
||||
c for c in candidates_list
|
||||
if (query_lower in c.first_name.lower() or
|
||||
query_lower in c.last_name.lower() or
|
||||
query_lower in c.email.lower() or
|
||||
any(query_lower in skill.name.lower() for skill in c.skills))
|
||||
]
|
||||
|
||||
paginated_candidates, total = filter_and_paginate(
|
||||
candidates_list, page, limit, filters=filter_dict
|
||||
)
|
||||
|
||||
paginated_response = create_paginated_response(
|
||||
[c.model_dump(by_alias=True) for c in paginated_candidates],
|
||||
page, limit, total
|
||||
)
|
||||
|
||||
return create_success_response(paginated_response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Search candidates error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("SEARCH_FAILED", str(e))
|
||||
)
|
||||
|
||||
# ============================
|
||||
# Job Endpoints
|
||||
# ============================
|
||||
|
||||
@api_router.post("/jobs")
|
||||
async def create_job(
|
||||
job_data: Dict[str, Any] = Body(...),
|
||||
current_user = Depends(get_current_user),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Create a new job"""
|
||||
try:
|
||||
# Verify user is an employer
|
||||
if not isinstance(current_user, Employer):
|
||||
return JSONResponse(
|
||||
status_code=403,
|
||||
content=create_error_response("FORBIDDEN", "Only employers can create jobs")
|
||||
)
|
||||
|
||||
# Add required fields
|
||||
job_data["id"] = str(uuid.uuid4())
|
||||
job_data["datePosted"] = datetime.now(UTC).isoformat()
|
||||
job_data["views"] = 0
|
||||
job_data["applicationCount"] = 0
|
||||
job_data["employerId"] = current_user.id
|
||||
|
||||
job = Job.model_validate(job_data)
|
||||
await database.set_job(job.id, job.model_dump())
|
||||
|
||||
return create_success_response(job.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Job creation error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("CREATION_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/jobs/{job_id}")
|
||||
async def get_job(
|
||||
job_id: str = Path(...),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Get a job by ID"""
|
||||
try:
|
||||
job_data = await database.get_job(job_id)
|
||||
if not job_data:
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content=create_error_response("NOT_FOUND", "Job not found")
|
||||
)
|
||||
|
||||
# Increment view count
|
||||
job_data["views"] = job_data.get("views", 0) + 1
|
||||
await database.set_job(job_id, job_data)
|
||||
|
||||
job = Job.model_validate(job_data)
|
||||
return create_success_response(job.model_dump(by_alias=True))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Get job error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=create_error_response("FETCH_ERROR", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/jobs")
|
||||
async def get_jobs(
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
sortBy: Optional[str] = Query(None, alias="sortBy"),
|
||||
sortOrder: str = Query("desc", pattern="^(asc|desc)$", alias="sortOrder"),
|
||||
filters: Optional[str] = Query(None),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Get paginated list of jobs"""
|
||||
try:
|
||||
filter_dict = None
|
||||
if filters:
|
||||
filter_dict = json.loads(filters)
|
||||
|
||||
# Get all jobs from Redis
|
||||
all_jobs_data = await database.get_all_jobs()
|
||||
jobs_list = [Job.model_validate(data) for data in all_jobs_data.values() if data.get("is_active", True)]
|
||||
|
||||
paginated_jobs, total = filter_and_paginate(
|
||||
jobs_list, page, limit, sortBy, sortOrder, filter_dict
|
||||
)
|
||||
|
||||
paginated_response = create_paginated_response(
|
||||
[j.model_dump(by_alias=True) for j in paginated_jobs],
|
||||
page, limit, total
|
||||
)
|
||||
|
||||
return create_success_response(paginated_response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Get jobs error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("FETCH_FAILED", str(e))
|
||||
)
|
||||
|
||||
@api_router.get("/jobs/search")
|
||||
async def search_jobs(
|
||||
query: str = Query(...),
|
||||
filters: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
database: RedisDatabase = Depends(get_database)
|
||||
):
|
||||
"""Search jobs"""
|
||||
try:
|
||||
filter_dict = {}
|
||||
if filters:
|
||||
filter_dict = json.loads(filters)
|
||||
|
||||
# Get all jobs from Redis
|
||||
all_jobs_data = await database.get_all_jobs()
|
||||
jobs_list = [Job.model_validate(data) for data in all_jobs_data.values() if data.get("is_active", True)]
|
||||
|
||||
if query:
|
||||
query_lower = query.lower()
|
||||
jobs_list = [
|
||||
j for j in jobs_list
|
||||
if (query_lower in j.title.lower() or
|
||||
query_lower in j.description.lower() or
|
||||
any(query_lower in skill.lower() for skill in (j.preferred_skills or [])))
|
||||
]
|
||||
|
||||
paginated_jobs, total = filter_and_paginate(
|
||||
jobs_list, page, limit, filters=filter_dict
|
||||
)
|
||||
|
||||
paginated_response = create_paginated_response(
|
||||
[j.model_dump(by_alias=True) for j in paginated_jobs],
|
||||
page, limit, total
|
||||
)
|
||||
|
||||
return create_success_response(paginated_response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Search jobs error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content=create_error_response("SEARCH_FAILED", str(e))
|
||||
)
|
||||
|
||||
# ============================
|
||||
# Health Check and Info Endpoints
|
||||
# ============================
|
||||
async def get_redis() -> redis.Redis:
|
||||
"""Dependency to get Redis client"""
|
||||
return redis_manager.get_client()
|
||||
|
||||
@app.get("/health")
|
||||
async def enhanced_health_check():
|
||||
"""Enhanced health check endpoint"""
|
||||
try:
|
||||
database = db_manager.get_database()
|
||||
if not redis_manager.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
|
||||
# Test Redis connection
|
||||
await redis_manager.redis_client.ping()
|
||||
|
||||
# Get database stats
|
||||
stats = await database.get_stats()
|
||||
|
||||
# Redis info
|
||||
redis_info = await redis_manager.redis_client.info()
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"database": {
|
||||
"status": "connected",
|
||||
"stats": stats
|
||||
},
|
||||
"redis": {
|
||||
"version": redis_info.get("redis_version", "unknown"),
|
||||
"uptime": redis_info.get("uptime_in_seconds", 0),
|
||||
"memory_used": redis_info.get("used_memory_human", "unknown")
|
||||
},
|
||||
"application": {
|
||||
"active_requests": db_manager._active_requests,
|
||||
"shutting_down": db_manager.is_shutting_down
|
||||
}
|
||||
}
|
||||
|
||||
except RuntimeError as e:
|
||||
return {"status": "shutting_down", "message": str(e)}
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
@api_router.get("/redis/stats")
|
||||
async def redis_stats(redis_client: redis.Redis = Depends(get_redis)):
|
||||
try:
|
||||
info = await redis_client.info()
|
||||
return {
|
||||
"connected_clients": info.get("connected_clients"),
|
||||
"used_memory_human": info.get("used_memory_human"),
|
||||
"total_commands_processed": info.get("total_commands_processed"),
|
||||
"keyspace_hits": info.get("keyspace_hits"),
|
||||
"keyspace_misses": info.get("keyspace_misses"),
|
||||
"uptime_in_seconds": info.get("uptime_in_seconds")
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=503, detail=f"Redis stats unavailable: {e}")
|
||||
|
||||
@api_router.get("/")
|
||||
async def api_info():
|
||||
"""API information endpoint"""
|
||||
return {
|
||||
"message": "Backstory API",
|
||||
"version": "1.0.0",
|
||||
"prefix": defines.api_prefix,
|
||||
"documentation": f"{defines.api_prefix}/docs",
|
||||
"health": f"{defines.api_prefix}/health"
|
||||
}
|
||||
|
||||
# ============================
|
||||
# Include Router in App
|
||||
# ============================
|
||||
|
||||
# Include the API router
|
||||
app.include_router(api_router)
|
||||
|
||||
# ============================
|
||||
# Debug logging
|
||||
# ============================
|
||||
logger.info(f"Debug mode is {'enabled' if defines.debug else 'disabled'}")
|
||||
|
||||
@app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
try:
|
||||
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
||||
logger.info(f"Request path: {request.url.path}, Method: {request.method}, Remote: {request.client.host}")
|
||||
response = await call_next(request)
|
||||
if defines.debug and not re.match(rf"{defines.api_prefix}/metrics", request.url.path):
|
||||
logger.info(f"Response status: {response.status_code}, Path: {request.url.path}, Method: {request.method}")
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing request: {str(e)}, Path: {request.url.path}, Method: {request.method}")
|
||||
return JSONResponse(status_code=400, content={"detail": "Invalid HTTP request"})
|
||||
|
||||
# ============================
|
||||
# Request tracking middleware
|
||||
# ============================
|
||||
@app.middleware("http")
|
||||
async def track_requests(request, call_next):
|
||||
"""Middleware to track active requests during shutdown"""
|
||||
if db_manager.is_shutting_down:
|
||||
return JSONResponse(status_code=503, content={"error": "Application is shutting down"})
|
||||
|
||||
db_manager.increment_requests()
|
||||
try:
|
||||
response = await call_next(request)
|
||||
return response
|
||||
finally:
|
||||
db_manager.decrement_requests()
|
||||
|
||||
# ============================
|
||||
# FastAPI Metrics
|
||||
# ============================
|
||||
prometheus_collector = CollectorRegistry()
|
||||
|
||||
# Keep the Instrumentator instance alive
|
||||
instrumentator = Instrumentator(
|
||||
should_group_status_codes=True,
|
||||
should_ignore_untemplated=True,
|
||||
should_group_untemplated=True,
|
||||
excluded_handlers=[f"{defines.api_prefix}/metrics"],
|
||||
registry=prometheus_collector
|
||||
)
|
||||
|
||||
# Instrument the FastAPI app
|
||||
instrumentator.instrument(app)
|
||||
|
||||
# Expose the /metrics endpoint
|
||||
logger.info(f"Exposing Prometheus metrics at {defines.api_prefix}/metrics")
|
||||
instrumentator.expose(app, endpoint=f"{defines.api_prefix}/metrics")
|
||||
|
||||
# ============================
|
||||
# Static File Serving
|
||||
# ============================
|
||||
|
||||
# Serve static files (for frontend build)
|
||||
# This should be last to not interfere with API routes
|
||||
if os.path.exists(defines.static_content):
|
||||
app.mount("/", StaticFiles(directory=defines.static_content, html=True), name="static")
|
||||
else:
|
||||
logger.info(f"⚠️ Static directory '{defines.static_content}' not found. Static file serving disabled.")
|
||||
|
||||
# Root endpoint when no static files
|
||||
@app.get("/", include_in_schema=False)
|
||||
async def root():
|
||||
"""Root endpoint with API information (when no static files)"""
|
||||
return {
|
||||
"message": "Backstory API",
|
||||
"version": "1.0.0",
|
||||
"api_prefix": defines.api_prefix,
|
||||
"documentation": f"{defines.api_prefix}/docs",
|
||||
"health": f"{defines.api_prefix}/health"
|
||||
}
|
||||
|
||||
# ============================
|
||||
# Development Data Seeding
|
||||
# ============================
|
||||
|
||||
async def seed_development_data():
|
||||
"""Seed the database with development data"""
|
||||
try:
|
||||
database = db_manager.get_database()
|
||||
|
||||
# Check if data already exists
|
||||
stats = await database.get_stats()
|
||||
if stats.get('candidates', 0) > 0:
|
||||
logger.info("✅ Development data already exists, skipping seeding")
|
||||
return
|
||||
|
||||
# Create sample location
|
||||
sample_location = Location(
|
||||
city="San Francisco",
|
||||
state="CA",
|
||||
country="USA",
|
||||
postalCode="94102"
|
||||
)
|
||||
|
||||
# Create sample candidate
|
||||
candidate_id = str(uuid.uuid4())
|
||||
sample_candidate = Candidate(
|
||||
id=candidate_id,
|
||||
email="john.doe@example.com",
|
||||
createdAt=datetime.now(UTC),
|
||||
updatedAt=datetime.now(UTC),
|
||||
status="active",
|
||||
firstName="John",
|
||||
lastName="Doe",
|
||||
fullName="John Doe",
|
||||
username="johndoe",
|
||||
skills=[],
|
||||
experience=[],
|
||||
education=[],
|
||||
preferredJobTypes=["full-time"],
|
||||
location=sample_location,
|
||||
languages=[],
|
||||
certifications=[]
|
||||
)
|
||||
|
||||
await database.set_candidate(candidate_id, sample_candidate.model_dump())
|
||||
await database.set_user(sample_candidate, {"id": candidate_id, "type": "candidate"})
|
||||
|
||||
# Create sample employer
|
||||
employer_id = str(uuid.uuid4())
|
||||
sample_employer = Employer(
|
||||
id=employer_id,
|
||||
email="hr@techcorp.com",
|
||||
createdAt=datetime.now(UTC),
|
||||
updatedAt=datetime.now(UTC),
|
||||
status="active",
|
||||
companyName="TechCorp",
|
||||
industry="Technology",
|
||||
companySize="100-500",
|
||||
companyDescription="Leading technology company",
|
||||
location=sample_location
|
||||
)
|
||||
|
||||
await database.set_employer(employer_id, sample_employer.model_dump())
|
||||
await database.set_user(sample_employer, {"id": employer_id, "type": "employer"})
|
||||
|
||||
# Create sample job
|
||||
job_id = str(uuid.uuid4())
|
||||
sample_job = Job(
|
||||
id=job_id,
|
||||
title="Senior Software Engineer",
|
||||
description="We are looking for a senior software engineer...",
|
||||
responsibilities=["Develop software", "Lead projects", "Mentor juniors"],
|
||||
requirements=["5+ years experience", "Python expertise"],
|
||||
preferredSkills=["FastAPI", "React", "PostgreSQL"],
|
||||
employerId=employer_id,
|
||||
location=sample_location,
|
||||
employmentType="full-time",
|
||||
datePosted=datetime.now(UTC),
|
||||
isActive=True,
|
||||
views=0,
|
||||
applicationCount=0
|
||||
)
|
||||
|
||||
await database.set_job(job_id, sample_job.model_dump())
|
||||
|
||||
logger.info("✅ Development data seeded successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"⚠️ Failed to seed development data: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
host = defines.host
|
||||
port = defines.port
|
||||
if ssl_enabled:
|
||||
logger.info(f"Starting web server at https://{host}:{port}")
|
||||
uvicorn.run(
|
||||
app="main:app",
|
||||
host=host,
|
||||
port=port,
|
||||
log_config=None,
|
||||
ssl_keyfile=defines.key_path,
|
||||
ssl_certfile=defines.cert_path,
|
||||
reload=True,
|
||||
)
|
||||
else:
|
||||
logger.info(f"Starting web server at http://{host}:{port}")
|
||||
uvicorn.run(app=app, host=host, port=port, log_config=None)
|
94
src/backend/metrics.py
Normal file
94
src/backend/metrics.py
Normal file
@ -0,0 +1,94 @@
|
||||
from prometheus_client import Counter, Histogram # type: ignore
|
||||
from threading import Lock
|
||||
|
||||
def singleton(cls):
|
||||
instance = None
|
||||
lock = Lock()
|
||||
|
||||
def get_instance(*args, **kwargs):
|
||||
nonlocal instance
|
||||
with lock:
|
||||
if instance is None:
|
||||
instance = cls(*args, **kwargs)
|
||||
return instance
|
||||
|
||||
return get_instance
|
||||
|
||||
|
||||
@singleton
|
||||
class Metrics:
|
||||
def __init__(self, *args, prometheus_collector, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.prometheus_collector = prometheus_collector
|
||||
|
||||
self.prepare_count: Counter = Counter(
|
||||
name="prepare_total",
|
||||
documentation="Total messages prepared by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.prepare_duration: Histogram = Histogram(
|
||||
name="prepare_duration",
|
||||
documentation="Preparation duration by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.process_count: Counter = Counter(
|
||||
name="process",
|
||||
documentation="Total messages processed by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.process_duration: Histogram = Histogram(
|
||||
name="process_duration",
|
||||
documentation="Processing duration by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.tool_count: Counter = Counter(
|
||||
name="tool_total",
|
||||
documentation="Total messages tooled by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.tool_duration: Histogram = Histogram(
|
||||
name="tool_duration",
|
||||
documentation="Tool duration by agent type",
|
||||
buckets=(0.1, 0.5, 1.0, 2.0, float("inf")),
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.generate_count: Counter = Counter(
|
||||
name="generate_total",
|
||||
documentation="Total messages generated by agent type",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.generate_duration: Histogram = Histogram(
|
||||
name="generate_duration",
|
||||
documentation="Generate duration by agent type",
|
||||
buckets=(0.1, 0.5, 1.0, 2.0, float("inf")),
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.tokens_prompt: Counter = Counter(
|
||||
name="tokens_prompt",
|
||||
documentation="Total tokens passed as prompt to LLM",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
||||
|
||||
self.tokens_eval: Counter = Counter(
|
||||
name="tokens_eval",
|
||||
documentation="Total tokens returned by LLM",
|
||||
labelnames=("agent",),
|
||||
registry=self.prometheus_collector,
|
||||
)
|
745
src/backend/models.py
Normal file
745
src/backend/models.py
Normal file
@ -0,0 +1,745 @@
|
||||
from typing import List, Dict, Optional, Any, Union, Literal, TypeVar, Generic, Annotated
|
||||
from pydantic import BaseModel, Field, EmailStr, HttpUrl, validator # type: ignore
|
||||
from pydantic.types import constr, conint # type: ignore
|
||||
from datetime import datetime, date
|
||||
from enum import Enum
|
||||
import uuid
|
||||
|
||||
# Generic type variable
|
||||
T = TypeVar('T')
|
||||
|
||||
# ============================
|
||||
# Enums
|
||||
# ============================
|
||||
|
||||
class UserType(str, Enum):
|
||||
CANDIDATE = "candidate"
|
||||
EMPLOYER = "employer"
|
||||
GUEST = "guest"
|
||||
|
||||
class UserGender(str, Enum):
|
||||
FEMALE = "female"
|
||||
MALE = "male"
|
||||
|
||||
class UserStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
INACTIVE = "inactive"
|
||||
PENDING = "pending"
|
||||
BANNED = "banned"
|
||||
|
||||
class SkillLevel(str, Enum):
|
||||
BEGINNER = "beginner"
|
||||
INTERMEDIATE = "intermediate"
|
||||
ADVANCED = "advanced"
|
||||
EXPERT = "expert"
|
||||
|
||||
class EmploymentType(str, Enum):
|
||||
FULL_TIME = "full-time"
|
||||
PART_TIME = "part-time"
|
||||
CONTRACT = "contract"
|
||||
INTERNSHIP = "internship"
|
||||
FREELANCE = "freelance"
|
||||
|
||||
class InterviewType(str, Enum):
|
||||
PHONE = "phone"
|
||||
VIDEO = "video"
|
||||
ONSITE = "onsite"
|
||||
TECHNICAL = "technical"
|
||||
BEHAVIORAL = "behavioral"
|
||||
|
||||
class ApplicationStatus(str, Enum):
|
||||
APPLIED = "applied"
|
||||
REVIEWING = "reviewing"
|
||||
INTERVIEW = "interview"
|
||||
OFFER = "offer"
|
||||
REJECTED = "rejected"
|
||||
ACCEPTED = "accepted"
|
||||
WITHDRAWN = "withdrawn"
|
||||
|
||||
class InterviewRecommendation(str, Enum):
|
||||
STRONG_HIRE = "strong_hire"
|
||||
HIRE = "hire"
|
||||
NO_HIRE = "no_hire"
|
||||
STRONG_NO_HIRE = "strong_no_hire"
|
||||
|
||||
class ChatSenderType(str, Enum):
|
||||
USER = "user"
|
||||
AI = "ai"
|
||||
SYSTEM = "system"
|
||||
|
||||
class ChatContextType(str, Enum):
|
||||
JOB_SEARCH = "job_search"
|
||||
CANDIDATE_SCREENING = "candidate_screening"
|
||||
INTERVIEW_PREP = "interview_prep"
|
||||
RESUME_REVIEW = "resume_review"
|
||||
GENERAL = "general"
|
||||
|
||||
class AIModelType(str, Enum):
|
||||
GPT_4 = "gpt-4"
|
||||
GPT_35_TURBO = "gpt-3.5-turbo"
|
||||
CLAUDE_3 = "claude-3"
|
||||
CLAUDE_3_OPUS = "claude-3-opus"
|
||||
CUSTOM = "custom"
|
||||
|
||||
class MFAMethod(str, Enum):
|
||||
APP = "app"
|
||||
SMS = "sms"
|
||||
EMAIL = "email"
|
||||
|
||||
class VectorStoreType(str, Enum):
|
||||
PINECONE = "pinecone"
|
||||
QDRANT = "qdrant"
|
||||
FAISS = "faiss"
|
||||
MILVUS = "milvus"
|
||||
WEAVIATE = "weaviate"
|
||||
|
||||
class DataSourceType(str, Enum):
|
||||
DOCUMENT = "document"
|
||||
WEBSITE = "website"
|
||||
API = "api"
|
||||
DATABASE = "database"
|
||||
INTERNAL = "internal"
|
||||
|
||||
class ProcessingStepType(str, Enum):
|
||||
EXTRACT = "extract"
|
||||
TRANSFORM = "transform"
|
||||
CHUNK = "chunk"
|
||||
EMBED = "embed"
|
||||
FILTER = "filter"
|
||||
SUMMARIZE = "summarize"
|
||||
|
||||
class SearchType(str, Enum):
|
||||
SIMILARITY = "similarity"
|
||||
MMR = "mmr"
|
||||
HYBRID = "hybrid"
|
||||
KEYWORD = "keyword"
|
||||
|
||||
class ActivityType(str, Enum):
|
||||
LOGIN = "login"
|
||||
SEARCH = "search"
|
||||
VIEW_JOB = "view_job"
|
||||
APPLY_JOB = "apply_job"
|
||||
MESSAGE = "message"
|
||||
UPDATE_PROFILE = "update_profile"
|
||||
CHAT = "chat"
|
||||
|
||||
class ThemePreference(str, Enum):
|
||||
LIGHT = "light"
|
||||
DARK = "dark"
|
||||
SYSTEM = "system"
|
||||
|
||||
class NotificationType(str, Enum):
|
||||
EMAIL = "email"
|
||||
PUSH = "push"
|
||||
IN_APP = "in_app"
|
||||
|
||||
class FontSize(str, Enum):
|
||||
SMALL = "small"
|
||||
MEDIUM = "medium"
|
||||
LARGE = "large"
|
||||
|
||||
class SalaryPeriod(str, Enum):
|
||||
HOUR = "hour"
|
||||
DAY = "day"
|
||||
MONTH = "month"
|
||||
YEAR = "year"
|
||||
|
||||
class LanguageProficiency(str, Enum):
|
||||
BASIC = "basic"
|
||||
CONVERSATIONAL = "conversational"
|
||||
FLUENT = "fluent"
|
||||
NATIVE = "native"
|
||||
|
||||
class SocialPlatform(str, Enum):
|
||||
LINKEDIN = "linkedin"
|
||||
TWITTER = "twitter"
|
||||
GITHUB = "github"
|
||||
DRIBBBLE = "dribbble"
|
||||
BEHANCE = "behance"
|
||||
WEBSITE = "website"
|
||||
OTHER = "other"
|
||||
|
||||
class ColorBlindMode(str, Enum):
|
||||
PROTANOPIA = "protanopia"
|
||||
DEUTERANOPIA = "deuteranopia"
|
||||
TRITANOPIA = "tritanopia"
|
||||
NONE = "none"
|
||||
|
||||
class SortOrder(str, Enum):
|
||||
ASC = "asc"
|
||||
DESC = "desc"
|
||||
|
||||
# ============================
|
||||
# Supporting Models
|
||||
# ============================
|
||||
|
||||
class Tunables(BaseModel):
|
||||
enable_rag: bool = Field(True, alias="enableRAG")
|
||||
enable_tools: bool = Field(True, alias="enableTools")
|
||||
enable_context: bool = Field(True, alias="enableContext")
|
||||
|
||||
class CandidateQuestion(BaseModel):
|
||||
question: str
|
||||
tunables: Optional[Tunables] = None
|
||||
|
||||
class Location(BaseModel):
|
||||
city: str
|
||||
state: Optional[str] = None
|
||||
country: str
|
||||
postal_code: Optional[str] = Field(None, alias="postalCode")
|
||||
latitude: Optional[float] = None
|
||||
longitude: Optional[float] = None
|
||||
remote: Optional[bool] = None
|
||||
hybrid_options: Optional[List[str]] = Field(None, alias="hybridOptions")
|
||||
address: Optional[str] = None
|
||||
|
||||
class Skill(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
name: str
|
||||
category: str
|
||||
level: SkillLevel
|
||||
years_of_experience: Optional[int] = Field(None, alias="yearsOfExperience")
|
||||
|
||||
class WorkExperience(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
company_name: str = Field(..., alias="companyName")
|
||||
position: str
|
||||
start_date: datetime = Field(..., alias="startDate")
|
||||
end_date: Optional[datetime] = Field(None, alias="endDate")
|
||||
is_current: bool = Field(..., alias="isCurrent")
|
||||
description: str
|
||||
skills: List[str]
|
||||
location: Location
|
||||
achievements: Optional[List[str]] = None
|
||||
|
||||
class Education(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
institution: str
|
||||
degree: str
|
||||
field_of_study: str = Field(..., alias="fieldOfStudy")
|
||||
start_date: datetime = Field(..., alias="startDate")
|
||||
end_date: Optional[datetime] = Field(None, alias="endDate")
|
||||
is_current: bool = Field(..., alias="isCurrent")
|
||||
gpa: Optional[float] = None
|
||||
achievements: Optional[List[str]] = None
|
||||
location: Optional[Location] = None
|
||||
|
||||
class Language(BaseModel):
|
||||
language: str
|
||||
proficiency: LanguageProficiency
|
||||
|
||||
class Certification(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
name: str
|
||||
issuing_organization: str = Field(..., alias="issuingOrganization")
|
||||
issue_date: datetime = Field(..., alias="issueDate")
|
||||
expiration_date: Optional[datetime] = Field(None, alias="expirationDate")
|
||||
credential_id: Optional[str] = Field(None, alias="credentialId")
|
||||
credential_url: Optional[HttpUrl] = Field(None, alias="credentialUrl")
|
||||
|
||||
class SocialLink(BaseModel):
|
||||
platform: SocialPlatform
|
||||
url: HttpUrl
|
||||
|
||||
class DesiredSalary(BaseModel):
|
||||
amount: float
|
||||
currency: str
|
||||
period: SalaryPeriod
|
||||
|
||||
class SalaryRange(BaseModel):
|
||||
min: float
|
||||
max: float
|
||||
currency: str
|
||||
period: SalaryPeriod
|
||||
is_visible: bool = Field(..., alias="isVisible")
|
||||
|
||||
class PointOfContact(BaseModel):
|
||||
name: str
|
||||
position: str
|
||||
email: EmailStr
|
||||
phone: Optional[str] = None
|
||||
|
||||
class RefreshToken(BaseModel):
|
||||
token: str
|
||||
expires_at: datetime = Field(..., alias="expiresAt")
|
||||
device: str
|
||||
ip_address: str = Field(..., alias="ipAddress")
|
||||
is_revoked: bool = Field(..., alias="isRevoked")
|
||||
revoked_reason: Optional[str] = Field(None, alias="revokedReason")
|
||||
|
||||
class Attachment(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
file_name: str = Field(..., alias="fileName")
|
||||
file_type: str = Field(..., alias="fileType")
|
||||
file_size: int = Field(..., alias="fileSize")
|
||||
file_url: str = Field(..., alias="fileUrl")
|
||||
uploaded_at: datetime = Field(..., alias="uploadedAt")
|
||||
is_processed: bool = Field(..., alias="isProcessed")
|
||||
processing_result: Optional[Any] = Field(None, alias="processingResult")
|
||||
thumbnail_url: Optional[str] = Field(None, alias="thumbnailUrl")
|
||||
|
||||
class MessageReaction(BaseModel):
|
||||
user_id: str = Field(..., alias="userId")
|
||||
reaction: str
|
||||
timestamp: datetime
|
||||
|
||||
class EditHistory(BaseModel):
|
||||
content: str
|
||||
edited_at: datetime = Field(..., alias="editedAt")
|
||||
edited_by: str = Field(..., alias="editedBy")
|
||||
|
||||
class CustomQuestion(BaseModel):
|
||||
question: str
|
||||
answer: str
|
||||
|
||||
class CandidateContact(BaseModel):
|
||||
email: EmailStr
|
||||
phone: Optional[str] = None
|
||||
|
||||
class ApplicationDecision(BaseModel):
|
||||
status: Literal["accepted", "rejected"]
|
||||
reason: Optional[str] = None
|
||||
date: datetime
|
||||
by: str
|
||||
|
||||
class SkillAssessment(BaseModel):
|
||||
skill_name: str = Field(..., alias="skillName")
|
||||
score: Annotated[float, Field(ge=0, le=10)]
|
||||
comments: Optional[str] = None
|
||||
|
||||
class NotificationPreference(BaseModel):
|
||||
type: NotificationType
|
||||
events: List[str]
|
||||
is_enabled: bool = Field(..., alias="isEnabled")
|
||||
|
||||
class AccessibilitySettings(BaseModel):
|
||||
font_size: FontSize = Field(..., alias="fontSize")
|
||||
high_contrast: bool = Field(..., alias="highContrast")
|
||||
reduce_motion: bool = Field(..., alias="reduceMotion")
|
||||
screen_reader: bool = Field(..., alias="screenReader")
|
||||
color_blind_mode: Optional[ColorBlindMode] = Field(None, alias="colorBlindMode")
|
||||
|
||||
class ProcessingStep(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
type: ProcessingStepType
|
||||
parameters: Dict[str, Any]
|
||||
order: int
|
||||
depends_on: Optional[List[str]] = Field(None, alias="dependsOn")
|
||||
|
||||
class RetrievalParameters(BaseModel):
|
||||
search_type: SearchType = Field(..., alias="searchType")
|
||||
top_k: int = Field(..., alias="topK")
|
||||
similarity_threshold: Optional[float] = Field(None, alias="similarityThreshold")
|
||||
reranker_model: Optional[str] = Field(None, alias="rerankerModel")
|
||||
use_keyword_boost: bool = Field(..., alias="useKeywordBoost")
|
||||
filter_options: Optional[Dict[str, Any]] = Field(None, alias="filterOptions")
|
||||
context_window: int = Field(..., alias="contextWindow")
|
||||
|
||||
class ErrorDetail(BaseModel):
|
||||
code: str
|
||||
message: str
|
||||
details: Optional[Any] = None
|
||||
|
||||
# ============================
|
||||
# Main Models
|
||||
# ============================
|
||||
|
||||
# Base user model without user_type field
|
||||
class BaseUser(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
email: EmailStr
|
||||
phone: Optional[str] = None
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
updated_at: datetime = Field(..., alias="updatedAt")
|
||||
last_login: Optional[datetime] = Field(None, alias="lastLogin")
|
||||
profile_image: Optional[str] = Field(None, alias="profileImage")
|
||||
status: UserStatus
|
||||
|
||||
class Config:
|
||||
use_enum_values = True
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
# Generic base user with user_type for API responses
|
||||
class BaseUserWithType(BaseUser):
|
||||
user_type: UserType = Field(..., alias="userType")
|
||||
|
||||
class Candidate(BaseUser):
|
||||
user_type: Literal[UserType.CANDIDATE] = Field(UserType.CANDIDATE, alias="userType")
|
||||
username: str
|
||||
first_name: str = Field(..., alias="firstName")
|
||||
last_name: str = Field(..., alias="lastName")
|
||||
full_name: str = Field(..., alias="fullName")
|
||||
description: Optional[str] = None
|
||||
resume: Optional[str] = None
|
||||
skills: List[Skill]
|
||||
experience: List[WorkExperience]
|
||||
questions: List[CandidateQuestion] = []
|
||||
education: List[Education]
|
||||
preferred_job_types: List[EmploymentType] = Field(..., alias="preferredJobTypes")
|
||||
desired_salary: Optional[DesiredSalary] = Field(None, alias="desiredSalary")
|
||||
location: Location
|
||||
availability_date: Optional[datetime] = Field(None, alias="availabilityDate")
|
||||
summary: Optional[str] = None
|
||||
languages: List[Language]
|
||||
certifications: List[Certification]
|
||||
job_applications: Optional[List["JobApplication"]] = Field(None, alias="jobApplications")
|
||||
has_profile: bool = Field(default=False, alias="hasProfile")
|
||||
# Used for AI generated personas
|
||||
age: Optional[int] = None
|
||||
gender: Optional[UserGender] = None
|
||||
ethnicity: Optional[str] = None
|
||||
|
||||
class Employer(BaseUser):
|
||||
user_type: Literal[UserType.EMPLOYER] = Field(UserType.EMPLOYER, alias="userType")
|
||||
company_name: str = Field(..., alias="companyName")
|
||||
industry: str
|
||||
description: Optional[str] = None
|
||||
company_size: str = Field(..., alias="companySize")
|
||||
company_description: str = Field(..., alias="companyDescription")
|
||||
website_url: Optional[HttpUrl] = Field(None, alias="websiteUrl")
|
||||
jobs: Optional[List["Job"]] = None
|
||||
location: Location
|
||||
company_logo: Optional[str] = Field(None, alias="companyLogo")
|
||||
social_links: Optional[List[SocialLink]] = Field(None, alias="socialLinks")
|
||||
poc: Optional[PointOfContact] = None
|
||||
|
||||
class Guest(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
session_id: str = Field(..., alias="sessionId")
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
last_activity: datetime = Field(..., alias="lastActivity")
|
||||
converted_to_user_id: Optional[str] = Field(None, alias="convertedToUserId")
|
||||
ip_address: Optional[str] = Field(None, alias="ipAddress")
|
||||
user_agent: Optional[str] = Field(None, alias="userAgent")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class Authentication(BaseModel):
|
||||
user_id: str = Field(..., alias="userId")
|
||||
password_hash: str = Field(..., alias="passwordHash")
|
||||
salt: str
|
||||
refresh_tokens: List[RefreshToken] = Field(..., alias="refreshTokens")
|
||||
reset_password_token: Optional[str] = Field(None, alias="resetPasswordToken")
|
||||
reset_password_expiry: Optional[datetime] = Field(None, alias="resetPasswordExpiry")
|
||||
last_password_change: datetime = Field(..., alias="lastPasswordChange")
|
||||
mfa_enabled: bool = Field(..., alias="mfaEnabled")
|
||||
mfa_method: Optional[MFAMethod] = Field(None, alias="mfaMethod")
|
||||
mfa_secret: Optional[str] = Field(None, alias="mfaSecret")
|
||||
login_attempts: int = Field(..., alias="loginAttempts")
|
||||
locked_until: Optional[datetime] = Field(None, alias="lockedUntil")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class AuthResponse(BaseModel):
|
||||
access_token: str = Field(..., alias="accessToken")
|
||||
refresh_token: str = Field(..., alias="refreshToken")
|
||||
user: Candidate | Employer
|
||||
expires_at: int = Field(..., alias="expiresAt")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class Job(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
title: str
|
||||
description: str
|
||||
responsibilities: List[str]
|
||||
requirements: List[str]
|
||||
preferred_skills: Optional[List[str]] = Field(None, alias="preferredSkills")
|
||||
employer_id: str = Field(..., alias="employerId")
|
||||
location: Location
|
||||
salary_range: Optional[SalaryRange] = Field(None, alias="salaryRange")
|
||||
employment_type: EmploymentType = Field(..., alias="employmentType")
|
||||
date_posted: datetime = Field(..., alias="datePosted")
|
||||
application_deadline: Optional[datetime] = Field(None, alias="applicationDeadline")
|
||||
is_active: bool = Field(..., alias="isActive")
|
||||
applicants: Optional[List["JobApplication"]] = None
|
||||
department: Optional[str] = None
|
||||
reports_to: Optional[str] = Field(None, alias="reportsTo")
|
||||
benefits: Optional[List[str]] = None
|
||||
visa_sponsorship: Optional[bool] = Field(None, alias="visaSponsorship")
|
||||
featured_until: Optional[datetime] = Field(None, alias="featuredUntil")
|
||||
views: int = 0
|
||||
application_count: int = Field(0, alias="applicationCount")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class InterviewFeedback(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
interview_id: str = Field(..., alias="interviewId")
|
||||
reviewer_id: str = Field(..., alias="reviewerId")
|
||||
technical_score: Annotated[float, Field(ge=0, le=10)] = Field(..., alias="technicalScore")
|
||||
cultural_score: Annotated[float, Field(ge=0, le=10)] = Field(..., alias="culturalScore")
|
||||
overall_score: Annotated[float, Field(ge=0, le=10)] = Field(..., alias="overallScore")
|
||||
strengths: List[str]
|
||||
weaknesses: List[str]
|
||||
recommendation: InterviewRecommendation
|
||||
comments: str
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
updated_at: datetime = Field(..., alias="updatedAt")
|
||||
is_visible: bool = Field(..., alias="isVisible")
|
||||
skill_assessments: Optional[List[SkillAssessment]] = Field(None, alias="skillAssessments")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class InterviewSchedule(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
application_id: str = Field(..., alias="applicationId")
|
||||
scheduled_date: datetime = Field(..., alias="scheduledDate")
|
||||
end_date: datetime = Field(..., alias="endDate")
|
||||
interview_type: InterviewType = Field(..., alias="interviewType")
|
||||
interviewers: List[str]
|
||||
location: Optional[Union[str, Location]] = None
|
||||
notes: Optional[str] = None
|
||||
feedback: Optional[InterviewFeedback] = None
|
||||
status: Literal["scheduled", "completed", "cancelled", "rescheduled"]
|
||||
meeting_link: Optional[HttpUrl] = Field(None, alias="meetingLink")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class JobApplication(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
job_id: str = Field(..., alias="jobId")
|
||||
candidate_id: str = Field(..., alias="candidateId")
|
||||
status: ApplicationStatus
|
||||
applied_date: datetime = Field(..., alias="appliedDate")
|
||||
updated_date: datetime = Field(..., alias="updatedDate")
|
||||
resume_version: str = Field(..., alias="resumeVersion")
|
||||
cover_letter: Optional[str] = Field(None, alias="coverLetter")
|
||||
notes: Optional[str] = None
|
||||
interview_schedules: Optional[List[InterviewSchedule]] = Field(None, alias="interviewSchedules")
|
||||
custom_questions: Optional[List[CustomQuestion]] = Field(None, alias="customQuestions")
|
||||
candidate_contact: Optional[CandidateContact] = Field(None, alias="candidateContact")
|
||||
decision: Optional[ApplicationDecision] = None
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class AIParameters(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
user_id: Optional[str] = Field(None, alias="userId")
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
model: AIModelType
|
||||
temperature: Annotated[float, Field(ge=0, le=1)]
|
||||
max_tokens: Annotated[int, Field(gt=0)] = Field(..., alias="maxTokens")
|
||||
top_p: Annotated[float, Field(ge=0, le=1)] = Field(..., alias="topP")
|
||||
frequency_penalty: Annotated[float, Field(ge=-2, le=2)] = Field(..., alias="frequencyPenalty")
|
||||
presence_penalty: Annotated[float, Field(ge=-2, le=2)] = Field(..., alias="presencePenalty")
|
||||
system_prompt: Optional[str] = Field(None, alias="systemPrompt")
|
||||
is_default: bool = Field(..., alias="isDefault")
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
updated_at: datetime = Field(..., alias="updatedAt")
|
||||
custom_model_config: Optional[Dict[str, Any]] = Field(None, alias="customModelConfig")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class ChatContext(BaseModel):
|
||||
type: ChatContextType
|
||||
related_entity_id: Optional[str] = Field(None, alias="relatedEntityId")
|
||||
related_entity_type: Optional[Literal["job", "candidate", "employer"]] = Field(None, alias="relatedEntityType")
|
||||
ai_parameters: AIParameters = Field(..., alias="aiParameters")
|
||||
additional_context: Optional[Dict[str, Any]] = Field(None, alias="additionalContext")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
session_id: str = Field(..., alias="sessionId")
|
||||
sender: ChatSenderType
|
||||
sender_id: Optional[str] = Field(None, alias="senderId")
|
||||
content: str
|
||||
timestamp: datetime
|
||||
attachments: Optional[List[Attachment]] = None
|
||||
reactions: Optional[List[MessageReaction]] = None
|
||||
is_edited: bool = Field(False, alias="isEdited")
|
||||
edit_history: Optional[List[EditHistory]] = Field(None, alias="editHistory")
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class ChatSession(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
user_id: Optional[str] = Field(None, alias="userId")
|
||||
guest_id: Optional[str] = Field(None, alias="guestId")
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
last_activity: datetime = Field(..., alias="lastActivity")
|
||||
title: Optional[str] = None
|
||||
context: ChatContext
|
||||
messages: Optional[List[ChatMessage]] = None
|
||||
is_archived: bool = Field(False, alias="isArchived")
|
||||
system_prompt: Optional[str] = Field(None, alias="systemPrompt")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
@validator('user_id', 'guest_id')
|
||||
def validate_user_or_guest(cls, v, values, **kwargs):
|
||||
field = kwargs.get('field')
|
||||
if not field:
|
||||
raise ValueError('field must be provided')
|
||||
if field.name == 'user_id' and 'guest_id' in values and not v and not values['guest_id']:
|
||||
raise ValueError('Either user_id or guest_id must be provided')
|
||||
if field.name == 'guest_id' and 'user_id' in values and not v and not values['user_id']:
|
||||
raise ValueError('Either user_id or guest_id must be provided')
|
||||
return v
|
||||
|
||||
class DataSourceConfiguration(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
rag_config_id: str = Field(..., alias="ragConfigId")
|
||||
name: str
|
||||
source_type: DataSourceType = Field(..., alias="sourceType")
|
||||
connection_details: Dict[str, Any] = Field(..., alias="connectionDetails")
|
||||
processing_pipeline: List[ProcessingStep] = Field(..., alias="processingPipeline")
|
||||
refresh_schedule: Optional[str] = Field(None, alias="refreshSchedule")
|
||||
last_refreshed: Optional[datetime] = Field(None, alias="lastRefreshed")
|
||||
status: Literal["active", "pending", "error", "processing"]
|
||||
error_details: Optional[str] = Field(None, alias="errorDetails")
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class RAGConfiguration(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
user_id: str = Field(..., alias="userId")
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
data_source_configurations: List[DataSourceConfiguration] = Field(..., alias="dataSourceConfigurations")
|
||||
embedding_model: str = Field(..., alias="embeddingModel")
|
||||
vector_store_type: VectorStoreType = Field(..., alias="vectorStoreType")
|
||||
retrieval_parameters: RetrievalParameters = Field(..., alias="retrievalParameters")
|
||||
created_at: datetime = Field(..., alias="createdAt")
|
||||
updated_at: datetime = Field(..., alias="updatedAt")
|
||||
is_default: bool = Field(..., alias="isDefault")
|
||||
version: int
|
||||
is_active: bool = Field(..., alias="isActive")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class UserActivity(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
user_id: Optional[str] = Field(None, alias="userId")
|
||||
guest_id: Optional[str] = Field(None, alias="guestId")
|
||||
activity_type: ActivityType = Field(..., alias="activityType")
|
||||
timestamp: datetime
|
||||
metadata: Dict[str, Any]
|
||||
ip_address: Optional[str] = Field(None, alias="ipAddress")
|
||||
user_agent: Optional[str] = Field(None, alias="userAgent")
|
||||
session_id: Optional[str] = Field(None, alias="sessionId")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
@validator('user_id', 'guest_id')
|
||||
def validate_user_or_guest(cls, v, values, **kwargs):
|
||||
field = kwargs.get('field')
|
||||
if not field:
|
||||
raise ValueError('field must be provided')
|
||||
if field.name == 'user_id' and 'guest_id' in values and not v and not values['guest_id']:
|
||||
raise ValueError('Either user_id or guest_id must be provided')
|
||||
if field.name == 'guest_id' and 'user_id' in values and not v and not values['user_id']:
|
||||
raise ValueError('Either user_id or guest_id must be provided')
|
||||
return v
|
||||
|
||||
class Analytics(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
entity_type: Literal["job", "candidate", "chat", "system", "employer"] = Field(..., alias="entityType")
|
||||
entity_id: str = Field(..., alias="entityId")
|
||||
metric_type: str = Field(..., alias="metricType")
|
||||
value: float
|
||||
timestamp: datetime
|
||||
dimensions: Optional[Dict[str, Any]] = None
|
||||
segment: Optional[str] = None
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class UserPreference(BaseModel):
|
||||
user_id: str = Field(..., alias="userId")
|
||||
theme: ThemePreference
|
||||
notifications: List[NotificationPreference]
|
||||
accessibility: AccessibilitySettings
|
||||
dashboard_layout: Optional[Dict[str, Any]] = Field(None, alias="dashboardLayout")
|
||||
language: str
|
||||
timezone: str
|
||||
email_frequency: Literal["immediate", "daily", "weekly", "never"] = Field(..., alias="emailFrequency")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
# ============================
|
||||
# API Request/Response Models
|
||||
# ============================
|
||||
class Query(BaseModel):
|
||||
prompt: str
|
||||
tunables: Optional[Tunables] = None
|
||||
agent_options: Optional[Dict[str, Any]] = Field(None, alias="agentOptions")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class PaginatedRequest(BaseModel):
|
||||
page: Annotated[int, Field(ge=1)] = 1
|
||||
limit: Annotated[int, Field(ge=1, le=100)] = 20
|
||||
sort_by: Optional[str] = Field(None, alias="sortBy")
|
||||
sort_order: Optional[SortOrder] = Field(None, alias="sortOrder")
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class SearchQuery(BaseModel):
|
||||
query: str
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
page: Annotated[int, Field(ge=1)] = 1
|
||||
limit: Annotated[int, Field(ge=1, le=100)] = 20
|
||||
sort_by: Optional[str] = Field(None, alias="sortBy")
|
||||
sort_order: Optional[SortOrder] = Field(None, alias="sortOrder")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class PaginatedResponse(BaseModel):
|
||||
data: List[Any] # Will be typed specifically when used
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
||||
total_pages: int = Field(..., alias="totalPages")
|
||||
has_more: bool = Field(..., alias="hasMore")
|
||||
class Config:
|
||||
populate_by_name = True # Allow both field names and aliases
|
||||
|
||||
class ApiResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional[Any] = None # Will be typed specifically when used
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Specific typed response models for common use cases
|
||||
class CandidateResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional[Candidate] = None
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
class EmployerResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional[Employer] = None
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
class JobResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional["Job"] = None
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
class CandidateListResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional[List[Candidate]] = None
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
class JobListResponse(BaseModel):
|
||||
success: bool
|
||||
data: Optional[List["Job"]] = None
|
||||
error: Optional[ErrorDetail] = None
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Forward references resolution
|
||||
Candidate.update_forward_refs()
|
||||
Employer.update_forward_refs()
|
||||
ChatSession.update_forward_refs()
|
||||
JobApplication.update_forward_refs()
|
||||
Job.update_forward_refs()
|
207
src/focused_test.py
Normal file
207
src/focused_test.py
Normal file
@ -0,0 +1,207 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Focused test script that tests the most important functionality
|
||||
without getting caught up in serialization format complexities
|
||||
"""
|
||||
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from models import (
|
||||
UserStatus, UserType, SkillLevel, EmploymentType,
|
||||
Candidate, Employer, Location, Skill, AIParameters, AIModelType
|
||||
)
|
||||
|
||||
def test_model_creation():
|
||||
"""Test that we can create models successfully"""
|
||||
print("🧪 Testing model creation...")
|
||||
|
||||
# Create supporting objects
|
||||
location = Location(city="Austin", country="USA")
|
||||
skill = Skill(name="Python", category="Programming", level=SkillLevel.ADVANCED)
|
||||
|
||||
# Create candidate
|
||||
candidate = Candidate(
|
||||
email="test@example.com",
|
||||
username="test_candidate",
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
status=UserStatus.ACTIVE,
|
||||
firstName="John",
|
||||
lastName="Doe",
|
||||
fullName="John Doe",
|
||||
skills=[skill],
|
||||
experience=[],
|
||||
education=[],
|
||||
preferredJobTypes=[EmploymentType.FULL_TIME],
|
||||
location=location,
|
||||
languages=[],
|
||||
certifications=[]
|
||||
)
|
||||
|
||||
# Create employer
|
||||
employer = Employer(
|
||||
email="hr@company.com",
|
||||
username="test_employer",
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
status=UserStatus.ACTIVE,
|
||||
companyName="Test Company",
|
||||
industry="Technology",
|
||||
companySize="50-200",
|
||||
companyDescription="A test company",
|
||||
location=location
|
||||
)
|
||||
|
||||
print(f"✅ Candidate: {candidate.first_name} {candidate.last_name}")
|
||||
print(f"✅ Employer: {employer.company_name}")
|
||||
print(f"✅ User types: {candidate.user_type}, {employer.user_type}")
|
||||
|
||||
return candidate, employer
|
||||
|
||||
def test_json_api_format():
|
||||
"""Test JSON serialization in API format (the most important use case)"""
|
||||
print("\n📡 Testing JSON API format...")
|
||||
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Serialize to JSON (API format)
|
||||
candidate_json = candidate.model_dump_json(by_alias=True)
|
||||
employer_json = employer.model_dump_json(by_alias=True)
|
||||
|
||||
print(f"✅ Candidate JSON: {len(candidate_json)} chars")
|
||||
print(f"✅ Employer JSON: {len(employer_json)} chars")
|
||||
|
||||
# Deserialize from JSON
|
||||
candidate_back = Candidate.model_validate_json(candidate_json)
|
||||
employer_back = Employer.model_validate_json(employer_json)
|
||||
|
||||
# Verify data integrity
|
||||
assert candidate_back.email == candidate.email
|
||||
assert candidate_back.first_name == candidate.first_name
|
||||
assert employer_back.company_name == employer.company_name
|
||||
|
||||
print(f"✅ JSON round-trip successful")
|
||||
print(f"✅ Data integrity verified")
|
||||
|
||||
return True
|
||||
|
||||
def test_api_dict_format():
|
||||
"""Test dictionary format with aliases (for API requests/responses)"""
|
||||
print("\n📊 Testing API dictionary format...")
|
||||
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Create API format dictionaries
|
||||
candidate_dict = candidate.model_dump(by_alias=True)
|
||||
employer_dict = employer.model_dump(by_alias=True)
|
||||
|
||||
# Verify camelCase aliases are used
|
||||
assert "firstName" in candidate_dict
|
||||
assert "lastName" in candidate_dict
|
||||
assert "createdAt" in candidate_dict
|
||||
assert "companyName" in employer_dict
|
||||
|
||||
print(f"✅ API format dictionaries created")
|
||||
print(f"✅ CamelCase aliases verified")
|
||||
|
||||
# Test deserializing from API format
|
||||
candidate_back = Candidate.model_validate(candidate_dict)
|
||||
employer_back = Employer.model_validate(employer_dict)
|
||||
|
||||
assert candidate_back.email == candidate.email
|
||||
assert employer_back.company_name == employer.company_name
|
||||
|
||||
print(f"✅ API format round-trip successful")
|
||||
|
||||
return True
|
||||
|
||||
def test_validation_constraints():
|
||||
"""Test that validation constraints work"""
|
||||
print("\n🔒 Testing validation constraints...")
|
||||
|
||||
# Test AI Parameters with constraints
|
||||
valid_params = AIParameters(
|
||||
name="Test Config",
|
||||
model=AIModelType.GPT_4,
|
||||
temperature=0.7, # Valid: 0-1
|
||||
maxTokens=2000, # Valid: > 0
|
||||
topP=0.95, # Valid: 0-1
|
||||
frequencyPenalty=0.0, # Valid: -2 to 2
|
||||
presencePenalty=0.0, # Valid: -2 to 2
|
||||
isDefault=True,
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now()
|
||||
)
|
||||
print(f"✅ Valid AI parameters created")
|
||||
|
||||
# Test constraint violation
|
||||
try:
|
||||
invalid_params = AIParameters(
|
||||
name="Invalid Config",
|
||||
model=AIModelType.GPT_4,
|
||||
temperature=1.5, # Invalid: > 1
|
||||
maxTokens=2000,
|
||||
topP=0.95,
|
||||
frequencyPenalty=0.0,
|
||||
presencePenalty=0.0,
|
||||
isDefault=True,
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now()
|
||||
)
|
||||
print("❌ Should have rejected invalid temperature")
|
||||
return False
|
||||
except Exception:
|
||||
print(f"✅ Constraint validation working")
|
||||
|
||||
return True
|
||||
|
||||
def test_enum_values():
|
||||
"""Test that enum values work correctly"""
|
||||
print("\n📋 Testing enum values...")
|
||||
|
||||
# Test that enum values are properly handled
|
||||
candidate, employer = test_model_creation()
|
||||
|
||||
# Check enum values in serialization
|
||||
candidate_dict = candidate.model_dump(by_alias=True)
|
||||
|
||||
assert candidate_dict["status"] == "active"
|
||||
assert candidate_dict["userType"] == "candidate"
|
||||
assert employer.user_type == UserType.EMPLOYER
|
||||
|
||||
print(f"✅ Enum values correctly serialized")
|
||||
print(f"✅ User types: candidate={candidate.user_type}, employer={employer.user_type}")
|
||||
|
||||
return True
|
||||
|
||||
def main():
|
||||
"""Run all focused tests"""
|
||||
print("🎯 Focused Pydantic Model Tests")
|
||||
print("=" * 40)
|
||||
|
||||
try:
|
||||
test_model_creation()
|
||||
test_json_api_format()
|
||||
test_api_dict_format()
|
||||
test_validation_constraints()
|
||||
test_enum_values()
|
||||
|
||||
print(f"\n🎉 All focused tests passed!")
|
||||
print("=" * 40)
|
||||
print("✅ Models work correctly")
|
||||
print("✅ JSON API format works")
|
||||
print("✅ Validation constraints work")
|
||||
print("✅ Enum values work")
|
||||
print("✅ Ready for type generation!")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Test failed: {type(e).__name__}: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
461
src/generate_types.py
Normal file
461
src/generate_types.py
Normal file
@ -0,0 +1,461 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Enhanced Type Generator - Generate TypeScript types from Pydantic models
|
||||
Now with command line parameters, pre-test validation, and TypeScript compilation
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import subprocess
|
||||
from typing import Any, Dict, List, Optional, Union, get_origin, get_args
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
def run_command(command: str, description: str, cwd: str | None = None) -> bool:
|
||||
"""Run a command and return success status"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=cwd
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f"✅ {description}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ {description} failed:")
|
||||
if result.stderr.strip():
|
||||
print(f" Error: {result.stderr.strip()}")
|
||||
if result.stdout.strip():
|
||||
print(f" Output: {result.stdout.strip()}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ {description} failed with exception: {e}")
|
||||
return False
|
||||
|
||||
def run_focused_test() -> bool:
|
||||
"""Run the focused test to validate models before generating types"""
|
||||
print("🧪 Running focused test to validate models...")
|
||||
|
||||
# Get the directory of the currently executing script
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
test_file_path = os.path.join(script_dir, "focused_test.py")
|
||||
|
||||
if not os.path.exists(test_file_path):
|
||||
print("❌ focused_test.py not found - skipping model validation")
|
||||
return False
|
||||
|
||||
return run_command(f"python {test_file_path}", "Model validation")
|
||||
|
||||
def check_typescript_available() -> bool:
|
||||
"""Check if TypeScript compiler is available"""
|
||||
return run_command("npx tsc --version", "TypeScript version check")
|
||||
|
||||
# Add current directory to Python path so we can import models
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, current_dir)
|
||||
|
||||
try:
|
||||
from pydantic import BaseModel # type: ignore
|
||||
except ImportError as e:
|
||||
print(f"Error importing pydantic: {e}")
|
||||
print("Make sure pydantic is installed: pip install pydantic")
|
||||
sys.exit(1)
|
||||
|
||||
def python_type_to_typescript(python_type: Any) -> str:
|
||||
"""Convert a Python type to TypeScript type string"""
|
||||
|
||||
# Handle None/null
|
||||
if python_type is type(None):
|
||||
return "null"
|
||||
|
||||
# Handle basic types
|
||||
if python_type == str:
|
||||
return "string"
|
||||
elif python_type == int or python_type == float:
|
||||
return "number"
|
||||
elif python_type == bool:
|
||||
return "boolean"
|
||||
elif python_type == dict or python_type == Dict:
|
||||
return "Record<string, any>"
|
||||
elif python_type == list or python_type == List:
|
||||
return "Array<any>"
|
||||
|
||||
# Handle typing generics
|
||||
origin = get_origin(python_type)
|
||||
args = get_args(python_type)
|
||||
|
||||
if origin is Union:
|
||||
# Handle Optional (Union[T, None])
|
||||
if len(args) == 2 and type(None) in args:
|
||||
non_none_type = next(arg for arg in args if arg is not type(None))
|
||||
return python_type_to_typescript(non_none_type)
|
||||
|
||||
# Handle other unions
|
||||
union_types = [python_type_to_typescript(arg) for arg in args if arg is not type(None)]
|
||||
return " | ".join(union_types)
|
||||
|
||||
elif origin is list or origin is List:
|
||||
if args:
|
||||
item_type = python_type_to_typescript(args[0])
|
||||
return f"Array<{item_type}>"
|
||||
return "Array<any>"
|
||||
|
||||
elif origin is dict or origin is Dict:
|
||||
if len(args) == 2:
|
||||
key_type = python_type_to_typescript(args[0])
|
||||
value_type = python_type_to_typescript(args[1])
|
||||
return f"Record<{key_type}, {value_type}>"
|
||||
return "Record<string, any>"
|
||||
|
||||
# Handle Literal types
|
||||
if hasattr(python_type, '__origin__') and str(python_type.__origin__).endswith('Literal'):
|
||||
if args:
|
||||
literal_values = [f'"{arg}"' if isinstance(arg, str) else str(arg) for arg in args]
|
||||
return " | ".join(literal_values)
|
||||
|
||||
# Handle Enum types
|
||||
if isinstance(python_type, type) and issubclass(python_type, Enum):
|
||||
enum_values = [f'"{v.value}"' for v in python_type]
|
||||
return " | ".join(enum_values)
|
||||
|
||||
# Handle datetime
|
||||
if python_type == datetime:
|
||||
return "Date"
|
||||
|
||||
# Handle Pydantic models
|
||||
if isinstance(python_type, type) and issubclass(python_type, BaseModel):
|
||||
return python_type.__name__
|
||||
|
||||
# Handle string representations
|
||||
type_str = str(python_type)
|
||||
if "EmailStr" in type_str:
|
||||
return "string"
|
||||
elif "HttpUrl" in type_str:
|
||||
return "string"
|
||||
elif "UUID" in type_str:
|
||||
return "string"
|
||||
|
||||
# Default fallback
|
||||
return "any"
|
||||
|
||||
def snake_to_camel(snake_str: str) -> str:
|
||||
"""Convert snake_case to camelCase"""
|
||||
components = snake_str.split('_')
|
||||
return components[0] + ''.join(x.title() for x in components[1:])
|
||||
|
||||
def process_pydantic_model(model_class) -> Dict[str, Any]:
|
||||
"""Process a Pydantic model and return TypeScript interface definition"""
|
||||
interface_name = model_class.__name__
|
||||
properties = []
|
||||
|
||||
# Get fields from the model
|
||||
if hasattr(model_class, 'model_fields'):
|
||||
# Pydantic v2
|
||||
fields = model_class.model_fields
|
||||
for field_name, field_info in fields.items():
|
||||
ts_name = snake_to_camel(field_name)
|
||||
|
||||
# Check for alias
|
||||
if hasattr(field_info, 'alias') and field_info.alias:
|
||||
ts_name = field_info.alias
|
||||
|
||||
# Get type annotation
|
||||
field_type = getattr(field_info, 'annotation', str)
|
||||
ts_type = python_type_to_typescript(field_type)
|
||||
|
||||
# Check if optional
|
||||
is_optional = False
|
||||
if hasattr(field_info, 'is_required'):
|
||||
is_optional = not field_info.is_required()
|
||||
elif hasattr(field_info, 'default'):
|
||||
is_optional = field_info.default is not None
|
||||
|
||||
properties.append({
|
||||
'name': ts_name,
|
||||
'type': ts_type,
|
||||
'optional': is_optional
|
||||
})
|
||||
|
||||
elif hasattr(model_class, '__fields__'):
|
||||
# Pydantic v1
|
||||
fields = model_class.__fields__
|
||||
for field_name, field_info in fields.items():
|
||||
ts_name = snake_to_camel(field_name)
|
||||
|
||||
if hasattr(field_info, 'alias') and field_info.alias:
|
||||
ts_name = field_info.alias
|
||||
|
||||
field_type = getattr(field_info, 'annotation', getattr(field_info, 'type_', str))
|
||||
ts_type = python_type_to_typescript(field_type)
|
||||
|
||||
is_optional = not getattr(field_info, 'required', True)
|
||||
if hasattr(field_info, 'default') and field_info.default is not None:
|
||||
is_optional = True
|
||||
|
||||
properties.append({
|
||||
'name': ts_name,
|
||||
'type': ts_type,
|
||||
'optional': is_optional
|
||||
})
|
||||
|
||||
return {
|
||||
'name': interface_name,
|
||||
'properties': properties
|
||||
}
|
||||
|
||||
def process_enum(enum_class) -> Dict[str, Any]:
|
||||
"""Process an Enum and return TypeScript type definition"""
|
||||
enum_name = enum_class.__name__
|
||||
values = [f'"{v.value}"' for v in enum_class]
|
||||
if len(values) == 0:
|
||||
raise ValueError(f"Enum class '{enum_name}' has no values.")
|
||||
return {
|
||||
'name': enum_name,
|
||||
'values': " | ".join(values)
|
||||
}
|
||||
|
||||
def generate_typescript_interfaces(source_file: str):
|
||||
"""Generate TypeScript interfaces from models"""
|
||||
|
||||
print(f"📖 Scanning {source_file} for Pydantic models and enums...")
|
||||
|
||||
# Import the models module dynamically
|
||||
try:
|
||||
import importlib.util
|
||||
spec = importlib.util.spec_from_file_location("models", source_file)
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Could not load module from {source_file}")
|
||||
|
||||
models_module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["models"] = models_module
|
||||
spec.loader.exec_module(models_module)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error importing {source_file}: {e}")
|
||||
return None
|
||||
|
||||
interfaces = []
|
||||
enums = []
|
||||
|
||||
# Scan the models module
|
||||
for name in dir(models_module):
|
||||
obj = getattr(models_module, name)
|
||||
|
||||
# Skip private attributes
|
||||
if name.startswith('_'):
|
||||
continue
|
||||
|
||||
try:
|
||||
# Check if it's a Pydantic model
|
||||
if (isinstance(obj, type) and
|
||||
issubclass(obj, BaseModel) and
|
||||
obj != BaseModel):
|
||||
|
||||
interface = process_pydantic_model(obj)
|
||||
interfaces.append(interface)
|
||||
print(f" ✅ Found Pydantic model: {name}")
|
||||
|
||||
# Check if it's an Enum
|
||||
elif (isinstance(obj, type) and
|
||||
issubclass(obj, Enum)):
|
||||
|
||||
enum_def = process_enum(obj)
|
||||
enums.append(enum_def)
|
||||
print(f" ✅ Found enum: {name}")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Error processing {name}: {e}")
|
||||
continue
|
||||
|
||||
print(f"\n📊 Found {len(interfaces)} interfaces and {len(enums)} enums")
|
||||
|
||||
# Generate TypeScript content
|
||||
ts_content = f"""// Generated TypeScript types from Pydantic models
|
||||
// Source: {source_file}
|
||||
// Generated on: {datetime.now().isoformat()}
|
||||
// DO NOT EDIT MANUALLY - This file is auto-generated
|
||||
|
||||
"""
|
||||
|
||||
# Add enums
|
||||
if enums:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Enums\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
|
||||
for enum_def in enums:
|
||||
ts_content += f"export type {enum_def['name']} = {enum_def['values']};\n\n"
|
||||
|
||||
# Add interfaces
|
||||
if interfaces:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Interfaces\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
|
||||
for interface in interfaces:
|
||||
ts_content += f"export interface {interface['name']} {{\n"
|
||||
|
||||
for prop in interface['properties']:
|
||||
optional_marker = "?" if prop['optional'] else ""
|
||||
ts_content += f" {prop['name']}{optional_marker}: {prop['type']};\n"
|
||||
|
||||
ts_content += "}\n\n"
|
||||
|
||||
# Add user union type if we have user types
|
||||
user_interfaces = [i for i in interfaces if i['name'] in ['Candidate', 'Employer']]
|
||||
if len(user_interfaces) >= 2:
|
||||
ts_content += "// ============================\n"
|
||||
ts_content += "// Union Types\n"
|
||||
ts_content += "// ============================\n\n"
|
||||
user_type_names = [i['name'] for i in user_interfaces]
|
||||
ts_content += f"export type User = {' | '.join(user_type_names)};\n\n"
|
||||
|
||||
# Add export statement
|
||||
ts_content += "// Export all types\n"
|
||||
ts_content += "export type { };\n"
|
||||
|
||||
return ts_content
|
||||
|
||||
def compile_typescript(ts_file: str) -> bool:
|
||||
"""Compile TypeScript file to check for syntax errors"""
|
||||
print(f"🔧 Compiling TypeScript file to check syntax...")
|
||||
|
||||
# Check if TypeScript is available
|
||||
if not check_typescript_available():
|
||||
print("⚠️ TypeScript compiler not available - skipping compilation check")
|
||||
print(" To install: npm install -g typescript")
|
||||
return True # Don't fail if TS isn't available
|
||||
|
||||
# Run TypeScript compiler in check mode
|
||||
return run_command(
|
||||
f"npx tsc --noEmit --skipLibCheck {ts_file}",
|
||||
"TypeScript syntax validation"
|
||||
)
|
||||
|
||||
def main():
|
||||
"""Main function with command line argument parsing"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate TypeScript types from Pydantic models',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python generate_types.py # Use defaults
|
||||
python generate_types.py --source models.py --output types.ts # Specify files
|
||||
python generate_types.py --skip-test # Skip model validation
|
||||
python generate_types.py --skip-compile # Skip TS compilation
|
||||
python generate_types.py --source models.py --output types.ts --skip-test --skip-compile
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--source', '-s',
|
||||
default='models.py',
|
||||
help='Source Python file with Pydantic models (default: models.py)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--output', '-o',
|
||||
default='types.ts',
|
||||
help='Output TypeScript file (default: types.ts)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--skip-test',
|
||||
action='store_true',
|
||||
help='Skip running focused_test.py before generation'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--skip-compile',
|
||||
action='store_true',
|
||||
help='Skip TypeScript compilation check after generation'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--version', '-v',
|
||||
action='version',
|
||||
version='TypeScript Generator 2.0'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("🚀 Enhanced TypeScript Type Generator")
|
||||
print("=" * 50)
|
||||
print(f"📁 Source file: {args.source}")
|
||||
print(f"📁 Output file: {args.output}")
|
||||
print()
|
||||
|
||||
try:
|
||||
# Step 1: Validate source file exists
|
||||
if not os.path.exists(args.source):
|
||||
print(f"❌ Source file '{args.source}' not found")
|
||||
sys.exit(1)
|
||||
|
||||
# Step 2: Run focused test (unless skipped)
|
||||
if not args.skip_test:
|
||||
if not run_focused_test():
|
||||
print("❌ Model validation failed - aborting type generation")
|
||||
sys.exit(1)
|
||||
print()
|
||||
else:
|
||||
print("⏭️ Skipping model validation test")
|
||||
print()
|
||||
|
||||
# Step 3: Generate TypeScript content
|
||||
print("🔄 Generating TypeScript types...")
|
||||
ts_content = generate_typescript_interfaces(args.source)
|
||||
|
||||
if ts_content is None:
|
||||
print("❌ Failed to generate TypeScript content")
|
||||
sys.exit(1)
|
||||
|
||||
# Step 4: Write to output file
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(ts_content)
|
||||
|
||||
file_size = len(ts_content)
|
||||
print(f"✅ TypeScript types generated: {args.output} ({file_size} characters)")
|
||||
|
||||
# Step 5: Compile TypeScript (unless skipped)
|
||||
if not args.skip_compile:
|
||||
print()
|
||||
if not compile_typescript(args.output):
|
||||
print("❌ TypeScript compilation failed - check the generated file")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("⏭️ Skipping TypeScript compilation check")
|
||||
|
||||
# Step 6: Success summary
|
||||
print(f"\n🎉 Type generation completed successfully!")
|
||||
print("=" * 50)
|
||||
print(f"✅ Generated {args.output} from {args.source}")
|
||||
print(f"✅ File size: {file_size} characters")
|
||||
if not args.skip_test:
|
||||
print("✅ Model validation passed")
|
||||
if not args.skip_compile:
|
||||
print("✅ TypeScript syntax validated")
|
||||
print(f"\n💡 Usage in your TypeScript project:")
|
||||
print(f" import {{ Candidate, Employer, Job }} from './{Path(args.output).stem}';")
|
||||
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n⏹️ Type generation cancelled by user")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error generating types: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
@ -48,11 +48,13 @@ try_import("prometheus_fastapi_instrumentator")
|
||||
|
||||
import ollama
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, Request, HTTPException # type: ignore
|
||||
from fastapi import FastAPI, Request, HTTPException, Depends # type: ignore
|
||||
from fastapi.responses import JSONResponse, StreamingResponse, FileResponse, RedirectResponse # type: ignore
|
||||
from fastapi.middleware.cors import CORSMiddleware # type: ignore
|
||||
import uvicorn # type: ignore
|
||||
import numpy as np # type: ignore
|
||||
from utils import redis_manager
|
||||
import redis.asyncio as redis # type: ignore
|
||||
|
||||
# Prometheus
|
||||
from prometheus_client import Summary # type: ignore
|
||||
@ -227,12 +229,17 @@ def is_valid_uuid(value: str) -> bool:
|
||||
class WebServer:
|
||||
@asynccontextmanager
|
||||
async def lifespan(self, app: FastAPI):
|
||||
# Startup
|
||||
await redis_manager.connect()
|
||||
|
||||
# Shutdown
|
||||
yield
|
||||
for user in self.users:
|
||||
if user.observer:
|
||||
user.observer.stop()
|
||||
user.observer.join()
|
||||
logger.info("File watcher stopped")
|
||||
await redis_manager.disconnect()
|
||||
|
||||
def __init__(self, llm, model=MODEL_NAME):
|
||||
self.app = FastAPI(lifespan=self.lifespan)
|
||||
@ -279,6 +286,10 @@ class WebServer:
|
||||
|
||||
self.setup_routes()
|
||||
|
||||
async def get_redis(self) -> redis.Redis:
|
||||
"""Dependency to get Redis client"""
|
||||
return redis_manager.get_client()
|
||||
|
||||
def sanitize_input(self, input: str):
|
||||
# Validate input: allow only alphanumeric, underscores, and hyphens
|
||||
if not re.match(r'^[a-zA-Z0-9._-]+$', input): # alphanumeric, _, -, and . are valid
|
||||
@ -965,9 +976,28 @@ class WebServer:
|
||||
)
|
||||
|
||||
@self.app.get("/api/health")
|
||||
async def health_check():
|
||||
return JSONResponse({"status": "healthy"})
|
||||
async def health_check(redis_client: redis.Redis = Depends(self.get_redis)):
|
||||
try:
|
||||
await redis_client.ping()
|
||||
return {"status": "healthy", "redis": "connected"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=503, detail=f"Redis connection failed: {e}")
|
||||
|
||||
@self.app.get("/api/redis/stats")
|
||||
async def redis_stats(redis_client: redis.Redis = Depends(self.get_redis)):
|
||||
try:
|
||||
info = await redis_client.info()
|
||||
return {
|
||||
"connected_clients": info.get("connected_clients"),
|
||||
"used_memory_human": info.get("used_memory_human"),
|
||||
"total_commands_processed": info.get("total_commands_processed"),
|
||||
"keyspace_hits": info.get("keyspace_hits"),
|
||||
"keyspace_misses": info.get("keyspace_misses"),
|
||||
"uptime_in_seconds": info.get("uptime_in_seconds")
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=503, detail=f"Redis stats unavailable: {e}")
|
||||
|
||||
@self.app.get("/{path:path}")
|
||||
async def serve_static(path: str, request: Request):
|
||||
full_path = os.path.join(defines.static_content, path)
|
||||
|
@ -18,6 +18,7 @@ from .agents import class_registry, AnyAgent, Agent, __all__ as agents_all
|
||||
from .metrics import Metrics
|
||||
from .check_serializable import check_serializable
|
||||
from .profile_image import generate_image, ImageRequest
|
||||
from .redis_client import redis_manager
|
||||
|
||||
__all__ = [
|
||||
"Agent",
|
||||
@ -28,6 +29,7 @@ __all__ = [
|
||||
"Conversation",
|
||||
"Metrics",
|
||||
"RagEntry",
|
||||
"redis_manager",
|
||||
"ChromaDBFileWatcher",
|
||||
'ChromaDBGetResponse',
|
||||
"start_file_watcher",
|
||||
|
51
src/utils/redis_client.py
Normal file
51
src/utils/redis_client.py
Normal file
@ -0,0 +1,51 @@
|
||||
import redis.asyncio as redis # type: ignore
|
||||
from typing import Optional
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RedisManager:
|
||||
def __init__(self):
|
||||
self.redis_client: Optional[redis.Redis] = None
|
||||
self.redis_url = os.getenv("REDIS_URL", "redis://redis:6379")
|
||||
|
||||
async def connect(self):
|
||||
"""Initialize Redis connection"""
|
||||
try:
|
||||
self.redis_client = redis.from_url(
|
||||
self.redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True,
|
||||
max_connections=20,
|
||||
retry_on_timeout=True,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options={},
|
||||
health_check_interval=30
|
||||
)
|
||||
|
||||
# Test connection
|
||||
if not self.redis_client:
|
||||
raise Exception("redis client not available")
|
||||
|
||||
await self.redis_client.ping()
|
||||
logger.info("Successfully connected to Redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Close Redis connection"""
|
||||
if self.redis_client:
|
||||
await self.redis_client.close()
|
||||
logger.info("Disconnected from Redis")
|
||||
|
||||
def get_client(self) -> redis.Redis:
|
||||
"""Get Redis client instance"""
|
||||
if not self.redis_client:
|
||||
raise RuntimeError("Redis client not initialized")
|
||||
return self.redis_client
|
||||
|
||||
# Global Redis manager instance
|
||||
redis_manager = RedisManager()
|
Loading…
x
Reference in New Issue
Block a user