first commit
This commit is contained in:
13
app/routers/__init__.py
Normal file
13
app/routers/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""
|
||||
API routers package.
|
||||
"""
|
||||
|
||||
from app.routers.sessions import router as sessions_router
|
||||
from app.routers.tryouts import router as tryouts_router
|
||||
from app.routers.reports import router as reports_router
|
||||
|
||||
__all__ = [
|
||||
"sessions_router",
|
||||
"tryouts_router",
|
||||
"reports_router",
|
||||
]
|
||||
249
app/routers/admin.py
Normal file
249
app/routers/admin.py
Normal file
@@ -0,0 +1,249 @@
|
||||
"""
|
||||
Admin API router for custom admin actions.
|
||||
|
||||
Provides admin-specific endpoints for triggering calibration,
|
||||
toggling AI generation, and resetting normalization.
|
||||
"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.config import get_settings
|
||||
from app.database import get_db
|
||||
from app.models import Tryout, TryoutStats
|
||||
from app.services.irt_calibration import (
|
||||
calibrate_all,
|
||||
CALIBRATION_SAMPLE_THRESHOLD,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"])
|
||||
settings = get_settings()
|
||||
|
||||
|
||||
def get_admin_website_id(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header for admin operations.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/calibrate",
|
||||
summary="Trigger IRT calibration",
|
||||
description="Trigger IRT calibration for all items in this tryout with sufficient response data.",
|
||||
)
|
||||
async def admin_trigger_calibration(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_admin_website_id),
|
||||
) -> Dict[str, any]:
|
||||
"""
|
||||
Trigger IRT calibration for all items in a tryout.
|
||||
|
||||
Runs calibration for items with >= min_calibration_sample responses.
|
||||
Updates item.irt_b, item.irt_se, and item.calibrated status.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Calibration results summary
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found or calibration fails
|
||||
"""
|
||||
# Verify tryout exists
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Run calibration
|
||||
result = await calibrate_all(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
min_sample_size=tryout.min_calibration_sample or CALIBRATION_SAMPLE_THRESHOLD,
|
||||
)
|
||||
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"total_items": result.total_items,
|
||||
"calibrated_items": result.calibrated_items,
|
||||
"failed_items": result.failed_items,
|
||||
"calibration_percentage": round(result.calibration_percentage * 100, 2),
|
||||
"ready_for_irt": result.ready_for_irt,
|
||||
"message": f"Calibration complete: {result.calibrated_items}/{result.total_items} items calibrated",
|
||||
}
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/toggle-ai-generation",
|
||||
summary="Toggle AI generation",
|
||||
description="Toggle AI question generation for a tryout.",
|
||||
)
|
||||
async def admin_toggle_ai_generation(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_admin_website_id),
|
||||
) -> Dict[str, any]:
|
||||
"""
|
||||
Toggle AI generation for a tryout.
|
||||
|
||||
Updates Tryout.AI_generation_enabled field.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Updated AI generation status
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
# Get tryout
|
||||
result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Toggle AI generation
|
||||
tryout.ai_generation_enabled = not tryout.ai_generation_enabled
|
||||
await db.commit()
|
||||
await db.refresh(tryout)
|
||||
|
||||
status = "enabled" if tryout.ai_generation_enabled else "disabled"
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"ai_generation_enabled": tryout.ai_generation_enabled,
|
||||
"message": f"AI generation {status} for tryout {tryout_id}",
|
||||
}
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/reset-normalization",
|
||||
summary="Reset normalization",
|
||||
description="Reset normalization to static values and clear incremental stats.",
|
||||
)
|
||||
async def admin_reset_normalization(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_admin_website_id),
|
||||
) -> Dict[str, any]:
|
||||
"""
|
||||
Reset normalization for a tryout.
|
||||
|
||||
Resets rataan, sb to static values and clears incremental stats.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Reset statistics
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout or stats not found
|
||||
"""
|
||||
# Get tryout stats
|
||||
stats_result = await db.execute(
|
||||
select(TryoutStats).where(
|
||||
TryoutStats.website_id == website_id,
|
||||
TryoutStats.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
stats = stats_result.scalar_one_or_none()
|
||||
|
||||
if stats is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"TryoutStats for {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Get tryout for static values
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout:
|
||||
# Reset to static values
|
||||
stats.rataan = tryout.static_rataan
|
||||
stats.sb = tryout.static_sb
|
||||
else:
|
||||
# Reset to default values
|
||||
stats.rataan = 500.0
|
||||
stats.sb = 100.0
|
||||
|
||||
# Clear incremental stats
|
||||
old_participant_count = stats.participant_count
|
||||
stats.participant_count = 0
|
||||
stats.total_nm_sum = 0.0
|
||||
stats.total_nm_sq_sum = 0.0
|
||||
stats.min_nm = None
|
||||
stats.max_nm = None
|
||||
stats.last_calculated = None
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(stats)
|
||||
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"rataan": stats.rataan,
|
||||
"sb": stats.sb,
|
||||
"cleared_stats": {
|
||||
"previous_participant_count": old_participant_count,
|
||||
},
|
||||
"message": f"Normalization reset to static values (rataan={stats.rataan}, sb={stats.sb}). Incremental stats cleared.",
|
||||
}
|
||||
292
app/routers/ai.py
Normal file
292
app/routers/ai.py
Normal file
@@ -0,0 +1,292 @@
|
||||
"""
|
||||
AI Generation Router.
|
||||
|
||||
Admin endpoints for AI question generation playground.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.item import Item
|
||||
from app.schemas.ai import (
|
||||
AIGeneratePreviewRequest,
|
||||
AIGeneratePreviewResponse,
|
||||
AISaveRequest,
|
||||
AISaveResponse,
|
||||
AIStatsResponse,
|
||||
)
|
||||
from app.services.ai_generation import (
|
||||
generate_question,
|
||||
get_ai_stats,
|
||||
save_ai_question,
|
||||
validate_ai_model,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/admin/ai", tags=["admin", "ai-generation"])
|
||||
|
||||
|
||||
@router.post(
|
||||
"/generate-preview",
|
||||
response_model=AIGeneratePreviewResponse,
|
||||
summary="Preview AI-generated question",
|
||||
description="""
|
||||
Generate a question preview using AI without saving to database.
|
||||
|
||||
This is an admin playground endpoint for testing AI generation quality.
|
||||
Admins can retry unlimited times until satisfied with the result.
|
||||
|
||||
Requirements:
|
||||
- basis_item_id must reference an existing item at 'sedang' level
|
||||
- target_level must be 'mudah' or 'sulit'
|
||||
- ai_model must be a supported OpenRouter model
|
||||
""",
|
||||
responses={
|
||||
200: {"description": "Question generated successfully (preview mode)"},
|
||||
400: {"description": "Invalid request (wrong level, unsupported model)"},
|
||||
404: {"description": "Basis item not found"},
|
||||
500: {"description": "AI generation failed"},
|
||||
},
|
||||
)
|
||||
async def generate_preview(
|
||||
request: AIGeneratePreviewRequest,
|
||||
db: Annotated[AsyncSession, Depends(get_db)],
|
||||
) -> AIGeneratePreviewResponse:
|
||||
"""
|
||||
Generate AI question preview (no database save).
|
||||
|
||||
- **basis_item_id**: ID of the sedang-level question to base generation on
|
||||
- **target_level**: Target difficulty (mudah/sulit)
|
||||
- **ai_model**: OpenRouter model to use (default: qwen/qwen-2.5-coder-32b-instruct)
|
||||
"""
|
||||
# Validate AI model
|
||||
if not validate_ai_model(request.ai_model):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Unsupported AI model: {request.ai_model}. "
|
||||
f"Supported models: qwen/qwen-2.5-coder-32b-instruct, meta-llama/llama-3.3-70b-instruct",
|
||||
)
|
||||
|
||||
# Fetch basis item
|
||||
result = await db.execute(
|
||||
select(Item).where(Item.id == request.basis_item_id)
|
||||
)
|
||||
basis_item = result.scalar_one_or_none()
|
||||
|
||||
if not basis_item:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Basis item not found: {request.basis_item_id}",
|
||||
)
|
||||
|
||||
# Validate basis item is sedang level
|
||||
if basis_item.level != "sedang":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Basis item must be 'sedang' level, got: {basis_item.level}",
|
||||
)
|
||||
|
||||
# Generate question
|
||||
try:
|
||||
generated = await generate_question(
|
||||
basis_item=basis_item,
|
||||
target_level=request.target_level,
|
||||
ai_model=request.ai_model,
|
||||
)
|
||||
|
||||
if not generated:
|
||||
return AIGeneratePreviewResponse(
|
||||
success=False,
|
||||
error="AI generation failed. Please check logs or try again.",
|
||||
ai_model=request.ai_model,
|
||||
basis_item_id=request.basis_item_id,
|
||||
target_level=request.target_level,
|
||||
)
|
||||
|
||||
return AIGeneratePreviewResponse(
|
||||
success=True,
|
||||
stem=generated.stem,
|
||||
options=generated.options,
|
||||
correct=generated.correct,
|
||||
explanation=generated.explanation,
|
||||
ai_model=request.ai_model,
|
||||
basis_item_id=request.basis_item_id,
|
||||
target_level=request.target_level,
|
||||
cached=False,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"AI preview generation failed: {e}")
|
||||
return AIGeneratePreviewResponse(
|
||||
success=False,
|
||||
error=f"AI generation error: {str(e)}",
|
||||
ai_model=request.ai_model,
|
||||
basis_item_id=request.basis_item_id,
|
||||
target_level=request.target_level,
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/generate-save",
|
||||
response_model=AISaveResponse,
|
||||
summary="Save AI-generated question",
|
||||
description="""
|
||||
Save an AI-generated question to the database.
|
||||
|
||||
This endpoint creates a new Item record with:
|
||||
- generated_by='ai'
|
||||
- ai_model from request
|
||||
- basis_item_id linking to original question
|
||||
- calibrated=False (will be calculated later)
|
||||
""",
|
||||
responses={
|
||||
200: {"description": "Question saved successfully"},
|
||||
400: {"description": "Invalid request data"},
|
||||
404: {"description": "Basis item or tryout not found"},
|
||||
409: {"description": "Item already exists at this slot/level"},
|
||||
500: {"description": "Database save failed"},
|
||||
},
|
||||
)
|
||||
async def generate_save(
|
||||
request: AISaveRequest,
|
||||
db: Annotated[AsyncSession, Depends(get_db)],
|
||||
) -> AISaveResponse:
|
||||
"""
|
||||
Save AI-generated question to database.
|
||||
|
||||
- **stem**: Question text
|
||||
- **options**: Dict with A, B, C, D options
|
||||
- **correct**: Correct answer (A/B/C/D)
|
||||
- **explanation**: Answer explanation (optional)
|
||||
- **tryout_id**: Tryout identifier
|
||||
- **website_id**: Website identifier
|
||||
- **basis_item_id**: Original item ID this was generated from
|
||||
- **slot**: Question slot position
|
||||
- **level**: Difficulty level
|
||||
- **ai_model**: AI model used for generation
|
||||
"""
|
||||
# Verify basis item exists
|
||||
basis_result = await db.execute(
|
||||
select(Item).where(Item.id == request.basis_item_id)
|
||||
)
|
||||
basis_item = basis_result.scalar_one_or_none()
|
||||
|
||||
if not basis_item:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Basis item not found: {request.basis_item_id}",
|
||||
)
|
||||
|
||||
# Check for duplicate (same tryout, website, slot, level)
|
||||
existing_result = await db.execute(
|
||||
select(Item).where(
|
||||
and_(
|
||||
Item.tryout_id == request.tryout_id,
|
||||
Item.website_id == request.website_id,
|
||||
Item.slot == request.slot,
|
||||
Item.level == request.level,
|
||||
)
|
||||
)
|
||||
)
|
||||
existing = existing_result.scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Item already exists at slot={request.slot}, level={request.level} "
|
||||
f"for tryout={request.tryout_id}",
|
||||
)
|
||||
|
||||
# Create GeneratedQuestion from request
|
||||
from app.schemas.ai import GeneratedQuestion
|
||||
|
||||
generated_data = GeneratedQuestion(
|
||||
stem=request.stem,
|
||||
options=request.options,
|
||||
correct=request.correct,
|
||||
explanation=request.explanation,
|
||||
)
|
||||
|
||||
# Save to database
|
||||
item_id = await save_ai_question(
|
||||
generated_data=generated_data,
|
||||
tryout_id=request.tryout_id,
|
||||
website_id=request.website_id,
|
||||
basis_item_id=request.basis_item_id,
|
||||
slot=request.slot,
|
||||
level=request.level,
|
||||
ai_model=request.ai_model,
|
||||
db=db,
|
||||
)
|
||||
|
||||
if not item_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to save AI-generated question",
|
||||
)
|
||||
|
||||
return AISaveResponse(
|
||||
success=True,
|
||||
item_id=item_id,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=AIStatsResponse,
|
||||
summary="Get AI generation statistics",
|
||||
description="""
|
||||
Get statistics about AI-generated questions.
|
||||
|
||||
Returns:
|
||||
- Total AI-generated items count
|
||||
- Items count by model
|
||||
- Cache hit rate (placeholder)
|
||||
""",
|
||||
)
|
||||
async def get_stats(
|
||||
db: Annotated[AsyncSession, Depends(get_db)],
|
||||
) -> AIStatsResponse:
|
||||
"""
|
||||
Get AI generation statistics.
|
||||
"""
|
||||
stats = await get_ai_stats(db)
|
||||
|
||||
return AIStatsResponse(
|
||||
total_ai_items=stats["total_ai_items"],
|
||||
items_by_model=stats["items_by_model"],
|
||||
cache_hit_rate=stats["cache_hit_rate"],
|
||||
total_cache_hits=stats["total_cache_hits"],
|
||||
total_requests=stats["total_requests"],
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/models",
|
||||
summary="List supported AI models",
|
||||
description="Returns list of supported AI models for question generation.",
|
||||
)
|
||||
async def list_models() -> dict:
|
||||
"""
|
||||
List supported AI models.
|
||||
"""
|
||||
return {
|
||||
"models": [
|
||||
{
|
||||
"id": "qwen/qwen-2.5-coder-32b-instruct",
|
||||
"name": "Qwen 2.5 Coder 32B",
|
||||
"description": "Fast and efficient model for question generation",
|
||||
},
|
||||
{
|
||||
"id": "meta-llama/llama-3.3-70b-instruct",
|
||||
"name": "Llama 3.3 70B",
|
||||
"description": "High-quality model with better reasoning",
|
||||
},
|
||||
]
|
||||
}
|
||||
324
app/routers/import_export.py
Normal file
324
app/routers/import_export.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Import/Export API router for Excel question migration.
|
||||
|
||||
Endpoints:
|
||||
- POST /api/v1/import/preview: Preview Excel import without saving
|
||||
- POST /api/v1/import/questions: Import questions from Excel to database
|
||||
- GET /api/v1/export/questions: Export questions to Excel file
|
||||
"""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, File, Form, Header, HTTPException, UploadFile, status
|
||||
from fastapi.responses import FileResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.services.excel_import import (
|
||||
bulk_insert_items,
|
||||
export_questions_to_excel,
|
||||
parse_excel_import,
|
||||
validate_excel_structure,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/api/v1/import-export", tags=["import-export"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/preview",
|
||||
summary="Preview Excel import",
|
||||
description="Parse Excel file and return preview without saving to database.",
|
||||
)
|
||||
async def preview_import(
|
||||
file: UploadFile = File(..., description="Excel file (.xlsx)"),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> dict:
|
||||
"""
|
||||
Preview Excel import without saving to database.
|
||||
|
||||
Args:
|
||||
file: Excel file upload (.xlsx format)
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
- items_count: Number of items parsed
|
||||
- preview: List of item previews
|
||||
- validation_errors: List of validation errors if any
|
||||
|
||||
Raises:
|
||||
HTTPException: If file format is invalid or parsing fails
|
||||
"""
|
||||
# Validate file format
|
||||
if not file.filename or not file.filename.lower().endswith('.xlsx'):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="File must be .xlsx format",
|
||||
)
|
||||
|
||||
# Save uploaded file to temporary location
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".xlsx") as temp_file:
|
||||
content = await file.read()
|
||||
temp_file.write(content)
|
||||
temp_file_path = temp_file.name
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to save uploaded file: {str(e)}",
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate Excel structure
|
||||
validation = validate_excel_structure(temp_file_path)
|
||||
if not validation["valid"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail={
|
||||
"error": "Invalid Excel structure",
|
||||
"validation_errors": validation["errors"],
|
||||
},
|
||||
)
|
||||
|
||||
# Parse Excel (tryout_id is optional for preview)
|
||||
tryout_id = "preview" # Use dummy tryout_id for preview
|
||||
result = parse_excel_import(
|
||||
temp_file_path,
|
||||
website_id=website_id,
|
||||
tryout_id=tryout_id
|
||||
)
|
||||
|
||||
if result["validation_errors"]:
|
||||
return {
|
||||
"items_count": result["items_count"],
|
||||
"preview": result["items"],
|
||||
"validation_errors": result["validation_errors"],
|
||||
"has_errors": True,
|
||||
}
|
||||
|
||||
# Return limited preview (first 5 items)
|
||||
preview_items = result["items"][:5]
|
||||
|
||||
return {
|
||||
"items_count": result["items_count"],
|
||||
"preview": preview_items,
|
||||
"validation_errors": [],
|
||||
"has_errors": False,
|
||||
}
|
||||
|
||||
finally:
|
||||
# Clean up temporary file
|
||||
if os.path.exists(temp_file_path):
|
||||
os.unlink(temp_file_path)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/questions",
|
||||
summary="Import questions from Excel",
|
||||
description="Parse Excel file and import questions to database with 100% data integrity.",
|
||||
)
|
||||
async def import_questions(
|
||||
file: UploadFile = File(..., description="Excel file (.xlsx)"),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
tryout_id: str = Form(..., description="Tryout identifier"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> dict:
|
||||
"""
|
||||
Import questions from Excel to database.
|
||||
|
||||
Validates file format, parses Excel content, checks for duplicates,
|
||||
and performs bulk insert with rollback on error.
|
||||
|
||||
Args:
|
||||
file: Excel file upload (.xlsx format)
|
||||
website_id: Website ID from header
|
||||
tryout_id: Tryout identifier
|
||||
db: Async database session
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
- imported: Number of items successfully imported
|
||||
- duplicates: Number of duplicate items skipped
|
||||
- errors: List of errors if any
|
||||
|
||||
Raises:
|
||||
HTTPException: If file format is invalid, validation fails, or import fails
|
||||
"""
|
||||
# Validate file format
|
||||
if not file.filename or not file.filename.lower().endswith('.xlsx'):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="File must be .xlsx format",
|
||||
)
|
||||
|
||||
# Save uploaded file to temporary location
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".xlsx") as temp_file:
|
||||
content = await file.read()
|
||||
temp_file.write(content)
|
||||
temp_file_path = temp_file.name
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to save uploaded file: {str(e)}",
|
||||
)
|
||||
|
||||
try:
|
||||
# Validate Excel structure
|
||||
validation = validate_excel_structure(temp_file_path)
|
||||
if not validation["valid"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail={
|
||||
"error": "Invalid Excel structure",
|
||||
"validation_errors": validation["errors"],
|
||||
},
|
||||
)
|
||||
|
||||
# Parse Excel
|
||||
result = parse_excel_import(
|
||||
temp_file_path,
|
||||
website_id=website_id,
|
||||
tryout_id=tryout_id
|
||||
)
|
||||
|
||||
# Check for validation errors
|
||||
if result["validation_errors"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail={
|
||||
"error": "Validation failed",
|
||||
"validation_errors": result["validation_errors"],
|
||||
},
|
||||
)
|
||||
|
||||
# Check if items were parsed
|
||||
if result["items_count"] == 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="No items found in Excel file",
|
||||
)
|
||||
|
||||
# Bulk insert items
|
||||
insert_result = await bulk_insert_items(result["items"], db)
|
||||
|
||||
# Check for insertion errors
|
||||
if insert_result["errors"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail={
|
||||
"error": "Import failed",
|
||||
"errors": insert_result["errors"],
|
||||
},
|
||||
)
|
||||
|
||||
# Check for conflicts (duplicates)
|
||||
if insert_result["duplicate_count"] > 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail={
|
||||
"message": f"Import completed with {insert_result['duplicate_count']} duplicate(s) skipped",
|
||||
"imported": insert_result["inserted_count"],
|
||||
"duplicates": insert_result["duplicate_count"],
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Import successful",
|
||||
"imported": insert_result["inserted_count"],
|
||||
"duplicates": insert_result["duplicate_count"],
|
||||
}
|
||||
|
||||
finally:
|
||||
# Clean up temporary file
|
||||
if os.path.exists(temp_file_path):
|
||||
os.unlink(temp_file_path)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/export/questions",
|
||||
summary="Export questions to Excel",
|
||||
description="Export questions for a tryout to Excel file in standardized format.",
|
||||
)
|
||||
async def export_questions(
|
||||
tryout_id: str,
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> FileResponse:
|
||||
"""
|
||||
Export questions to Excel file.
|
||||
|
||||
Creates Excel file with standardized format:
|
||||
- Row 2: KUNCI (answer key)
|
||||
- Row 4: TK (p-values)
|
||||
- Row 5: BOBOT (weights)
|
||||
- Rows 6+: Question data
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
website_id: Website ID from header
|
||||
db: Async database session
|
||||
|
||||
Returns:
|
||||
FileResponse with Excel file
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout has no questions or export fails
|
||||
"""
|
||||
try:
|
||||
# Export questions to Excel
|
||||
output_path = await export_questions_to_excel(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return file for download
|
||||
filename = f"tryout_{tryout_id}_questions.xlsx"
|
||||
return FileResponse(
|
||||
path=output_path,
|
||||
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Export failed: {str(e)}",
|
||||
)
|
||||
279
app/routers/normalization.py
Normal file
279
app/routers/normalization.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
Normalization API router for dynamic normalization management.
|
||||
|
||||
Endpoints:
|
||||
- GET /tryout/{tryout_id}/normalization: Get normalization configuration
|
||||
- PUT /tryout/{tryout_id}/normalization: Update normalization settings
|
||||
- POST /tryout/{tryout_id}/normalization/reset: Reset normalization stats
|
||||
- GET /tryout/{tryout_id}/normalization/validate: Validate dynamic normalization
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.services.config_management import (
|
||||
get_normalization_config,
|
||||
reset_normalization_stats,
|
||||
toggle_normalization_mode,
|
||||
update_config,
|
||||
)
|
||||
from app.services.normalization import (
|
||||
validate_dynamic_normalization,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/tryout", tags=["normalization"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tryout_id}/normalization",
|
||||
summary="Get normalization configuration",
|
||||
description="Retrieve current normalization configuration including mode, static values, dynamic values, and threshold status.",
|
||||
)
|
||||
async def get_normalization_endpoint(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Get normalization configuration for a tryout.
|
||||
|
||||
Returns:
|
||||
Normalization configuration with:
|
||||
- mode (static/dynamic/hybrid)
|
||||
- current rataan, sb (from TryoutStats)
|
||||
- static_rataan, static_sb (from Tryout config)
|
||||
- participant_count
|
||||
- threshold_status (ready for dynamic or not)
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
try:
|
||||
config = await get_normalization_config(db, website_id, tryout_id)
|
||||
return config
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tryout_id}/normalization",
|
||||
summary="Update normalization settings",
|
||||
description="Update normalization mode and static values for a tryout.",
|
||||
)
|
||||
async def update_normalization_endpoint(
|
||||
tryout_id: str,
|
||||
normalization_mode: Optional[str] = None,
|
||||
static_rataan: Optional[float] = None,
|
||||
static_sb: Optional[float] = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Update normalization settings for a tryout.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
normalization_mode: New normalization mode (static/dynamic/hybrid)
|
||||
static_rataan: New static mean value
|
||||
static_sb: New static standard deviation
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Updated normalization configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found or validation fails
|
||||
"""
|
||||
# Build updates dictionary
|
||||
updates = {}
|
||||
|
||||
if normalization_mode is not None:
|
||||
if normalization_mode not in ["static", "dynamic", "hybrid"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid normalization_mode: {normalization_mode}. Must be 'static', 'dynamic', or 'hybrid'",
|
||||
)
|
||||
updates["normalization_mode"] = normalization_mode
|
||||
|
||||
if static_rataan is not None:
|
||||
if static_rataan <= 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="static_rataan must be greater than 0",
|
||||
)
|
||||
updates["static_rataan"] = static_rataan
|
||||
|
||||
if static_sb is not None:
|
||||
if static_sb <= 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="static_sb must be greater than 0",
|
||||
)
|
||||
updates["static_sb"] = static_sb
|
||||
|
||||
if not updates:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="No updates provided",
|
||||
)
|
||||
|
||||
try:
|
||||
# Update configuration
|
||||
await update_config(db, website_id, tryout_id, updates)
|
||||
|
||||
# Get updated configuration
|
||||
config = await get_normalization_config(db, website_id, tryout_id)
|
||||
|
||||
return config
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/normalization/reset",
|
||||
summary="Reset normalization stats",
|
||||
description="Reset TryoutStats to initial values and switch to static normalization mode.",
|
||||
)
|
||||
async def reset_normalization_endpoint(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Reset normalization stats for a tryout.
|
||||
|
||||
Resets TryoutStats to initial values (participant_count=0, sums cleared)
|
||||
and temporarily switches normalization_mode to "static".
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Success message with updated configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
try:
|
||||
stats = await reset_normalization_stats(db, website_id, tryout_id)
|
||||
config = await get_normalization_config(db, website_id, tryout_id)
|
||||
|
||||
return {
|
||||
"message": "Normalization stats reset successfully",
|
||||
"tryout_id": tryout_id,
|
||||
"participant_count": stats.participant_count,
|
||||
"normalization_mode": config["normalization_mode"],
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tryout_id}/normalization/validate",
|
||||
summary="Validate dynamic normalization",
|
||||
description="Validate that dynamic normalization produces expected distribution (mean≈500±5, SD≈100±5).",
|
||||
)
|
||||
async def validate_normalization_endpoint(
|
||||
tryout_id: str,
|
||||
target_mean: float = 500.0,
|
||||
target_sd: float = 100.0,
|
||||
mean_tolerance: float = 5.0,
|
||||
sd_tolerance: float = 5.0,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Validate dynamic normalization for a tryout.
|
||||
|
||||
Checks if calculated rataan and sb are close to target values.
|
||||
Returns validation status, deviations, warnings, and suggestions.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
target_mean: Target mean (default: 500)
|
||||
target_sd: Target standard deviation (default: 100)
|
||||
mean_tolerance: Allowed deviation from target mean (default: 5)
|
||||
sd_tolerance: Allowed deviation from target SD (default: 5)
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validation result with:
|
||||
- is_valid: True if within tolerance
|
||||
- details: Full validation details
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
try:
|
||||
is_valid, details = await validate_dynamic_normalization(
|
||||
db=db,
|
||||
website_id=website_id,
|
||||
tryout_id=tryout_id,
|
||||
target_mean=target_mean,
|
||||
target_sd=target_sd,
|
||||
mean_tolerance=mean_tolerance,
|
||||
sd_tolerance=sd_tolerance,
|
||||
)
|
||||
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"is_valid": is_valid,
|
||||
"target_mean": target_mean,
|
||||
"target_sd": target_sd,
|
||||
"mean_tolerance": mean_tolerance,
|
||||
"sd_tolerance": sd_tolerance,
|
||||
"details": details,
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
792
app/routers/reports.py
Normal file
792
app/routers/reports.py
Normal file
@@ -0,0 +1,792 @@
|
||||
"""
|
||||
Reports API router for comprehensive reporting.
|
||||
|
||||
Endpoints:
|
||||
- GET /reports/student/performance: Get student performance report
|
||||
- GET /reports/items/analysis: Get item analysis report
|
||||
- GET /reports/calibration/status: Get calibration status report
|
||||
- GET /reports/tryout/comparison: Get tryout comparison report
|
||||
- POST /reports/schedule: Schedule a report
|
||||
- GET /reports/export/{schedule_id}/{format}: Export scheduled report
|
||||
"""
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, status
|
||||
from fastapi.responses import FileResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.schemas.report import (
|
||||
StudentPerformanceReportOutput,
|
||||
AggregatePerformanceStatsOutput,
|
||||
StudentPerformanceRecordOutput,
|
||||
ItemAnalysisReportOutput,
|
||||
ItemAnalysisRecordOutput,
|
||||
CalibrationStatusReportOutput,
|
||||
CalibrationItemStatusOutput,
|
||||
TryoutComparisonReportOutput,
|
||||
TryoutComparisonRecordOutput,
|
||||
ReportScheduleRequest,
|
||||
ReportScheduleOutput,
|
||||
ReportScheduleResponse,
|
||||
ExportResponse,
|
||||
)
|
||||
from app.services.reporting import (
|
||||
generate_student_performance_report,
|
||||
generate_item_analysis_report,
|
||||
generate_calibration_status_report,
|
||||
generate_tryout_comparison_report,
|
||||
export_report_to_csv,
|
||||
export_report_to_excel,
|
||||
export_report_to_pdf,
|
||||
schedule_report,
|
||||
get_scheduled_report,
|
||||
list_scheduled_reports,
|
||||
cancel_scheduled_report,
|
||||
StudentPerformanceReport,
|
||||
ItemAnalysisReport,
|
||||
CalibrationStatusReport,
|
||||
TryoutComparisonReport,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/reports", tags=["reports"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Student Performance Report Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/student/performance",
|
||||
response_model=StudentPerformanceReportOutput,
|
||||
summary="Get student performance report",
|
||||
description="Generate student performance report with individual and aggregate statistics.",
|
||||
)
|
||||
async def get_student_performance_report(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
date_start: Optional[datetime] = None,
|
||||
date_end: Optional[datetime] = None,
|
||||
format_type: Literal["individual", "aggregate", "both"] = "both",
|
||||
) -> StudentPerformanceReportOutput:
|
||||
"""
|
||||
Get student performance report.
|
||||
|
||||
Returns individual student records and/or aggregate statistics.
|
||||
"""
|
||||
date_range = None
|
||||
if date_start or date_end:
|
||||
date_range = {}
|
||||
if date_start:
|
||||
date_range["start"] = date_start
|
||||
if date_end:
|
||||
date_range["end"] = date_end
|
||||
|
||||
report = await generate_student_performance_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
date_range=date_range,
|
||||
format_type=format_type,
|
||||
)
|
||||
|
||||
return _convert_student_performance_report(report)
|
||||
|
||||
|
||||
def _convert_student_performance_report(report: StudentPerformanceReport) -> StudentPerformanceReportOutput:
|
||||
"""Convert dataclass report to Pydantic output."""
|
||||
date_range_str = None
|
||||
if report.date_range:
|
||||
date_range_str = {}
|
||||
if report.date_range.get("start"):
|
||||
date_range_str["start"] = report.date_range["start"].isoformat()
|
||||
if report.date_range.get("end"):
|
||||
date_range_str["end"] = report.date_range["end"].isoformat()
|
||||
|
||||
return StudentPerformanceReportOutput(
|
||||
generated_at=report.generated_at,
|
||||
tryout_id=report.tryout_id,
|
||||
website_id=report.website_id,
|
||||
date_range=date_range_str,
|
||||
aggregate=AggregatePerformanceStatsOutput(
|
||||
tryout_id=report.aggregate.tryout_id,
|
||||
participant_count=report.aggregate.participant_count,
|
||||
avg_nm=report.aggregate.avg_nm,
|
||||
std_nm=report.aggregate.std_nm,
|
||||
min_nm=report.aggregate.min_nm,
|
||||
max_nm=report.aggregate.max_nm,
|
||||
median_nm=report.aggregate.median_nm,
|
||||
avg_nn=report.aggregate.avg_nn,
|
||||
std_nn=report.aggregate.std_nn,
|
||||
avg_theta=report.aggregate.avg_theta,
|
||||
pass_rate=report.aggregate.pass_rate,
|
||||
avg_time_spent=report.aggregate.avg_time_spent,
|
||||
),
|
||||
individual_records=[
|
||||
StudentPerformanceRecordOutput(
|
||||
session_id=r.session_id,
|
||||
wp_user_id=r.wp_user_id,
|
||||
tryout_id=r.tryout_id,
|
||||
NM=r.NM,
|
||||
NN=r.NN,
|
||||
theta=r.theta,
|
||||
theta_se=r.theta_se,
|
||||
total_benar=r.total_benar,
|
||||
time_spent=r.time_spent,
|
||||
start_time=r.start_time,
|
||||
end_time=r.end_time,
|
||||
scoring_mode_used=r.scoring_mode_used,
|
||||
rataan_used=r.rataan_used,
|
||||
sb_used=r.sb_used,
|
||||
)
|
||||
for r in report.individual_records
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Item Analysis Report Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/items/analysis",
|
||||
response_model=ItemAnalysisReportOutput,
|
||||
summary="Get item analysis report",
|
||||
description="Generate item analysis report with difficulty, discrimination, and information functions.",
|
||||
)
|
||||
async def get_item_analysis_report(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
filter_by: Optional[Literal["difficulty", "calibrated", "discrimination"]] = None,
|
||||
difficulty_level: Optional[Literal["mudah", "sedang", "sulit"]] = None,
|
||||
) -> ItemAnalysisReportOutput:
|
||||
"""
|
||||
Get item analysis report.
|
||||
|
||||
Returns item difficulty, discrimination, and information function data.
|
||||
"""
|
||||
report = await generate_item_analysis_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
filter_by=filter_by,
|
||||
difficulty_level=difficulty_level,
|
||||
)
|
||||
|
||||
return ItemAnalysisReportOutput(
|
||||
generated_at=report.generated_at,
|
||||
tryout_id=report.tryout_id,
|
||||
website_id=report.website_id,
|
||||
total_items=report.total_items,
|
||||
items=[
|
||||
ItemAnalysisRecordOutput(
|
||||
item_id=r.item_id,
|
||||
slot=r.slot,
|
||||
level=r.level,
|
||||
ctt_p=r.ctt_p,
|
||||
ctt_bobot=r.ctt_bobot,
|
||||
ctt_category=r.ctt_category,
|
||||
irt_b=r.irt_b,
|
||||
irt_se=r.irt_se,
|
||||
calibrated=r.calibrated,
|
||||
calibration_sample_size=r.calibration_sample_size,
|
||||
correctness_rate=r.correctness_rate,
|
||||
item_total_correlation=r.item_total_correlation,
|
||||
information_values=r.information_values,
|
||||
optimal_theta_range=r.optimal_theta_range,
|
||||
)
|
||||
for r in report.items
|
||||
],
|
||||
summary=report.summary,
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Calibration Status Report Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/calibration/status",
|
||||
response_model=CalibrationStatusReportOutput,
|
||||
summary="Get calibration status report",
|
||||
description="Generate calibration status report with progress tracking and readiness metrics.",
|
||||
)
|
||||
async def get_calibration_status_report(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> CalibrationStatusReportOutput:
|
||||
"""
|
||||
Get calibration status report.
|
||||
|
||||
Returns calibration progress, items awaiting calibration, and IRT readiness status.
|
||||
"""
|
||||
report = await generate_calibration_status_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
return CalibrationStatusReportOutput(
|
||||
generated_at=report.generated_at,
|
||||
tryout_id=report.tryout_id,
|
||||
website_id=report.website_id,
|
||||
total_items=report.total_items,
|
||||
calibrated_items=report.calibrated_items,
|
||||
calibration_percentage=report.calibration_percentage,
|
||||
items_awaiting_calibration=[
|
||||
CalibrationItemStatusOutput(
|
||||
item_id=r.item_id,
|
||||
slot=r.slot,
|
||||
level=r.level,
|
||||
sample_size=r.sample_size,
|
||||
calibrated=r.calibrated,
|
||||
irt_b=r.irt_b,
|
||||
irt_se=r.irt_se,
|
||||
ctt_p=r.ctt_p,
|
||||
)
|
||||
for r in report.items_awaiting_calibration
|
||||
],
|
||||
avg_calibration_sample_size=report.avg_calibration_sample_size,
|
||||
estimated_time_to_90_percent=report.estimated_time_to_90_percent,
|
||||
ready_for_irt_rollout=report.ready_for_irt_rollout,
|
||||
items=[
|
||||
CalibrationItemStatusOutput(
|
||||
item_id=r.item_id,
|
||||
slot=r.slot,
|
||||
level=r.level,
|
||||
sample_size=r.sample_size,
|
||||
calibrated=r.calibrated,
|
||||
irt_b=r.irt_b,
|
||||
irt_se=r.irt_se,
|
||||
ctt_p=r.ctt_p,
|
||||
)
|
||||
for r in report.items
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tryout Comparison Report Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/tryout/comparison",
|
||||
response_model=TryoutComparisonReportOutput,
|
||||
summary="Get tryout comparison report",
|
||||
description="Generate tryout comparison report across dates or subjects.",
|
||||
)
|
||||
async def get_tryout_comparison_report(
|
||||
tryout_ids: str, # Comma-separated list
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
group_by: Literal["date", "subject"] = "date",
|
||||
) -> TryoutComparisonReportOutput:
|
||||
"""
|
||||
Get tryout comparison report.
|
||||
|
||||
Compares tryouts across dates or subjects.
|
||||
"""
|
||||
tryout_id_list = [tid.strip() for tid in tryout_ids.split(",")]
|
||||
|
||||
if len(tryout_id_list) < 2:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="At least 2 tryout IDs are required for comparison",
|
||||
)
|
||||
|
||||
report = await generate_tryout_comparison_report(
|
||||
tryout_ids=tryout_id_list,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
group_by=group_by,
|
||||
)
|
||||
|
||||
return TryoutComparisonReportOutput(
|
||||
generated_at=report.generated_at,
|
||||
comparison_type=report.comparison_type,
|
||||
tryouts=[
|
||||
TryoutComparisonRecordOutput(
|
||||
tryout_id=r.tryout_id,
|
||||
date=r.date,
|
||||
subject=r.subject,
|
||||
participant_count=r.participant_count,
|
||||
avg_nm=r.avg_nm,
|
||||
avg_nn=r.avg_nn,
|
||||
avg_theta=r.avg_theta,
|
||||
std_nm=r.std_nm,
|
||||
calibration_percentage=r.calibration_percentage,
|
||||
)
|
||||
for r in report.tryouts
|
||||
],
|
||||
trends=report.trends,
|
||||
normalization_impact=report.normalization_impact,
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report Scheduling Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.post(
|
||||
"/schedule",
|
||||
response_model=ReportScheduleResponse,
|
||||
summary="Schedule a report",
|
||||
description="Schedule a report for automatic generation on a daily, weekly, or monthly basis.",
|
||||
)
|
||||
async def create_report_schedule(
|
||||
request: ReportScheduleRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> ReportScheduleResponse:
|
||||
"""
|
||||
Schedule a report.
|
||||
|
||||
Creates a scheduled report that will be generated automatically.
|
||||
"""
|
||||
schedule_id = schedule_report(
|
||||
report_type=request.report_type,
|
||||
schedule=request.schedule,
|
||||
tryout_ids=request.tryout_ids,
|
||||
website_id=request.website_id,
|
||||
recipients=request.recipients,
|
||||
export_format=request.export_format,
|
||||
)
|
||||
|
||||
scheduled = get_scheduled_report(schedule_id)
|
||||
|
||||
return ReportScheduleResponse(
|
||||
schedule_id=schedule_id,
|
||||
message=f"Report scheduled successfully for {request.schedule} generation",
|
||||
next_run=scheduled.next_run if scheduled else None,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/schedule/{schedule_id}",
|
||||
response_model=ReportScheduleOutput,
|
||||
summary="Get scheduled report details",
|
||||
description="Get details of a scheduled report.",
|
||||
)
|
||||
async def get_scheduled_report_details(
|
||||
schedule_id: str,
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> ReportScheduleOutput:
|
||||
"""
|
||||
Get scheduled report details.
|
||||
|
||||
Returns the configuration and status of a scheduled report.
|
||||
"""
|
||||
scheduled = get_scheduled_report(schedule_id)
|
||||
|
||||
if not scheduled:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Scheduled report {schedule_id} not found",
|
||||
)
|
||||
|
||||
if scheduled.website_id != website_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to this scheduled report",
|
||||
)
|
||||
|
||||
return ReportScheduleOutput(
|
||||
schedule_id=scheduled.schedule_id,
|
||||
report_type=scheduled.report_type,
|
||||
schedule=scheduled.schedule,
|
||||
tryout_ids=scheduled.tryout_ids,
|
||||
website_id=scheduled.website_id,
|
||||
recipients=scheduled.recipients,
|
||||
format=scheduled.format,
|
||||
created_at=scheduled.created_at,
|
||||
last_run=scheduled.last_run,
|
||||
next_run=scheduled.next_run,
|
||||
is_active=scheduled.is_active,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/schedule",
|
||||
response_model=List[ReportScheduleOutput],
|
||||
summary="List scheduled reports",
|
||||
description="List all scheduled reports for a website.",
|
||||
)
|
||||
async def list_scheduled_reports_endpoint(
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> List[ReportScheduleOutput]:
|
||||
"""
|
||||
List all scheduled reports.
|
||||
|
||||
Returns all scheduled reports for the current website.
|
||||
"""
|
||||
reports = list_scheduled_reports(website_id=website_id)
|
||||
|
||||
return [
|
||||
ReportScheduleOutput(
|
||||
schedule_id=r.schedule_id,
|
||||
report_type=r.report_type,
|
||||
schedule=r.schedule,
|
||||
tryout_ids=r.tryout_ids,
|
||||
website_id=r.website_id,
|
||||
recipients=r.recipients,
|
||||
format=r.format,
|
||||
created_at=r.created_at,
|
||||
last_run=r.last_run,
|
||||
next_run=r.next_run,
|
||||
is_active=r.is_active,
|
||||
)
|
||||
for r in reports
|
||||
]
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/schedule/{schedule_id}",
|
||||
summary="Cancel scheduled report",
|
||||
description="Cancel a scheduled report.",
|
||||
)
|
||||
async def cancel_scheduled_report_endpoint(
|
||||
schedule_id: str,
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> dict:
|
||||
"""
|
||||
Cancel a scheduled report.
|
||||
|
||||
Removes the scheduled report from the system.
|
||||
"""
|
||||
scheduled = get_scheduled_report(schedule_id)
|
||||
|
||||
if not scheduled:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Scheduled report {schedule_id} not found",
|
||||
)
|
||||
|
||||
if scheduled.website_id != website_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to this scheduled report",
|
||||
)
|
||||
|
||||
success = cancel_scheduled_report(schedule_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to cancel scheduled report",
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Scheduled report {schedule_id} cancelled successfully",
|
||||
"schedule_id": schedule_id,
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report Export Endpoints
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/export/{schedule_id}/{format}",
|
||||
summary="Export scheduled report",
|
||||
description="Generate and export a scheduled report in the specified format.",
|
||||
)
|
||||
async def export_scheduled_report(
|
||||
schedule_id: str,
|
||||
format: Literal["csv", "xlsx", "pdf"],
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Export a scheduled report.
|
||||
|
||||
Generates the report and returns it as a file download.
|
||||
"""
|
||||
scheduled = get_scheduled_report(schedule_id)
|
||||
|
||||
if not scheduled:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Scheduled report {schedule_id} not found",
|
||||
)
|
||||
|
||||
if scheduled.website_id != website_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to this scheduled report",
|
||||
)
|
||||
|
||||
# Generate report based on type
|
||||
report = None
|
||||
base_filename = f"report_{scheduled.report_type}_{schedule_id}"
|
||||
|
||||
try:
|
||||
if scheduled.report_type == "student_performance":
|
||||
if len(scheduled.tryout_ids) > 0:
|
||||
report = await generate_student_performance_report(
|
||||
tryout_id=scheduled.tryout_ids[0],
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
elif scheduled.report_type == "item_analysis":
|
||||
if len(scheduled.tryout_ids) > 0:
|
||||
report = await generate_item_analysis_report(
|
||||
tryout_id=scheduled.tryout_ids[0],
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
elif scheduled.report_type == "calibration_status":
|
||||
if len(scheduled.tryout_ids) > 0:
|
||||
report = await generate_calibration_status_report(
|
||||
tryout_id=scheduled.tryout_ids[0],
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
elif scheduled.report_type == "tryout_comparison":
|
||||
report = await generate_tryout_comparison_report(
|
||||
tryout_ids=scheduled.tryout_ids,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
if not report:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to generate report",
|
||||
)
|
||||
|
||||
# Export to requested format
|
||||
if format == "csv":
|
||||
file_path = export_report_to_csv(report, base_filename)
|
||||
media_type = "text/csv"
|
||||
elif format == "xlsx":
|
||||
file_path = export_report_to_excel(report, base_filename)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
else: # pdf
|
||||
file_path = export_report_to_pdf(report, base_filename)
|
||||
media_type = "application/pdf"
|
||||
|
||||
# Return file
|
||||
return FileResponse(
|
||||
path=file_path,
|
||||
media_type=media_type,
|
||||
filename=os.path.basename(file_path),
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to export report: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Direct Export Endpoints (without scheduling)
|
||||
# =============================================================================
|
||||
|
||||
@router.get(
|
||||
"/student/performance/export/{format}",
|
||||
summary="Export student performance report directly",
|
||||
description="Generate and export student performance report directly without scheduling.",
|
||||
)
|
||||
async def export_student_performance_direct(
|
||||
format: Literal["csv", "xlsx", "pdf"],
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
date_start: Optional[datetime] = None,
|
||||
date_end: Optional[datetime] = None,
|
||||
):
|
||||
"""Export student performance report directly."""
|
||||
date_range = None
|
||||
if date_start or date_end:
|
||||
date_range = {}
|
||||
if date_start:
|
||||
date_range["start"] = date_start
|
||||
if date_end:
|
||||
date_range["end"] = date_end
|
||||
|
||||
report = await generate_student_performance_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
date_range=date_range,
|
||||
)
|
||||
|
||||
base_filename = f"student_performance_{tryout_id}"
|
||||
|
||||
if format == "csv":
|
||||
file_path = export_report_to_csv(report, base_filename)
|
||||
media_type = "text/csv"
|
||||
elif format == "xlsx":
|
||||
file_path = export_report_to_excel(report, base_filename)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
else:
|
||||
file_path = export_report_to_pdf(report, base_filename)
|
||||
media_type = "application/pdf"
|
||||
|
||||
return FileResponse(
|
||||
path=file_path,
|
||||
media_type=media_type,
|
||||
filename=os.path.basename(file_path),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/items/analysis/export/{format}",
|
||||
summary="Export item analysis report directly",
|
||||
description="Generate and export item analysis report directly without scheduling.",
|
||||
)
|
||||
async def export_item_analysis_direct(
|
||||
format: Literal["csv", "xlsx", "pdf"],
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
filter_by: Optional[Literal["difficulty", "calibrated", "discrimination"]] = None,
|
||||
difficulty_level: Optional[Literal["mudah", "sedang", "sulit"]] = None,
|
||||
):
|
||||
"""Export item analysis report directly."""
|
||||
report = await generate_item_analysis_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
filter_by=filter_by,
|
||||
difficulty_level=difficulty_level,
|
||||
)
|
||||
|
||||
base_filename = f"item_analysis_{tryout_id}"
|
||||
|
||||
if format == "csv":
|
||||
file_path = export_report_to_csv(report, base_filename)
|
||||
media_type = "text/csv"
|
||||
elif format == "xlsx":
|
||||
file_path = export_report_to_excel(report, base_filename)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
else:
|
||||
file_path = export_report_to_pdf(report, base_filename)
|
||||
media_type = "application/pdf"
|
||||
|
||||
return FileResponse(
|
||||
path=file_path,
|
||||
media_type=media_type,
|
||||
filename=os.path.basename(file_path),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/calibration/status/export/{format}",
|
||||
summary="Export calibration status report directly",
|
||||
description="Generate and export calibration status report directly without scheduling.",
|
||||
)
|
||||
async def export_calibration_status_direct(
|
||||
format: Literal["csv", "xlsx", "pdf"],
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""Export calibration status report directly."""
|
||||
report = await generate_calibration_status_report(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
base_filename = f"calibration_status_{tryout_id}"
|
||||
|
||||
if format == "csv":
|
||||
file_path = export_report_to_csv(report, base_filename)
|
||||
media_type = "text/csv"
|
||||
elif format == "xlsx":
|
||||
file_path = export_report_to_excel(report, base_filename)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
else:
|
||||
file_path = export_report_to_pdf(report, base_filename)
|
||||
media_type = "application/pdf"
|
||||
|
||||
return FileResponse(
|
||||
path=file_path,
|
||||
media_type=media_type,
|
||||
filename=os.path.basename(file_path),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/tryout/comparison/export/{format}",
|
||||
summary="Export tryout comparison report directly",
|
||||
description="Generate and export tryout comparison report directly without scheduling.",
|
||||
)
|
||||
async def export_tryout_comparison_direct(
|
||||
format: Literal["csv", "xlsx", "pdf"],
|
||||
tryout_ids: str, # Comma-separated
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
group_by: Literal["date", "subject"] = "date",
|
||||
):
|
||||
"""Export tryout comparison report directly."""
|
||||
tryout_id_list = [tid.strip() for tid in tryout_ids.split(",")]
|
||||
|
||||
if len(tryout_id_list) < 2:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="At least 2 tryout IDs are required for comparison",
|
||||
)
|
||||
|
||||
report = await generate_tryout_comparison_report(
|
||||
tryout_ids=tryout_id_list,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
group_by=group_by,
|
||||
)
|
||||
|
||||
base_filename = "tryout_comparison"
|
||||
|
||||
if format == "csv":
|
||||
file_path = export_report_to_csv(report, base_filename)
|
||||
media_type = "text/csv"
|
||||
elif format == "xlsx":
|
||||
file_path = export_report_to_excel(report, base_filename)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
else:
|
||||
file_path = export_report_to_pdf(report, base_filename)
|
||||
media_type = "application/pdf"
|
||||
|
||||
return FileResponse(
|
||||
path=file_path,
|
||||
media_type=media_type,
|
||||
filename=os.path.basename(file_path),
|
||||
)
|
||||
402
app/routers/sessions.py
Normal file
402
app/routers/sessions.py
Normal file
@@ -0,0 +1,402 @@
|
||||
"""
|
||||
Session API router for tryout session management.
|
||||
|
||||
Endpoints:
|
||||
- POST /session/{session_id}/complete: Submit answers and complete session
|
||||
- GET /session/{session_id}: Get session details
|
||||
- POST /session: Create new session
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.item import Item
|
||||
from app.models.session import Session
|
||||
from app.models.tryout import Tryout
|
||||
from app.models.tryout_stats import TryoutStats
|
||||
from app.models.user_answer import UserAnswer
|
||||
from app.schemas.session import (
|
||||
SessionCompleteRequest,
|
||||
SessionCompleteResponse,
|
||||
SessionCreateRequest,
|
||||
SessionResponse,
|
||||
UserAnswerOutput,
|
||||
)
|
||||
from app.services.ctt_scoring import (
|
||||
calculate_ctt_bobot,
|
||||
calculate_ctt_nm,
|
||||
calculate_ctt_nn,
|
||||
get_total_bobot_max,
|
||||
update_tryout_stats,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/session", tags=["sessions"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{session_id}/complete",
|
||||
response_model=SessionCompleteResponse,
|
||||
summary="Complete session with answers",
|
||||
description="Submit user answers, calculate CTT scores, and complete the session.",
|
||||
)
|
||||
async def complete_session(
|
||||
session_id: str,
|
||||
request: SessionCompleteRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> SessionCompleteResponse:
|
||||
"""
|
||||
Complete a session by submitting answers and calculating CTT scores.
|
||||
|
||||
Process:
|
||||
1. Validate session exists and is not completed
|
||||
2. For each answer: check is_correct, calculate bobot_earned
|
||||
3. Save UserAnswer records
|
||||
4. Calculate CTT scores (total_benar, total_bobot_earned, NM)
|
||||
5. Update Session with CTT results
|
||||
6. Update TryoutStats incrementally
|
||||
7. Return session with scores
|
||||
|
||||
Args:
|
||||
session_id: Unique session identifier
|
||||
request: Session completion request with end_time and user_answers
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
SessionCompleteResponse with CTT scores
|
||||
|
||||
Raises:
|
||||
HTTPException: If session not found, already completed, or validation fails
|
||||
"""
|
||||
# Get session with tryout relationship
|
||||
result = await db.execute(
|
||||
select(Session)
|
||||
.options(selectinload(Session.tryout))
|
||||
.where(
|
||||
Session.session_id == session_id,
|
||||
Session.website_id == website_id,
|
||||
)
|
||||
)
|
||||
session = result.scalar_one_or_none()
|
||||
|
||||
if session is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Session {session_id} not found",
|
||||
)
|
||||
|
||||
if session.is_completed:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Session is already completed",
|
||||
)
|
||||
|
||||
# Get tryout configuration
|
||||
tryout = session.tryout
|
||||
|
||||
# Get all items for this tryout to calculate bobot
|
||||
items_result = await db.execute(
|
||||
select(Item).where(
|
||||
Item.website_id == website_id,
|
||||
Item.tryout_id == session.tryout_id,
|
||||
)
|
||||
)
|
||||
items = {item.id: item for item in items_result.scalars().all()}
|
||||
|
||||
# Process each answer
|
||||
total_benar = 0
|
||||
total_bobot_earned = 0.0
|
||||
user_answer_records = []
|
||||
|
||||
for answer_input in request.user_answers:
|
||||
item = items.get(answer_input.item_id)
|
||||
|
||||
if item is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Item {answer_input.item_id} not found in tryout {session.tryout_id}",
|
||||
)
|
||||
|
||||
# Check if answer is correct
|
||||
is_correct = answer_input.response.upper() == item.correct_answer.upper()
|
||||
|
||||
# Calculate bobot_earned (only if correct)
|
||||
bobot_earned = 0.0
|
||||
if is_correct:
|
||||
total_benar += 1
|
||||
if item.ctt_bobot is not None:
|
||||
bobot_earned = item.ctt_bobot
|
||||
total_bobot_earned += bobot_earned
|
||||
|
||||
# Create UserAnswer record
|
||||
user_answer = UserAnswer(
|
||||
session_id=session.session_id,
|
||||
wp_user_id=session.wp_user_id,
|
||||
website_id=website_id,
|
||||
tryout_id=session.tryout_id,
|
||||
item_id=item.id,
|
||||
response=answer_input.response.upper(),
|
||||
is_correct=is_correct,
|
||||
time_spent=answer_input.time_spent,
|
||||
scoring_mode_used=session.scoring_mode_used,
|
||||
bobot_earned=bobot_earned,
|
||||
)
|
||||
user_answer_records.append(user_answer)
|
||||
db.add(user_answer)
|
||||
|
||||
# Calculate total_bobot_max for NM calculation
|
||||
try:
|
||||
total_bobot_max = await get_total_bobot_max(
|
||||
db, website_id, session.tryout_id, level="sedang"
|
||||
)
|
||||
except ValueError:
|
||||
# Fallback: calculate from items we have
|
||||
total_bobot_max = sum(
|
||||
item.ctt_bobot or 0 for item in items.values() if item.level == "sedang"
|
||||
)
|
||||
if total_bobot_max == 0:
|
||||
# If no bobot values, use count of questions
|
||||
total_bobot_max = len(items)
|
||||
|
||||
# Calculate CTT NM (Nilai Mentah)
|
||||
nm = calculate_ctt_nm(total_bobot_earned, total_bobot_max)
|
||||
|
||||
# Get normalization parameters based on tryout configuration
|
||||
if tryout.normalization_mode == "static":
|
||||
rataan = tryout.static_rataan
|
||||
sb = tryout.static_sb
|
||||
elif tryout.normalization_mode == "dynamic":
|
||||
# Get current stats for dynamic normalization
|
||||
stats_result = await db.execute(
|
||||
select(TryoutStats).where(
|
||||
TryoutStats.website_id == website_id,
|
||||
TryoutStats.tryout_id == session.tryout_id,
|
||||
)
|
||||
)
|
||||
stats = stats_result.scalar_one_or_none()
|
||||
|
||||
if stats and stats.participant_count >= tryout.min_sample_for_dynamic:
|
||||
rataan = stats.rataan or tryout.static_rataan
|
||||
sb = stats.sb or tryout.static_sb
|
||||
else:
|
||||
# Not enough data, use static values
|
||||
rataan = tryout.static_rataan
|
||||
sb = tryout.static_sb
|
||||
else: # hybrid
|
||||
# Hybrid: use dynamic if enough data, otherwise static
|
||||
stats_result = await db.execute(
|
||||
select(TryoutStats).where(
|
||||
TryoutStats.website_id == website_id,
|
||||
TryoutStats.tryout_id == session.tryout_id,
|
||||
)
|
||||
)
|
||||
stats = stats_result.scalar_one_or_none()
|
||||
|
||||
if stats and stats.participant_count >= tryout.min_sample_for_dynamic:
|
||||
rataan = stats.rataan or tryout.static_rataan
|
||||
sb = stats.sb or tryout.static_sb
|
||||
else:
|
||||
rataan = tryout.static_rataan
|
||||
sb = tryout.static_sb
|
||||
|
||||
# Calculate CTT NN (Nilai Nasional)
|
||||
nn = calculate_ctt_nn(nm, rataan, sb)
|
||||
|
||||
# Update session with results
|
||||
session.end_time = request.end_time
|
||||
session.is_completed = True
|
||||
session.total_benar = total_benar
|
||||
session.total_bobot_earned = total_bobot_earned
|
||||
session.NM = nm
|
||||
session.NN = nn
|
||||
session.rataan_used = rataan
|
||||
session.sb_used = sb
|
||||
|
||||
# Update tryout stats incrementally
|
||||
await update_tryout_stats(db, website_id, session.tryout_id, nm)
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
# Refresh to get updated relationships
|
||||
await db.refresh(session)
|
||||
|
||||
# Build response
|
||||
return SessionCompleteResponse(
|
||||
id=session.id,
|
||||
session_id=session.session_id,
|
||||
wp_user_id=session.wp_user_id,
|
||||
website_id=session.website_id,
|
||||
tryout_id=session.tryout_id,
|
||||
start_time=session.start_time,
|
||||
end_time=session.end_time,
|
||||
is_completed=session.is_completed,
|
||||
scoring_mode_used=session.scoring_mode_used,
|
||||
total_benar=session.total_benar,
|
||||
total_bobot_earned=session.total_bobot_earned,
|
||||
NM=session.NM,
|
||||
NN=session.NN,
|
||||
rataan_used=session.rataan_used,
|
||||
sb_used=session.sb_used,
|
||||
user_answers=[
|
||||
UserAnswerOutput(
|
||||
id=ua.id,
|
||||
item_id=ua.item_id,
|
||||
response=ua.response,
|
||||
is_correct=ua.is_correct,
|
||||
time_spent=ua.time_spent,
|
||||
bobot_earned=ua.bobot_earned,
|
||||
scoring_mode_used=ua.scoring_mode_used,
|
||||
)
|
||||
for ua in user_answer_records
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{session_id}",
|
||||
response_model=SessionResponse,
|
||||
summary="Get session details",
|
||||
description="Retrieve session details including scores if completed.",
|
||||
)
|
||||
async def get_session(
|
||||
session_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> SessionResponse:
|
||||
"""
|
||||
Get session details.
|
||||
|
||||
Args:
|
||||
session_id: Unique session identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
SessionResponse with session details
|
||||
|
||||
Raises:
|
||||
HTTPException: If session not found
|
||||
"""
|
||||
result = await db.execute(
|
||||
select(Session).where(
|
||||
Session.session_id == session_id,
|
||||
Session.website_id == website_id,
|
||||
)
|
||||
)
|
||||
session = result.scalar_one_or_none()
|
||||
|
||||
if session is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Session {session_id} not found",
|
||||
)
|
||||
|
||||
return SessionResponse.model_validate(session)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/",
|
||||
response_model=SessionResponse,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
summary="Create new session",
|
||||
description="Create a new tryout session for a student.",
|
||||
)
|
||||
async def create_session(
|
||||
request: SessionCreateRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> SessionResponse:
|
||||
"""
|
||||
Create a new session.
|
||||
|
||||
Args:
|
||||
request: Session creation request
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
SessionResponse with created session
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found or session already exists
|
||||
"""
|
||||
# Verify tryout exists
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == request.website_id,
|
||||
Tryout.tryout_id == request.tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {request.tryout_id} not found for website {request.website_id}",
|
||||
)
|
||||
|
||||
# Check if session already exists
|
||||
existing_result = await db.execute(
|
||||
select(Session).where(Session.session_id == request.session_id)
|
||||
)
|
||||
existing_session = existing_result.scalar_one_or_none()
|
||||
|
||||
if existing_session:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"Session {request.session_id} already exists",
|
||||
)
|
||||
|
||||
# Create new session
|
||||
session = Session(
|
||||
session_id=request.session_id,
|
||||
wp_user_id=request.wp_user_id,
|
||||
website_id=request.website_id,
|
||||
tryout_id=request.tryout_id,
|
||||
scoring_mode_used=request.scoring_mode,
|
||||
start_time=datetime.now(timezone.utc),
|
||||
is_completed=False,
|
||||
total_benar=0,
|
||||
total_bobot_earned=0.0,
|
||||
)
|
||||
|
||||
db.add(session)
|
||||
await db.commit()
|
||||
await db.refresh(session)
|
||||
|
||||
return SessionResponse.model_validate(session)
|
||||
458
app/routers/tryouts.py
Normal file
458
app/routers/tryouts.py
Normal file
@@ -0,0 +1,458 @@
|
||||
"""
|
||||
Tryout API router for tryout configuration and management.
|
||||
|
||||
Endpoints:
|
||||
- GET /tryout/{tryout_id}/config: Get tryout configuration
|
||||
- PUT /tryout/{tryout_id}/normalization: Update normalization settings
|
||||
- GET /tryout: List tryouts for a website
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, status
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.item import Item
|
||||
from app.models.tryout import Tryout
|
||||
from app.models.tryout_stats import TryoutStats
|
||||
from app.schemas.tryout import (
|
||||
NormalizationUpdateRequest,
|
||||
NormalizationUpdateResponse,
|
||||
TryoutConfigBrief,
|
||||
TryoutConfigResponse,
|
||||
TryoutStatsResponse,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/tryout", tags=["tryouts"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tryout_id}/config",
|
||||
response_model=TryoutConfigResponse,
|
||||
summary="Get tryout configuration",
|
||||
description="Retrieve tryout configuration including scoring mode, normalization settings, and current stats.",
|
||||
)
|
||||
async def get_tryout_config(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> TryoutConfigResponse:
|
||||
"""
|
||||
Get tryout configuration.
|
||||
|
||||
Returns:
|
||||
TryoutConfigResponse with scoring_mode, normalization_mode, and current_stats
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
# Get tryout with stats
|
||||
result = await db.execute(
|
||||
select(Tryout)
|
||||
.options(selectinload(Tryout.stats))
|
||||
.where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Build stats response
|
||||
current_stats = None
|
||||
if tryout.stats:
|
||||
current_stats = TryoutStatsResponse(
|
||||
participant_count=tryout.stats.participant_count,
|
||||
rataan=tryout.stats.rataan,
|
||||
sb=tryout.stats.sb,
|
||||
min_nm=tryout.stats.min_nm,
|
||||
max_nm=tryout.stats.max_nm,
|
||||
last_calculated=tryout.stats.last_calculated,
|
||||
)
|
||||
|
||||
return TryoutConfigResponse(
|
||||
id=tryout.id,
|
||||
website_id=tryout.website_id,
|
||||
tryout_id=tryout.tryout_id,
|
||||
name=tryout.name,
|
||||
description=tryout.description,
|
||||
scoring_mode=tryout.scoring_mode,
|
||||
selection_mode=tryout.selection_mode,
|
||||
normalization_mode=tryout.normalization_mode,
|
||||
min_sample_for_dynamic=tryout.min_sample_for_dynamic,
|
||||
static_rataan=tryout.static_rataan,
|
||||
static_sb=tryout.static_sb,
|
||||
ai_generation_enabled=tryout.ai_generation_enabled,
|
||||
hybrid_transition_slot=tryout.hybrid_transition_slot,
|
||||
min_calibration_sample=tryout.min_calibration_sample,
|
||||
theta_estimation_method=tryout.theta_estimation_method,
|
||||
fallback_to_ctt_on_error=tryout.fallback_to_ctt_on_error,
|
||||
current_stats=current_stats,
|
||||
created_at=tryout.created_at,
|
||||
updated_at=tryout.updated_at,
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tryout_id}/normalization",
|
||||
response_model=NormalizationUpdateResponse,
|
||||
summary="Update normalization settings",
|
||||
description="Update normalization mode and static values for a tryout.",
|
||||
)
|
||||
async def update_normalization(
|
||||
tryout_id: str,
|
||||
request: NormalizationUpdateRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> NormalizationUpdateResponse:
|
||||
"""
|
||||
Update normalization settings for a tryout.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
request: Normalization update request
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
NormalizationUpdateResponse with updated settings
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found or validation fails
|
||||
"""
|
||||
# Get tryout
|
||||
result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Update normalization mode if provided
|
||||
if request.normalization_mode is not None:
|
||||
tryout.normalization_mode = request.normalization_mode
|
||||
|
||||
# Update static values if provided
|
||||
if request.static_rataan is not None:
|
||||
tryout.static_rataan = request.static_rataan
|
||||
|
||||
if request.static_sb is not None:
|
||||
tryout.static_sb = request.static_sb
|
||||
|
||||
# Get current stats for participant count
|
||||
stats_result = await db.execute(
|
||||
select(TryoutStats).where(
|
||||
TryoutStats.website_id == website_id,
|
||||
TryoutStats.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
stats = stats_result.scalar_one_or_none()
|
||||
current_participant_count = stats.participant_count if stats else 0
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(tryout)
|
||||
|
||||
return NormalizationUpdateResponse(
|
||||
tryout_id=tryout.tryout_id,
|
||||
normalization_mode=tryout.normalization_mode,
|
||||
static_rataan=tryout.static_rataan,
|
||||
static_sb=tryout.static_sb,
|
||||
will_switch_to_dynamic_at=tryout.min_sample_for_dynamic,
|
||||
current_participant_count=current_participant_count,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/",
|
||||
response_model=List[TryoutConfigBrief],
|
||||
summary="List tryouts",
|
||||
description="List all tryouts for a website.",
|
||||
)
|
||||
async def list_tryouts(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
) -> List[TryoutConfigBrief]:
|
||||
"""
|
||||
List all tryouts for a website.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
List of TryoutConfigBrief
|
||||
"""
|
||||
# Get tryouts with stats
|
||||
result = await db.execute(
|
||||
select(Tryout)
|
||||
.options(selectinload(Tryout.stats))
|
||||
.where(Tryout.website_id == website_id)
|
||||
)
|
||||
tryouts = result.scalars().all()
|
||||
|
||||
return [
|
||||
TryoutConfigBrief(
|
||||
tryout_id=t.tryout_id,
|
||||
name=t.name,
|
||||
scoring_mode=t.scoring_mode,
|
||||
selection_mode=t.selection_mode,
|
||||
normalization_mode=t.normalization_mode,
|
||||
participant_count=t.stats.participant_count if t.stats else 0,
|
||||
)
|
||||
for t in tryouts
|
||||
]
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tryout_id}/calibration-status",
|
||||
summary="Get calibration status",
|
||||
description="Get IRT calibration status for items in this tryout.",
|
||||
)
|
||||
async def get_calibration_status(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Get calibration status for items in a tryout.
|
||||
|
||||
Returns statistics on how many items are calibrated and ready for IRT.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Calibration status summary
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found
|
||||
"""
|
||||
# Verify tryout exists
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Get calibration statistics
|
||||
stats_result = await db.execute(
|
||||
select(
|
||||
func.count().label("total_items"),
|
||||
func.sum(func.cast(Item.calibrated, type_=func.INTEGER)).label("calibrated_items"),
|
||||
func.avg(Item.calibration_sample_size).label("avg_sample_size"),
|
||||
).where(
|
||||
Item.website_id == website_id,
|
||||
Item.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
stats = stats_result.first()
|
||||
|
||||
total_items = stats.total_items or 0
|
||||
calibrated_items = stats.calibrated_items or 0
|
||||
calibration_percentage = (calibrated_items / total_items * 100) if total_items > 0 else 0
|
||||
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"total_items": total_items,
|
||||
"calibrated_items": calibrated_items,
|
||||
"calibration_percentage": round(calibration_percentage, 2),
|
||||
"avg_sample_size": round(stats.avg_sample_size, 2) if stats.avg_sample_size else 0,
|
||||
"min_calibration_sample": tryout.min_calibration_sample,
|
||||
"ready_for_irt": calibration_percentage >= 90,
|
||||
}
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/calibrate",
|
||||
summary="Trigger IRT calibration",
|
||||
description="Trigger IRT calibration for all items in this tryout with sufficient response data.",
|
||||
)
|
||||
async def trigger_calibration(
|
||||
tryout_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Trigger IRT calibration for all items in a tryout.
|
||||
|
||||
Runs calibration for items with >= min_calibration_sample responses.
|
||||
Updates item.irt_b, item.irt_se, and item.calibrated status.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Calibration results summary
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout not found or calibration fails
|
||||
"""
|
||||
from app.services.irt_calibration import (
|
||||
calibrate_all,
|
||||
CALIBRATION_SAMPLE_THRESHOLD,
|
||||
)
|
||||
|
||||
# Verify tryout exists
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Run calibration
|
||||
result = await calibrate_all(
|
||||
tryout_id=tryout_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
min_sample_size=tryout.min_calibration_sample or CALIBRATION_SAMPLE_THRESHOLD,
|
||||
)
|
||||
|
||||
return {
|
||||
"tryout_id": tryout_id,
|
||||
"total_items": result.total_items,
|
||||
"calibrated_items": result.calibrated_items,
|
||||
"failed_items": result.failed_items,
|
||||
"calibration_percentage": round(result.calibration_percentage * 100, 2),
|
||||
"ready_for_irt": result.ready_for_irt,
|
||||
"message": f"Calibration complete: {result.calibrated_items}/{result.total_items} items calibrated",
|
||||
}
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tryout_id}/calibrate/{item_id}",
|
||||
summary="Trigger IRT calibration for single item",
|
||||
description="Trigger IRT calibration for a specific item.",
|
||||
)
|
||||
async def trigger_item_calibration(
|
||||
tryout_id: str,
|
||||
item_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
):
|
||||
"""
|
||||
Trigger IRT calibration for a single item.
|
||||
|
||||
Args:
|
||||
tryout_id: Tryout identifier
|
||||
item_id: Item ID to calibrate
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Calibration result for the item
|
||||
|
||||
Raises:
|
||||
HTTPException: If tryout or item not found
|
||||
"""
|
||||
from app.services.irt_calibration import calibrate_item, CALIBRATION_SAMPLE_THRESHOLD
|
||||
|
||||
# Verify tryout exists
|
||||
tryout_result = await db.execute(
|
||||
select(Tryout).where(
|
||||
Tryout.website_id == website_id,
|
||||
Tryout.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
tryout = tryout_result.scalar_one_or_none()
|
||||
|
||||
if tryout is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Tryout {tryout_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
# Verify item belongs to this tryout
|
||||
item_result = await db.execute(
|
||||
select(Item).where(
|
||||
Item.id == item_id,
|
||||
Item.website_id == website_id,
|
||||
Item.tryout_id == tryout_id,
|
||||
)
|
||||
)
|
||||
item = item_result.scalar_one_or_none()
|
||||
|
||||
if item is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Item {item_id} not found in tryout {tryout_id}",
|
||||
)
|
||||
|
||||
# Run calibration
|
||||
result = await calibrate_item(
|
||||
item_id=item_id,
|
||||
db=db,
|
||||
min_sample_size=tryout.min_calibration_sample or CALIBRATION_SAMPLE_THRESHOLD,
|
||||
)
|
||||
|
||||
return {
|
||||
"item_id": result.item_id,
|
||||
"status": result.status.value,
|
||||
"irt_b": result.irt_b,
|
||||
"irt_se": result.irt_se,
|
||||
"sample_size": result.sample_size,
|
||||
"message": result.message,
|
||||
}
|
||||
384
app/routers/wordpress.py
Normal file
384
app/routers/wordpress.py
Normal file
@@ -0,0 +1,384 @@
|
||||
"""
|
||||
WordPress Integration API Router.
|
||||
|
||||
Endpoints:
|
||||
- POST /wordpress/sync_users: Synchronize users from WordPress
|
||||
- POST /wordpress/verify_session: Verify WordPress session/token
|
||||
- GET /wordpress/website/{website_id}/users: Get all users for a website
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, status
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
from app.models.website import Website
|
||||
from app.schemas.wordpress import (
|
||||
SyncUsersResponse,
|
||||
SyncStatsResponse,
|
||||
UserListResponse,
|
||||
VerifySessionRequest,
|
||||
VerifySessionResponse,
|
||||
WordPressUserResponse,
|
||||
)
|
||||
from app.services.wordpress_auth import (
|
||||
get_wordpress_user,
|
||||
sync_wordpress_users,
|
||||
verify_website_exists,
|
||||
verify_wordpress_token,
|
||||
get_or_create_user,
|
||||
WordPressAPIError,
|
||||
WordPressRateLimitError,
|
||||
WordPressTokenInvalidError,
|
||||
WebsiteNotFoundError,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/wordpress", tags=["wordpress"])
|
||||
|
||||
|
||||
def get_website_id_from_header(
|
||||
x_website_id: Optional[str] = Header(None, alias="X-Website-ID"),
|
||||
) -> int:
|
||||
"""
|
||||
Extract and validate website_id from request header.
|
||||
|
||||
Args:
|
||||
x_website_id: Website ID from header
|
||||
|
||||
Returns:
|
||||
Validated website ID as integer
|
||||
|
||||
Raises:
|
||||
HTTPException: If header is missing or invalid
|
||||
"""
|
||||
if x_website_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID header is required",
|
||||
)
|
||||
try:
|
||||
return int(x_website_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="X-Website-ID must be a valid integer",
|
||||
)
|
||||
|
||||
|
||||
async def get_valid_website(
|
||||
website_id: int,
|
||||
db: AsyncSession,
|
||||
) -> Website:
|
||||
"""
|
||||
Validate website_id exists and return Website model.
|
||||
|
||||
Args:
|
||||
website_id: Website identifier
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Website model instance
|
||||
|
||||
Raises:
|
||||
HTTPException: If website not found
|
||||
"""
|
||||
try:
|
||||
return await verify_website_exists(website_id, db)
|
||||
except WebsiteNotFoundError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Website {website_id} not found",
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/sync_users",
|
||||
response_model=SyncUsersResponse,
|
||||
summary="Synchronize users from WordPress",
|
||||
description="Fetch all users from WordPress API and sync to local database. Requires admin WordPress token.",
|
||||
)
|
||||
async def sync_users_endpoint(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
website_id: int = Depends(get_website_id_from_header),
|
||||
authorization: Optional[str] = Header(None, alias="Authorization"),
|
||||
) -> SyncUsersResponse:
|
||||
"""
|
||||
Synchronize users from WordPress to local database.
|
||||
|
||||
Process:
|
||||
1. Validate website_id exists
|
||||
2. Extract admin token from Authorization header
|
||||
3. Fetch all users from WordPress API
|
||||
4. Upsert: Update existing users, insert new users
|
||||
5. Return sync statistics
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
website_id: Website ID from header
|
||||
authorization: Authorization header with Bearer token
|
||||
|
||||
Returns:
|
||||
SyncUsersResponse with sync statistics
|
||||
|
||||
Raises:
|
||||
HTTPException: If website not found, token invalid, or API error
|
||||
"""
|
||||
# Validate website exists
|
||||
await get_valid_website(website_id, db)
|
||||
|
||||
# Extract token from Authorization header
|
||||
if authorization is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authorization header is required",
|
||||
)
|
||||
|
||||
# Parse Bearer token
|
||||
parts = authorization.split()
|
||||
if len(parts) != 2 or parts[0].lower() != "bearer":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid Authorization header format. Use: Bearer {token}",
|
||||
)
|
||||
|
||||
admin_token = parts[1]
|
||||
|
||||
try:
|
||||
sync_stats = await sync_wordpress_users(
|
||||
website_id=website_id,
|
||||
admin_token=admin_token,
|
||||
db=db,
|
||||
)
|
||||
|
||||
return SyncUsersResponse(
|
||||
synced=SyncStatsResponse(
|
||||
inserted=sync_stats.inserted,
|
||||
updated=sync_stats.updated,
|
||||
total=sync_stats.total,
|
||||
errors=sync_stats.errors,
|
||||
),
|
||||
website_id=website_id,
|
||||
message=f"Sync completed: {sync_stats.inserted} inserted, {sync_stats.updated} updated",
|
||||
)
|
||||
|
||||
except WordPressTokenInvalidError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=str(e),
|
||||
)
|
||||
except WordPressRateLimitError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
detail=str(e),
|
||||
)
|
||||
except WordPressAPIError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=str(e),
|
||||
)
|
||||
except WebsiteNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/verify_session",
|
||||
response_model=VerifySessionResponse,
|
||||
summary="Verify WordPress session",
|
||||
description="Verify WordPress JWT token and user identity.",
|
||||
)
|
||||
async def verify_session_endpoint(
|
||||
request: VerifySessionRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> VerifySessionResponse:
|
||||
"""
|
||||
Verify WordPress session/token.
|
||||
|
||||
Process:
|
||||
1. Validate website_id exists
|
||||
2. Call WordPress API to verify token
|
||||
3. Verify wp_user_id matches token owner
|
||||
4. Get or create local user
|
||||
5. Return validation result
|
||||
|
||||
Args:
|
||||
request: VerifySessionRequest with wp_user_id, token, website_id
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
VerifySessionResponse with validation result
|
||||
|
||||
Raises:
|
||||
HTTPException: If website not found or API error
|
||||
"""
|
||||
# Validate website exists
|
||||
await get_valid_website(request.website_id, db)
|
||||
|
||||
try:
|
||||
# Verify token with WordPress
|
||||
wp_user_info = await verify_wordpress_token(
|
||||
token=request.token,
|
||||
website_id=request.website_id,
|
||||
wp_user_id=request.wp_user_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
if wp_user_info is None:
|
||||
return VerifySessionResponse(
|
||||
valid=False,
|
||||
error="User ID mismatch or invalid credentials",
|
||||
)
|
||||
|
||||
# Get or create local user
|
||||
user = await get_or_create_user(
|
||||
wp_user_id=request.wp_user_id,
|
||||
website_id=request.website_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
return VerifySessionResponse(
|
||||
valid=True,
|
||||
user=WordPressUserResponse.model_validate(user),
|
||||
wp_user_info={
|
||||
"username": wp_user_info.username,
|
||||
"email": wp_user_info.email,
|
||||
"display_name": wp_user_info.display_name,
|
||||
"roles": wp_user_info.roles,
|
||||
},
|
||||
)
|
||||
|
||||
except WordPressTokenInvalidError as e:
|
||||
return VerifySessionResponse(
|
||||
valid=False,
|
||||
error=f"Invalid credentials: {str(e)}",
|
||||
)
|
||||
except WordPressRateLimitError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
detail=str(e),
|
||||
)
|
||||
except WordPressAPIError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=str(e),
|
||||
)
|
||||
except WebsiteNotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/website/{website_id}/users",
|
||||
response_model=UserListResponse,
|
||||
summary="Get users for website",
|
||||
description="Retrieve all users for a specific website from local database with pagination.",
|
||||
)
|
||||
async def get_website_users(
|
||||
website_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
page: int = 1,
|
||||
page_size: int = 50,
|
||||
) -> UserListResponse:
|
||||
"""
|
||||
Get all users for a website.
|
||||
|
||||
Args:
|
||||
website_id: Website identifier
|
||||
db: Database session
|
||||
page: Page number (default: 1)
|
||||
page_size: Number of users per page (default: 50, max: 100)
|
||||
|
||||
Returns:
|
||||
UserListResponse with paginated user list
|
||||
|
||||
Raises:
|
||||
HTTPException: If website not found
|
||||
"""
|
||||
# Validate website exists
|
||||
await get_valid_website(website_id, db)
|
||||
|
||||
# Clamp page_size
|
||||
page_size = min(max(1, page_size), 100)
|
||||
page = max(1, page)
|
||||
|
||||
# Get total count
|
||||
count_result = await db.execute(
|
||||
select(func.count()).select_from(User).where(User.website_id == website_id)
|
||||
)
|
||||
total = count_result.scalar() or 0
|
||||
|
||||
# Calculate pagination
|
||||
offset = (page - 1) * page_size
|
||||
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
||||
|
||||
# Get users
|
||||
result = await db.execute(
|
||||
select(User)
|
||||
.where(User.website_id == website_id)
|
||||
.order_by(User.id)
|
||||
.offset(offset)
|
||||
.limit(page_size)
|
||||
)
|
||||
users = result.scalars().all()
|
||||
|
||||
return UserListResponse(
|
||||
users=[WordPressUserResponse.model_validate(user) for user in users],
|
||||
total=total,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
total_pages=total_pages,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/website/{website_id}/user/{wp_user_id}",
|
||||
response_model=WordPressUserResponse,
|
||||
summary="Get specific user",
|
||||
description="Retrieve a specific user by WordPress user ID.",
|
||||
)
|
||||
async def get_user_endpoint(
|
||||
website_id: int,
|
||||
wp_user_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> WordPressUserResponse:
|
||||
"""
|
||||
Get a specific user by WordPress user ID.
|
||||
|
||||
Args:
|
||||
website_id: Website identifier
|
||||
wp_user_id: WordPress user ID
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
WordPressUserResponse with user data
|
||||
|
||||
Raises:
|
||||
HTTPException: If website or user not found
|
||||
"""
|
||||
# Validate website exists
|
||||
await get_valid_website(website_id, db)
|
||||
|
||||
# Get user
|
||||
user = await get_wordpress_user(
|
||||
wp_user_id=wp_user_id,
|
||||
website_id=website_id,
|
||||
db=db,
|
||||
)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"User {wp_user_id} not found for website {website_id}",
|
||||
)
|
||||
|
||||
return WordPressUserResponse.model_validate(user)
|
||||
Reference in New Issue
Block a user