Add basis workspace filters, stale-on-reimport, and variant usage metrics
This commit is contained in:
118
alembic/versions/20260404_000003_ai_runs_and_variant_status.py
Normal file
118
alembic/versions/20260404_000003_ai_runs_and_variant_status.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
"""add ai generation runs and item variant lifecycle fields
|
||||||
|
|
||||||
|
Revision ID: 20260404_000003
|
||||||
|
Revises: 20260402_000002
|
||||||
|
Create Date: 2026-04-04 10:10:00
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
revision: str = "20260404_000003"
|
||||||
|
down_revision: Union[str, None] = "20260402_000002"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"ai_generation_runs",
|
||||||
|
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column("basis_item_id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("source_snapshot_question_id", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("target_level", sa.String(length=50), nullable=False),
|
||||||
|
sa.Column("requested_count", sa.Integer(), nullable=False, server_default="1"),
|
||||||
|
sa.Column("model", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("prompt_version", sa.String(length=50), nullable=False, server_default="v1"),
|
||||||
|
sa.Column("operator_notes", sa.Text(), nullable=True),
|
||||||
|
sa.Column("created_by", sa.String(length=255), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["basis_item_id"], ["items.id"], ondelete="CASCADE", onupdate="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["source_snapshot_question_id"],
|
||||||
|
["tryout_snapshot_questions.id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
onupdate="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_ai_generation_runs_basis_item_id", "ai_generation_runs", ["basis_item_id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
"ix_ai_generation_runs_source_snapshot_question_id",
|
||||||
|
"ai_generation_runs",
|
||||||
|
["source_snapshot_question_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.add_column("items", sa.Column("generation_run_id", sa.Integer(), nullable=True))
|
||||||
|
op.add_column("items", sa.Column("source_snapshot_question_id", sa.Integer(), nullable=True))
|
||||||
|
op.add_column("items", sa.Column("variant_status", sa.String(length=50), nullable=False, server_default="active"))
|
||||||
|
op.add_column("items", sa.Column("reviewed_by", sa.String(length=255), nullable=True))
|
||||||
|
op.add_column("items", sa.Column("reviewed_at", sa.DateTime(timezone=True), nullable=True))
|
||||||
|
op.add_column("items", sa.Column("review_notes", sa.Text(), nullable=True))
|
||||||
|
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_items_generation_run_id",
|
||||||
|
"items",
|
||||||
|
"ai_generation_runs",
|
||||||
|
["generation_run_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
onupdate="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_items_source_snapshot_question_id",
|
||||||
|
"items",
|
||||||
|
"tryout_snapshot_questions",
|
||||||
|
["source_snapshot_question_id"],
|
||||||
|
["id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
onupdate="CASCADE",
|
||||||
|
)
|
||||||
|
op.create_index("ix_items_generation_run_id", "items", ["generation_run_id"], unique=False)
|
||||||
|
op.create_index(
|
||||||
|
"ix_items_source_snapshot_question_id",
|
||||||
|
"items",
|
||||||
|
["source_snapshot_question_id"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
op.create_index("ix_items_variant_status", "items", ["variant_status"], unique=False)
|
||||||
|
|
||||||
|
op.drop_index("ix_items_tryout_id_website_id_slot", table_name="items")
|
||||||
|
op.create_index(
|
||||||
|
"ix_items_tryout_id_website_id_slot",
|
||||||
|
"items",
|
||||||
|
["tryout_id", "website_id", "slot", "level"],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.alter_column("items", "variant_status", server_default=None)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("ix_items_tryout_id_website_id_slot", table_name="items")
|
||||||
|
op.create_index(
|
||||||
|
"ix_items_tryout_id_website_id_slot",
|
||||||
|
"items",
|
||||||
|
["tryout_id", "website_id", "slot", "level"],
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.drop_index("ix_items_variant_status", table_name="items")
|
||||||
|
op.drop_index("ix_items_source_snapshot_question_id", table_name="items")
|
||||||
|
op.drop_index("ix_items_generation_run_id", table_name="items")
|
||||||
|
op.drop_constraint("fk_items_source_snapshot_question_id", "items", type_="foreignkey")
|
||||||
|
op.drop_constraint("fk_items_generation_run_id", "items", type_="foreignkey")
|
||||||
|
op.drop_column("items", "review_notes")
|
||||||
|
op.drop_column("items", "reviewed_at")
|
||||||
|
op.drop_column("items", "reviewed_by")
|
||||||
|
op.drop_column("items", "variant_status")
|
||||||
|
op.drop_column("items", "source_snapshot_question_id")
|
||||||
|
op.drop_column("items", "generation_run_id")
|
||||||
|
|
||||||
|
op.drop_index("ix_ai_generation_runs_source_snapshot_question_id", table_name="ai_generation_runs")
|
||||||
|
op.drop_index("ix_ai_generation_runs_basis_item_id", table_name="ai_generation_runs")
|
||||||
|
op.drop_table("ai_generation_runs")
|
||||||
924
app/admin_web.py
924
app/admin_web.py
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ Exports all SQLAlchemy ORM models for use in the application.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from app.database import Base
|
from app.database import Base
|
||||||
|
from app.models.ai_generation_run import AIGenerationRun
|
||||||
from app.models.item import Item
|
from app.models.item import Item
|
||||||
from app.models.session import Session
|
from app.models.session import Session
|
||||||
from app.models.tryout import Tryout
|
from app.models.tryout import Tryout
|
||||||
@@ -17,6 +18,7 @@ from app.models.website import Website
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Base",
|
"Base",
|
||||||
|
"AIGenerationRun",
|
||||||
"User",
|
"User",
|
||||||
"Website",
|
"Website",
|
||||||
"Tryout",
|
"Tryout",
|
||||||
|
|||||||
72
app/models/ai_generation_run.py
Normal file
72
app/models/ai_generation_run.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""
|
||||||
|
AI generation run model.
|
||||||
|
|
||||||
|
Represents one admin generation request that can produce one or many variants.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text, func
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||||
|
|
||||||
|
from app.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class AIGenerationRun(Base):
|
||||||
|
__tablename__ = "ai_generation_runs"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||||
|
basis_item_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("items.id", ondelete="CASCADE", onupdate="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Basis item ID",
|
||||||
|
)
|
||||||
|
source_snapshot_question_id: Mapped[Optional[int]] = mapped_column(
|
||||||
|
ForeignKey("tryout_snapshot_questions.id", ondelete="SET NULL", onupdate="CASCADE"),
|
||||||
|
nullable=True,
|
||||||
|
index=True,
|
||||||
|
comment="Source snapshot question ID",
|
||||||
|
)
|
||||||
|
target_level: Mapped[str] = mapped_column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
comment="Target level (mudah/sulit)",
|
||||||
|
)
|
||||||
|
requested_count: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
nullable=False,
|
||||||
|
default=1,
|
||||||
|
comment="Requested output count",
|
||||||
|
)
|
||||||
|
model: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
comment="Model identifier",
|
||||||
|
)
|
||||||
|
prompt_version: Mapped[str] = mapped_column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
default="v1",
|
||||||
|
comment="Prompt template version",
|
||||||
|
)
|
||||||
|
operator_notes: Mapped[Optional[str]] = mapped_column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Optional admin notes",
|
||||||
|
)
|
||||||
|
created_by: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
comment="Admin username",
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True), nullable=False, server_default=func.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
generated_items: Mapped[list["Item"]] = relationship(
|
||||||
|
"Item",
|
||||||
|
back_populates="generation_run",
|
||||||
|
lazy="selectin",
|
||||||
|
)
|
||||||
@@ -155,6 +155,39 @@ class Item(Base):
|
|||||||
nullable=True,
|
nullable=True,
|
||||||
comment="Original item ID (for AI variants)",
|
comment="Original item ID (for AI variants)",
|
||||||
)
|
)
|
||||||
|
generation_run_id: Mapped[Union[int, None]] = mapped_column(
|
||||||
|
ForeignKey("ai_generation_runs.id", ondelete="SET NULL", onupdate="CASCADE"),
|
||||||
|
nullable=True,
|
||||||
|
index=True,
|
||||||
|
comment="AI generation run ID",
|
||||||
|
)
|
||||||
|
source_snapshot_question_id: Mapped[Union[int, None]] = mapped_column(
|
||||||
|
ForeignKey("tryout_snapshot_questions.id", ondelete="SET NULL", onupdate="CASCADE"),
|
||||||
|
nullable=True,
|
||||||
|
index=True,
|
||||||
|
comment="Source snapshot question ID",
|
||||||
|
)
|
||||||
|
variant_status: Mapped[str] = mapped_column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
default="active",
|
||||||
|
comment="Lifecycle status (active/draft/approved/rejected/archived/stale)",
|
||||||
|
)
|
||||||
|
reviewed_by: Mapped[Union[str, None]] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=True,
|
||||||
|
comment="Reviewer username",
|
||||||
|
)
|
||||||
|
reviewed_at: Mapped[Union[datetime, None]] = mapped_column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
nullable=True,
|
||||||
|
comment="Review timestamp",
|
||||||
|
)
|
||||||
|
review_notes: Mapped[Union[str, None]] = mapped_column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Review notes",
|
||||||
|
)
|
||||||
|
|
||||||
# Timestamps
|
# Timestamps
|
||||||
created_at: Mapped[datetime] = mapped_column(
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
@@ -187,6 +220,11 @@ class Item(Base):
|
|||||||
lazy="selectin",
|
lazy="selectin",
|
||||||
cascade="all, delete-orphan",
|
cascade="all, delete-orphan",
|
||||||
)
|
)
|
||||||
|
generation_run: Mapped[Union["AIGenerationRun", None]] = relationship(
|
||||||
|
"AIGenerationRun",
|
||||||
|
back_populates="generated_items",
|
||||||
|
lazy="selectin",
|
||||||
|
)
|
||||||
|
|
||||||
# Constraints and indexes
|
# Constraints and indexes
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
@@ -203,10 +241,11 @@ class Item(Base):
|
|||||||
"website_id",
|
"website_id",
|
||||||
"slot",
|
"slot",
|
||||||
"level",
|
"level",
|
||||||
unique=True,
|
unique=False,
|
||||||
),
|
),
|
||||||
Index("ix_items_calibrated", "calibrated"),
|
Index("ix_items_calibrated", "calibrated"),
|
||||||
Index("ix_items_basis_item_id", "basis_item_id"),
|
Index("ix_items_basis_item_id", "basis_item_id"),
|
||||||
|
Index("ix_items_variant_status", "variant_status"),
|
||||||
# IRT b parameter constraint [-3, +3]
|
# IRT b parameter constraint [-3, +3]
|
||||||
CheckConstraint(
|
CheckConstraint(
|
||||||
"irt_b IS NULL OR (irt_b >= -3 AND irt_b <= 3)",
|
"irt_b IS NULL OR (irt_b >= -3 AND irt_b <= 3)",
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||||||
|
|
||||||
from app.core.config import get_settings
|
from app.core.config import get_settings
|
||||||
from app.models.item import Item
|
from app.models.item import Item
|
||||||
|
from app.models.ai_generation_run import AIGenerationRun
|
||||||
from app.models.tryout import Tryout
|
from app.models.tryout import Tryout
|
||||||
from app.models.user_answer import UserAnswer
|
from app.models.user_answer import UserAnswer
|
||||||
from app.schemas.ai import GeneratedQuestion
|
from app.schemas.ai import GeneratedQuestion
|
||||||
@@ -493,6 +494,9 @@ async def save_ai_question(
|
|||||||
level: Literal["mudah", "sedang", "sulit"],
|
level: Literal["mudah", "sedang", "sulit"],
|
||||||
ai_model: str,
|
ai_model: str,
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
|
generation_run_id: int | None = None,
|
||||||
|
source_snapshot_question_id: int | None = None,
|
||||||
|
variant_status: str = "draft",
|
||||||
) -> Optional[int]:
|
) -> Optional[int]:
|
||||||
"""
|
"""
|
||||||
Save AI-generated question to database.
|
Save AI-generated question to database.
|
||||||
@@ -523,6 +527,9 @@ async def save_ai_question(
|
|||||||
generated_by="ai",
|
generated_by="ai",
|
||||||
ai_model=ai_model,
|
ai_model=ai_model,
|
||||||
basis_item_id=basis_item_id,
|
basis_item_id=basis_item_id,
|
||||||
|
generation_run_id=generation_run_id,
|
||||||
|
source_snapshot_question_id=source_snapshot_question_id,
|
||||||
|
variant_status=variant_status,
|
||||||
calibrated=False,
|
calibrated=False,
|
||||||
ctt_p=None,
|
ctt_p=None,
|
||||||
ctt_bobot=None,
|
ctt_bobot=None,
|
||||||
@@ -547,6 +554,50 @@ async def save_ai_question(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def create_generation_run(
|
||||||
|
basis_item_id: int,
|
||||||
|
target_level: Literal["mudah", "sulit"],
|
||||||
|
requested_count: int,
|
||||||
|
model: str,
|
||||||
|
created_by: str,
|
||||||
|
db: AsyncSession,
|
||||||
|
source_snapshot_question_id: int | None = None,
|
||||||
|
operator_notes: str | None = None,
|
||||||
|
prompt_version: str = "v1",
|
||||||
|
) -> int:
|
||||||
|
run = AIGenerationRun(
|
||||||
|
basis_item_id=basis_item_id,
|
||||||
|
source_snapshot_question_id=source_snapshot_question_id,
|
||||||
|
target_level=target_level,
|
||||||
|
requested_count=requested_count,
|
||||||
|
model=model,
|
||||||
|
prompt_version=prompt_version,
|
||||||
|
operator_notes=operator_notes,
|
||||||
|
created_by=created_by,
|
||||||
|
)
|
||||||
|
db.add(run)
|
||||||
|
await db.flush()
|
||||||
|
return int(run.id)
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_questions_batch(
|
||||||
|
basis_item: Item,
|
||||||
|
target_level: Literal["mudah", "sulit"],
|
||||||
|
ai_model: str,
|
||||||
|
count: int,
|
||||||
|
) -> list[GeneratedQuestion]:
|
||||||
|
generated_items: list[GeneratedQuestion] = []
|
||||||
|
for _ in range(count):
|
||||||
|
generated = await generate_question(
|
||||||
|
basis_item=basis_item,
|
||||||
|
target_level=target_level,
|
||||||
|
ai_model=ai_model,
|
||||||
|
)
|
||||||
|
if generated is not None:
|
||||||
|
generated_items.append(generated)
|
||||||
|
return generated_items
|
||||||
|
|
||||||
|
|
||||||
async def get_ai_stats(db: AsyncSession) -> Dict[str, Any]:
|
async def get_ai_stats(db: AsyncSession) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Get AI generation statistics.
|
Get AI generation statistics.
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from typing import Any
|
|||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.models import TryoutImportSnapshot, TryoutSnapshotQuestion, Website
|
from app.models import Item, TryoutImportSnapshot, TryoutSnapshotQuestion, Website
|
||||||
|
|
||||||
SOURCE_FORMAT = "sejoli_json"
|
SOURCE_FORMAT = "sejoli_json"
|
||||||
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
@@ -292,7 +292,8 @@ async def import_tryout_json_snapshot(payload: dict[str, Any], website_id: int,
|
|||||||
new_questions += 1
|
new_questions += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if existing.content_checksum != question["content_checksum"]:
|
content_changed = existing.content_checksum != question["content_checksum"]
|
||||||
|
if content_changed:
|
||||||
existing.question_title = question["title"] or question["question"]
|
existing.question_title = question["title"] or question["question"]
|
||||||
existing.question_html = question["question"]
|
existing.question_html = question["question"]
|
||||||
existing.explanation_html = question["explanation"]
|
existing.explanation_html = question["explanation"]
|
||||||
@@ -313,6 +314,18 @@ async def import_tryout_json_snapshot(payload: dict[str, Any], website_id: int,
|
|||||||
existing.is_active = True
|
existing.is_active = True
|
||||||
existing.last_seen_at = now
|
existing.last_seen_at = now
|
||||||
|
|
||||||
|
# If source content changed, mark AI children derived from this source as stale.
|
||||||
|
if content_changed:
|
||||||
|
stale_variants_result = await db.execute(
|
||||||
|
select(Item).where(
|
||||||
|
Item.generated_by == "ai",
|
||||||
|
Item.source_snapshot_question_id == existing.id,
|
||||||
|
Item.variant_status.in_(["draft", "approved", "active"]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for variant in stale_variants_result.scalars().all():
|
||||||
|
variant.variant_status = "stale"
|
||||||
|
|
||||||
removed_questions = 0
|
removed_questions = 0
|
||||||
for source_question_id, existing in existing_questions.items():
|
for source_question_id, existing in existing_questions.items():
|
||||||
if existing.is_active and source_question_id not in incoming_ids:
|
if existing.is_active and source_question_id not in incoming_ids:
|
||||||
@@ -320,6 +333,15 @@ async def import_tryout_json_snapshot(payload: dict[str, Any], website_id: int,
|
|||||||
existing.latest_snapshot_id = snapshot.id
|
existing.latest_snapshot_id = snapshot.id
|
||||||
existing.last_seen_at = now
|
existing.last_seen_at = now
|
||||||
removed_questions += 1
|
removed_questions += 1
|
||||||
|
stale_removed_result = await db.execute(
|
||||||
|
select(Item).where(
|
||||||
|
Item.generated_by == "ai",
|
||||||
|
Item.source_snapshot_question_id == existing.id,
|
||||||
|
Item.variant_status.in_(["draft", "approved", "active"]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for variant in stale_removed_result.scalars().all():
|
||||||
|
variant.variant_status = "stale"
|
||||||
|
|
||||||
imported_tryouts.append(
|
imported_tryouts.append(
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user