Files
ss-tools/backend/src/models/task.py

116 lines
3.7 KiB
Python

# [DEF:TaskModels:Module]
#
# @COMPLEXITY: 1
# @SEMANTICS: database, task, record, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for task execution records.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> sqlalchemy
#
# @INVARIANT: All primary keys are UUID strings.
# [SECTION: IMPORTS]
from sqlalchemy import Column, String, DateTime, JSON, ForeignKey, Text, Integer, Index
from sqlalchemy.sql import func
from .mapping import Base
import uuid
# [/SECTION]
# [DEF:TaskRecord:Class]
# @COMPLEXITY: 1
# @PURPOSE: Represents a persistent record of a task execution.
class TaskRecord(Base):
__tablename__ = "task_records"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
type = Column(String, nullable=False) # e.g., "backup", "migration"
status = Column(String, nullable=False) # Enum: "PENDING", "RUNNING", "SUCCESS", "FAILED"
environment_id = Column(String, ForeignKey("environments.id"), nullable=True)
started_at = Column(DateTime(timezone=True), nullable=True)
finished_at = Column(DateTime(timezone=True), nullable=True)
logs = Column(JSON, nullable=True) # Store structured logs as JSON (legacy, kept for backward compatibility)
error = Column(String, nullable=True)
result = Column(JSON, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
params = Column(JSON, nullable=True)
# [/DEF:TaskRecord:Class]
# [DEF:TaskLogRecord:Class]
# @PURPOSE: Represents a single persistent log entry for a task.
# @COMPLEXITY: 3
# @RELATION: DEPENDS_ON -> TaskRecord
# @INVARIANT: Each log entry belongs to exactly one task.
#
# @TEST_CONTRACT: TaskLogCreate ->
# {
# required_fields: {
# task_id: str,
# timestamp: datetime,
# level: str,
# source: str,
# message: str
# },
# optional_fields: {
# metadata_json: str,
# id: int
# },
# invariants: [
# "task_id matches an existing TaskRecord.id"
# ]
# }
#
# @TEST_FIXTURE: basic_info_log ->
# {
# task_id: "00000000-0000-0000-0000-000000000000",
# timestamp: "2026-02-26T12:00:00Z",
# level: "INFO",
# source: "system",
# message: "Task initialization complete"
# }
#
# @TEST_EDGE: missing_required_field ->
# {
# timestamp: "2026-02-26T12:00:00Z",
# level: "ERROR",
# source: "system",
# message: "Missing task_id"
# }
#
# @TEST_EDGE: invalid_type ->
# {
# task_id: "00000000-0000-0000-0000-000000000000",
# timestamp: "2026-02-26T12:00:00Z",
# level: 500,
# source: "system",
# message: "Integer level"
# }
#
# @TEST_EDGE: empty_message ->
# {
# task_id: "00000000-0000-0000-0000-000000000000",
# timestamp: "2026-02-26T12:00:00Z",
# level: "DEBUG",
# source: "system",
# message: ""
# }
#
# @TEST_INVARIANT: exact_one_task_association -> verifies: [basic_info_log, missing_required_field]
class TaskLogRecord(Base):
__tablename__ = "task_logs"
id = Column(Integer, primary_key=True, autoincrement=True)
task_id = Column(String, ForeignKey("task_records.id", ondelete="CASCADE"), nullable=False, index=True)
timestamp = Column(DateTime(timezone=True), nullable=False, index=True)
level = Column(String(16), nullable=False) # INFO, WARNING, ERROR, DEBUG
source = Column(String(64), nullable=False, default="system") # plugin, superset_api, git, etc.
message = Column(Text, nullable=False)
metadata_json = Column(Text, nullable=True) # JSON string for additional metadata
# Composite indexes for efficient filtering
__table_args__ = (
Index('ix_task_logs_task_timestamp', 'task_id', 'timestamp'),
Index('ix_task_logs_task_level', 'task_id', 'level'),
Index('ix_task_logs_task_source', 'task_id', 'source'),
)
# [/DEF:TaskLogRecord:Class]
# [/DEF:TaskModels:Module]