refactor(semantics): migrate legacy @TIER to @COMPLEXITY annotations

- Replaced @TIER: TRIVIAL with @COMPLEXITY: 1
- Replaced @TIER: STANDARD with @COMPLEXITY: 3
- Replaced @TIER: CRITICAL with @COMPLEXITY: 5
- Manually elevated specific critical/complex components to levels 2 and 4
- Ignored legacy, specs, and node_modules directories
- Updated generated semantic map
This commit is contained in:
2026-03-16 10:06:44 +03:00
parent 321e0eb2db
commit 274510fc38
321 changed files with 30101 additions and 58483 deletions

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.auth:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, auth, routes, login, logout
# @PURPOSE: Authentication API endpoints.
# @LAYER: API
@@ -24,13 +24,13 @@ import starlette.requests
# [/SECTION]
# [DEF:router:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: APIRouter instance for authentication routes.
router = APIRouter(prefix="/api/auth", tags=["auth"])
# [/DEF:router:Variable]
# [DEF:login_for_access_token:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Authenticates a user and returns a JWT access token.
# @PRE: form_data contains username and password.
# @POST: Returns a Token object on success.
@@ -58,7 +58,7 @@ async def login_for_access_token(
# [/DEF:login_for_access_token:Function]
# [DEF:read_users_me:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves the profile of the currently authenticated user.
# @PRE: Valid JWT token provided.
# @POST: Returns the current user's data.
@@ -71,7 +71,7 @@ async def read_users_me(current_user: UserSchema = Depends(get_current_user)):
# [/DEF:read_users_me:Function]
# [DEF:logout:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Logs out the current user (placeholder for session revocation).
# @PRE: Valid JWT token provided.
# @POST: Returns success message.
@@ -85,7 +85,7 @@ async def logout(current_user: UserSchema = Depends(get_current_user)):
# [/DEF:logout:Function]
# [DEF:login_adfs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Initiates the ADFS OIDC login flow.
# @POST: Redirects the user to ADFS.
@router.get("/login/adfs")
@@ -101,7 +101,7 @@ async def login_adfs(request: starlette.requests.Request):
# [/DEF:login_adfs:Function]
# [DEF:auth_callback_adfs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Handles the callback from ADFS after successful authentication.
# @POST: Provisions user JIT and returns session token.
@router.get("/callback/adfs", name="auth_callback_adfs")

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__init__:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: routes, lazy-import, module-registry
# @PURPOSE: Provide lazy route module loading to avoid heavyweight imports during tests.
# @LAYER: API
@@ -10,7 +10,7 @@ __all__ = ['plugins', 'tasks', 'settings', 'connections', 'environments', 'mappi
# [DEF:__getattr__:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Lazily import route module by attribute name.
# @PRE: name is module candidate exposed in __all__.
# @POST: Returns imported submodule or raises AttributeError.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_assistant_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, assistant, api, confirmation, status
# @PURPOSE: Validate assistant API endpoint logic via direct async handler invocation.
# @LAYER: UI (API Tests)
@@ -26,7 +26,7 @@ from src.models.assistant import (
# [DEF:_run_async:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Execute async endpoint handler in synchronous test context.
# @PRE: coroutine is awaitable endpoint invocation.
# @POST: Returns coroutine result or raises propagated exception.
@@ -36,7 +36,7 @@ def _run_async(coroutine):
# [/DEF:_run_async:Function]
# [DEF:_FakeTask:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Lightweight task stub used by assistant API tests.
class _FakeTask:
def __init__(self, task_id: str, status: str = "RUNNING", user_id: str = "u-admin"):
@@ -47,7 +47,7 @@ class _FakeTask:
# [/DEF:_FakeTask:Class]
# [DEF:_FakeTaskManager:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Minimal async-compatible TaskManager fixture for deterministic test flows.
class _FakeTaskManager:
def __init__(self):
@@ -71,7 +71,7 @@ class _FakeTaskManager:
# [/DEF:_FakeTaskManager:Class]
# [DEF:_FakeConfigManager:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Environment config fixture with dev/prod aliases for parser tests.
class _FakeConfigManager:
def get_environments(self):
@@ -87,7 +87,7 @@ class _FakeConfigManager:
)
# [/DEF:_FakeConfigManager:Class]
# [DEF:_admin_user:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build admin principal fixture.
# @PRE: Test harness requires authenticated admin-like principal object.
# @POST: Returns user stub with Admin role.
@@ -98,7 +98,7 @@ def _admin_user():
# [/DEF:_admin_user:Function]
# [DEF:_limited_user:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build non-admin principal fixture.
# @PRE: Test harness requires restricted principal for deny scenarios.
# @POST: Returns user stub without admin privileges.
@@ -109,7 +109,7 @@ def _limited_user():
# [/DEF:_limited_user:Function]
# [DEF:_FakeQuery:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Minimal chainable query object for fake SQLAlchemy-like DB behavior in tests.
class _FakeQuery:
def __init__(self, rows):
@@ -141,7 +141,7 @@ class _FakeQuery:
# [/DEF:_FakeQuery:Class]
# [DEF:_FakeDb:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: In-memory fake database implementing subset of Session interface used by assistant routes.
class _FakeDb:
def __init__(self):
@@ -191,7 +191,7 @@ class _FakeDb:
# [/DEF:_FakeDb:Class]
# [DEF:_clear_assistant_state:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Reset in-memory assistant registries for isolation between tests.
# @PRE: Assistant module globals may contain residues from previous test runs.
# @POST: In-memory conversation/confirmation/audit dictionaries are empty.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_assistant_authz:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, assistant, authz, confirmation, rbac
# @PURPOSE: Verify assistant confirmation ownership, expiration, and deny behavior for restricted users.
# @LAYER: UI (API Tests)
@@ -28,7 +28,7 @@ from src.models.assistant import (
# [DEF:_run_async:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Execute async endpoint handler in synchronous test context.
# @PRE: coroutine is awaitable endpoint invocation.
# @POST: Returns coroutine result or raises propagated exception.
@@ -38,7 +38,7 @@ def _run_async(coroutine):
# [/DEF:_run_async:Function]
# [DEF:_FakeTask:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Lightweight task model used for assistant authz tests.
class _FakeTask:
def __init__(self, task_id: str, status: str = "RUNNING", user_id: str = "u-admin"):
@@ -49,7 +49,7 @@ class _FakeTask:
# [/DEF:_FakeTask:Class]
# [DEF:_FakeTaskManager:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Minimal task manager for deterministic operation creation and lookup.
class _FakeTaskManager:
def __init__(self):
@@ -73,7 +73,7 @@ class _FakeTaskManager:
# [/DEF:_FakeTaskManager:Class]
# [DEF:_FakeConfigManager:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Provide deterministic environment aliases required by intent parsing.
class _FakeConfigManager:
def get_environments(self):
@@ -85,7 +85,7 @@ class _FakeConfigManager:
# [/DEF:_FakeConfigManager:Class]
# [DEF:_admin_user:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build admin principal fixture.
# @PRE: Test requires privileged principal for risky operations.
# @POST: Returns admin-like user stub with Admin role.
@@ -96,7 +96,7 @@ def _admin_user():
# [/DEF:_admin_user:Function]
# [DEF:_other_admin_user:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build second admin principal fixture for ownership tests.
# @PRE: Ownership mismatch scenario needs distinct authenticated actor.
# @POST: Returns alternate admin-like user stub.
@@ -107,7 +107,7 @@ def _other_admin_user():
# [/DEF:_other_admin_user:Function]
# [DEF:_limited_user:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build limited principal without required assistant execution privileges.
# @PRE: Permission denial scenario needs non-admin actor.
# @POST: Returns restricted user stub.
@@ -118,7 +118,7 @@ def _limited_user():
# [/DEF:_limited_user:Function]
# [DEF:_FakeQuery:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Minimal chainable query object for fake DB interactions.
class _FakeQuery:
def __init__(self, rows):
@@ -150,7 +150,7 @@ class _FakeQuery:
# [/DEF:_FakeQuery:Class]
# [DEF:_FakeDb:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: In-memory session substitute for assistant route persistence calls.
class _FakeDb:
def __init__(self):
@@ -197,7 +197,7 @@ class _FakeDb:
# [/DEF:_FakeDb:Class]
# [DEF:_clear_assistant_state:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Reset assistant process-local state between test cases.
# @PRE: Assistant globals may contain state from prior tests.
# @POST: Assistant in-memory state dictionaries are cleared.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.tests.api.routes.test_clean_release_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, api, clean-release, checks, reports
# @PURPOSE: Contract tests for clean release checks and reports endpoints.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_clean_release_legacy_compat:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Compatibility tests for legacy clean-release API paths retained during v2 migration.
# @LAYER: Tests
# @RELATION: TESTS -> backend.src.api.routes.clean_release

View File

@@ -1,5 +1,5 @@
# [DEF:backend.tests.api.routes.test_clean_release_source_policy:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, api, clean-release, source-policy
# @PURPOSE: Validate API behavior for source isolation violations in clean release preparation.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:test_clean_release_v2_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: API contract tests for redesigned clean release endpoints.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:test_clean_release_v2_release_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: API contract test scaffolding for clean release approval and publication endpoints.
# @LAYER: Domain
# @RELATION: IMPLEMENTS -> clean_release_v2_release_api_contracts

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_connections_routes:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Verifies connection routes bootstrap their table before CRUD access.
# @LAYER: API
# @RELATION: VERIFIES -> backend.src.api.routes.connections

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_dashboards:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for Dashboards API endpoints
# @LAYER: API
# @RELATION: TESTS -> backend.src.api.routes.dashboards

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_datasets:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: datasets, api, tests, pagination, mapping, docs
# @PURPOSE: Unit tests for Datasets API endpoints
# @LAYER: API

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_git_status_route:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, git, api, status, no_repo
# @PURPOSE: Validate status endpoint behavior for missing and error repository states.
# @LAYER: Domain (Tests)

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.__tests__.test_migration_routes:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for migration API route handlers.
# @LAYER: API
# @RELATION: VERIFIES -> backend.src.api.routes.migration

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.__tests__.test_profile_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, profile, api, preferences, lookup, contract
# @PURPOSE: Verifies profile API route contracts for preference read/update and Superset account lookup.
# @LAYER: API

View File

@@ -1,5 +1,5 @@
# [DEF:backend.tests.test_reports_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, reports, api, contract, pagination, filtering
# @PURPOSE: Contract tests for GET /api/reports defaults, pagination, and filtering behavior.
# @LAYER: Domain (Tests)

View File

@@ -1,5 +1,5 @@
# [DEF:backend.tests.test_reports_detail_api:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, reports, api, detail, diagnostics
# @PURPOSE: Contract tests for GET /api/reports/{report_id} detail endpoint behavior.
# @LAYER: Domain (Tests)

View File

@@ -1,5 +1,5 @@
# [DEF:backend.tests.test_reports_openapi_conformance:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, reports, openapi, conformance
# @PURPOSE: Validate implemented reports payload shape against OpenAPI-required top-level contract fields.
# @LAYER: Domain (Tests)

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.admin:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, admin, users, roles, permissions
# @PURPOSE: Admin API endpoints for user and role management.
# @LAYER: API
@@ -36,7 +36,7 @@ router = APIRouter(prefix="/api/admin", tags=["admin"])
# [/DEF:router:Variable]
# [DEF:list_users:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all registered users.
# @PRE: Current user has 'Admin' role.
# @POST: Returns a list of UserSchema objects.
@@ -53,7 +53,7 @@ async def list_users(
# [/DEF:list_users:Function]
# [DEF:create_user:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Creates a new local user.
# @PRE: Current user has 'Admin' role.
# @POST: New user is created in the database.
@@ -91,7 +91,7 @@ async def create_user(
# [/DEF:create_user:Function]
# [DEF:update_user:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates an existing user.
@router.put("/users/{user_id}", response_model=UserSchema)
async def update_user(
@@ -126,7 +126,7 @@ async def update_user(
# [/DEF:update_user:Function]
# [DEF:delete_user:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deletes a user.
@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_user(
@@ -150,7 +150,7 @@ async def delete_user(
# [/DEF:delete_user:Function]
# [DEF:list_roles:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all available roles.
# @RETURN: List[RoleSchema] - List of roles.
# @RELATION: CALLS -> backend.src.models.auth.Role
@@ -164,7 +164,7 @@ async def list_roles(
# [/DEF:list_roles:Function]
# [DEF:create_role:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Creates a new system role with associated permissions.
# @PRE: Role name must be unique.
# @POST: New Role record is created in auth.db.
@@ -202,7 +202,7 @@ async def create_role(
# [/DEF:create_role:Function]
# [DEF:update_role:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates an existing role's metadata and permissions.
# @PRE: role_id must be a valid existing role UUID.
# @POST: Role record is updated in auth.db.
@@ -247,7 +247,7 @@ async def update_role(
# [/DEF:update_role:Function]
# [DEF:delete_role:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Removes a role from the system.
# @PRE: role_id must be a valid existing role UUID.
# @POST: Role record is removed from auth.db.
@@ -274,7 +274,7 @@ async def delete_role(
# [/DEF:delete_role:Function]
# [DEF:list_permissions:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all available system permissions for assignment.
# @POST: Returns a list of all PermissionSchema objects.
# @PARAM: db (Session) - Auth database session.
@@ -300,7 +300,7 @@ async def list_permissions(
# [/DEF:list_permissions:Function]
# [DEF:list_ad_mappings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all AD Group to Role mappings.
@router.get("/ad-mappings", response_model=List[ADGroupMappingSchema])
async def list_ad_mappings(
@@ -312,7 +312,7 @@ async def list_ad_mappings(
# [/DEF:list_ad_mappings:Function]
# [DEF:create_ad_mapping:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Creates a new AD Group mapping.
@router.post("/ad-mappings", response_model=ADGroupMappingSchema)
async def create_ad_mapping(

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.assistant:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, assistant, chat, command, confirmation
# @PURPOSE: API routes for LLM assistant command parsing and safe execution orchestration.
# @LAYER: API
@@ -47,7 +47,7 @@ git_service = GitService()
# [DEF:AssistantMessageRequest:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Input payload for assistant message endpoint.
# @PRE: message length is within accepted bounds.
# @POST: Request object provides message text and optional conversation binding.
@@ -58,7 +58,7 @@ class AssistantMessageRequest(BaseModel):
# [DEF:AssistantAction:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: UI action descriptor returned with assistant responses.
# @PRE: type and label are provided by orchestration logic.
# @POST: Action can be rendered as button on frontend.
@@ -70,7 +70,7 @@ class AssistantAction(BaseModel):
# [DEF:AssistantMessageResponse:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Output payload contract for assistant interaction endpoints.
# @PRE: Response includes deterministic state and text.
# @POST: Payload may include task_id/confirmation_id/actions for UI follow-up.
@@ -88,7 +88,7 @@ class AssistantMessageResponse(BaseModel):
# [DEF:ConfirmationRecord:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: In-memory confirmation token model for risky operation dispatch.
# @PRE: intent/dispatch/user_id are populated at confirmation request time.
# @POST: Record tracks lifecycle state and expiry timestamp.
@@ -125,7 +125,7 @@ INTENT_PERMISSION_CHECKS: Dict[str, List[Tuple[str, str]]] = {
# [DEF:_append_history:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Append conversation message to in-memory history buffer.
# @PRE: user_id and conversation_id identify target conversation bucket.
# @POST: Message entry is appended to CONVERSATIONS key list.
@@ -157,7 +157,7 @@ def _append_history(
# [DEF:_persist_message:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist assistant/user message record to database.
# @PRE: db session is writable and message payload is serializable.
# @POST: Message row is committed or persistence failure is logged.
@@ -193,7 +193,7 @@ def _persist_message(
# [DEF:_audit:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Append in-memory audit record for assistant decision trace.
# @PRE: payload describes decision/outcome fields.
# @POST: ASSISTANT_AUDIT list for user contains new timestamped entry.
@@ -206,7 +206,7 @@ def _audit(user_id: str, payload: Dict[str, Any]):
# [DEF:_persist_audit:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist structured assistant audit payload in database.
# @PRE: db session is writable and payload is JSON-serializable.
# @POST: Audit row is committed or failure is logged with rollback.
@@ -230,7 +230,7 @@ def _persist_audit(db: Session, user_id: str, payload: Dict[str, Any], conversat
# [DEF:_persist_confirmation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist confirmation token record to database.
# @PRE: record contains id/user/intent/dispatch/expiry fields.
# @POST: Confirmation row exists in persistent storage.
@@ -256,7 +256,7 @@ def _persist_confirmation(db: Session, record: ConfirmationRecord):
# [DEF:_update_confirmation_state:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Update persistent confirmation token lifecycle state.
# @PRE: confirmation_id references existing row.
# @POST: State and consumed_at fields are updated when applicable.
@@ -276,7 +276,7 @@ def _update_confirmation_state(db: Session, confirmation_id: str, state: str):
# [DEF:_load_confirmation_from_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Load confirmation token from database into in-memory model.
# @PRE: confirmation_id may or may not exist in storage.
# @POST: Returns ConfirmationRecord when found, otherwise None.
@@ -302,7 +302,7 @@ def _load_confirmation_from_db(db: Session, confirmation_id: str) -> Optional[Co
# [DEF:_ensure_conversation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve active conversation id in memory or create a new one.
# @PRE: user_id identifies current actor.
# @POST: Returns stable conversation id and updates USER_ACTIVE_CONVERSATION.
@@ -322,7 +322,7 @@ def _ensure_conversation(user_id: str, conversation_id: Optional[str]) -> str:
# [DEF:_resolve_or_create_conversation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve active conversation using explicit id, memory cache, or persisted history.
# @PRE: user_id and db session are available.
# @POST: Returns conversation id and updates USER_ACTIVE_CONVERSATION cache.
@@ -352,7 +352,7 @@ def _resolve_or_create_conversation(user_id: str, conversation_id: Optional[str]
# [DEF:_cleanup_history_ttl:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Enforce assistant message retention window by deleting expired rows and in-memory records.
# @PRE: db session is available and user_id references current actor scope.
# @POST: Messages older than ASSISTANT_MESSAGE_TTL_DAYS are removed from persistence and memory mirrors.
@@ -390,7 +390,7 @@ def _cleanup_history_ttl(db: Session, user_id: str):
# [DEF:_is_conversation_archived:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Determine archived state for a conversation based on last update timestamp.
# @PRE: updated_at can be null for empty conversations.
# @POST: Returns True when conversation inactivity exceeds archive threshold.
@@ -403,7 +403,7 @@ def _is_conversation_archived(updated_at: Optional[datetime]) -> bool:
# [DEF:_coerce_query_bool:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Normalize bool-like query values for compatibility in direct handler invocations/tests.
# @PRE: value may be bool, string, or FastAPI Query metadata object.
# @POST: Returns deterministic boolean flag.
@@ -417,7 +417,7 @@ def _coerce_query_bool(value: Any) -> bool:
# [DEF:_extract_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Extract first regex match group from text by ordered pattern list.
# @PRE: patterns contain at least one capture group.
# @POST: Returns first matched token or None.
@@ -431,7 +431,7 @@ def _extract_id(text: str, patterns: List[str]) -> Optional[str]:
# [DEF:_resolve_env_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve environment identifier/name token to canonical environment id.
# @PRE: config_manager provides environment list.
# @POST: Returns matched environment id or None.
@@ -449,7 +449,7 @@ def _resolve_env_id(token: Optional[str], config_manager: ConfigManager) -> Opti
# [DEF:_is_production_env:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Determine whether environment token resolves to production-like target.
# @PRE: config_manager provides environments or token text is provided.
# @POST: Returns True for production/prod synonyms, else False.
@@ -467,7 +467,7 @@ def _is_production_env(token: Optional[str], config_manager: ConfigManager) -> b
# [DEF:_resolve_provider_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve provider token to provider id with active/default fallback.
# @PRE: db session can load provider list through LLMProviderService.
# @POST: Returns provider id or None when no providers configured.
@@ -503,7 +503,7 @@ def _resolve_provider_id(
# [DEF:_get_default_environment_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve default environment id from settings or first configured environment.
# @PRE: config_manager returns environments list.
# @POST: Returns default environment id or None when environment list is empty.
@@ -525,7 +525,7 @@ def _get_default_environment_id(config_manager: ConfigManager) -> Optional[str]:
# [DEF:_resolve_dashboard_id_by_ref:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard id by title or slug reference in selected environment.
# @PRE: dashboard_ref is a non-empty string-like token.
# @POST: Returns dashboard id when uniquely matched, otherwise None.
@@ -568,7 +568,7 @@ def _resolve_dashboard_id_by_ref(
# [DEF:_resolve_dashboard_id_entity:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard id from intent entities using numeric id or dashboard_ref fallback.
# @PRE: entities may contain dashboard_id as int/str and optional dashboard_ref.
# @POST: Returns resolved dashboard id or None when ambiguous/unresolvable.
@@ -600,7 +600,7 @@ def _resolve_dashboard_id_entity(
# [DEF:_get_environment_name_by_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve human-readable environment name by id.
# @PRE: environment id may be None.
# @POST: Returns matching environment name or fallback id.
@@ -613,7 +613,7 @@ def _get_environment_name_by_id(env_id: Optional[str], config_manager: ConfigMan
# [DEF:_extract_result_deep_links:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build deep-link actions to verify task result from assistant chat.
# @PRE: task object is available.
# @POST: Returns zero or more assistant actions for dashboard open/diff.
@@ -670,7 +670,7 @@ def _extract_result_deep_links(task: Any, config_manager: ConfigManager) -> List
# [DEF:_build_task_observability_summary:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build compact textual summary for completed tasks to reduce "black box" effect.
# @PRE: task may contain plugin-specific result payload.
# @POST: Returns non-empty summary line for known task types or empty string fallback.
@@ -734,7 +734,7 @@ def _build_task_observability_summary(task: Any, config_manager: ConfigManager)
# [DEF:_parse_command:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deterministically parse RU/EN command text into intent payload.
# @PRE: message contains raw user text and config manager resolves environments.
# @POST: Returns intent dict with domain/operation/entities/confidence/risk fields.
@@ -928,7 +928,7 @@ def _parse_command(message: str, config_manager: ConfigManager) -> Dict[str, Any
# [DEF:_check_any_permission:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Validate user against alternative permission checks (logical OR).
# @PRE: checks list contains resource-action tuples.
# @POST: Returns on first successful permission; raises 403-like HTTPException otherwise.
@@ -946,7 +946,7 @@ def _check_any_permission(current_user: User, checks: List[Tuple[str, str]]):
# [DEF:_has_any_permission:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Check whether user has at least one permission tuple from the provided list.
# @PRE: current_user and checks list are valid.
# @POST: Returns True when at least one permission check passes.
@@ -960,7 +960,7 @@ def _has_any_permission(current_user: User, checks: List[Tuple[str, str]]) -> bo
# [DEF:_build_tool_catalog:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build current-user tool catalog for LLM planner with operation contracts and defaults.
# @PRE: current_user is authenticated; config/db are available.
# @POST: Returns list of executable tools filtered by permission and runtime availability.
@@ -1084,7 +1084,7 @@ def _build_tool_catalog(current_user: User, config_manager: ConfigManager, db: S
# [DEF:_coerce_intent_entities:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Normalize intent entity value types from LLM output to route-compatible values.
# @PRE: intent contains entities dict or missing entities.
# @POST: Returned intent has numeric ids coerced where possible and string values stripped.
@@ -1109,7 +1109,7 @@ _SAFE_OPS = {"show_capabilities", "get_task_status", "get_health_summary"}
# [DEF:_confirmation_summary:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build human-readable confirmation prompt for an intent before execution.
# @PRE: intent contains operation and entities fields.
# @POST: Returns descriptive Russian-language text ending with confirmation prompt.
@@ -1205,7 +1205,7 @@ async def _async_confirmation_summary(intent: Dict[str, Any], config_manager: Co
# [DEF:_clarification_text_for_intent:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Convert technical missing-parameter errors into user-facing clarification prompts.
# @PRE: state was classified as needs_clarification for current intent/error combination.
# @POST: Returned text is human-readable and actionable for target operation.
@@ -1229,7 +1229,7 @@ def _clarification_text_for_intent(intent: Optional[Dict[str, Any]], detail_text
# [DEF:_plan_intent_with_llm:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Use active LLM provider to select best tool/operation from dynamic catalog.
# @PRE: tools list contains allowed operations for current user.
# @POST: Returns normalized intent dict when planning succeeds; otherwise None.
@@ -1340,7 +1340,7 @@ async def _plan_intent_with_llm(
# [DEF:_authorize_intent:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Validate user permissions for parsed intent before confirmation/dispatch.
# @PRE: intent.operation is present for known assistant command domains.
# @POST: Returns if authorized; raises HTTPException(403) when denied.
@@ -1352,7 +1352,7 @@ def _authorize_intent(intent: Dict[str, Any], current_user: User):
# [DEF:_dispatch_intent:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Execute parsed assistant intent via existing task/plugin/git services.
# @PRE: intent operation is known and actor permissions are validated per operation.
# @POST: Returns response text, optional task id, and UI actions for follow-up.
@@ -1674,7 +1674,7 @@ async def _dispatch_intent(
@router.post("/messages", response_model=AssistantMessageResponse)
# [DEF:send_message:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Parse assistant command, enforce safety gates, and dispatch executable intent.
# @PRE: Authenticated user is available and message text is non-empty.
# @POST: Response state is one of clarification/confirmation/started/success/denied/failed.
@@ -1844,7 +1844,7 @@ async def send_message(
@router.post("/confirmations/{confirmation_id}/confirm", response_model=AssistantMessageResponse)
# [DEF:confirm_operation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Execute previously requested risky operation after explicit user confirmation.
# @PRE: confirmation_id exists, belongs to current user, is pending, and not expired.
# @POST: Confirmation state becomes consumed and operation result is persisted in history.
@@ -1911,7 +1911,7 @@ async def confirm_operation(
@router.post("/confirmations/{confirmation_id}/cancel", response_model=AssistantMessageResponse)
# [DEF:cancel_operation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Cancel pending risky operation and mark confirmation token as cancelled.
# @PRE: confirmation_id exists, belongs to current user, and is still pending.
# @POST: Confirmation becomes cancelled and cannot be executed anymore.
@@ -1968,7 +1968,7 @@ async def cancel_operation(
# [DEF:list_conversations:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return paginated conversation list for current user with archived flag and last message preview.
# @PRE: Authenticated user context and valid pagination params.
# @POST: Conversations are grouped by conversation_id sorted by latest activity descending.
@@ -2056,7 +2056,7 @@ async def list_conversations(
# [DEF:delete_conversation:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Soft-delete or hard-delete a conversation and clear its in-memory trace.
# @PRE: conversation_id belongs to current_user.
# @POST: Conversation records are removed from DB and CONVERSATIONS cache.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.clean_release:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, clean-release, candidate-preparation, compliance
# @PURPOSE: Expose clean release endpoints for candidate preparation and subsequent compliance flow.
# @LAYER: API

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.clean_release_v2:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, clean-release, v2, headless
# @PURPOSE: Redesigned clean release API for headless candidate lifecycle.
# @LAYER: API

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.dashboards:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: api, dashboards, resources, hub
# @PURPOSE: API endpoints for the Dashboard Hub - listing dashboards with Git and task status
# @LAYER: API
@@ -66,7 +66,7 @@ from ...services.resource_service import ResourceService
router = APIRouter(prefix="/api/dashboards", tags=["Dashboards"])
# [DEF:GitStatus:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for dashboard Git synchronization status.
class GitStatus(BaseModel):
branch: Optional[str] = None
@@ -76,7 +76,7 @@ class GitStatus(BaseModel):
# [/DEF:GitStatus:DataClass]
# [DEF:LastTask:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for the most recent background task associated with a dashboard.
class LastTask(BaseModel):
task_id: Optional[str] = None
@@ -88,7 +88,7 @@ class LastTask(BaseModel):
# [/DEF:LastTask:DataClass]
# [DEF:DashboardItem:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO representing a single dashboard with projected metadata.
class DashboardItem(BaseModel):
id: int
@@ -104,7 +104,7 @@ class DashboardItem(BaseModel):
# [/DEF:DashboardItem:DataClass]
# [DEF:EffectiveProfileFilter:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Metadata about applied profile filters for UI context.
class EffectiveProfileFilter(BaseModel):
applied: bool
@@ -117,7 +117,7 @@ class EffectiveProfileFilter(BaseModel):
# [/DEF:EffectiveProfileFilter:DataClass]
# [DEF:DashboardsResponse:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Envelope DTO for paginated dashboards list.
class DashboardsResponse(BaseModel):
dashboards: List[DashboardItem]
@@ -129,7 +129,7 @@ class DashboardsResponse(BaseModel):
# [/DEF:DashboardsResponse:DataClass]
# [DEF:DashboardChartItem:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for a chart linked to a dashboard.
class DashboardChartItem(BaseModel):
id: int
@@ -141,7 +141,7 @@ class DashboardChartItem(BaseModel):
# [/DEF:DashboardChartItem:DataClass]
# [DEF:DashboardDatasetItem:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for a dataset associated with a dashboard.
class DashboardDatasetItem(BaseModel):
id: int
@@ -153,7 +153,7 @@ class DashboardDatasetItem(BaseModel):
# [/DEF:DashboardDatasetItem:DataClass]
# [DEF:DashboardDetailResponse:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Detailed dashboard metadata including children.
class DashboardDetailResponse(BaseModel):
id: int
@@ -170,7 +170,7 @@ class DashboardDetailResponse(BaseModel):
# [/DEF:DashboardDetailResponse:DataClass]
# [DEF:DashboardTaskHistoryItem:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Individual history record entry.
class DashboardTaskHistoryItem(BaseModel):
id: str
@@ -184,7 +184,7 @@ class DashboardTaskHistoryItem(BaseModel):
# [/DEF:DashboardTaskHistoryItem:DataClass]
# [DEF:DashboardTaskHistoryResponse:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Collection DTO for task history.
class DashboardTaskHistoryResponse(BaseModel):
dashboard_id: int
@@ -192,7 +192,7 @@ class DashboardTaskHistoryResponse(BaseModel):
# [/DEF:DashboardTaskHistoryResponse:DataClass]
# [DEF:DatabaseMapping:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for cross-environment database ID mapping.
class DatabaseMapping(BaseModel):
source_db: str
@@ -203,7 +203,7 @@ class DatabaseMapping(BaseModel):
# [/DEF:DatabaseMapping:DataClass]
# [DEF:DatabaseMappingsResponse:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Wrapper for database mappings.
class DatabaseMappingsResponse(BaseModel):
mappings: List[DatabaseMapping]
@@ -211,7 +211,7 @@ class DatabaseMappingsResponse(BaseModel):
# [DEF:_find_dashboard_id_by_slug:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard numeric ID by slug using Superset list endpoint.
# @PRE: `dashboard_slug` is non-empty.
# @POST: Returns dashboard ID when found, otherwise None.
@@ -239,7 +239,7 @@ def _find_dashboard_id_by_slug(
# [DEF:_resolve_dashboard_id_from_ref:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard ID from slug-first reference with numeric fallback.
# @PRE: `dashboard_ref` is provided in route path.
# @POST: Returns a valid dashboard ID or raises HTTPException(404).
@@ -264,7 +264,7 @@ def _resolve_dashboard_id_from_ref(
# [DEF:_find_dashboard_id_by_slug_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard numeric ID by slug using async Superset list endpoint.
# @PRE: dashboard_slug is non-empty.
# @POST: Returns dashboard ID when found, otherwise None.
@@ -292,7 +292,7 @@ async def _find_dashboard_id_by_slug_async(
# [DEF:_resolve_dashboard_id_from_ref_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard ID from slug-first reference using async Superset client.
# @PRE: dashboard_ref is provided in route path.
# @POST: Returns valid dashboard ID or raises HTTPException(404).
@@ -316,7 +316,7 @@ async def _resolve_dashboard_id_from_ref_async(
# [DEF:_normalize_filter_values:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Normalize query filter values to lower-cased non-empty tokens.
# @PRE: values may be None or list of strings.
# @POST: Returns trimmed normalized list preserving input order.
@@ -333,7 +333,7 @@ def _normalize_filter_values(values: Optional[List[str]]) -> List[str]:
# [DEF:_dashboard_git_filter_value:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build comparable git status token for dashboards filtering.
# @PRE: dashboard payload may contain git_status or None.
# @POST: Returns one of ok|diff|no_repo|error|pending.
@@ -353,7 +353,7 @@ def _dashboard_git_filter_value(dashboard: Dict[str, Any]) -> str:
# [/DEF:_dashboard_git_filter_value:Function]
# [DEF:_normalize_actor_alias_token:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Normalize actor alias token to comparable trim+lower text.
# @PRE: value can be scalar/None.
# @POST: Returns normalized token or None.
@@ -364,7 +364,7 @@ def _normalize_actor_alias_token(value: Any) -> Optional[str]:
# [DEF:_normalize_owner_display_token:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Project owner payload value into stable display string for API response contracts.
# @PRE: owner can be scalar, dict or None.
# @POST: Returns trimmed non-empty owner display token or None.
@@ -391,7 +391,7 @@ def _normalize_owner_display_token(owner: Any) -> Optional[str]:
# [DEF:_normalize_dashboard_owner_values:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Normalize dashboard owners payload to optional list of display strings.
# @PRE: owners payload can be None, scalar, or list with mixed values.
# @POST: Returns deduplicated owner labels preserving order, or None when absent.
@@ -416,7 +416,7 @@ def _normalize_dashboard_owner_values(owners: Any) -> Optional[List[str]]:
# [DEF:_project_dashboard_response_items:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Project dashboard payloads to response-contract-safe shape.
# @PRE: dashboards is a list of dict-like dashboard payloads.
# @POST: Returned items satisfy DashboardItem owners=list[str]|None contract.
@@ -433,7 +433,7 @@ def _project_dashboard_response_items(dashboards: List[Dict[str, Any]]) -> List[
# [DEF:_resolve_profile_actor_aliases:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve stable actor aliases for profile filtering without per-dashboard detail fan-out.
# @PRE: bound username is available and env is valid.
# @POST: Returns at least normalized username; may include Superset display-name alias.
@@ -498,7 +498,7 @@ def _resolve_profile_actor_aliases(env: Any, bound_username: str) -> List[str]:
# [DEF:_matches_dashboard_actor_aliases:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Apply profile actor matching against multiple aliases (username + optional display name).
# @PRE: actor_aliases contains normalized non-empty tokens.
# @POST: Returns True when any alias matches owners OR modified_by.
@@ -520,7 +520,7 @@ def _matches_dashboard_actor_aliases(
# [DEF:get_dashboards:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
# @PRE: env_id must be a valid environment ID
# @PRE: page must be >= 1 if provided
@@ -823,7 +823,7 @@ async def get_dashboards(
# [/DEF:get_dashboards:Function]
# [DEF:get_database_mappings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get database mapping suggestions between source and target environments
# @PRE: User has permission plugin:migration:read
# @PRE: source_env_id and target_env_id are valid environment IDs
@@ -879,7 +879,7 @@ async def get_database_mappings(
# [/DEF:get_database_mappings:Function]
# [DEF:get_dashboard_detail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch detailed dashboard info with related charts and datasets
# @PRE: env_id must be valid and dashboard ref (slug or id) must exist
# @POST: Returns dashboard detail payload for overview page
@@ -917,7 +917,7 @@ async def get_dashboard_detail(
# [DEF:_task_matches_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Checks whether task params are tied to a specific dashboard and environment.
# @PRE: task-like object exposes plugin_id and params fields.
# @POST: Returns True only for supported task plugins tied to dashboard_id (+optional env_id).
@@ -951,7 +951,7 @@ def _task_matches_dashboard(task: Any, dashboard_id: int, env_id: Optional[str])
# [DEF:get_dashboard_tasks_history:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Returns history of backup and LLM validation tasks for a dashboard.
# @PRE: dashboard ref (slug or id) is valid.
# @POST: Response contains sorted task history (newest first).
@@ -1038,7 +1038,7 @@ async def get_dashboard_tasks_history(
# [DEF:get_dashboard_thumbnail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Proxies Superset dashboard thumbnail with cache support.
# @PRE: env_id must exist.
# @POST: Returns image bytes or 202 when thumbnail is being prepared by Superset.
@@ -1132,7 +1132,7 @@ async def get_dashboard_thumbnail(
# [/DEF:get_dashboard_thumbnail:Function]
# [DEF:MigrateRequest:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for dashboard migration requests.
class MigrateRequest(BaseModel):
source_env_id: str = Field(..., description="Source environment ID")
@@ -1143,14 +1143,14 @@ class MigrateRequest(BaseModel):
# [/DEF:MigrateRequest:DataClass]
# [DEF:TaskResponse:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for async task ID return.
class TaskResponse(BaseModel):
task_id: str
# [/DEF:TaskResponse:DataClass]
# [DEF:migrate_dashboards:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Trigger bulk migration of dashboards from source to target environment
# @PRE: User has permission plugin:migration:execute
# @PRE: source_env_id and target_env_id are valid environment IDs
@@ -1211,7 +1211,7 @@ async def migrate_dashboards(
# [/DEF:migrate_dashboards:Function]
# [DEF:BackupRequest:DataClass]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: DTO for dashboard backup requests.
class BackupRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
@@ -1220,7 +1220,7 @@ class BackupRequest(BaseModel):
# [/DEF:BackupRequest:DataClass]
# [DEF:backup_dashboards:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Trigger bulk backup of dashboards with optional cron schedule
# @PRE: User has permission plugin:backup:execute
# @PRE: env_id is a valid environment ID

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.datasets:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, datasets, resources, hub
# @PURPOSE: API endpoints for the Dataset Hub - listing datasets with mapping progress
# @LAYER: API
@@ -22,7 +22,7 @@ from ...core.superset_client import SupersetClient
router = APIRouter(prefix="/api/datasets", tags=["Datasets"])
# [DEF:MappedFields:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: DTO for dataset mapping progress statistics
class MappedFields(BaseModel):
total: int
@@ -30,7 +30,7 @@ class MappedFields(BaseModel):
# [/DEF:MappedFields:DataClass]
# [DEF:LastTask:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: DTO for the most recent task associated with a dataset
class LastTask(BaseModel):
task_id: Optional[str] = None
@@ -38,7 +38,7 @@ class LastTask(BaseModel):
# [/DEF:LastTask:DataClass]
# [DEF:DatasetItem:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Summary DTO for a dataset in the hub listing
class DatasetItem(BaseModel):
id: int
@@ -53,7 +53,7 @@ class DatasetItem(BaseModel):
# [/DEF:DatasetItem:DataClass]
# [DEF:LinkedDashboard:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: DTO for a dashboard linked to a dataset
class LinkedDashboard(BaseModel):
id: int
@@ -62,7 +62,7 @@ class LinkedDashboard(BaseModel):
# [/DEF:LinkedDashboard:DataClass]
# [DEF:DatasetColumn:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: DTO for a single dataset column's metadata
class DatasetColumn(BaseModel):
id: int
@@ -74,7 +74,7 @@ class DatasetColumn(BaseModel):
# [/DEF:DatasetColumn:DataClass]
# [DEF:DatasetDetailResponse:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Detailed DTO for a dataset including columns and links
class DatasetDetailResponse(BaseModel):
id: int
@@ -96,7 +96,7 @@ class DatasetDetailResponse(BaseModel):
# [/DEF:DatasetDetailResponse:DataClass]
# [DEF:DatasetsResponse:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Paginated response DTO for dataset listings
class DatasetsResponse(BaseModel):
datasets: List[DatasetItem]
@@ -107,14 +107,14 @@ class DatasetsResponse(BaseModel):
# [/DEF:DatasetsResponse:DataClass]
# [DEF:TaskResponse:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Response DTO containing a task ID for tracking
class TaskResponse(BaseModel):
task_id: str
# [/DEF:TaskResponse:DataClass]
# [DEF:get_dataset_ids:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch list of all dataset IDs from a specific environment (without pagination)
# @PRE: env_id must be a valid environment ID
# @POST: Returns a list of all dataset IDs
@@ -166,7 +166,7 @@ async def get_dataset_ids(
# [/DEF:get_dataset_ids:Function]
# [DEF:get_datasets:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch list of datasets from a specific environment with mapping progress
# @PRE: env_id must be a valid environment ID
# @PRE: page must be >= 1 if provided
@@ -246,7 +246,7 @@ async def get_datasets(
# [/DEF:get_datasets:Function]
# [DEF:MapColumnsRequest:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Request DTO for initiating column mapping
class MapColumnsRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
@@ -257,7 +257,7 @@ class MapColumnsRequest(BaseModel):
# [/DEF:MapColumnsRequest:DataClass]
# [DEF:map_columns:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Trigger bulk column mapping for datasets
# @PRE: User has permission plugin:mapper:execute
# @PRE: env_id is a valid environment ID
@@ -319,7 +319,7 @@ async def map_columns(
# [/DEF:map_columns:Function]
# [DEF:GenerateDocsRequest:DataClass]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Request DTO for initiating documentation generation
class GenerateDocsRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
@@ -329,7 +329,7 @@ class GenerateDocsRequest(BaseModel):
# [/DEF:GenerateDocsRequest:DataClass]
# [DEF:generate_docs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Trigger bulk documentation generation for datasets
# @PRE: User has permission plugin:llm_analysis:execute
# @PRE: env_id is a valid environment ID
@@ -385,7 +385,7 @@ async def generate_docs(
# [/DEF:generate_docs:Function]
# [DEF:get_dataset_detail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get detailed dataset information including columns and linked dashboards
# @PRE: env_id is a valid environment ID
# @PRE: dataset_id is a valid dataset ID

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.environments:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, environments, superset, databases
# @PURPOSE: API endpoints for listing environments and their databases.
# @LAYER: API

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.git:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: git, routes, api, fastapi, repository, deployment
# @PURPOSE: Provides FastAPI endpoints for Git integration operations.
# @LAYER: API
@@ -48,7 +48,7 @@ MAX_REPOSITORY_STATUS_BATCH = 50
# [DEF:_build_no_repo_status_payload:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build a consistent status payload for dashboards without initialized repositories.
# @PRE: None.
# @POST: Returns a stable payload compatible with frontend repository status parsing.
@@ -73,7 +73,7 @@ def _build_no_repo_status_payload() -> dict:
# [DEF:_handle_unexpected_git_route_error:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Convert unexpected route-level exceptions to stable 500 API responses.
# @PRE: `error` is a non-HTTPException instance.
# @POST: Raises HTTPException(500) with route-specific context.
@@ -86,7 +86,7 @@ def _handle_unexpected_git_route_error(route_name: str, error: Exception) -> Non
# [DEF:_resolve_repository_status:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve repository status for one dashboard with graceful NO_REPO semantics.
# @PRE: `dashboard_id` is a valid integer.
# @POST: Returns standard status payload or `NO_REPO` payload when repository path is absent.
@@ -113,7 +113,7 @@ def _resolve_repository_status(dashboard_id: int) -> dict:
# [DEF:_get_git_config_or_404:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve GitServerConfig by id or raise 404.
# @PRE: db session is available.
# @POST: Returns GitServerConfig model.
@@ -126,7 +126,7 @@ def _get_git_config_or_404(db: Session, config_id: str) -> GitServerConfig:
# [DEF:_find_dashboard_id_by_slug:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard numeric ID by slug in a specific environment.
# @PRE: dashboard_slug is non-empty.
# @POST: Returns dashboard ID or None when not found.
@@ -153,7 +153,7 @@ def _find_dashboard_id_by_slug(
# [DEF:_resolve_dashboard_id_from_ref:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard ID from slug-or-id reference for Git routes.
# @PRE: dashboard_ref is provided; env_id is required for slug values.
# @POST: Returns numeric dashboard ID or raises HTTPException.
@@ -188,7 +188,7 @@ def _resolve_dashboard_id_from_ref(
# [DEF:_find_dashboard_id_by_slug_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard numeric ID by slug asynchronously for hot-path Git routes.
# @PRE: dashboard_slug is non-empty.
# @POST: Returns dashboard ID or None when not found.
@@ -215,7 +215,7 @@ async def _find_dashboard_id_by_slug_async(
# [DEF:_resolve_dashboard_id_from_ref_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve dashboard ID asynchronously from slug-or-id reference for hot Git routes.
# @PRE: dashboard_ref is provided; env_id is required for slug values.
# @POST: Returns numeric dashboard ID or raises HTTPException.
@@ -254,7 +254,7 @@ async def _resolve_dashboard_id_from_ref_async(
# [DEF:_resolve_repo_key_from_ref:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve repository folder key with slug-first strategy and deterministic fallback.
# @PRE: dashboard_id is resolved and valid.
# @POST: Returns safe key to be used in local repository path.
@@ -287,7 +287,7 @@ def _resolve_repo_key_from_ref(
# [DEF:_sanitize_optional_identity_value:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Normalize optional identity value into trimmed string or None.
# @PRE: value may be None or blank.
# @POST: Returns sanitized value suitable for git identity configuration.
@@ -301,7 +301,7 @@ def _sanitize_optional_identity_value(value: Optional[str]) -> Optional[str]:
# [DEF:_resolve_current_user_git_identity:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve configured Git username/email from current user's profile preferences.
# @PRE: `db` may be stubbed in tests; `current_user` may be absent for direct handler invocations.
# @POST: Returns tuple(username, email) only when both values are configured.
@@ -343,7 +343,7 @@ def _resolve_current_user_git_identity(
# [DEF:_apply_git_identity_from_profile:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Apply user-scoped Git identity to repository-local config before write/pull operations.
# @PRE: dashboard_id is resolved; db/current_user may be missing in direct test invocation context.
# @POST: git_service.configure_identity is called only when identity and method are available.
@@ -367,7 +367,7 @@ def _apply_git_identity_from_profile(
# [DEF:get_git_configs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: List all configured Git servers.
# @PRE: Database session `db` is available.
# @POST: Returns a list of all GitServerConfig objects from the database.
@@ -388,7 +388,7 @@ async def get_git_configs(
# [/DEF:get_git_configs:Function]
# [DEF:create_git_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Register a new Git server configuration.
# @PRE: `config` contains valid GitServerConfigCreate data.
# @POST: A new GitServerConfig record is created in the database.
@@ -410,7 +410,7 @@ async def create_git_config(
# [/DEF:create_git_config:Function]
# [DEF:update_git_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Update an existing Git server configuration.
# @PRE: `config_id` corresponds to an existing configuration.
# @POST: The configuration record is updated in the database.
@@ -445,7 +445,7 @@ async def update_git_config(
# [/DEF:update_git_config:Function]
# [DEF:delete_git_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Remove a Git server configuration.
# @PRE: `config_id` corresponds to an existing configuration.
# @POST: The configuration record is removed from the database.
@@ -467,7 +467,7 @@ async def delete_git_config(
# [/DEF:delete_git_config:Function]
# [DEF:test_git_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Validate connection to a Git server using provided credentials.
# @PRE: `config` contains provider, url, and pat.
# @POST: Returns success if the connection is validated via GitService.
@@ -499,7 +499,7 @@ async def test_git_config(
# [DEF:list_gitea_repositories:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: List repositories in Gitea for a saved Gitea config.
# @PRE: config_id exists and provider is GITEA.
# @POST: Returns repositories visible to PAT user.
@@ -530,7 +530,7 @@ async def list_gitea_repositories(
# [DEF:create_gitea_repository:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Create a repository in Gitea for a saved Gitea config.
# @PRE: config_id exists and provider is GITEA.
# @POST: Returns created repository payload.
@@ -567,7 +567,7 @@ async def create_gitea_repository(
# [DEF:create_remote_repository:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Create repository on remote Git server using selected provider config.
# @PRE: config_id exists and PAT has creation permissions.
# @POST: Returns normalized remote repository payload.
@@ -628,7 +628,7 @@ async def create_remote_repository(
# [DEF:delete_gitea_repository:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete repository in Gitea for a saved Gitea config.
# @PRE: config_id exists and provider is GITEA.
# @POST: Target repository is deleted on Gitea.
@@ -654,7 +654,7 @@ async def delete_gitea_repository(
# [/DEF:delete_gitea_repository:Function]
# [DEF:init_repository:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Link a dashboard to a Git repository and perform initial clone/init.
# @PRE: `dashboard_ref` exists and `init_data` contains valid config_id and remote_url.
# @POST: Repository is initialized on disk and a GitRepository record is saved in DB.
@@ -712,7 +712,7 @@ async def init_repository(
# [/DEF:init_repository:Function]
# [DEF:get_repository_binding:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return repository binding with provider metadata for selected dashboard.
# @PRE: `dashboard_ref` resolves to a valid dashboard and repository is initialized.
# @POST: Returns dashboard repository binding and linked provider.
@@ -747,7 +747,7 @@ async def get_repository_binding(
# [/DEF:get_repository_binding:Function]
# [DEF:delete_repository:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete local repository workspace and DB binding for selected dashboard.
# @PRE: `dashboard_ref` resolves to a valid dashboard.
# @POST: Repository files and binding record are removed when present.
@@ -772,7 +772,7 @@ async def delete_repository(
# [/DEF:delete_repository:Function]
# [DEF:get_branches:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: List all branches for a dashboard's repository.
# @PRE: Repository for `dashboard_ref` is initialized.
# @POST: Returns a list of branches from the local repository.
@@ -796,7 +796,7 @@ async def get_branches(
# [/DEF:get_branches:Function]
# [DEF:create_branch:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Create a new branch in the dashboard's repository.
# @PRE: `dashboard_ref` repository exists and `branch_data` has name and from_branch.
# @POST: A new branch is created in the local repository.
@@ -825,7 +825,7 @@ async def create_branch(
# [/DEF:create_branch:Function]
# [DEF:checkout_branch:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Switch the dashboard's repository to a specific branch.
# @PRE: `dashboard_ref` repository exists and branch `checkout_data.name` exists.
# @POST: The local repository HEAD is moved to the specified branch.
@@ -851,7 +851,7 @@ async def checkout_branch(
# [/DEF:checkout_branch:Function]
# [DEF:commit_changes:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Stage and commit changes in the dashboard's repository.
# @PRE: `dashboard_ref` repository exists and `commit_data` has message and files.
# @POST: Specified files are staged and a new commit is created.
@@ -880,7 +880,7 @@ async def commit_changes(
# [/DEF:commit_changes:Function]
# [DEF:push_changes:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Push local commits to the remote repository.
# @PRE: `dashboard_ref` repository exists and has a remote configured.
# @POST: Local commits are pushed to the remote repository.
@@ -904,7 +904,7 @@ async def push_changes(
# [/DEF:push_changes:Function]
# [DEF:pull_changes:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Pull changes from the remote repository.
# @PRE: `dashboard_ref` repository exists and has a remote configured.
# @POST: Remote changes are fetched and merged into the local branch.
@@ -952,7 +952,7 @@ async def pull_changes(
# [/DEF:pull_changes:Function]
# [DEF:get_merge_status:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return unfinished-merge status for repository (web-only recovery support).
# @PRE: `dashboard_ref` resolves to a valid dashboard repository.
# @POST: Returns merge status payload.
@@ -975,7 +975,7 @@ async def get_merge_status(
# [DEF:get_merge_conflicts:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return conflicted files with mine/theirs previews for web conflict resolver.
# @PRE: `dashboard_ref` resolves to a valid dashboard repository.
# @POST: Returns conflict file list.
@@ -998,7 +998,7 @@ async def get_merge_conflicts(
# [DEF:resolve_merge_conflicts:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Apply mine/theirs/manual conflict resolutions from WebUI and stage files.
# @PRE: `dashboard_ref` resolves; request contains at least one resolution item.
# @POST: Resolved files are staged in index.
@@ -1026,7 +1026,7 @@ async def resolve_merge_conflicts(
# [DEF:abort_merge:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Abort unfinished merge from WebUI flow.
# @PRE: `dashboard_ref` resolves to repository.
# @POST: Merge operation is aborted or reports no active merge.
@@ -1049,7 +1049,7 @@ async def abort_merge(
# [DEF:continue_merge:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Finalize unfinished merge from WebUI flow.
# @PRE: All conflicts are resolved and staged.
# @POST: Merge commit is created.
@@ -1073,7 +1073,7 @@ async def continue_merge(
# [DEF:sync_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Sync dashboard state from Superset to Git using the GitPlugin.
# @PRE: `dashboard_ref` is valid; GitPlugin is available.
# @POST: Dashboard YAMLs are exported from Superset and committed to Git.
@@ -1105,7 +1105,7 @@ async def sync_dashboard(
# [DEF:promote_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Promote changes between branches via MR or direct merge.
# @PRE: dashboard repository is initialized and Git config is valid.
# @POST: Returns promotion result metadata.
@@ -1208,7 +1208,7 @@ async def promote_dashboard(
# [/DEF:promote_dashboard:Function]
# [DEF:get_environments:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: List all deployment environments.
# @PRE: Config manager is accessible.
# @POST: Returns a list of DeploymentEnvironmentSchema objects.
@@ -1231,7 +1231,7 @@ async def get_environments(
# [/DEF:get_environments:Function]
# [DEF:deploy_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deploy dashboard from Git to a target environment.
# @PRE: `dashboard_ref` and `deploy_data.environment_id` are valid.
# @POST: Dashboard YAMLs are read from Git and imported into the target Superset.
@@ -1262,7 +1262,7 @@ async def deploy_dashboard(
# [/DEF:deploy_dashboard:Function]
# [DEF:get_history:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: View commit history for a dashboard's repository.
# @PRE: `dashboard_ref` repository exists.
# @POST: Returns a list of recent commits from the repository.
@@ -1288,7 +1288,7 @@ async def get_history(
# [/DEF:get_history:Function]
# [DEF:get_repository_status:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get current Git status for a dashboard repository.
# @PRE: `dashboard_ref` resolves to a valid dashboard.
# @POST: Returns repository status; if repo is not initialized, returns `NO_REPO` payload.
@@ -1313,7 +1313,7 @@ async def get_repository_status(
# [DEF:get_repository_status_batch:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get Git statuses for multiple dashboard repositories in one request.
# @PRE: `request.dashboard_ids` is provided.
# @POST: Returns `statuses` map where each key is dashboard ID and value is repository status payload.
@@ -1357,7 +1357,7 @@ async def get_repository_status_batch(
# [/DEF:get_repository_status_batch:Function]
# [DEF:get_repository_diff:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get Git diff for a dashboard repository.
# @PRE: `dashboard_ref` repository exists.
# @POST: Returns the diff text for the specified file or all changes.
@@ -1386,7 +1386,7 @@ async def get_repository_diff(
# [/DEF:get_repository_diff:Function]
# [DEF:generate_commit_message:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Generate a suggested commit message using LLM.
# @PRE: Repository for `dashboard_ref` is initialized.
# @POST: Returns a suggested commit message string.

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.git_schemas:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: git, schemas, pydantic, api, contracts
# @PURPOSE: Defines Pydantic models for the Git integration API layer.
# @LAYER: API
@@ -14,7 +14,7 @@ from datetime import datetime
from src.models.git import GitProvider, GitStatus, SyncStatus
# [DEF:GitServerConfigBase:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Base schema for Git server configuration attributes.
class GitServerConfigBase(BaseModel):
name: str = Field(..., description="Display name for the Git server")

View File

@@ -1,5 +1,5 @@
# [DEF:health_router:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: health, monitoring, dashboards
# @PURPOSE: API endpoints for dashboard health monitoring and status aggregation.
# @LAYER: UI/API

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/api/routes/llm.py:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, routes, llm
# @PURPOSE: API routes for LLM provider configuration and management.
# @LAYER: UI (API)

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.mappings:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: api, mappings, database, fuzzy-matching
# @PURPOSE: API endpoints for managing database mappings and getting suggestions.
# @LAYER: API

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.api.routes.migration:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: api, migration, dashboards, sync, dry-run
# @PURPOSE: HTTP contract layer for migration orchestration, settings, dry-run, and mapping sync endpoints.
# @LAYER: Infra
@@ -38,7 +38,7 @@ from ...models.mapping import ResourceMapping
router = APIRouter(prefix="/api", tags=["migration"])
# [DEF:get_dashboards:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch dashboard metadata from a requested environment for migration selection UI.
# @PRE: env_id is provided and exists in configured environments.
# @POST: Returns List[DashboardMetadata] for the resolved environment; emits HTTP_404 when environment is absent.
@@ -66,7 +66,7 @@ async def get_dashboards(
# [/DEF:get_dashboards:Function]
# [DEF:execute_migration:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Validate migration selection and enqueue asynchronous migration task execution.
# @PRE: DashboardSelection payload is valid and both source/target environments exist.
# @POST: Returns {"task_id": str, "message": str} when task creation succeeds; emits HTTP_400/HTTP_500 on failure.
@@ -108,7 +108,7 @@ async def execute_migration(
# [DEF:dry_run_migration:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Build pre-flight migration diff and risk summary without mutating target systems.
# @PRE: DashboardSelection is valid, source and target environments exist, differ, and selected_ids is non-empty.
# @POST: Returns deterministic dry-run payload; emits HTTP_400 for guard violations and HTTP_500 for orchestrator value errors.
@@ -160,7 +160,7 @@ async def dry_run_migration(
# [/DEF:dry_run_migration:Function]
# [DEF:get_migration_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Read and return configured migration synchronization cron expression.
# @PRE: Configuration store is available and requester has READ permission.
# @POST: Returns {"cron": str} reflecting current persisted settings value.
@@ -178,7 +178,7 @@ async def get_migration_settings(
# [/DEF:get_migration_settings:Function]
# [DEF:update_migration_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Validate and persist migration synchronization cron expression update.
# @PRE: Payload includes "cron" key and requester has WRITE permission.
# @POST: Returns {"cron": str, "status": "updated"} and persists updated cron value.
@@ -204,7 +204,7 @@ async def update_migration_settings(
# [/DEF:update_migration_settings:Function]
# [DEF:get_resource_mappings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch synchronized resource mappings with optional filters and pagination for migration mappings view.
# @PRE: skip>=0, 1<=limit<=500, DB session is active, requester has READ permission.
# @POST: Returns {"items": [...], "total": int} where items reflect applied filters and pagination.
@@ -255,7 +255,7 @@ async def get_resource_mappings(
# [/DEF:get_resource_mappings:Function]
# [DEF:trigger_sync_now:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Trigger immediate ID synchronization for every configured environment.
# @PRE: At least one environment is configured and requester has EXECUTE permission.
# @POST: Returns sync summary with synced/failed counts after attempting all environments.

View File

@@ -1,32 +1,32 @@
# [DEF:PluginsRouter:Module]
# @TIER: STANDARD
# @SEMANTICS: api, router, plugins, list
# @PURPOSE: Defines the FastAPI router for plugin-related endpoints, allowing clients to list available plugins.
# @LAYER: UI (API)
# @RELATION: Depends on the PluginLoader and PluginConfig. It is included by the main app.
from typing import List
from fastapi import APIRouter, Depends
from ...core.plugin_base import PluginConfig
from ...dependencies import get_plugin_loader, has_permission
from ...core.logger import belief_scope
router = APIRouter()
# [DEF:list_plugins:Function]
# @PURPOSE: Retrieve a list of all available plugins.
# @PRE: plugin_loader is injected via Depends.
# @POST: Returns a list of PluginConfig objects.
# @RETURN: List[PluginConfig] - List of registered plugins.
@router.get("", response_model=List[PluginConfig])
async def list_plugins(
plugin_loader = Depends(get_plugin_loader),
_ = Depends(has_permission("plugins", "READ"))
):
with belief_scope("list_plugins"):
"""
Retrieve a list of all available plugins.
"""
return plugin_loader.get_all_plugin_configs()
# [/DEF:list_plugins:Function]
# [DEF:PluginsRouter:Module]
# @COMPLEXITY: 3
# @SEMANTICS: api, router, plugins, list
# @PURPOSE: Defines the FastAPI router for plugin-related endpoints, allowing clients to list available plugins.
# @LAYER: UI (API)
# @RELATION: Depends on the PluginLoader and PluginConfig. It is included by the main app.
from typing import List
from fastapi import APIRouter, Depends
from ...core.plugin_base import PluginConfig
from ...dependencies import get_plugin_loader, has_permission
from ...core.logger import belief_scope
router = APIRouter()
# [DEF:list_plugins:Function]
# @PURPOSE: Retrieve a list of all available plugins.
# @PRE: plugin_loader is injected via Depends.
# @POST: Returns a list of PluginConfig objects.
# @RETURN: List[PluginConfig] - List of registered plugins.
@router.get("", response_model=List[PluginConfig])
async def list_plugins(
plugin_loader = Depends(get_plugin_loader),
_ = Depends(has_permission("plugins", "READ"))
):
with belief_scope("list_plugins"):
"""
Retrieve a list of all available plugins.
"""
return plugin_loader.get_all_plugin_configs()
# [/DEF:list_plugins:Function]
# [/DEF:PluginsRouter:Module]

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.api.routes.profile:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: api, profile, preferences, self-service, account-lookup
# @PURPOSE: Exposes self-scoped profile preference endpoints and environment-based Superset account lookup.
# @LAYER: API

View File

@@ -1,5 +1,5 @@
# [DEF:ReportsRouter:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: api, reports, list, detail, pagination, filters
# @PURPOSE: FastAPI router for unified task report list and detail retrieval endpoints.
# @LAYER: UI (API)
@@ -29,7 +29,7 @@ router = APIRouter(prefix="/api/reports", tags=["Reports"])
# [DEF:_parse_csv_enum_list:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Parse comma-separated query value into enum list.
# @PRE: raw may be None/empty or comma-separated values.
# @POST: Returns enum list or raises HTTP 400 with deterministic machine-readable payload.
@@ -64,7 +64,7 @@ def _parse_csv_enum_list(raw: Optional[str], enum_cls, field_name: str) -> List:
# [DEF:list_reports:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return paginated unified reports list.
# @PRE: authenticated/authorized request and validated query params.
# @POST: returns {items,total,page,page_size,has_next,applied_filters}.
@@ -131,7 +131,7 @@ async def list_reports(
# [DEF:get_report_detail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return one normalized report detail with diagnostics and next actions.
# @PRE: authenticated/authorized request and existing report_id.
# @POST: returns normalized detail envelope or 404 when report is not found.

View File

@@ -1,6 +1,6 @@
# [DEF:SettingsRouter:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: settings, api, router, fastapi
# @PURPOSE: Provides API endpoints for managing application settings and Superset environments.
# @LAYER: UI (API)
@@ -29,7 +29,7 @@ from sqlalchemy.orm import Session
# [/SECTION]
# [DEF:LoggingConfigResponse:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Response model for logging configuration with current task log level.
# @SEMANTICS: logging, config, response
class LoggingConfigResponse(BaseModel):
@@ -42,7 +42,7 @@ router = APIRouter()
# [DEF:_normalize_superset_env_url:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Canonicalize Superset environment URL to base host/path without trailing /api/v1.
# @PRE: raw_url can be empty.
# @POST: Returns normalized base URL.
@@ -55,7 +55,7 @@ def _normalize_superset_env_url(raw_url: str) -> str:
# [DEF:_validate_superset_connection_fast:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Run lightweight Superset connectivity validation without full pagination scan.
# @PRE: env contains valid URL and credentials.
# @POST: Raises on auth/API failures; returns None on success.
@@ -74,7 +74,7 @@ def _validate_superset_connection_fast(env: Environment) -> None:
# [/DEF:_validate_superset_connection_fast:Function]
# [DEF:get_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves all application settings.
# @PRE: Config manager is available.
# @POST: Returns masked AppConfig.
@@ -96,7 +96,7 @@ async def get_settings(
# [/DEF:get_settings:Function]
# [DEF:update_global_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates global application settings.
# @PRE: New settings are provided.
# @POST: Global settings are updated.
@@ -116,7 +116,7 @@ async def update_global_settings(
# [/DEF:update_global_settings:Function]
# [DEF:get_storage_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves storage-specific settings.
# @RETURN: StorageConfig - The storage configuration.
@router.get("/storage", response_model=StorageConfig)
@@ -129,7 +129,7 @@ async def get_storage_settings(
# [/DEF:get_storage_settings:Function]
# [DEF:update_storage_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates storage-specific settings.
# @PARAM: storage (StorageConfig) - The new storage settings.
# @POST: Storage settings are updated and saved.
@@ -152,7 +152,7 @@ async def update_storage_settings(
# [/DEF:update_storage_settings:Function]
# [DEF:get_environments:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all configured Superset environments.
# @PRE: Config manager is available.
# @POST: Returns list of environments.
@@ -172,7 +172,7 @@ async def get_environments(
# [/DEF:get_environments:Function]
# [DEF:add_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Adds a new Superset environment.
# @PRE: Environment data is valid and reachable.
# @POST: Environment is added to config.
@@ -200,7 +200,7 @@ async def add_environment(
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates an existing Superset environment.
# @PRE: ID and valid environment data are provided.
# @POST: Environment is updated in config.
@@ -238,7 +238,7 @@ async def update_environment(
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deletes a Superset environment.
# @PRE: ID is provided.
# @POST: Environment is removed from config.
@@ -255,7 +255,7 @@ async def delete_environment(
# [/DEF:delete_environment:Function]
# [DEF:test_environment_connection:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Tests the connection to a Superset environment.
# @PRE: ID is provided.
# @POST: Returns success or error status.
@@ -285,7 +285,7 @@ async def test_environment_connection(
# [/DEF:test_environment_connection:Function]
# [DEF:get_logging_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves current logging configuration.
# @PRE: Config manager is available.
# @POST: Returns logging configuration.
@@ -305,7 +305,7 @@ async def get_logging_config(
# [/DEF:get_logging_config:Function]
# [DEF:update_logging_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates logging configuration.
# @PRE: New logging config is provided.
# @POST: Logging configuration is updated and saved.
@@ -333,7 +333,7 @@ async def update_logging_config(
# [/DEF:update_logging_config:Function]
# [DEF:ConsolidatedSettingsResponse:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Response model for consolidated application settings.
class ConsolidatedSettingsResponse(BaseModel):
environments: List[dict]
@@ -346,7 +346,7 @@ class ConsolidatedSettingsResponse(BaseModel):
# [/DEF:ConsolidatedSettingsResponse:Class]
# [DEF:get_consolidated_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves all settings categories in a single call
# @PRE: Config manager is available.
# @POST: Returns all consolidated settings.
@@ -400,7 +400,7 @@ async def get_consolidated_settings(
# [/DEF:get_consolidated_settings:Function]
# [DEF:update_consolidated_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Bulk update application settings from the consolidated view.
# @PRE: User has admin permissions, config is valid.
# @POST: Settings are updated and saved via ConfigManager.
@@ -446,7 +446,7 @@ async def update_consolidated_settings(
# [/DEF:update_consolidated_settings:Function]
# [DEF:get_validation_policies:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lists all validation policies.
# @RETURN: List[ValidationPolicyResponse] - List of policies.
@router.get("/automation/policies", response_model=List[ValidationPolicyResponse])
@@ -459,7 +459,7 @@ async def get_validation_policies(
# [/DEF:get_validation_policies:Function]
# [DEF:create_validation_policy:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Creates a new validation policy.
# @PARAM: policy (ValidationPolicyCreate) - The policy data.
# @RETURN: ValidationPolicyResponse - The created policy.
@@ -478,7 +478,7 @@ async def create_validation_policy(
# [/DEF:create_validation_policy:Function]
# [DEF:update_validation_policy:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Updates an existing validation policy.
# @PARAM: id (str) - The ID of the policy to update.
# @PARAM: policy (ValidationPolicyUpdate) - The updated policy data.
@@ -505,7 +505,7 @@ async def update_validation_policy(
# [/DEF:update_validation_policy:Function]
# [DEF:delete_validation_policy:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deletes a validation policy.
# @PARAM: id (str) - The ID of the policy to delete.
@router.delete("/automation/policies/{id}")

View File

@@ -1,6 +1,6 @@
# [DEF:storage_routes:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: storage, files, upload, download, backup, repository
# @PURPOSE: API endpoints for file storage management (backups and repositories).
# @LAYER: API
@@ -22,7 +22,7 @@ from ...core.logger import belief_scope
router = APIRouter(tags=["storage"])
# [DEF:list_files:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: List all files and directories in the storage system.
#
# @PRE: None.
@@ -49,7 +49,7 @@ async def list_files(
# [/DEF:list_files:Function]
# [DEF:upload_file:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Upload a file to the storage system.
#
# @PRE: category must be a valid FileCategory.
@@ -83,7 +83,7 @@ async def upload_file(
# [/DEF:upload_file:Function]
# [DEF:delete_file:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete a specific file or directory.
#
# @PRE: category must be a valid FileCategory.
@@ -116,7 +116,7 @@ async def delete_file(
# [/DEF:delete_file:Function]
# [DEF:download_file:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieve a file for download.
#
# @PRE: category must be a valid FileCategory.
@@ -149,7 +149,7 @@ async def download_file(
# [/DEF:download_file:Function]
# [DEF:get_file_by_path:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieve a file by validated absolute/relative path under storage root.
#
# @PRE: path must resolve under configured storage root.

View File

@@ -1,5 +1,5 @@
# [DEF:TasksRouter:Module]
# @TIER: STANDARD
# @COMPLEXITY: 4
# @SEMANTICS: api, router, tasks, create, list, get, logs
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
# @LAYER: UI (API)
@@ -43,7 +43,7 @@ class ResumeTaskRequest(BaseModel):
passwords: Dict[str, str]
# [DEF:create_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Create and start a new task for a given plugin.
# @PARAM: request (CreateTaskRequest) - The request body containing plugin_id and params.
# @PARAM: task_manager (TaskManager) - The task manager instance.
@@ -107,7 +107,7 @@ async def create_task(
# [/DEF:create_task:Function]
# [DEF:list_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieve a list of tasks with pagination and optional status filter.
# @PARAM: limit (int) - Maximum number of tasks to return.
# @PARAM: offset (int) - Number of tasks to skip.
@@ -147,7 +147,7 @@ async def list_tasks(
# [/DEF:list_tasks:Function]
# [DEF:get_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieve the details of a specific task.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: task_manager (TaskManager) - The task manager instance.
@@ -168,7 +168,7 @@ async def get_task(
# [/DEF:get_task:Function]
# [DEF:get_task_logs:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve logs for a specific task with optional filtering.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: level (Optional[str]) - Filter by log level (DEBUG, INFO, WARNING, ERROR).
@@ -214,7 +214,7 @@ async def get_task_logs(
# [/DEF:get_task_logs:Function]
# [DEF:get_task_log_stats:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get statistics about logs for a task (counts by level and source).
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: task_manager (TaskManager) - The task manager instance.
@@ -235,7 +235,7 @@ async def get_task_log_stats(
# [/DEF:get_task_log_stats:Function]
# [DEF:get_task_log_sources:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get unique sources for a task's logs.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: task_manager (TaskManager) - The task manager instance.
@@ -256,7 +256,7 @@ async def get_task_log_sources(
# [/DEF:get_task_log_sources:Function]
# [DEF:resolve_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve a task that is awaiting mapping.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: request (ResolveTaskRequest) - The resolution parameters.
@@ -280,7 +280,7 @@ async def resolve_task(
# [/DEF:resolve_task:Function]
# [DEF:resume_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resume a task that is awaiting input (e.g., passwords).
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: request (ResumeTaskRequest) - The input (passwords).
@@ -304,7 +304,7 @@ async def resume_task(
# [/DEF:resume_task:Function]
# [DEF:clear_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Clear tasks matching the status filter.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @PARAM: task_manager (TaskManager) - The task manager instance.

View File

@@ -1,5 +1,5 @@
# [DEF:AppModule:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: app, main, entrypoint, fastapi
# @PURPOSE: The main entry point for the FastAPI application. It initializes the app, configures CORS, sets up dependencies, includes API routers, and defines the WebSocket endpoint for log streaming.
# @LAYER: UI (API)
@@ -32,7 +32,7 @@ from .api.routes import plugins, tasks, settings, environments, mappings, migrat
from .api import auth
# [DEF:App:Global]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: app, fastapi, instance
# @PURPOSE: The global FastAPI application instance.
app = FastAPI(
@@ -43,7 +43,7 @@ app = FastAPI(
# [/DEF:App:Global]
# [DEF:startup_event:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Handles application startup tasks, such as starting the scheduler.
# @PRE: None.
# @POST: Scheduler is started.
@@ -57,7 +57,7 @@ async def startup_event():
# [/DEF:startup_event:Function]
# [DEF:shutdown_event:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Handles application shutdown tasks, such as stopping the scheduler.
# @PRE: None.
# @POST: Scheduler is stopped.
@@ -84,7 +84,7 @@ app.add_middleware(
# [DEF:network_error_handler:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Global exception handler for NetworkError.
# @PRE: request is a FastAPI Request object.
# @POST: Returns 503 HTTP Exception.
@@ -101,7 +101,7 @@ async def network_error_handler(request: Request, exc: NetworkError):
# [/DEF:network_error_handler:Function]
# [DEF:log_requests:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Middleware to log incoming HTTP requests and their response status.
# @PRE: request is a FastAPI Request object.
# @POST: Logs request and response details.
@@ -153,14 +153,14 @@ app.include_router(health.router)
# [DEF:api.include_routers:Action]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Registers all API routers with the FastAPI application.
# @LAYER: API
# @SEMANTICS: routes, registration, api
# [/DEF:api.include_routers:Action]
# [DEF:websocket_endpoint:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering.
# @PRE: task_id must be a valid task ID.
# @POST: WebSocket connection is managed and logs are streamed until disconnect.
@@ -280,7 +280,7 @@ async def websocket_endpoint(
# [/DEF:websocket_endpoint:Function]
# [DEF:StaticFiles:Mount]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: static, frontend, spa
# @PURPOSE: Mounts the frontend build directory to serve static assets.
frontend_path = project_root / "frontend" / "build"
@@ -288,7 +288,7 @@ if frontend_path.exists():
app.mount("/_app", StaticFiles(directory=str(frontend_path / "_app")), name="static")
# [DEF:serve_spa:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Serves the SPA frontend for any path not matched by API routes.
# @PRE: frontend_path exists.
# @POST: Returns the requested file or index.html.
@@ -309,7 +309,7 @@ if frontend_path.exists():
# [/DEF:serve_spa:Function]
else:
# [DEF:read_root:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: A simple root endpoint to confirm that the API is running when frontend is missing.
# @PRE: None.
# @POST: Returns a JSON message indicating API status.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.__tests__.test_config_manager_compat:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: config-manager, compatibility, payload, tests
# @PURPOSE: Verifies ConfigManager compatibility wrappers preserve legacy payload sections.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.__tests__.test_superset_profile_lookup:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, superset, profile, lookup, fallback, sorting
# @PURPOSE: Verifies Superset profile lookup adapter payload normalization and fallback error precedence.
# @LAYER: Domain

View File

@@ -3,7 +3,7 @@ from datetime import time, date, datetime, timedelta
from src.core.scheduler import ThrottledSchedulerConfigurator
# [DEF:test_throttled_scheduler:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for ThrottledSchedulerConfigurator distribution logic.
def test_calculate_schedule_even_distribution():

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.async_superset_client:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: superset, async, client, httpx, dashboards, datasets
# @PURPOSE: Async Superset client for dashboard hot-path requests without blocking FastAPI event loop.
# @LAYER: Core
@@ -26,14 +26,14 @@ from .utils.async_network import AsyncAPIClient
# [DEF:backend.src.core.async_superset_client.AsyncSupersetClient:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Async sibling of SupersetClient for dashboard read paths.
# @RELATION: [INHERITS] ->[backend.src.core.superset_client.SupersetClient]
# @RELATION: [DEPENDS_ON] ->[backend.src.core.utils.async_network.AsyncAPIClient]
# @RELATION: [CALLS] ->[backend.src.core.utils.async_network.AsyncAPIClient.request]
class AsyncSupersetClient(SupersetClient):
# [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.__init__:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Initialize async Superset client with AsyncAPIClient transport.
# @PRE: env is valid Environment instance.
# @POST: Client uses async network transport and inherited projection helpers.
@@ -55,7 +55,7 @@ class AsyncSupersetClient(SupersetClient):
# [/DEF:backend.src.core.async_superset_client.AsyncSupersetClient.__init__:Function]
# [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.aclose:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Close async transport resources.
# @POST: Underlying AsyncAPIClient is closed.
# @SIDE_EFFECT: Closes network sockets.
@@ -64,7 +64,7 @@ class AsyncSupersetClient(SupersetClient):
# [/DEF:backend.src.core.async_superset_client.AsyncSupersetClient.aclose:Function]
# [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.get_dashboards_page_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch one dashboards page asynchronously.
# @POST: Returns total count and page result list.
# @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]]
@@ -99,7 +99,7 @@ class AsyncSupersetClient(SupersetClient):
# [/DEF:get_dashboards_page_async:Function]
# [DEF:get_dashboard_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch one dashboard payload asynchronously.
# @POST: Returns raw dashboard payload from Superset API.
# @DATA_CONTRACT: Input[dashboard_id: int] -> Output[Dict]
@@ -110,7 +110,7 @@ class AsyncSupersetClient(SupersetClient):
# [/DEF:get_dashboard_async:Function]
# [DEF:get_chart_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch one chart payload asynchronously.
# @POST: Returns raw chart payload from Superset API.
# @DATA_CONTRACT: Input[chart_id: int] -> Output[Dict]
@@ -121,7 +121,7 @@ class AsyncSupersetClient(SupersetClient):
# [/DEF:get_chart_async:Function]
# [DEF:get_dashboard_detail_async:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch dashboard detail asynchronously with concurrent charts/datasets requests.
# @POST: Returns dashboard detail payload for overview page.
# @DATA_CONTRACT: Input[dashboard_id: int] -> Output[Dict]

View File

@@ -1,5 +1,5 @@
# [DEF:test_auth:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for authentication module
# @LAYER: Domain
# @RELATION: VERIFIES -> src.core.auth

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.auth.jwt:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: jwt, token, session, auth
# @PURPOSE: JWT token generation and validation logic.
# @LAYER: Core

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.auth.logger:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: auth, logger, audit, security
# @PURPOSE: Audit logging for security-related events.
# @LAYER: Core

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.auth.repository:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: auth, repository, database, user, role, permission
# @PURPOSE: Data access layer for authentication and user preference entities.
# @LAYER: Domain
@@ -25,12 +25,12 @@ from ..logger import belief_scope, logger
# [/SECTION]
# [DEF:AuthRepository:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Encapsulates database operations for authentication-related entities.
# @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session]
class AuthRepository:
# [DEF:__init__:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Bind repository instance to an existing SQLAlchemy session.
# @PRE: db is an initialized sqlalchemy.orm.Session instance.
# @POST: self.db points to the provided session and is used by all repository methods.
@@ -48,7 +48,7 @@ class AuthRepository:
# [/DEF:__init__:Function]
# [DEF:get_user_by_username:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a user entity by unique username.
# @PRE: username is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching User entity when present, otherwise None.
@@ -75,7 +75,7 @@ class AuthRepository:
# [/DEF:get_user_by_username:Function]
# [DEF:get_user_by_id:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a user entity by identifier.
# @PRE: user_id is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching User entity when present, otherwise None.
@@ -97,7 +97,7 @@ class AuthRepository:
# [/DEF:get_user_by_id:Function]
# [DEF:get_role_by_name:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a role entity by role name.
# @PRE: name is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching Role entity when present, otherwise None.
@@ -109,7 +109,7 @@ class AuthRepository:
# [/DEF:get_role_by_name:Function]
# [DEF:update_last_login:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Update last_login timestamp for the provided user entity.
# @PRE: user is a managed User instance and self.db is a valid open Session.
# @POST: user.last_login is set to current UTC timestamp and transaction is committed.
@@ -129,7 +129,7 @@ class AuthRepository:
# [/DEF:update_last_login:Function]
# [DEF:get_role_by_id:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a role entity by identifier.
# @PRE: role_id is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching Role entity when present, otherwise None.
@@ -141,7 +141,7 @@ class AuthRepository:
# [/DEF:get_role_by_id:Function]
# [DEF:get_permission_by_id:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a permission entity by identifier.
# @PRE: perm_id is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching Permission entity when present, otherwise None.
@@ -153,7 +153,7 @@ class AuthRepository:
# [/DEF:get_permission_by_id:Function]
# [DEF:get_permission_by_resource_action:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve a permission entity by resource and action pair.
# @PRE: resource and action are non-empty str values; self.db is a valid open Session.
# @POST: Returns matching Permission entity when present, otherwise None.
@@ -168,7 +168,7 @@ class AuthRepository:
# [/DEF:get_permission_by_resource_action:Function]
# [DEF:get_user_dashboard_preference:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Retrieve dashboard preference entity owned by specified user.
# @PRE: user_id is a non-empty str and self.db is a valid open Session.
# @POST: Returns matching UserDashboardPreference entity when present, otherwise None.
@@ -184,7 +184,7 @@ class AuthRepository:
# [/DEF:get_user_dashboard_preference:Function]
# [DEF:save_user_dashboard_preference:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Persist dashboard preference entity and return refreshed persistent row.
# @PRE: preference is a valid UserDashboardPreference entity and self.db is a valid open Session.
# @POST: preference is committed to DB, refreshed from DB state, and returned.
@@ -207,7 +207,7 @@ class AuthRepository:
# [/DEF:save_user_dashboard_preference:Function]
# [DEF:list_permissions:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: List all permission entities available in storage.
# @PRE: self.db is a valid open Session.
# @POST: Returns list containing all Permission entities visible to the session.

View File

@@ -1,6 +1,6 @@
# [DEF:ConfigManagerModule:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: config, manager, persistence, migration, postgresql
# @PURPOSE: Manages application configuration persistence in DB with one-time migration from legacy JSON.
# @LAYER: Domain
@@ -29,7 +29,7 @@ from .logger import logger, configure_logger, belief_scope
# [DEF:ConfigManager:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Handles application configuration load, validation, mutation, and persistence lifecycle.
class ConfigManager:
# [DEF:__init__:Function]
@@ -60,7 +60,7 @@ class ConfigManager:
# [/DEF:__init__:Function]
# [DEF:_default_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Build default application configuration fallback.
# @PRE: None.
# @POST: Returns valid AppConfig with empty environments and default storage settings.
@@ -75,7 +75,7 @@ class ConfigManager:
# [/DEF:_default_config:Function]
# [DEF:_sync_raw_payload_from_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Merge typed AppConfig state into raw payload while preserving unsupported legacy sections.
# @PRE: self.config is initialized as AppConfig.
# @POST: self.raw_payload contains AppConfig fields refreshed from self.config.
@@ -90,7 +90,7 @@ class ConfigManager:
# [/DEF:_sync_raw_payload_from_config:Function]
# [DEF:_load_from_legacy_file:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Load legacy JSON configuration for migration fallback path.
# @PRE: self.config_path is initialized.
# @POST: Returns AppConfig from file payload or safe default.
@@ -116,7 +116,7 @@ class ConfigManager:
# [/DEF:_load_from_legacy_file:Function]
# [DEF:_get_record:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve global configuration record from DB.
# @PRE: session is an active SQLAlchemy Session.
# @POST: Returns record when present, otherwise None.
@@ -128,7 +128,7 @@ class ConfigManager:
# [/DEF:_get_record:Function]
# [DEF:_load_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Load configuration from DB or perform one-time migration from legacy JSON.
# @PRE: SessionLocal factory is available and AppConfigRecord schema is accessible.
# @POST: Returns valid AppConfig and closes opened DB session.
@@ -160,7 +160,7 @@ class ConfigManager:
# [/DEF:_load_config:Function]
# [DEF:_save_config_to_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist provided AppConfig into the global DB configuration record.
# @PRE: config is AppConfig; session is either None or an active Session.
# @POST: Global DB record payload equals config.model_dump() when commit succeeds.
@@ -195,7 +195,7 @@ class ConfigManager:
# [/DEF:_save_config_to_db:Function]
# [DEF:save:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist current in-memory configuration state.
# @PRE: self.config is initialized.
# @POST: Current self.config is written to DB global record.
@@ -207,7 +207,7 @@ class ConfigManager:
# [/DEF:save:Function]
# [DEF:get_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return current in-memory configuration snapshot.
# @PRE: self.config is initialized.
# @POST: Returns AppConfig reference stored in manager.
@@ -219,7 +219,7 @@ class ConfigManager:
# [/DEF:get_config:Function]
# [DEF:get_payload:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return full persisted payload including sections outside typed AppConfig schema.
# @PRE: Manager state is initialized.
# @POST: Returns dict payload with current AppConfig fields synchronized.
@@ -231,7 +231,7 @@ class ConfigManager:
# [/DEF:get_payload:Function]
# [DEF:save_config:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist configuration provided either as typed AppConfig or raw payload dict.
# @PRE: config is AppConfig or dict compatible with AppConfig core schema.
# @POST: self.config and self.raw_payload are synchronized and persisted to DB.
@@ -253,7 +253,7 @@ class ConfigManager:
# [/DEF:save_config:Function]
# [DEF:update_global_settings:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Replace global settings and persist the resulting configuration.
# @PRE: settings is GlobalSettings.
# @POST: self.config.settings equals provided settings and DB state is updated.
@@ -272,7 +272,7 @@ class ConfigManager:
# [/DEF:update_global_settings:Function]
# [DEF:validate_path:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Validate that path exists and is writable, creating it when absent.
# @PRE: path is a string path candidate.
# @POST: Returns (True, msg) for writable path, else (False, reason).
@@ -294,7 +294,7 @@ class ConfigManager:
# [/DEF:validate_path:Function]
# [DEF:get_environments:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return all configured environments.
# @PRE: self.config is initialized.
# @POST: Returns list of Environment models from current configuration.
@@ -306,7 +306,7 @@ class ConfigManager:
# [/DEF:get_environments:Function]
# [DEF:has_environments:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Check whether at least one environment exists in configuration.
# @PRE: self.config is initialized.
# @POST: Returns True iff environment list length is greater than zero.
@@ -318,7 +318,7 @@ class ConfigManager:
# [/DEF:has_environments:Function]
# [DEF:get_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve a configured environment by identifier.
# @PRE: env_id is string identifier.
# @POST: Returns matching Environment when found; otherwise None.
@@ -333,7 +333,7 @@ class ConfigManager:
# [/DEF:get_environment:Function]
# [DEF:add_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Upsert environment by id into configuration and persist.
# @PRE: env is Environment.
# @POST: Configuration contains provided env id with new payload persisted.
@@ -352,7 +352,7 @@ class ConfigManager:
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Update existing environment by id and preserve masked password placeholder behavior.
# @PRE: env_id is non-empty string and updated_env is Environment.
# @POST: Returns True and persists update when target exists; else returns False.
@@ -382,7 +382,7 @@ class ConfigManager:
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete environment by id and persist when deletion occurs.
# @PRE: env_id is non-empty string.
# @POST: Environment is removed when present; otherwise configuration is unchanged.

View File

@@ -1,93 +1,93 @@
# [DEF:ConfigModels:Module]
# @TIER: STANDARD
# @SEMANTICS: config, models, pydantic
# @PURPOSE: Defines the data models for application configuration using Pydantic.
# @LAYER: Core
# @RELATION: READS_FROM -> app_configurations (database)
# @RELATION: USED_BY -> ConfigManager
from pydantic import BaseModel, Field
from typing import List, Optional
from ..models.storage import StorageConfig
from ..services.llm_prompt_templates import (
DEFAULT_LLM_ASSISTANT_SETTINGS,
DEFAULT_LLM_PROMPTS,
DEFAULT_LLM_PROVIDER_BINDINGS,
)
# [DEF:Schedule:DataClass]
# @PURPOSE: Represents a backup schedule configuration.
class Schedule(BaseModel):
enabled: bool = False
cron_expression: str = "0 0 * * *" # Default: daily at midnight
# [/DEF:Schedule:DataClass]
# [DEF:Environment:DataClass]
# @PURPOSE: Represents a Superset environment configuration.
class Environment(BaseModel):
id: str
name: str
url: str
username: str
password: str # Will be masked in UI
stage: str = Field(default="DEV", pattern="^(DEV|PREPROD|PROD)$")
verify_ssl: bool = True
timeout: int = 30
is_default: bool = False
is_production: bool = False
backup_schedule: Schedule = Field(default_factory=Schedule)
# [/DEF:Environment:DataClass]
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
class LoggingConfig(BaseModel):
level: str = "INFO"
task_log_level: str = "INFO" # Minimum level for task-specific logs (DEBUG, INFO, WARNING, ERROR)
file_path: Optional[str] = None
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
# [/DEF:LoggingConfig:DataClass]
# [DEF:CleanReleaseConfig:DataClass]
# @PURPOSE: Configuration for clean release compliance subsystem.
class CleanReleaseConfig(BaseModel):
active_policy_id: Optional[str] = None
active_registry_id: Optional[str] = None
# [/DEF:CleanReleaseConfig:DataClass]
# [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings.
class GlobalSettings(BaseModel):
storage: StorageConfig = Field(default_factory=StorageConfig)
clean_release: CleanReleaseConfig = Field(default_factory=CleanReleaseConfig)
default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
connections: List[dict] = []
llm: dict = Field(
default_factory=lambda: {
"providers": [],
"default_provider": "",
"prompts": dict(DEFAULT_LLM_PROMPTS),
"provider_bindings": dict(DEFAULT_LLM_PROVIDER_BINDINGS),
**dict(DEFAULT_LLM_ASSISTANT_SETTINGS),
}
)
# Task retention settings
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# Migration sync settings
migration_sync_cron: str = "0 2 * * *"
# [/DEF:GlobalSettings:DataClass]
# [DEF:AppConfig:DataClass]
# @PURPOSE: The root configuration model containing all application settings.
class AppConfig(BaseModel):
environments: List[Environment] = []
settings: GlobalSettings
# [/DEF:AppConfig:DataClass]
# [/DEF:ConfigModels:Module]
# [DEF:backend.src.core.config_models:Module]
# @COMPLEXITY: 3
# @SEMANTICS: config, models, pydantic
# @PURPOSE: Defines the data models for application configuration using Pydantic.
# @LAYER: Core
# @RELATION: READS_FROM -> app_configurations (database)
# @RELATION: USED_BY -> ConfigManager
from pydantic import BaseModel, Field
from typing import List, Optional
from ..models.storage import StorageConfig
from ..services.llm_prompt_templates import (
DEFAULT_LLM_ASSISTANT_SETTINGS,
DEFAULT_LLM_PROMPTS,
DEFAULT_LLM_PROVIDER_BINDINGS,
)
# [DEF:Schedule:DataClass]
# @PURPOSE: Represents a backup schedule configuration.
class Schedule(BaseModel):
enabled: bool = False
cron_expression: str = "0 0 * * *" # Default: daily at midnight
# [/DEF:Schedule:DataClass]
# [DEF:backend.src.core.config_models.Environment:DataClass]
# @PURPOSE: Represents a Superset environment configuration.
class Environment(BaseModel):
id: str
name: str
url: str
username: str
password: str # Will be masked in UI
stage: str = Field(default="DEV", pattern="^(DEV|PREPROD|PROD)$")
verify_ssl: bool = True
timeout: int = 30
is_default: bool = False
is_production: bool = False
backup_schedule: Schedule = Field(default_factory=Schedule)
# [/DEF:backend.src.core.config_models.Environment:DataClass]
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
class LoggingConfig(BaseModel):
level: str = "INFO"
task_log_level: str = "INFO" # Minimum level for task-specific logs (DEBUG, INFO, WARNING, ERROR)
file_path: Optional[str] = None
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
# [/DEF:LoggingConfig:DataClass]
# [DEF:CleanReleaseConfig:DataClass]
# @PURPOSE: Configuration for clean release compliance subsystem.
class CleanReleaseConfig(BaseModel):
active_policy_id: Optional[str] = None
active_registry_id: Optional[str] = None
# [/DEF:CleanReleaseConfig:DataClass]
# [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings.
class GlobalSettings(BaseModel):
storage: StorageConfig = Field(default_factory=StorageConfig)
clean_release: CleanReleaseConfig = Field(default_factory=CleanReleaseConfig)
default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
connections: List[dict] = []
llm: dict = Field(
default_factory=lambda: {
"providers": [],
"default_provider": "",
"prompts": dict(DEFAULT_LLM_PROMPTS),
"provider_bindings": dict(DEFAULT_LLM_PROVIDER_BINDINGS),
**dict(DEFAULT_LLM_ASSISTANT_SETTINGS),
}
)
# Task retention settings
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# Migration sync settings
migration_sync_cron: str = "0 2 * * *"
# [/DEF:GlobalSettings:DataClass]
# [DEF:AppConfig:DataClass]
# @PURPOSE: The root configuration model containing all application settings.
class AppConfig(BaseModel):
environments: List[Environment] = []
settings: GlobalSettings
# [/DEF:AppConfig:DataClass]
# [/DEF:ConfigModels:Module]

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.database:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: database, postgresql, sqlalchemy, session, persistence
# @PURPOSE: Configures database connection and session management (PostgreSQL-first).
# @LAYER: Core
@@ -31,13 +31,13 @@ from pathlib import Path
# [/SECTION]
# [DEF:BASE_DIR:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Base directory for the backend.
BASE_DIR = Path(__file__).resolve().parent.parent.parent
# [/DEF:BASE_DIR:Variable]
# [DEF:DATABASE_URL:Constant]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: URL for the main application database.
DEFAULT_POSTGRES_URL = os.getenv(
"POSTGRES_URL",
@@ -47,20 +47,20 @@ DATABASE_URL = os.getenv("DATABASE_URL", DEFAULT_POSTGRES_URL)
# [/DEF:DATABASE_URL:Constant]
# [DEF:TASKS_DATABASE_URL:Constant]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: URL for the tasks execution database.
# Defaults to DATABASE_URL to keep task logs in the same PostgreSQL instance.
TASKS_DATABASE_URL = os.getenv("TASKS_DATABASE_URL", DATABASE_URL)
# [/DEF:TASKS_DATABASE_URL:Constant]
# [DEF:AUTH_DATABASE_URL:Constant]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: URL for the authentication database.
AUTH_DATABASE_URL = os.getenv("AUTH_DATABASE_URL", auth_config.AUTH_DATABASE_URL)
# [/DEF:AUTH_DATABASE_URL:Constant]
# [DEF:engine:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: SQLAlchemy engine for mappings database.
# @SIDE_EFFECT: Creates database engine and manages connection pool.
def _build_engine(db_url: str):
@@ -73,40 +73,40 @@ engine = _build_engine(DATABASE_URL)
# [/DEF:engine:Variable]
# [DEF:tasks_engine:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: SQLAlchemy engine for tasks database.
tasks_engine = _build_engine(TASKS_DATABASE_URL)
# [/DEF:tasks_engine:Variable]
# [DEF:auth_engine:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: SQLAlchemy engine for authentication database.
auth_engine = _build_engine(AUTH_DATABASE_URL)
# [/DEF:auth_engine:Variable]
# [DEF:SessionLocal:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: A session factory for the main mappings database.
# @PRE: engine is initialized.
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# [/DEF:SessionLocal:Class]
# [DEF:TasksSessionLocal:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: A session factory for the tasks execution database.
# @PRE: tasks_engine is initialized.
TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_engine)
# [/DEF:TasksSessionLocal:Class]
# [DEF:AuthSessionLocal:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: A session factory for the authentication database.
# @PRE: auth_engine is initialized.
AuthSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=auth_engine)
# [/DEF:AuthSessionLocal:Class]
# [DEF:_ensure_user_dashboard_preferences_columns:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table.
# @PRE: bind_engine points to application database where profile table is stored.
# @POST: Missing columns are added without data loss.
@@ -173,7 +173,7 @@ def _ensure_user_dashboard_preferences_columns(bind_engine):
# [DEF:_ensure_user_dashboard_preferences_health_columns:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table (health fields).
def _ensure_user_dashboard_preferences_health_columns(bind_engine):
with belief_scope("_ensure_user_dashboard_preferences_health_columns"):
@@ -217,7 +217,7 @@ def _ensure_user_dashboard_preferences_health_columns(bind_engine):
# [DEF:_ensure_llm_validation_results_columns:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Applies additive schema upgrades for llm_validation_results table.
def _ensure_llm_validation_results_columns(bind_engine):
with belief_scope("_ensure_llm_validation_results_columns"):
@@ -257,7 +257,7 @@ def _ensure_llm_validation_results_columns(bind_engine):
# [DEF:_ensure_git_server_configs_columns:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Applies additive schema upgrades for git_server_configs table.
# @PRE: bind_engine points to application database.
# @POST: Missing columns are added without data loss.
@@ -295,7 +295,7 @@ def _ensure_git_server_configs_columns(bind_engine):
# [DEF:ensure_connection_configs_table:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Ensures the external connection registry table exists in the main database.
# @PRE: bind_engine points to the application database.
# @POST: connection_configs table exists without dropping existing data.
@@ -313,7 +313,7 @@ def ensure_connection_configs_table(bind_engine):
# [DEF:init_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Initializes the database by creating all tables.
# @PRE: engine, tasks_engine and auth_engine are initialized.
# @POST: Database tables created in all databases.
@@ -331,7 +331,7 @@ def init_db():
# [/DEF:init_db:Function]
# [DEF:get_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Dependency for getting a database session.
# @PRE: SessionLocal is initialized.
# @POST: Session is closed after use.
@@ -346,7 +346,7 @@ def get_db():
# [/DEF:get_db:Function]
# [DEF:get_tasks_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Dependency for getting a tasks database session.
# @PRE: TasksSessionLocal is initialized.
# @POST: Session is closed after use.
@@ -361,7 +361,7 @@ def get_tasks_db():
# [/DEF:get_tasks_db:Function]
# [DEF:get_auth_db:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Dependency for getting an authentication database session.
# @PRE: AuthSessionLocal is initialized.
# @POST: Session is closed after use.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.encryption_key:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: encryption, key, bootstrap, environment, startup
# @PURPOSE: Resolve and persist the Fernet encryption key required by runtime services.
# @LAYER: Infra

View File

@@ -1,5 +1,5 @@
# [DEF:test_logger:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for logger module
# @LAYER: Infra
# @RELATION: VERIFIES -> src.core.logger

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.mapping_service:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: mapping, ids, synchronization, environments, cross-filters
# @PURPOSE: Service for tracking and synchronizing Superset Resource IDs (UUID <-> Integer ID)
# @LAYER: Core
@@ -21,7 +21,7 @@ from src.core.logger import logger, belief_scope
# [/SECTION]
# [DEF:IdMappingService:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Service handling the cataloging and retrieval of remote Superset Integer IDs.
#
# @TEST_CONTRACT: IdMappingServiceModel ->

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.migration.__init__:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: migration, package, exports
# @PURPOSE: Namespace package for migration pre-flight orchestration components.
# @LAYER: Core

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.migration.archive_parser:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: migration, zip, parser, yaml, metadata
# @PURPOSE: Parse Superset export ZIP archives into normalized object catalogs for diffing.
# @LAYER: Core

View File

@@ -1,12 +1,12 @@
# [DEF:backend.src.core.migration.dry_run_orchestrator:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: migration, dry_run, diff, risk, superset
# @PURPOSE: Compute pre-flight migration diff and risk scoring without apply.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
# @RELATION: DEPENDS_ON -> backend.src.core.migration_engine
# @RELATION: DEPENDS_ON -> backend.src.core.migration.archive_parser
# @RELATION: DEPENDS_ON -> backend.src.core.migration.risk_assessor
# @RELATION: DEPENDS_ON ->[backend.src.core.superset_client.SupersetClient]
# @RELATION: DEPENDS_ON ->[backend.src.core.migration_engine.MigrationEngine]
# @RELATION: DEPENDS_ON ->[backend.src.core.migration.archive_parser.MigrationArchiveParser]
# @RELATION: DEPENDS_ON ->[backend.src.core.migration.risk_assessor]
# @INVARIANT: Dry run is informative only and must not mutate target environment.
from datetime import datetime, timezone

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.migration.risk_assessor:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: migration, dry_run, risk, scoring, preflight
# @PURPOSE: Compute deterministic migration risk items and aggregate score for dry-run reporting.
# @LAYER: Domain

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.migration_engine:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: migration, engine, zip, yaml, transformation, cross-filter, id-mapping
# @PURPOSE: Transforms Superset export ZIP archives while preserving archive integrity and patching mapped identifiers.
# @LAYER: Domain

View File

@@ -1,192 +1,192 @@
import importlib.util
import os
import sys # Added this line
from typing import Dict, List, Optional
from .plugin_base import PluginBase, PluginConfig
from .logger import belief_scope
# [DEF:PluginLoader:Class]
# @TIER: STANDARD
# @SEMANTICS: plugin, loader, dynamic, import
# @PURPOSE: Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface.
# @LAYER: Core
# @RELATION: Depends on PluginBase. It is used by the main application to discover and manage available plugins.
class PluginLoader:
"""
Scans a directory for Python modules, loads them, and identifies classes
that inherit from PluginBase.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
# @PRE: plugin_dir is a valid directory path.
# @POST: Plugins are loaded and registered.
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
def __init__(self, plugin_dir: str):
with belief_scope("__init__"):
self.plugin_dir = plugin_dir
self._plugins: Dict[str, PluginBase] = {}
self._plugin_configs: Dict[str, PluginConfig] = {}
self._load_plugins()
# [/DEF:__init__:Function]
# [DEF:_load_plugins:Function]
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
# @PRE: plugin_dir exists or can be created.
# @POST: _load_module is called for each .py file.
def _load_plugins(self):
with belief_scope("_load_plugins"):
"""
Scans the plugin directory, imports modules, and registers valid plugins.
"""
if not os.path.exists(self.plugin_dir):
os.makedirs(self.plugin_dir)
# Add the plugin directory's parent to sys.path to enable relative imports within plugins
# This assumes plugin_dir is something like 'backend/src/plugins'
# and we want 'backend/src' to be on the path for 'from ..core...' imports
plugin_parent_dir = os.path.abspath(os.path.join(self.plugin_dir, os.pardir))
if plugin_parent_dir not in sys.path:
sys.path.insert(0, plugin_parent_dir)
for filename in os.listdir(self.plugin_dir):
file_path = os.path.join(self.plugin_dir, filename)
# Handle directory-based plugins (packages)
if os.path.isdir(file_path):
init_file = os.path.join(file_path, "__init__.py")
if os.path.exists(init_file):
self._load_module(filename, init_file)
continue
# Handle single-file plugins
if filename.endswith(".py") and filename != "__init__.py":
module_name = filename[:-3]
self._load_module(module_name, file_path)
# [/DEF:_load_plugins:Function]
# [DEF:_load_module:Function]
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
# @PRE: module_name and file_path are valid.
# @POST: Plugin classes are instantiated and registered.
# @PARAM: module_name (str) - The name of the module.
# @PARAM: file_path (str) - The path to the module file.
def _load_module(self, module_name: str, file_path: str):
with belief_scope("_load_module"):
"""
Loads a single Python module and extracts PluginBase subclasses.
"""
import importlib.util
import os
import sys # Added this line
from typing import Dict, List, Optional
from .plugin_base import PluginBase, PluginConfig
from .logger import belief_scope
# [DEF:PluginLoader:Class]
# @COMPLEXITY: 3
# @SEMANTICS: plugin, loader, dynamic, import
# @PURPOSE: Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface.
# @LAYER: Core
# @RELATION: Depends on PluginBase. It is used by the main application to discover and manage available plugins.
class PluginLoader:
"""
Scans a directory for Python modules, loads them, and identifies classes
that inherit from PluginBase.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
# @PRE: plugin_dir is a valid directory path.
# @POST: Plugins are loaded and registered.
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
def __init__(self, plugin_dir: str):
with belief_scope("__init__"):
self.plugin_dir = plugin_dir
self._plugins: Dict[str, PluginBase] = {}
self._plugin_configs: Dict[str, PluginConfig] = {}
self._load_plugins()
# [/DEF:__init__:Function]
# [DEF:_load_plugins:Function]
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
# @PRE: plugin_dir exists or can be created.
# @POST: _load_module is called for each .py file.
def _load_plugins(self):
with belief_scope("_load_plugins"):
"""
Scans the plugin directory, imports modules, and registers valid plugins.
"""
if not os.path.exists(self.plugin_dir):
os.makedirs(self.plugin_dir)
# Add the plugin directory's parent to sys.path to enable relative imports within plugins
# This assumes plugin_dir is something like 'backend/src/plugins'
# and we want 'backend/src' to be on the path for 'from ..core...' imports
plugin_parent_dir = os.path.abspath(os.path.join(self.plugin_dir, os.pardir))
if plugin_parent_dir not in sys.path:
sys.path.insert(0, plugin_parent_dir)
for filename in os.listdir(self.plugin_dir):
file_path = os.path.join(self.plugin_dir, filename)
# Handle directory-based plugins (packages)
if os.path.isdir(file_path):
init_file = os.path.join(file_path, "__init__.py")
if os.path.exists(init_file):
self._load_module(filename, init_file)
continue
# Handle single-file plugins
if filename.endswith(".py") and filename != "__init__.py":
module_name = filename[:-3]
self._load_module(module_name, file_path)
# [/DEF:_load_plugins:Function]
# [DEF:_load_module:Function]
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
# @PRE: module_name and file_path are valid.
# @POST: Plugin classes are instantiated and registered.
# @PARAM: module_name (str) - The name of the module.
# @PARAM: file_path (str) - The path to the module file.
def _load_module(self, module_name: str, file_path: str):
with belief_scope("_load_module"):
"""
Loads a single Python module and extracts PluginBase subclasses.
"""
# All runtime code is imported through the canonical `src` package root.
package_name = f"src.plugins.{module_name}"
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
spec = importlib.util.spec_from_file_location(package_name, file_path)
if spec is None or spec.loader is None:
print(f"Could not load module spec for {package_name}") # Replace with proper logging
return
module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(module)
except Exception as e:
print(f"Error loading plugin module {module_name}: {e}") # Replace with proper logging
return
for attribute_name in dir(module):
attribute = getattr(module, attribute_name)
if (
isinstance(attribute, type)
and issubclass(attribute, PluginBase)
and attribute is not PluginBase
):
try:
plugin_instance = attribute()
self._register_plugin(plugin_instance)
except Exception as e:
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
# [/DEF:_load_module:Function]
# [DEF:_register_plugin:Function]
# @PURPOSE: Registers a PluginBase instance and its configuration.
# @PRE: plugin_instance is a valid implementation of PluginBase.
# @POST: Plugin is added to _plugins and _plugin_configs.
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
def _register_plugin(self, plugin_instance: PluginBase):
with belief_scope("_register_plugin"):
"""
Registers a valid plugin instance.
"""
plugin_id = plugin_instance.id
if plugin_id in self._plugins:
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
return
try:
schema = plugin_instance.get_schema()
# Basic validation to ensure it's a dictionary
if not isinstance(schema, dict):
raise TypeError("get_schema() must return a dictionary.")
plugin_config = PluginConfig(
id=plugin_instance.id,
name=plugin_instance.name,
description=plugin_instance.description,
version=plugin_instance.version,
ui_route=plugin_instance.ui_route,
schema=schema,
)
# The following line is commented out because it requires a schema to be passed to validate against.
# The schema provided by the plugin is the one being validated, not the data.
# validate(instance={}, schema=schema)
self._plugins[plugin_id] = plugin_instance
self._plugin_configs[plugin_id] = plugin_config
from ..core.logger import logger
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
except Exception as e:
from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
# [/DEF:_register_plugin:Function]
# [DEF:get_plugin:Function]
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
# @PRE: plugin_id is a string.
# @POST: Returns plugin instance or None.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
with belief_scope("get_plugin"):
"""
Returns a loaded plugin instance by its ID.
"""
return self._plugins.get(plugin_id)
# [/DEF:get_plugin:Function]
# [DEF:get_all_plugin_configs:Function]
# @PURPOSE: Returns a list of all registered plugin configurations.
# @PRE: None.
# @POST: Returns list of all PluginConfig objects.
# @RETURN: List[PluginConfig] - A list of plugin configurations.
def get_all_plugin_configs(self) -> List[PluginConfig]:
with belief_scope("get_all_plugin_configs"):
"""
Returns a list of all loaded plugin configurations.
"""
return list(self._plugin_configs.values())
# [/DEF:get_all_plugin_configs:Function]
# [DEF:has_plugin:Function]
# @PURPOSE: Checks if a plugin with the given ID is registered.
# @PRE: plugin_id is a string.
# @POST: Returns True if plugin exists.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: bool - True if the plugin is registered, False otherwise.
def has_plugin(self, plugin_id: str) -> bool:
with belief_scope("has_plugin"):
"""
Checks if a plugin with the given ID is loaded.
"""
return plugin_id in self._plugins
# [/DEF:has_plugin:Function]
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
spec = importlib.util.spec_from_file_location(package_name, file_path)
if spec is None or spec.loader is None:
print(f"Could not load module spec for {package_name}") # Replace with proper logging
return
module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(module)
except Exception as e:
print(f"Error loading plugin module {module_name}: {e}") # Replace with proper logging
return
for attribute_name in dir(module):
attribute = getattr(module, attribute_name)
if (
isinstance(attribute, type)
and issubclass(attribute, PluginBase)
and attribute is not PluginBase
):
try:
plugin_instance = attribute()
self._register_plugin(plugin_instance)
except Exception as e:
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
# [/DEF:_load_module:Function]
# [DEF:_register_plugin:Function]
# @PURPOSE: Registers a PluginBase instance and its configuration.
# @PRE: plugin_instance is a valid implementation of PluginBase.
# @POST: Plugin is added to _plugins and _plugin_configs.
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
def _register_plugin(self, plugin_instance: PluginBase):
with belief_scope("_register_plugin"):
"""
Registers a valid plugin instance.
"""
plugin_id = plugin_instance.id
if plugin_id in self._plugins:
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
return
try:
schema = plugin_instance.get_schema()
# Basic validation to ensure it's a dictionary
if not isinstance(schema, dict):
raise TypeError("get_schema() must return a dictionary.")
plugin_config = PluginConfig(
id=plugin_instance.id,
name=plugin_instance.name,
description=plugin_instance.description,
version=plugin_instance.version,
ui_route=plugin_instance.ui_route,
schema=schema,
)
# The following line is commented out because it requires a schema to be passed to validate against.
# The schema provided by the plugin is the one being validated, not the data.
# validate(instance={}, schema=schema)
self._plugins[plugin_id] = plugin_instance
self._plugin_configs[plugin_id] = plugin_config
from ..core.logger import logger
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
except Exception as e:
from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
# [/DEF:_register_plugin:Function]
# [DEF:get_plugin:Function]
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
# @PRE: plugin_id is a string.
# @POST: Returns plugin instance or None.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
with belief_scope("get_plugin"):
"""
Returns a loaded plugin instance by its ID.
"""
return self._plugins.get(plugin_id)
# [/DEF:get_plugin:Function]
# [DEF:get_all_plugin_configs:Function]
# @PURPOSE: Returns a list of all registered plugin configurations.
# @PRE: None.
# @POST: Returns list of all PluginConfig objects.
# @RETURN: List[PluginConfig] - A list of plugin configurations.
def get_all_plugin_configs(self) -> List[PluginConfig]:
with belief_scope("get_all_plugin_configs"):
"""
Returns a list of all loaded plugin configurations.
"""
return list(self._plugin_configs.values())
# [/DEF:get_all_plugin_configs:Function]
# [DEF:has_plugin:Function]
# @PURPOSE: Checks if a plugin with the given ID is registered.
# @PRE: plugin_id is a string.
# @POST: Returns True if plugin exists.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: bool - True if the plugin is registered, False otherwise.
def has_plugin(self, plugin_id: str) -> bool:
with belief_scope("has_plugin"):
"""
Checks if a plugin with the given ID is loaded.
"""
return plugin_id in self._plugins
# [/DEF:has_plugin:Function]
# [/DEF:PluginLoader:Class]

View File

@@ -1,5 +1,5 @@
# [DEF:SchedulerModule:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: scheduler, apscheduler, cron, backup
# @PURPOSE: Manages scheduled tasks using APScheduler.
# @LAYER: Core
@@ -18,7 +18,7 @@ from datetime import datetime, time, timedelta, date
# [/SECTION]
# [DEF:SchedulerService:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: scheduler, service, apscheduler
# @PURPOSE: Provides a service to manage scheduled backup tasks.
class SchedulerService:
@@ -123,7 +123,7 @@ class SchedulerService:
# [/DEF:SchedulerService:Class]
# [DEF:ThrottledSchedulerConfigurator:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: scheduler, throttling, distribution
# @PURPOSE: Distributes validation tasks evenly within an execution window.
class ThrottledSchedulerConfigurator:

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.superset_client:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: superset, api, client, rest, http, dashboard, dataset, import, export
# @PURPOSE: Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
# @LAYER: Core
@@ -25,13 +25,13 @@ from .config_models import Environment
# [/SECTION]
# [DEF:backend.src.core.superset_client.SupersetClient:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
# @RELATION: [DEPENDS_ON] ->[backend.src.core.utils.network.APIClient]
# @RELATION: [DEPENDS_ON] ->[backend.src.core.config_models.Environment]
class SupersetClient:
# [DEF:backend.src.core.superset_client.SupersetClient.__init__:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
# @PRE: `env` должен быть валидным объектом Environment.
# @POST: Атрибуты `env` и `network` созданы и готовы к работе.
@@ -60,7 +60,7 @@ class SupersetClient:
# [/DEF:__init__:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.authenticate:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Authenticates the client using the configured credentials.
# @PRE: self.network must be initialized with valid auth configuration.
# @POST: Client is authenticated and tokens are stored.
@@ -73,7 +73,7 @@ class SupersetClient:
@property
# [DEF:backend.src.core.superset_client.SupersetClient.headers:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
# @PRE: APIClient is initialized and authenticated.
# @POST: Returns a dictionary of HTTP headers.
@@ -85,7 +85,7 @@ class SupersetClient:
# [SECTION: DASHBOARD OPERATIONS]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboards:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
# @PRE: Client is authenticated.
# @POST: Returns a tuple with total count and list of dashboards.
@@ -119,7 +119,7 @@ class SupersetClient:
# [/DEF:get_dashboards:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboards_page:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches a single dashboards page from Superset without iterating all pages.
# @PRE: Client is authenticated.
# @POST: Returns total count and one page of dashboards.
@@ -156,7 +156,7 @@ class SupersetClient:
# [/DEF:get_dashboards_page:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboards_summary:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
# @PRE: Client is authenticated.
# @POST: Returns a list of dashboard metadata summaries.
@@ -241,7 +241,7 @@ class SupersetClient:
# [/DEF:get_dashboards_summary:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboards_summary_page:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches one page of dashboard metadata optimized for the grid.
# @PRE: page >= 1 and page_size > 0.
# @POST: Returns mapped summaries and total dashboard count.
@@ -314,7 +314,7 @@ class SupersetClient:
# [/DEF:get_dashboards_summary_page:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._extract_owner_labels:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Normalize dashboard owners payload to stable display labels.
# @PRE: owners payload can be scalar, object or list.
# @POST: Returns deduplicated non-empty owner labels preserving order.
@@ -342,7 +342,7 @@ class SupersetClient:
# [/DEF:_extract_owner_labels:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._extract_user_display:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Normalize user payload to a stable display name.
# @PRE: user payload can be string, dict or None.
# @POST: Returns compact non-empty display value or None.
@@ -371,7 +371,7 @@ class SupersetClient:
# [/DEF:_extract_user_display:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._sanitize_user_text:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Convert scalar value to non-empty user-facing text.
# @PRE: value can be any scalar type.
# @POST: Returns trimmed string or None.
@@ -385,7 +385,7 @@ class SupersetClient:
# [/DEF:_sanitize_user_text:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches a single dashboard by ID.
# @PRE: Client is authenticated and dashboard_id exists.
# @POST: Returns dashboard payload from Superset API.
@@ -398,7 +398,7 @@ class SupersetClient:
# [/DEF:get_dashboard:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_chart:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches a single chart by ID.
# @PRE: Client is authenticated and chart_id exists.
# @POST: Returns chart payload from Superset API.
@@ -411,7 +411,7 @@ class SupersetClient:
# [/DEF:get_chart:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dashboard_detail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches detailed dashboard information including related charts and datasets.
# @PRE: Client is authenticated and dashboard_id exists.
# @POST: Returns dashboard metadata with charts and datasets lists.
@@ -606,7 +606,7 @@ class SupersetClient:
# [/DEF:get_dashboard_detail:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_charts:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches all charts with pagination support.
# @PRE: Client is authenticated.
# @POST: Returns total count and charts list.
@@ -626,7 +626,7 @@ class SupersetClient:
# [/DEF:get_charts:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._extract_chart_ids_from_layout:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Traverses dashboard layout metadata and extracts chart IDs from common keys.
# @PRE: payload can be dict/list/scalar.
# @POST: Returns a set of chart IDs found in nested structures.
@@ -659,7 +659,7 @@ class SupersetClient:
# [/DEF:_extract_chart_ids_from_layout:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.export_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
# @PRE: dashboard_id must exist in Superset.
# @POST: Returns ZIP content and filename.
@@ -684,7 +684,7 @@ class SupersetClient:
# [/DEF:export_dashboard:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.import_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Импортирует дашборд из ZIP-файла.
# @PRE: file_name must be a valid ZIP dashboard export.
# @POST: Dashboard is imported or re-imported after deletion.
@@ -716,7 +716,7 @@ class SupersetClient:
# [/DEF:import_dashboard:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.delete_dashboard:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Удаляет дашборд по его ID или slug.
# @PRE: dashboard_id must exist.
# @POST: Dashboard is removed from Superset.
@@ -738,7 +738,7 @@ class SupersetClient:
# [SECTION: DATASET OPERATIONS]
# [DEF:backend.src.core.superset_client.SupersetClient.get_datasets:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
# @PRE: Client is authenticated.
# @POST: Returns total count and list of datasets.
@@ -759,7 +759,7 @@ class SupersetClient:
# [/DEF:get_datasets:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_datasets_summary:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches dataset metadata optimized for the Dataset Hub grid.
# @PRE: Client is authenticated.
# @POST: Returns a list of dataset metadata summaries.
@@ -784,7 +784,7 @@ class SupersetClient:
# [/DEF:get_datasets_summary:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dataset_detail:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches detailed dataset information including columns and linked dashboards
# @PRE: Client is authenticated and dataset_id exists.
# @POST: Returns detailed dataset info with columns and linked dashboards.
@@ -897,7 +897,7 @@ class SupersetClient:
# [/DEF:get_dataset_detail:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_dataset:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
# @PRE: dataset_id must exist.
# @POST: Returns dataset details.
@@ -913,7 +913,7 @@ class SupersetClient:
# [/DEF:get_dataset:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.update_dataset:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Обновляет данные датасета по его ID.
# @PRE: dataset_id must exist.
# @POST: Dataset is updated in Superset.
@@ -939,7 +939,7 @@ class SupersetClient:
# [SECTION: DATABASE OPERATIONS]
# [DEF:backend.src.core.superset_client.SupersetClient.get_databases:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Получает полный список баз данных.
# @PRE: Client is authenticated.
# @POST: Returns total count and list of databases.
@@ -962,7 +962,7 @@ class SupersetClient:
# [/DEF:get_databases:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_database:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
# @PRE: database_id must exist.
# @POST: Returns database details.
@@ -978,7 +978,7 @@ class SupersetClient:
# [/DEF:get_database:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_databases_summary:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
# @PRE: Client is authenticated.
# @POST: Returns list of database summaries.
@@ -999,7 +999,7 @@ class SupersetClient:
# [/DEF:get_databases_summary:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_database_by_uuid:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Find a database by its UUID.
# @PRE: db_uuid must be a valid UUID string.
# @POST: Returns database info or None.
@@ -1019,7 +1019,7 @@ class SupersetClient:
# [SECTION: HELPERS]
# [DEF:backend.src.core.superset_client.SupersetClient._resolve_target_id_for_delete:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Resolves a dashboard ID from either an ID or a slug.
# @PRE: Either dash_id or dash_slug should be provided.
# @POST: Returns the resolved ID or None.
@@ -1042,7 +1042,7 @@ class SupersetClient:
# [/DEF:_resolve_target_id_for_delete:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._do_import:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Performs the actual multipart upload for import.
# @PRE: file_name must be a path to an existing ZIP file.
# @POST: Returns the API response from the upload.
@@ -1064,7 +1064,7 @@ class SupersetClient:
# [/DEF:_do_import:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._validate_export_response:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Validates that the export response is a non-empty ZIP archive.
# @PRE: response must be a valid requests.Response object.
# @POST: Raises SupersetAPIError if validation fails.
@@ -1078,7 +1078,7 @@ class SupersetClient:
# [/DEF:_validate_export_response:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._resolve_export_filename:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Determines the filename for an exported dashboard.
# @PRE: response must contain Content-Disposition header or dashboard_id must be provided.
# @POST: Returns a sanitized filename string.
@@ -1094,7 +1094,7 @@ class SupersetClient:
# [/DEF:_resolve_export_filename:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._validate_query_params:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Ensures query parameters have default page and page_size.
# @PRE: query can be None or a dictionary.
# @POST: Returns a dictionary with at least page and page_size.
@@ -1107,7 +1107,7 @@ class SupersetClient:
# [/DEF:_validate_query_params:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._fetch_total_object_count:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Fetches the total number of items for a given endpoint.
# @PRE: endpoint must be a valid Superset API path.
# @POST: Returns the total count as an integer.
@@ -1122,7 +1122,7 @@ class SupersetClient:
# [/DEF:_fetch_total_object_count:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._fetch_all_pages:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Iterates through all pages to collect all data items.
# @PRE: pagination_options must contain base_query, total_count, and results_field.
# @POST: Returns a combined list of all items.
@@ -1132,7 +1132,7 @@ class SupersetClient:
# [/DEF:_fetch_all_pages:Function]
# [DEF:backend.src.core.superset_client.SupersetClient._validate_import_file:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Validates that the file to be imported is a valid ZIP with metadata.yaml.
# @PRE: zip_path must be a path to a file.
# @POST: Raises error if file is missing, not a ZIP, or missing metadata.
@@ -1149,7 +1149,7 @@ class SupersetClient:
# [/DEF:_validate_import_file:Function]
# [DEF:backend.src.core.superset_client.SupersetClient.get_all_resources:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Fetches all resources of a given type with id, uuid, and name columns.
# @PARAM: resource_type (str) - One of "chart", "dataset", "dashboard".
# @PRE: Client is authenticated. resource_type is valid.

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.core.superset_profile_lookup:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: superset, users, lookup, profile, pagination, normalization
# @PURPOSE: Provides environment-scoped Superset account lookup adapter with stable normalized output.
# @LAYER: Core
@@ -19,7 +19,7 @@ from .utils.network import APIClient, AuthenticationError, SupersetAPIError
# [DEF:SupersetAccountLookupAdapter:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Lookup Superset users and normalize candidates for profile binding.
class SupersetAccountLookupAdapter:
# [DEF:__init__:Function]

View File

@@ -1,9 +1,12 @@
# [DEF:TaskManagerPackage:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: task, manager, package, exports
# @PURPOSE: Exports the public API of the task manager package.
# @LAYER: Core
# @RELATION: Aggregates models and manager.
# @RELATION: DEPENDS_ON ->[TaskManagerModels]
# @RELATION: DEPENDS_ON ->[TaskManagerModule]
# @RELATION: DEPENDS_ON ->[backend.src.core.task_manager.manager.TaskManager]
# @INVARIANT: Package exports stay aligned with manager and models contracts.
from .models import Task, TaskStatus, LogEntry
from .manager import TaskManager

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.core.task_manager.__tests__.test_context:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, task-context, background-tasks, sub-context
# @PURPOSE: Verify TaskContext preserves optional background task scheduler across sub-context creation.

View File

@@ -1,5 +1,5 @@
# [DEF:TaskCleanupModule:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: task, cleanup, retention, logs
# @PURPOSE: Implements task cleanup and retention policies, including associated logs.
# @LAYER: Core
@@ -12,7 +12,7 @@ from ..config_manager import ConfigManager
# [DEF:TaskCleanupService:Class]
# @PURPOSE: Provides methods to clean up old task records and their associated logs.
# @TIER: STANDARD
# @COMPLEXITY: 3
class TaskCleanupService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the cleanup service with dependencies.

View File

@@ -3,7 +3,7 @@
# @PURPOSE: Provides execution context passed to plugins during task execution.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> TaskLogger, USED_BY -> plugins
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Each TaskContext is bound to a single task execution.
# [SECTION: IMPORTS]
@@ -16,7 +16,7 @@ from ..logger import belief_scope
# [DEF:TaskContext:Class]
# @SEMANTICS: context, task, execution, plugin
# @PURPOSE: A container passed to plugin.execute() providing the logger and other task-specific utilities.
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: logger is always a valid TaskLogger instance.
# @UX_STATE: Idle -> Active -> Complete
#

View File

@@ -1,5 +1,5 @@
# [DEF:TaskManagerModule:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
# @LAYER: Core
@@ -38,7 +38,7 @@ from ..logger import logger, belief_scope, should_log_task_level
# [/SECTION]
# [DEF:TaskManager:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
# @INVARIANT: Task IDs are unique within the registry.
@@ -67,7 +67,7 @@ class TaskManager:
LOG_FLUSH_INTERVAL = 2.0
# [DEF:__init__:Function]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @PURPOSE: Initialize the TaskManager with dependencies.
# @PRE: plugin_loader is initialized.
# @POST: TaskManager is ready to accept tasks.
@@ -101,7 +101,7 @@ class TaskManager:
# [/DEF:__init__:Function]
# [DEF:_flusher_loop:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Background thread that periodically flushes log buffer to database.
# @PRE: TaskManager is initialized.
# @POST: Logs are batch-written to database every LOG_FLUSH_INTERVAL seconds.
@@ -113,7 +113,7 @@ class TaskManager:
# [/DEF:_flusher_loop:Function]
# [DEF:_flush_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Flush all buffered logs to the database.
# @PRE: None.
# @POST: All buffered logs are written to task_logs table.
@@ -140,7 +140,7 @@ class TaskManager:
# [/DEF:_flush_logs:Function]
# [DEF:_flush_task_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Flush logs for a specific task immediately.
# @PRE: task_id exists.
# @POST: Task's buffered logs are written to database.
@@ -159,7 +159,7 @@ class TaskManager:
# [/DEF:_flush_task_logs:Function]
# [DEF:create_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Creates and queues a new task for execution.
# @PRE: Plugin with plugin_id exists. Params are valid.
# @POST: Task is created, added to registry, and scheduled for execution.
@@ -189,7 +189,7 @@ class TaskManager:
# [/DEF:create_task:Function]
# [DEF:_run_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Internal method to execute a task with TaskContext support.
# @PRE: Task exists in registry.
# @POST: Task is executed, status updated to SUCCESS or FAILED.
@@ -257,7 +257,7 @@ class TaskManager:
# [/DEF:_run_task:Function]
# [DEF:resolve_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resumes a task that is awaiting mapping.
# @PRE: Task exists and is in AWAITING_MAPPING state.
# @POST: Task status updated to RUNNING, params updated, execution resumed.
@@ -282,7 +282,7 @@ class TaskManager:
# [/DEF:resolve_task:Function]
# [DEF:wait_for_resolution:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Pauses execution and waits for a resolution signal.
# @PRE: Task exists.
# @POST: Execution pauses until future is set.
@@ -305,7 +305,7 @@ class TaskManager:
# [/DEF:wait_for_resolution:Function]
# [DEF:wait_for_input:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Pauses execution and waits for user input.
# @PRE: Task exists.
# @POST: Execution pauses until future is set via resume_task_with_password.
@@ -327,7 +327,7 @@ class TaskManager:
# [/DEF:wait_for_input:Function]
# [DEF:get_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves a task by its ID.
# @PRE: task_id is a string.
# @POST: Returns Task object or None.
@@ -339,7 +339,7 @@ class TaskManager:
# [/DEF:get_task:Function]
# [DEF:get_all_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves all registered tasks.
# @PRE: None.
# @POST: Returns list of all Task objects.
@@ -350,7 +350,7 @@ class TaskManager:
# [/DEF:get_all_tasks:Function]
# [DEF:get_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves tasks with pagination and optional status filter.
# @PRE: limit and offset are non-negative integers.
# @POST: Returns a list of tasks sorted by start_time descending.
@@ -391,7 +391,7 @@ class TaskManager:
# [/DEF:get_tasks:Function]
# [DEF:get_task_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Retrieves logs for a specific task (from memory for running, persistence for completed).
# @PRE: task_id is a string.
# @POST: Returns list of LogEntry or TaskLog objects.
@@ -424,7 +424,7 @@ class TaskManager:
# [/DEF:get_task_logs:Function]
# [DEF:get_task_log_stats:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get statistics about logs for a task.
# @PRE: task_id is a valid task ID.
# @POST: Returns LogStats with counts by level and source.
@@ -436,7 +436,7 @@ class TaskManager:
# [/DEF:get_task_log_stats:Function]
# [DEF:get_task_log_sources:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get unique sources for a task's logs.
# @PRE: task_id is a valid task ID.
# @POST: Returns list of unique source strings.
@@ -448,7 +448,7 @@ class TaskManager:
# [/DEF:get_task_log_sources:Function]
# [DEF:_add_log:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Adds a log entry to a task buffer and notifies subscribers.
# @PRE: Task exists.
# @POST: Log added to buffer and pushed to queues (if level meets task_log_level filter).
@@ -501,7 +501,7 @@ class TaskManager:
# [/DEF:_add_log:Function]
# [DEF:subscribe_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Subscribes to real-time logs for a task.
# @PRE: task_id is a string.
# @POST: Returns an asyncio.Queue for log entries.
@@ -517,7 +517,7 @@ class TaskManager:
# [/DEF:subscribe_logs:Function]
# [DEF:unsubscribe_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unsubscribes from real-time logs for a task.
# @PRE: task_id is a string, queue is asyncio.Queue.
# @POST: Queue removed from subscribers.
@@ -533,7 +533,7 @@ class TaskManager:
# [/DEF:unsubscribe_logs:Function]
# [DEF:load_persisted_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Load persisted tasks using persistence service.
# @PRE: None.
# @POST: Persisted tasks loaded into self.tasks.
@@ -546,7 +546,7 @@ class TaskManager:
# [/DEF:load_persisted_tasks:Function]
# [DEF:await_input:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Transition a task to AWAITING_INPUT state with input request.
# @PRE: Task exists and is in RUNNING state.
# @POST: Task status changed to AWAITING_INPUT, input_request set, persisted.
@@ -569,7 +569,7 @@ class TaskManager:
# [/DEF:await_input:Function]
# [DEF:resume_task_with_password:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resume a task that is awaiting input with provided passwords.
# @PRE: Task exists and is in AWAITING_INPUT state.
# @POST: Task status changed to RUNNING, passwords injected, task resumed.
@@ -599,7 +599,7 @@ class TaskManager:
# [/DEF:resume_task_with_password:Function]
# [DEF:clear_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Clears tasks based on status filter (also deletes associated logs).
# @PRE: status is Optional[TaskStatus].
# @POST: Tasks matching filter (or all non-active) cleared from registry and database.

View File

@@ -1,5 +1,5 @@
# [DEF:TaskManagerModels:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: task, models, pydantic, enum, state
# @PURPOSE: Defines the data models and enumerations used by the Task Manager.
# @LAYER: Core
@@ -17,7 +17,7 @@ from pydantic import BaseModel, Field
# [/SECTION]
# [DEF:TaskStatus:Enum]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: task, status, state, enum
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
class TaskStatus(str, Enum):
@@ -32,7 +32,7 @@ class TaskStatus(str, Enum):
# [DEF:LogLevel:Enum]
# @SEMANTICS: log, level, severity, enum
# @PURPOSE: Defines the possible log levels for task logging.
# @TIER: STANDARD
# @COMPLEXITY: 3
class LogLevel(str, Enum):
DEBUG = "DEBUG"
INFO = "INFO"
@@ -43,7 +43,7 @@ class LogLevel(str, Enum):
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Each log entry has a unique timestamp and source.
#
# @TEST_CONTRACT: LogEntryModel ->
@@ -65,7 +65,7 @@ class LogEntry(BaseModel):
# [DEF:TaskLog:Class]
# @SEMANTICS: task, log, persistent, pydantic
# @PURPOSE: A Pydantic model representing a persisted log entry from the database.
# @TIER: STANDARD
# @COMPLEXITY: 3
# @RELATION: MAPS_TO -> TaskLogRecord
class TaskLog(BaseModel):
id: int
@@ -83,7 +83,7 @@ class TaskLog(BaseModel):
# [DEF:LogFilter:Class]
# @SEMANTICS: log, filter, query, pydantic
# @PURPOSE: Filter parameters for querying task logs.
# @TIER: STANDARD
# @COMPLEXITY: 3
class LogFilter(BaseModel):
level: Optional[str] = None # Filter by log level
source: Optional[str] = None # Filter by source component
@@ -95,7 +95,7 @@ class LogFilter(BaseModel):
# [DEF:LogStats:Class]
# @SEMANTICS: log, stats, aggregation, pydantic
# @PURPOSE: Statistics about log entries for a task.
# @TIER: STANDARD
# @COMPLEXITY: 3
class LogStats(BaseModel):
total_count: int
by_level: Dict[str, int] # {"INFO": 10, "ERROR": 2}
@@ -103,7 +103,7 @@ class LogStats(BaseModel):
# [/DEF:LogStats:Class]
# [DEF:Task:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: task, job, execution, state, pydantic
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
class Task(BaseModel):

View File

@@ -1,5 +1,5 @@
# [DEF:TaskPersistenceModule:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: persistence, sqlite, sqlalchemy, task, storage
# @PURPOSE: Handles the persistence of tasks using SQLAlchemy and the tasks.db database.
# @LAYER: Core
@@ -8,7 +8,7 @@
# @SIDE_EFFECT: Performs database I/O on tasks.db.
# @DATA_CONTRACT: Input[Task, LogEntry] -> Model[TaskRecord, TaskLogRecord]
# @RELATION: [USED_BY] ->[backend.src.core.task_manager.manager.TaskManager]
# @RELATION: [DEPENDS_ON] ->[backend.src.core.database.TasksSessionLocal]
# @RELATION: [DEPENDS_ON] ->[TasksSessionLocal]
# @INVARIANT: Database schema must match the TaskRecord model structure.
# [SECTION: IMPORTS]
@@ -26,12 +26,16 @@ from ..logger import logger, belief_scope
# [/SECTION]
# [DEF:TaskPersistenceService:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: persistence, service, database, sqlalchemy
# @PURPOSE: Provides methods to save and load tasks from the tasks.db database using SQLAlchemy.
# @RELATION: [DEPENDS_ON] ->[backend.src.core.database.TasksSessionLocal]
# @RELATION: [DEPENDS_ON] ->[backend.src.models.task.TaskRecord]
# @RELATION: [DEPENDS_ON] ->[backend.src.models.mapping.Environment]
# @PURPOSE: Provides methods to save, load, and delete task records in tasks.db using SQLAlchemy models.
# @PRE: TasksSessionLocal must provide an active SQLAlchemy session, Task inputs must expose id/plugin_id/status/params/result/logs fields, and TaskRecord plus Environment schemas must be available.
# @POST: Persist operations leave matching TaskRecord rows committed or rolled back without leaking sessions, load operations return reconstructed Task objects from stored TaskRecord rows, and delete operations remove only the addressed task rows.
# @SIDE_EFFECT: Opens SQLAlchemy sessions, reads and writes task_records rows, resolves environment foreign keys against environments, commits or rolls back transactions, and emits error logs on persistence failures.
# @DATA_CONTRACT: Input[Task | List[Task] | List[str] | Query(limit:int,status:Optional[TaskStatus])] -> Model[TaskRecord, Environment] -> Output[None | List[Task]]
# @RELATION: [DEPENDS_ON] ->[TasksSessionLocal]
# @RELATION: [DEPENDS_ON] ->[TaskRecord]
# @RELATION: [DEPENDS_ON] ->[Environment]
# @RELATION: [USED_BY] ->[backend.src.core.task_manager.manager.TaskManager]
# @INVARIANT: Persistence must handle potentially missing task fields natively.
#
@@ -50,7 +54,7 @@ from ..logger import logger, belief_scope
# @TEST_INVARIANT: accurate_round_trip -> verifies: [valid_task_persistence, load_corrupt_json_params]
class TaskPersistenceService:
# [DEF:_json_load_if_needed:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Safely load JSON strings from DB if necessary
# @PRE: value is an arbitrary database value
# @POST: Returns parsed JSON object, list, string, or primitive
@@ -73,7 +77,7 @@ class TaskPersistenceService:
# [/DEF:_json_load_if_needed:Function]
# [DEF:_parse_datetime:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Safely parse a datetime string from the database
# @PRE: value is an ISO string or datetime object
# @POST: Returns datetime object or None
@@ -91,7 +95,7 @@ class TaskPersistenceService:
# [/DEF:_parse_datetime:Function]
# [DEF:_resolve_environment_id:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Resolve environment id into existing environments.id value to satisfy FK constraints.
# @PRE: Session is active
# @POST: Returns existing environments.id or None when unresolved.
@@ -130,7 +134,7 @@ class TaskPersistenceService:
# [/DEF:_resolve_environment_id:Function]
# [DEF:__init__:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Initializes the persistence service.
# @PRE: None.
# @POST: Service is ready.
@@ -141,14 +145,14 @@ class TaskPersistenceService:
# [/DEF:__init__:Function]
# [DEF:persist_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persists or updates a single task in the database.
# @PRE: isinstance(task, Task)
# @POST: Task record created or updated in database.
# @PARAM: task (Task) - The task object to persist.
# @SIDE_EFFECT: Writes to task_records table in tasks.db
# @DATA_CONTRACT: Input[Task] -> Model[TaskRecord]
# @RELATION: [CALLS] ->[self._resolve_environment_id]
# @RELATION: [CALLS] ->[_resolve_environment_id]
def persist_task(self, task: Task) -> None:
with belief_scope("TaskPersistenceService.persist_task", f"task_id={task.id}"):
session: Session = TasksSessionLocal()
@@ -206,12 +210,12 @@ class TaskPersistenceService:
# [/DEF:persist_task:Function]
# [DEF:persist_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persists multiple tasks.
# @PRE: isinstance(tasks, list)
# @POST: All tasks in list are persisted.
# @PARAM: tasks (List[Task]) - The list of tasks to persist.
# @RELATION: [CALLS] ->[self.persist_task]
# @RELATION: [CALLS] ->[persist_task]
def persist_tasks(self, tasks: List[Task]) -> None:
with belief_scope("TaskPersistenceService.persist_tasks"):
for task in tasks:
@@ -219,7 +223,7 @@ class TaskPersistenceService:
# [/DEF:persist_tasks:Function]
# [DEF:load_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Loads tasks from the database.
# @PRE: limit is an integer.
# @POST: Returns list of Task objects.
@@ -227,8 +231,8 @@ class TaskPersistenceService:
# @PARAM: status (Optional[TaskStatus]) - Filter by status.
# @RETURN: List[Task] - The loaded tasks.
# @DATA_CONTRACT: Model[TaskRecord] -> Output[List[Task]]
# @RELATION: [CALLS] ->[self._json_load_if_needed]
# @RELATION: [CALLS] ->[self._parse_datetime]
# @RELATION: [CALLS] ->[_json_load_if_needed]
# @RELATION: [CALLS] ->[_parse_datetime]
def load_tasks(self, limit: int = 100, status: Optional[TaskStatus] = None) -> List[Task]:
with belief_scope("TaskPersistenceService.load_tasks"):
session: Session = TasksSessionLocal()
@@ -277,7 +281,7 @@ class TaskPersistenceService:
# [/DEF:load_tasks:Function]
# [DEF:delete_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Deletes specific tasks from the database.
# @PRE: task_ids is a list of strings.
# @POST: Specified task records deleted from database.
@@ -297,15 +301,18 @@ class TaskPersistenceService:
finally:
session.close()
# [/DEF:delete_tasks:Function]
# [/DEF:TaskPersistenceService:Class]
# [DEF:TaskLogPersistenceService:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: persistence, service, database, log, sqlalchemy
# @PURPOSE: Provides methods to save and query task logs from the task_logs table.
# @RELATION: [DEPENDS_ON] ->[backend.src.models.task.TaskLogRecord]
# @RELATION: [DEPENDS_ON] ->[backend.src.core.database.TasksSessionLocal]
# @PURPOSE: Provides methods to store, query, summarize, and delete task log rows in the task_logs table.
# @PRE: TasksSessionLocal must provide an active SQLAlchemy session, task_id inputs must identify task log rows, LogEntry batches must expose timestamp/level/source/message/metadata fields, and LogFilter inputs must provide pagination and filter attributes used by queries.
# @POST: add_logs commits all provided log entries or rolls back on failure, query methods return TaskLog or LogStats views reconstructed from TaskLogRecord rows, and delete methods remove only log rows matching the supplied task identifiers.
# @SIDE_EFFECT: Opens SQLAlchemy sessions, inserts, reads, aggregates, and deletes task_logs rows, serializes log metadata to JSON, commits or rolls back transactions, and emits error logs on persistence failures.
# @DATA_CONTRACT: Input[task_id:str, logs:List[LogEntry], log_filter:LogFilter, task_ids:List[str]] -> Model[TaskLogRecord] -> Output[None | List[TaskLog] | LogStats | List[str]]
# @RELATION: [DEPENDS_ON] ->[TaskLogRecord]
# @RELATION: [DEPENDS_ON] ->[TasksSessionLocal]
# @RELATION: [USED_BY] ->[backend.src.core.task_manager.manager.TaskManager]
# @INVARIANT: Log entries are batch-inserted for performance.
#
@@ -328,7 +335,7 @@ class TaskLogPersistenceService:
"""
# [DEF:__init__:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Initializes the TaskLogPersistenceService
# @PRE: config is provided or defaults are used
# @POST: Service is ready for log persistence
@@ -337,7 +344,7 @@ class TaskLogPersistenceService:
# [/DEF:__init__:Function]
# [DEF:add_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Batch insert log entries for a task.
# @PRE: logs is a list of LogEntry objects.
# @POST: All logs inserted into task_logs table.
@@ -370,7 +377,7 @@ class TaskLogPersistenceService:
# [/DEF:add_logs:Function]
# [DEF:get_logs:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Query logs for a task with filtering and pagination.
# @PRE: task_id is a valid task ID.
# @POST: Returns list of TaskLog objects matching filters.
@@ -424,7 +431,7 @@ class TaskLogPersistenceService:
# [/DEF:get_logs:Function]
# [DEF:get_log_stats:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get statistics about logs for a task.
# @PRE: task_id is a valid task ID.
# @POST: Returns LogStats with counts by level and source.
@@ -471,7 +478,7 @@ class TaskLogPersistenceService:
# [/DEF:get_log_stats:Function]
# [DEF:get_sources:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Get unique sources for a task's logs.
# @PRE: task_id is a valid task ID.
# @POST: Returns list of unique source strings.
@@ -492,7 +499,7 @@ class TaskLogPersistenceService:
# [/DEF:get_sources:Function]
# [DEF:delete_logs_for_task:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete all logs for a specific task.
# @PRE: task_id is a valid task ID.
# @POST: All logs for the task are deleted.
@@ -514,7 +521,7 @@ class TaskLogPersistenceService:
# [/DEF:delete_logs_for_task:Function]
# [DEF:delete_logs_for_tasks:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Delete all logs for multiple tasks.
# @PRE: task_ids is a list of task IDs.
# @POST: All logs for the tasks are deleted.
@@ -536,6 +543,5 @@ class TaskLogPersistenceService:
finally:
session.close()
# [/DEF:delete_logs_for_tasks:Function]
# [/DEF:TaskLogPersistenceService:Class]
# [/DEF:TaskPersistenceModule:Module]

View File

@@ -3,7 +3,7 @@
# @PURPOSE: Provides a dedicated logger for tasks with automatic source attribution.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> TaskManager, CALLS -> TaskManager._add_log
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Each TaskLogger instance is bound to a specific task_id and default source.
# [SECTION: IMPORTS]
@@ -13,7 +13,7 @@ from typing import Dict, Any, Optional, Callable
# [DEF:TaskLogger:Class]
# @SEMANTICS: logger, task, source, attribution
# @PURPOSE: A wrapper around TaskManager._add_log that carries task_id and source context.
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: All log calls include the task_id and source.
# @UX_STATE: Idle -> Logging -> (system records log)
#

View File

@@ -1,9 +1,13 @@
# [DEF:backend.src.core.utils.async_network:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: network, httpx, async, superset, authentication, cache
# @PURPOSE: Provides async Superset API client with shared auth-token cache to avoid per-request re-login.
# @LAYER: Infra
# @PRE: Config payloads contain a Superset base URL and authentication fields needed for login.
# @POST: Async network clients reuse cached auth tokens and expose stable async request/error translation flow.
# @SIDE_EFFECT: Performs upstream HTTP I/O and mutates process-local auth cache entries.
# @DATA_CONTRACT: Input[config: Dict[str, Any]] -> Output[authenticated async Superset HTTP interactions]
# @RELATION: DEPENDS_ON -> backend.src.core.utils.network.SupersetAuthCache
# @INVARIANT: Async client reuses cached auth tokens per environment credentials and invalidates on 401.
@@ -25,8 +29,8 @@ from .network import (
# [/SECTION]
# [DEF:AsyncAPIClient:Class]
# @TIER: STANDARD
# [DEF:backend.src.core.utils.async_network.AsyncAPIClient:Class]
# @COMPLEXITY: 3
# @PURPOSE: Async Superset API client backed by httpx.AsyncClient with shared auth cache.
# @RELATION: [DEPENDS_ON] ->[backend.src.core.utils.network.SupersetAuthCache]
# @RELATION: [CALLS] ->[backend.src.core.utils.network.SupersetAuthCache.get]
@@ -35,8 +39,8 @@ class AsyncAPIClient:
DEFAULT_TIMEOUT = 30
_auth_locks: Dict[tuple[str, str, bool], asyncio.Lock] = {}
# [DEF:__init__:Function]
# @TIER: STANDARD
# [DEF:backend.src.core.utils.async_network.AsyncAPIClient.__init__:Function]
# @COMPLEXITY: 3
# @PURPOSE: Initialize async API client for one environment.
# @PRE: config contains base_url and auth payload.
# @POST: Client is ready for async request/authentication flow.
@@ -61,8 +65,8 @@ class AsyncAPIClient:
# [/DEF:__init__:Function]
# [DEF:_normalize_base_url:Function]
# @TIER: TRIVIAL
# [DEF:backend.src.core.utils.async_network.AsyncAPIClient._normalize_base_url:Function]
# @COMPLEXITY: 1
# @PURPOSE: Normalize base URL for Superset API root construction.
# @POST: Returns canonical base URL without trailing slash and duplicate /api/v1 suffix.
def _normalize_base_url(self, raw_url: str) -> str:
@@ -73,7 +77,7 @@ class AsyncAPIClient:
# [/DEF:_normalize_base_url:Function]
# [DEF:_build_api_url:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Build full API URL from relative Superset endpoint.
# @POST: Returns absolute URL for upstream request.
def _build_api_url(self, endpoint: str) -> str:
@@ -88,7 +92,7 @@ class AsyncAPIClient:
# [/DEF:_build_api_url:Function]
# [DEF:_get_auth_lock:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Return per-cache-key async lock to serialize fresh login attempts.
# @POST: Returns stable asyncio.Lock instance.
@classmethod
@@ -102,7 +106,7 @@ class AsyncAPIClient:
# [/DEF:_get_auth_lock:Function]
# [DEF:authenticate:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Authenticate against Superset and cache access/csrf tokens.
# @POST: Client tokens are populated and reusable across requests.
# @SIDE_EFFECT: Performs network requests to Superset authentication endpoints.
@@ -162,7 +166,7 @@ class AsyncAPIClient:
# [/DEF:authenticate:Function]
# [DEF:get_headers:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Return authenticated Superset headers for async requests.
# @POST: Headers include Authorization and CSRF tokens.
# @RELATION: CALLS -> self.authenticate
@@ -178,7 +182,7 @@ class AsyncAPIClient:
# [/DEF:get_headers:Function]
# [DEF:request:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Perform one authenticated async Superset API request.
# @POST: Returns JSON payload or raw httpx.Response when raw_response=true.
# @SIDE_EFFECT: Performs network I/O.
@@ -215,7 +219,7 @@ class AsyncAPIClient:
# [/DEF:request:Function]
# [DEF:_handle_http_error:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Translate upstream HTTP errors into stable domain exceptions.
# @POST: Raises domain-specific exception for caller flow control.
# @DATA_CONTRACT: Input[httpx.HTTPStatusError] -> Exception
@@ -234,7 +238,7 @@ class AsyncAPIClient:
# [/DEF:_handle_http_error:Function]
# [DEF:_handle_network_error:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Translate generic httpx errors into NetworkError.
# @POST: Raises NetworkError with URL context.
# @DATA_CONTRACT: Input[httpx.HTTPError] -> NetworkError
@@ -250,7 +254,7 @@ class AsyncAPIClient:
# [/DEF:_handle_network_error:Function]
# [DEF:aclose:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Close underlying httpx client.
# @POST: Client resources are released.
# @SIDE_EFFECT: Closes network connections.

View File

@@ -1,5 +1,6 @@
# [DEF:backend.core.utils.network:Module]
# [DEF:network:Module]
#
# @COMPLEXITY: 3
# @SEMANTICS: network, http, client, api, requests, session, authentication
# @PURPOSE: Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
# @LAYER: Infra
@@ -22,9 +23,11 @@ from ..logger import logger as app_logger, belief_scope
# [/SECTION]
# [DEF:SupersetAPIError:Class]
# @COMPLEXITY: 1
# @PURPOSE: Base exception for all Superset API related errors.
class SupersetAPIError(Exception):
# [DEF:__init__:Function]
# @COMPLEXITY: 1
# @PURPOSE: Initializes the exception with a message and context.
# @PRE: message is a string, context is a dict.
# @POST: Exception is initialized with context.
@@ -36,9 +39,11 @@ class SupersetAPIError(Exception):
# [/DEF:SupersetAPIError:Class]
# [DEF:AuthenticationError:Class]
# @COMPLEXITY: 1
# @PURPOSE: Exception raised when authentication fails.
class AuthenticationError(SupersetAPIError):
# [DEF:__init__:Function]
# @COMPLEXITY: 1
# @PURPOSE: Initializes the authentication error.
# @PRE: message is a string, context is a dict.
# @POST: AuthenticationError is initialized.
@@ -77,7 +82,7 @@ class DashboardNotFoundError(SupersetAPIError):
# [DEF:NetworkError:Class]
# @PURPOSE: Exception raised when a network level error occurs.
class NetworkError(Exception):
# [DEF:__init__:Function]
# [DEF:network.APIClient.__init__:Function]
# @PURPOSE: Initializes the network error.
# @PRE: message is a string.
# @POST: NetworkError is initialized.
@@ -89,7 +94,7 @@ class NetworkError(Exception):
# [/DEF:NetworkError:Class]
# [DEF:SupersetAuthCache:Class]
# [DEF:network.SupersetAuthCache:Class]
# @PURPOSE: Process-local cache for Superset access/csrf tokens keyed by environment credentials.
# @PRE: base_url and username are stable strings.
# @POST: Cached entries expire automatically by TTL and can be reused across requests.
@@ -145,10 +150,10 @@ class SupersetAuthCache:
# [/DEF:SupersetAuthCache:Class]
# [DEF:APIClient:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Synchronous Superset API client with process-local auth token caching.
# @RELATION: DEPENDS_ON -> backend.src.core.utils.network.SupersetAuthCache
# @RELATION: DEPENDS_ON -> backend.src.core.logger.logger
# @RELATION: DEPENDS_ON -> network.SupersetAuthCache
# @RELATION: DEPENDS_ON -> logger
class APIClient:
DEFAULT_TIMEOUT = 30

View File

@@ -1,5 +1,5 @@
# [DEF:Dependencies:Module]
# @TIER: STANDARD
# [DEF:backend.src.dependencies:Module]
# @COMPLEXITY: 3
# @SEMANTICS: dependency, injection, singleton, factory, auth, jwt
# @PURPOSE: Manages creation and provision of shared application dependencies, such as PluginLoader and TaskManager, to avoid circular imports.
# @LAYER: Core
@@ -40,7 +40,7 @@ init_db()
config_manager = ConfigManager(config_path=str(config_path))
# [DEF:get_config_manager:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for ConfigManager.
# @PRE: Global config_manager must be initialized.
# @POST: Returns shared ConfigManager instance.
@@ -71,7 +71,7 @@ logger.info("ResourceService initialized")
# initialize them inside the dependency functions.
# [DEF:get_plugin_loader:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for PluginLoader.
# @PRE: Global plugin_loader must be initialized.
# @POST: Returns shared PluginLoader instance.
@@ -82,7 +82,7 @@ def get_plugin_loader() -> PluginLoader:
# [/DEF:get_plugin_loader:Function]
# [DEF:get_task_manager:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for TaskManager.
# @PRE: Global task_manager must be initialized.
# @POST: Returns shared TaskManager instance.
@@ -93,7 +93,7 @@ def get_task_manager() -> TaskManager:
# [/DEF:get_task_manager:Function]
# [DEF:get_scheduler_service:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for SchedulerService.
# @PRE: Global scheduler_service must be initialized.
# @POST: Returns shared SchedulerService instance.
@@ -104,7 +104,7 @@ def get_scheduler_service() -> SchedulerService:
# [/DEF:get_scheduler_service:Function]
# [DEF:get_resource_service:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for ResourceService.
# @PRE: Global resource_service must be initialized.
# @POST: Returns shared ResourceService instance.
@@ -115,7 +115,7 @@ def get_resource_service() -> ResourceService:
# [/DEF:get_resource_service:Function]
# [DEF:get_mapping_service:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for MappingService.
# @PRE: Global config_manager must be initialized.
# @POST: Returns new MappingService instance.
@@ -129,7 +129,7 @@ def get_mapping_service() -> MappingService:
_clean_release_repository = CleanReleaseRepository()
# [DEF:get_clean_release_repository:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Legacy compatibility shim for CleanReleaseRepository.
# @POST: Returns a shared CleanReleaseRepository instance.
def get_clean_release_repository() -> CleanReleaseRepository:
@@ -139,7 +139,7 @@ def get_clean_release_repository() -> CleanReleaseRepository:
# [DEF:get_clean_release_facade:Function]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Dependency injector for CleanReleaseFacade.
# @POST: Returns a facade instance with a fresh DB session.
def get_clean_release_facade(db = Depends(get_db)) -> CleanReleaseFacade:
@@ -168,13 +168,13 @@ def get_clean_release_facade(db = Depends(get_db)) -> CleanReleaseFacade:
# [/DEF:get_clean_release_facade:Function]
# [DEF:oauth2_scheme:Variable]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: OAuth2 password bearer scheme for token extraction.
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
# [/DEF:oauth2_scheme:Variable]
# [DEF:get_current_user:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Dependency for retrieving currently authenticated user from a JWT.
# @PRE: JWT token provided in Authorization header.
# @POST: Returns User object if token is valid.
@@ -204,7 +204,7 @@ def get_current_user(token: str = Depends(oauth2_scheme), db = Depends(get_auth_
# [/DEF:get_current_user:Function]
# [DEF:has_permission:Function]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Dependency for checking if the current user has a specific permission.
# @PRE: User is authenticated.
# @POST: Returns True if user has permission.
@@ -234,4 +234,4 @@ def has_permission(resource: str, action: str):
return permission_checker
# [/DEF:has_permission:Function]
# [/DEF:Dependencies:Module]
# [/DEF:backend.src.dependencies:Module]

View File

@@ -1,5 +1,5 @@
# [DEF:test_models:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Unit tests for data models
# @LAYER: Domain
# @RELATION: VERIFIES -> src.models

View File

@@ -1,5 +1,5 @@
# [DEF:test_report_models:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for report Pydantic models and their validators
# @LAYER: Domain
# @RELATION: TESTS -> backend.src.models.report

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.assistant:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: assistant, audit, confirmation, chat
# @PURPOSE: SQLAlchemy models for assistant audit trail and confirmation tokens.
# @LAYER: Domain
@@ -14,7 +14,7 @@ from .mapping import Base
# [DEF:AssistantAuditRecord:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Store audit decisions and outcomes produced by assistant command handling.
# @PRE: user_id must identify the actor for every record.
# @POST: Audit payload remains available for compliance and debugging.
@@ -33,7 +33,7 @@ class AssistantAuditRecord(Base):
# [DEF:AssistantMessageRecord:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist chat history entries for assistant conversations.
# @PRE: user_id, conversation_id, role and text must be present.
# @POST: Message row can be queried in chronological order.
@@ -54,7 +54,7 @@ class AssistantMessageRecord(Base):
# [DEF:AssistantConfirmationRecord:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Persist risky operation confirmation tokens with lifecycle state.
# @PRE: intent/dispatch and expiry timestamp must be provided.
# @POST: State transitions can be tracked and audited.

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.auth:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: auth, models, user, role, permission, sqlalchemy
# @PURPOSE: SQLAlchemy models for multi-user authentication and authorization.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.clean_release:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: clean-release, models, lifecycle, compliance, evidence, immutability
# @PURPOSE: Define canonical clean release domain entities and lifecycle guards.
# @LAYER: Domain

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.config:Module]
#
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: database, config, settings, sqlalchemy, notification
# @PURPOSE: Defines SQLAlchemy persistence models for application and notification configuration records.
# @LAYER: Domain

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.connection:Module]
#
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: database, connection, configuration, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for external database connection configurations.
# @LAYER: Domain
@@ -16,7 +16,7 @@ import uuid
# [/SECTION]
# [DEF:ConnectionConfig:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Stores credentials for external databases used for column mapping.
class ConnectionConfig(Base):
__tablename__ = "connection_configs"

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.dashboard:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: dashboard, model, metadata, migration
# @PURPOSE: Defines data models for dashboard metadata and selection.
# @LAYER: Model
@@ -9,7 +9,7 @@ from pydantic import BaseModel
from typing import List
# [DEF:DashboardMetadata:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents a dashboard available for migration.
class DashboardMetadata(BaseModel):
id: int
@@ -19,7 +19,7 @@ class DashboardMetadata(BaseModel):
# [/DEF:DashboardMetadata:Class]
# [DEF:DashboardSelection:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents the user's selection of dashboards to migrate.
class DashboardSelection(BaseModel):
selected_ids: List[int]

View File

@@ -1,5 +1,5 @@
# [DEF:GitModels:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: git, models, sqlalchemy, database, schema
# @PURPOSE: Git-specific SQLAlchemy models for configuration and repository tracking.
# @LAYER: Model
@@ -27,7 +27,7 @@ class SyncStatus(str, enum.Enum):
CONFLICT = "CONFLICT"
# [DEF:GitServerConfig:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Configuration for a Git server connection.
class GitServerConfig(Base):
__tablename__ = "git_server_configs"
@@ -44,7 +44,7 @@ class GitServerConfig(Base):
# [/DEF:GitServerConfig:Class]
# [DEF:GitRepository:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Tracking for a local Git repository linked to a dashboard.
class GitRepository(Base):
__tablename__ = "git_repositories"
@@ -59,7 +59,7 @@ class GitRepository(Base):
# [/DEF:GitRepository:Class]
# [DEF:DeploymentEnvironment:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Target Superset environments for dashboard deployment.
class DeploymentEnvironment(Base):
__tablename__ = "deployment_environments"

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.llm:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: llm, models, sqlalchemy, persistence
# @PURPOSE: SQLAlchemy models for LLM provider configuration and validation results.
# @LAYER: Domain

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.mapping:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: database, mapping, environment, migration, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for environment metadata and database mappings using SQLAlchemy.
# @LAYER: Domain
@@ -20,7 +20,7 @@ import enum
Base = declarative_base()
# [DEF:ResourceType:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Enumeration of possible Superset resource types for ID mapping.
class ResourceType(str, enum.Enum):
CHART = "chart"
@@ -30,7 +30,7 @@ class ResourceType(str, enum.Enum):
# [DEF:MigrationStatus:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Enumeration of possible migration job statuses.
class MigrationStatus(enum.Enum):
PENDING = "PENDING"
@@ -41,7 +41,7 @@ class MigrationStatus(enum.Enum):
# [/DEF:MigrationStatus:Class]
# [DEF:Environment:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Represents a Superset instance environment.
class Environment(Base):
__tablename__ = "environments"
@@ -53,7 +53,7 @@ class Environment(Base):
# [/DEF:Environment:Class]
# [DEF:DatabaseMapping:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Represents a mapping between source and target databases.
class DatabaseMapping(Base):
__tablename__ = "database_mappings"
@@ -69,7 +69,7 @@ class DatabaseMapping(Base):
# [/DEF:DatabaseMapping:Class]
# [DEF:MigrationJob:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 2
# @PURPOSE: Represents a single migration execution job.
class MigrationJob(Base):
__tablename__ = "migration_jobs"
@@ -83,7 +83,7 @@ class MigrationJob(Base):
# [/DEF:MigrationJob:Class]
# [DEF:ResourceMapping:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Maps a universal UUID for a resource to its actual ID on a specific environment.
# @TEST_DATA: resource_mapping_record -> {'environment_id': 'prod-env-1', 'resource_type': 'chart', 'uuid': '123e4567-e89b-12d3-a456-426614174000', 'remote_integer_id': '42'}
class ResourceMapping(Base):

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.profile:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: profile, preferences, persistence, user, dashboard-filter, git, ui-preferences, sqlalchemy
# @PURPOSE: Defines persistent per-user profile settings for dashboard filter, Git identity/token, and UX preferences.
# @LAYER: Domain
@@ -20,7 +20,7 @@ from .mapping import Base
# [DEF:UserDashboardPreference:Class]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Stores Superset username binding and default "my dashboards" toggle for one authenticated user.
class UserDashboardPreference(Base):
__tablename__ = "user_dashboard_preferences"

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.report:Module]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @SEMANTICS: reports, models, pydantic, normalization, pagination
# @PURPOSE: Canonical report schemas for unified task reporting across heterogeneous task types.
# @LAYER: Domain
@@ -20,7 +20,7 @@ from pydantic import BaseModel, Field, field_validator, model_validator
# [DEF:TaskType:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Must contain valid generic task type mappings.
# @SEMANTICS: enum, type, task
# @PURPOSE: Supported normalized task report types.
@@ -35,7 +35,7 @@ class TaskType(str, Enum):
# [DEF:ReportStatus:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: TaskStatus enum mapping logic holds.
# @SEMANTICS: enum, status, task
# @PURPOSE: Supported normalized report status values.
@@ -48,7 +48,7 @@ class ReportStatus(str, Enum):
# [DEF:ErrorContext:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: The properties accurately describe error state.
# @SEMANTICS: error, context, payload
# @PURPOSE: Error and recovery context for failed/partial reports.
@@ -73,7 +73,7 @@ class ErrorContext(BaseModel):
# [DEF:TaskReport:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Must represent canonical task record attributes.
# @SEMANTICS: report, model, summary
# @PURPOSE: Canonical normalized report envelope for one task execution.
@@ -130,7 +130,7 @@ class TaskReport(BaseModel):
# [DEF:ReportQuery:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Time and pagination queries are mutually consistent.
# @SEMANTICS: query, filter, search
# @PURPOSE: Query object for server-side report filtering, sorting, and pagination.
@@ -188,7 +188,7 @@ class ReportQuery(BaseModel):
# [DEF:ReportCollection:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Represents paginated data correctly.
# @SEMANTICS: collection, pagination
# @PURPOSE: Paginated collection of normalized task reports.
@@ -213,7 +213,7 @@ class ReportCollection(BaseModel):
# [DEF:ReportDetailView:Class]
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @INVARIANT: Incorporates a report and logs correctly.
# @SEMANTICS: view, detail, logs
# @PURPOSE: Detailed report representation including diagnostics and recovery actions.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.models.storage:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: storage, file, model, pydantic
# @PURPOSE: Data models for the storage system.
# @LAYER: Domain
@@ -10,7 +10,7 @@ from typing import Optional
from pydantic import BaseModel, Field
# [DEF:FileCategory:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Enumeration of supported file categories in the storage system.
class FileCategory(str, Enum):
BACKUP = "backups"
@@ -18,7 +18,7 @@ class FileCategory(str, Enum):
# [/DEF:FileCategory:Class]
# [DEF:StorageConfig:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Configuration model for the storage system, defining paths and naming patterns.
class StorageConfig(BaseModel):
root_path: str = Field(default="backups", description="Absolute path to the storage root directory.")
@@ -30,7 +30,7 @@ class StorageConfig(BaseModel):
# [/DEF:StorageConfig:Class]
# [DEF:StoredFile:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Data model representing metadata for a file stored in the system.
class StoredFile(BaseModel):
name: str = Field(..., description="Name of the file (including extension).")

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.models.task:Module]
#
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @SEMANTICS: database, task, record, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for task execution records.
# @LAYER: Domain
@@ -16,7 +16,7 @@ import uuid
# [/SECTION]
# [DEF:TaskRecord:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents a persistent record of a task execution.
class TaskRecord(Base):
__tablename__ = "task_records"
@@ -36,7 +36,7 @@ class TaskRecord(Base):
# [DEF:TaskLogRecord:Class]
# @PURPOSE: Represents a single persistent log entry for a task.
# @TIER: CRITICAL
# @COMPLEXITY: 5
# @RELATION: DEPENDS_ON -> TaskRecord
# @INVARIANT: Each log entry belongs to exactly one task.
#

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/git/llm_extension:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: git, llm, commit
# @PURPOSE: LLM-based extensions for the Git plugin, specifically for commit message generation.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/llm_analysis/__init__.py:Module]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Initialize the LLM Analysis plugin package.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.plugins.llm_analysis.__tests__.test_client_headers:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, llm-client, openrouter, headers
# @PURPOSE: Verify OpenRouter client initialization includes provider-specific headers.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.plugins.llm_analysis.__tests__.test_screenshot_service:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, screenshot-service, navigation, timeout-regression
# @PURPOSE: Protect dashboard screenshot navigation from brittle networkidle waits.

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.plugins.llm_analysis.__tests__.test_service:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: tests, llm-analysis, fallback, provider-error, unknown-status
# @PURPOSE: Verify LLM analysis transport/provider failures do not masquerade as dashboard FAIL results.

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/llm_analysis/models.py:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: pydantic, models, llm
# @PURPOSE: Define Pydantic models for LLM Analysis plugin.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/llm_analysis/plugin.py:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: plugin, llm, analysis, documentation
# @PURPOSE: Implements DashboardValidationPlugin and DocumentationPlugin.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/llm_analysis/scheduler.py:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: scheduler, task, automation
# @PURPOSE: Provides helper functions to schedule LLM-based validation tasks.
# @LAYER: Domain

View File

@@ -1,5 +1,5 @@
# [DEF:backend/src/plugins/llm_analysis/service.py:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: service, llm, screenshot, playwright, openai
# @PURPOSE: Services for LLM interaction and dashboard screenshots.
# @LAYER: Domain

View File

@@ -1,384 +1,384 @@
# [DEF:MigrationPlugin:Module]
# @TIER: CRITICAL
# @SEMANTICS: migration, superset, automation, dashboard, plugin, transformation
# @PURPOSE: Orchestrates export, DB-mapping transformation, and import of Superset dashboards across environments.
# @LAYER: App
# @RELATION: IMPLEMENTS -> PluginBase
# @RELATION: DEPENDS_ON -> SupersetClient
# @RELATION: DEPENDS_ON -> MigrationEngine
# @RELATION: DEPENDS_ON -> IdMappingService
# @RELATION: USES -> TaskContext
# @INVARIANT: Dashboards must never be imported with unmapped/source DB connections to prevent data leaks or cross-environment pollution.
from typing import Dict, Any, Optional
import re
from ..core.plugin_base import PluginBase
from ..core.logger import belief_scope, logger as app_logger
from ..core.superset_client import SupersetClient
from ..core.utils.fileio import create_temp_file
from ..dependencies import get_config_manager
from ..core.migration_engine import MigrationEngine
from ..core.database import SessionLocal
from ..models.mapping import DatabaseMapping, Environment
from ..core.mapping_service import IdMappingService
from ..core.task_manager.context import TaskContext
# [DEF:MigrationPlugin:Class]
# @PURPOSE: Implementation of the migration plugin workflow and transformation orchestration.
# @PRE: Plugin loader must register this instance.
# @POST: Provides migration UI schema and executes atomic dashboard transfers.
# @TEST_FIXTURE: superset_export_zip -> file:backend/tests/fixtures/migration/dashboard_export.zip
# @TEST_FIXTURE: db_mapping_payload -> INLINE_JSON: {"db_mappings": {"source_uuid_1": "target_uuid_2"}}
# @TEST_FIXTURE: password_inject_payload -> INLINE_JSON: {"passwords": {"PostgreSQL": "secret123"}}
# @TEST_INVARIANT: strict_db_isolation -> VERIFIED_BY: [successful_dashboard_transfer, missing_mapping_resolution]
class MigrationPlugin(PluginBase):
"""
A plugin to migrate Superset dashboards between environments.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the migration plugin.
# @PRE: None.
# @POST: Returns stable string "superset-migration".
# @RETURN: str
def id(self) -> str:
with belief_scope("MigrationPlugin.id"):
return "superset-migration"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the plugin.
# @PRE: None.
# @POST: Returns "Superset Dashboard Migration".
# @RETURN: str
def name(self) -> str:
with belief_scope("MigrationPlugin.name"):
return "Superset Dashboard Migration"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns the semantic description of the plugin.
# @PRE: None.
# @POST: Returns description string.
# @RETURN: str
def description(self) -> str:
with belief_scope("MigrationPlugin.description"):
return "Migrates dashboards between Superset environments."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the semantic version of the migration plugin.
# @PRE: None.
# @POST: Returns "1.0.0".
# @RETURN: str
def version(self) -> str:
with belief_scope("MigrationPlugin.version"):
return "1.0.0"
# [/DEF:version:Function]
@property
# [DEF:ui_route:Function]
# @PURPOSE: Returns the frontend routing anchor for the plugin.
# @PRE: None.
# @POST: Returns "/migration".
# @RETURN: str
def ui_route(self) -> str:
with belief_scope("MigrationPlugin.ui_route"):
return "/migration"
# [/DEF:ui_route:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Generates the JSON Schema for the plugin execution form dynamically.
# @PRE: ConfigManager is accessible and environments are defined.
# @POST: Returns a JSON Schema dict matching current system environments.
# @RETURN: Dict[str, Any]
def get_schema(self) -> Dict[str, Any]:
with belief_scope("MigrationPlugin.get_schema"):
app_logger.reason("Generating migration UI schema")
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
schema = {
"type": "object",
"properties": {
"from_env": {
"type": "string",
"title": "Source Environment",
"description": "The environment to migrate from.",
"enum": envs if envs else ["dev", "prod"],
},
"to_env": {
"type": "string",
"title": "Target Environment",
"description": "The environment to migrate to.",
"enum": envs if envs else ["dev", "prod"],
},
"dashboard_regex": {
"type": "string",
"title": "Dashboard Regex",
"description": "A regular expression to filter dashboards to migrate.",
},
"replace_db_config": {
"type": "boolean",
"title": "Replace DB Config",
"description": "Whether to replace the database configuration.",
"default": False,
},
"from_db_id": {
"type": "integer",
"title": "Source DB ID",
"description": "The ID of the source database to replace (if replacing).",
},
"to_db_id": {
"type": "integer",
"title": "Target DB ID",
"description": "The ID of the target database to replace with (if replacing).",
},
},
"required": ["from_env", "to_env", "dashboard_regex"],
}
app_logger.reflect("Schema generated successfully", extra={"environments_count": len(envs)})
return schema
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Orchestrates the dashboard migration pipeline including extraction, AST mutation, and ingestion.
# @PARAM: params (Dict[str, Any]) - Extracted parameters from UI/API execution request.
# @PARAM: context (Optional[TaskContext]) - Dependency injected TaskContext for IO tracing.
# @PRE: Source and target environments must resolve. Matching dashboards must exist.
# @POST: Dashboard ZIP bundles are transformed and imported. ID mappings are synchronized.
# @SIDE_EFFECT: Creates temp files, mutates target Superset state, blocks on user input (passwords/mappings).
# @TEST_CONTRACT: Dict[str, Any] -> Dict[str, Any]
# @TEST_SCENARIO: successful_dashboard_transfer -> ZIP is downloaded, DB mappings applied via AST, target import succeeds.
# @TEST_SCENARIO: missing_password_injection -> Target import fails on auth, TaskManager pauses for user input, retries with password successfully.
# @TEST_SCENARIO: empty_selection -> Returns NO_MATCHES gracefully when regex finds zero dashboards.
# @TEST_EDGE: missing_env_field -> [ValueError: Could not resolve source or target environment]
# @TEST_EDGE: invalid_regex_pattern -> [Regex compilation exception is thrown or caught gracefully]
# @TEST_EDGE: target_api_timeout -> [Dashboard added to failed_dashboards, task concludes with PARTIAL_SUCCESS]
async def execute(self, params: Dict[str, Any], context: Optional[TaskContext] = None):
with belief_scope("MigrationPlugin.execute"):
app_logger.reason("Evaluating migration task parameters", extra={"params": params})
source_env_id = params.get("source_env_id")
target_env_id = params.get("target_env_id")
selected_ids = params.get("selected_ids")
from_env_name = params.get("from_env")
to_env_name = params.get("to_env")
dashboard_regex = params.get("dashboard_regex")
replace_db_config = params.get("replace_db_config", False)
fix_cross_filters = params.get("fix_cross_filters", True)
task_id = params.get("_task_id")
from ..dependencies import get_task_manager
tm = get_task_manager()
log = context.logger if context else app_logger
superset_log = log.with_source("superset_api") if context else log
migration_log = log.with_source("migration") if context else log
log.info("Starting migration task.")
try:
config_manager = get_config_manager()
environments = config_manager.get_environments()
# Resolve environments
src_env = next((e for e in environments if e.id == source_env_id), None) if source_env_id else next((e for e in environments if e.name == from_env_name), None)
tgt_env = next((e for e in environments if e.id == target_env_id), None) if target_env_id else next((e for e in environments if e.name == to_env_name), None)
if not src_env or not tgt_env:
app_logger.explore("Environment resolution failed", extra={"src": source_env_id or from_env_name, "tgt": target_env_id or to_env_name})
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
from_env_name = src_env.name
to_env_name = tgt_env.name
app_logger.reason("Environments resolved successfully", extra={"from": from_env_name, "to": to_env_name})
migration_result = {
"status": "SUCCESS",
"source_environment": from_env_name,
"target_environment": to_env_name,
"selected_dashboards": 0,
"migrated_dashboards": [],
"failed_dashboards": [],
"mapping_count": 0
}
from_c = SupersetClient(src_env)
to_c = SupersetClient(tgt_env)
if not from_c or not to_c:
raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
_, all_dashboards = from_c.get_dashboards()
# Selection Logic
if selected_ids:
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
elif dashboard_regex:
regex_pattern = re.compile(str(dashboard_regex), re.IGNORECASE)
dashboards_to_migrate = [d for d in all_dashboards if regex_pattern.search(d.get("dashboard_title", ""))]
else:
app_logger.explore("No deterministic selection criteria provided")
migration_result["status"] = "NO_SELECTION"
return migration_result
if not dashboards_to_migrate:
app_logger.explore("Zero dashboards match selection criteria")
migration_result["status"] = "NO_MATCHES"
return migration_result
migration_result["selected_dashboards"] = len(dashboards_to_migrate)
# Database Mapping Resolution
db_mapping = params.get("db_mappings", {})
if not isinstance(db_mapping, dict):
db_mapping = {}
if replace_db_config:
app_logger.reason("Fetching environment DB mappings from catalog")
db = SessionLocal()
try:
src_env_db = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env_db = db.query(Environment).filter(Environment.name == to_env_name).first()
if src_env_db and tgt_env_db:
stored_mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env_db.id,
DatabaseMapping.target_env_id == tgt_env_db.id
).all()
stored_map_dict = {m.source_db_uuid: m.target_db_uuid for m in stored_mappings}
stored_map_dict.update(db_mapping)
db_mapping = stored_map_dict
log.info(f"Loaded {len(stored_mappings)} database mappings from database.")
finally:
db.close()
migration_result["mapping_count"] = len(db_mapping)
engine = MigrationEngine()
# Migration Loop
for dash in dashboards_to_migrate:
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
app_logger.reason(f"Starting pipeline for dashboard '{title}'", extra={"dash_id": dash_id})
try:
exported_content, _ = from_c.export_dashboard(dash_id)
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip") as tmp_zip_path:
with create_temp_file(suffix=".zip", dry_run=True) as tmp_new_zip:
success = engine.transform_zip(
str(tmp_zip_path),
str(tmp_new_zip),
db_mapping,
strip_databases=False,
target_env_id=tgt_env.id if tgt_env else None,
fix_cross_filters=fix_cross_filters
)
if not success and replace_db_config:
if task_id:
app_logger.explore("Missing mapping blocks AST transform. Pausing task for user intervention.", extra={"task_id": task_id})
await tm.wait_for_resolution(task_id)
app_logger.reason("Task resumed, re-evaluating mapping states")
db = SessionLocal()
try:
src_env_rt = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env_rt = db.query(Environment).filter(Environment.name == to_env_name).first()
mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env_rt.id,
DatabaseMapping.target_env_id == tgt_env_rt.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
finally:
db.close()
success = engine.transform_zip(
str(tmp_zip_path),
str(tmp_new_zip),
db_mapping,
strip_databases=False,
target_env_id=tgt_env.id if tgt_env else None,
fix_cross_filters=fix_cross_filters
)
if success:
app_logger.reason("Pushing transformed ZIP to target Superset")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
migration_result["migrated_dashboards"].append({"id": dash_id, "title": title})
app_logger.reflect("Import successful", extra={"title": title})
else:
app_logger.explore("Transformation strictly failed, bypassing ingestion")
migration_log.error(f"Failed to transform ZIP for dashboard {title}")
migration_result["failed_dashboards"].append({
"id": dash_id, "title": title, "error": "Failed to transform ZIP"
})
except Exception as exc:
error_msg = str(exc)
if "Must provide a password for the database" in error_msg:
db_name = "unknown"
match = re.search(r"databases/([^.]+)\.yaml", error_msg)
if match:
db_name = match.group(1)
else:
match_alt = re.search(r"database '([^']+)'", error_msg)
if match_alt:
db_name = match_alt.group(1)
app_logger.explore(f"Missing DB password detected during ingestion. Escalating to UI.", extra={"db_name": db_name})
if task_id:
tm.await_input(task_id, {
"type": "database_password",
"databases": [db_name],
"error_message": error_msg
})
await tm.wait_for_input(task_id)
task = tm.get_task(task_id)
passwords = task.params.get("passwords", {})
if passwords:
app_logger.reason(f"Retrying import for {title} with injected credentials")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug, passwords=passwords)
migration_result["migrated_dashboards"].append({"id": dash_id, "title": title})
app_logger.reflect("Password injection unblocked import")
if "passwords" in task.params:
del task.params["passwords"]
continue
app_logger.explore(f"Catastrophic dashboard ingestion failure: {exc}")
migration_result["failed_dashboards"].append({"id": dash_id, "title": title, "error": str(exc)})
if migration_result["failed_dashboards"]:
migration_result["status"] = "PARTIAL_SUCCESS"
# Post-Migration ID Mapping Synchronization
try:
app_logger.reason("Executing incremental ID catalog sync on target")
db_session = SessionLocal()
mapping_service = IdMappingService(db_session)
mapping_service.sync_environment(tgt_env.id, to_c, incremental=True)
db_session.close()
app_logger.reflect("Incremental catalog sync closed out cleanly")
except Exception as sync_exc:
app_logger.explore(f"ID Mapping sync failed, mapping state might be degraded: {sync_exc}")
app_logger.reflect("Migration cycle fully resolved", extra={"result": migration_result})
return migration_result
except Exception as e:
app_logger.explore(f"Fatal plugin failure: {e}", exc_info=True)
raise e
# [/DEF:execute:Function]
# [/DEF:MigrationPlugin:Class]
# [DEF:MigrationPlugin:Module]
# @COMPLEXITY: 5
# @SEMANTICS: migration, superset, automation, dashboard, plugin, transformation
# @PURPOSE: Orchestrates export, DB-mapping transformation, and import of Superset dashboards across environments.
# @LAYER: App
# @RELATION: IMPLEMENTS -> PluginBase
# @RELATION: DEPENDS_ON -> SupersetClient
# @RELATION: DEPENDS_ON -> MigrationEngine
# @RELATION: DEPENDS_ON -> IdMappingService
# @RELATION: USES -> TaskContext
# @INVARIANT: Dashboards must never be imported with unmapped/source DB connections to prevent data leaks or cross-environment pollution.
from typing import Dict, Any, Optional
import re
from ..core.plugin_base import PluginBase
from ..core.logger import belief_scope, logger as app_logger
from ..core.superset_client import SupersetClient
from ..core.utils.fileio import create_temp_file
from ..dependencies import get_config_manager
from ..core.migration_engine import MigrationEngine
from ..core.database import SessionLocal
from ..models.mapping import DatabaseMapping, Environment
from ..core.mapping_service import IdMappingService
from ..core.task_manager.context import TaskContext
# [DEF:MigrationPlugin:Class]
# @PURPOSE: Implementation of the migration plugin workflow and transformation orchestration.
# @PRE: Plugin loader must register this instance.
# @POST: Provides migration UI schema and executes atomic dashboard transfers.
# @TEST_FIXTURE: superset_export_zip -> file:backend/tests/fixtures/migration/dashboard_export.zip
# @TEST_FIXTURE: db_mapping_payload -> INLINE_JSON: {"db_mappings": {"source_uuid_1": "target_uuid_2"}}
# @TEST_FIXTURE: password_inject_payload -> INLINE_JSON: {"passwords": {"PostgreSQL": "secret123"}}
# @TEST_INVARIANT: strict_db_isolation -> VERIFIED_BY: [successful_dashboard_transfer, missing_mapping_resolution]
class MigrationPlugin(PluginBase):
"""
A plugin to migrate Superset dashboards between environments.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the migration plugin.
# @PRE: None.
# @POST: Returns stable string "superset-migration".
# @RETURN: str
def id(self) -> str:
with belief_scope("MigrationPlugin.id"):
return "superset-migration"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the plugin.
# @PRE: None.
# @POST: Returns "Superset Dashboard Migration".
# @RETURN: str
def name(self) -> str:
with belief_scope("MigrationPlugin.name"):
return "Superset Dashboard Migration"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns the semantic description of the plugin.
# @PRE: None.
# @POST: Returns description string.
# @RETURN: str
def description(self) -> str:
with belief_scope("MigrationPlugin.description"):
return "Migrates dashboards between Superset environments."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the semantic version of the migration plugin.
# @PRE: None.
# @POST: Returns "1.0.0".
# @RETURN: str
def version(self) -> str:
with belief_scope("MigrationPlugin.version"):
return "1.0.0"
# [/DEF:version:Function]
@property
# [DEF:ui_route:Function]
# @PURPOSE: Returns the frontend routing anchor for the plugin.
# @PRE: None.
# @POST: Returns "/migration".
# @RETURN: str
def ui_route(self) -> str:
with belief_scope("MigrationPlugin.ui_route"):
return "/migration"
# [/DEF:ui_route:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Generates the JSON Schema for the plugin execution form dynamically.
# @PRE: ConfigManager is accessible and environments are defined.
# @POST: Returns a JSON Schema dict matching current system environments.
# @RETURN: Dict[str, Any]
def get_schema(self) -> Dict[str, Any]:
with belief_scope("MigrationPlugin.get_schema"):
app_logger.reason("Generating migration UI schema")
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
schema = {
"type": "object",
"properties": {
"from_env": {
"type": "string",
"title": "Source Environment",
"description": "The environment to migrate from.",
"enum": envs if envs else ["dev", "prod"],
},
"to_env": {
"type": "string",
"title": "Target Environment",
"description": "The environment to migrate to.",
"enum": envs if envs else ["dev", "prod"],
},
"dashboard_regex": {
"type": "string",
"title": "Dashboard Regex",
"description": "A regular expression to filter dashboards to migrate.",
},
"replace_db_config": {
"type": "boolean",
"title": "Replace DB Config",
"description": "Whether to replace the database configuration.",
"default": False,
},
"from_db_id": {
"type": "integer",
"title": "Source DB ID",
"description": "The ID of the source database to replace (if replacing).",
},
"to_db_id": {
"type": "integer",
"title": "Target DB ID",
"description": "The ID of the target database to replace with (if replacing).",
},
},
"required": ["from_env", "to_env", "dashboard_regex"],
}
app_logger.reflect("Schema generated successfully", extra={"environments_count": len(envs)})
return schema
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Orchestrates the dashboard migration pipeline including extraction, AST mutation, and ingestion.
# @PARAM: params (Dict[str, Any]) - Extracted parameters from UI/API execution request.
# @PARAM: context (Optional[TaskContext]) - Dependency injected TaskContext for IO tracing.
# @PRE: Source and target environments must resolve. Matching dashboards must exist.
# @POST: Dashboard ZIP bundles are transformed and imported. ID mappings are synchronized.
# @SIDE_EFFECT: Creates temp files, mutates target Superset state, blocks on user input (passwords/mappings).
# @TEST_CONTRACT: Dict[str, Any] -> Dict[str, Any]
# @TEST_SCENARIO: successful_dashboard_transfer -> ZIP is downloaded, DB mappings applied via AST, target import succeeds.
# @TEST_SCENARIO: missing_password_injection -> Target import fails on auth, TaskManager pauses for user input, retries with password successfully.
# @TEST_SCENARIO: empty_selection -> Returns NO_MATCHES gracefully when regex finds zero dashboards.
# @TEST_EDGE: missing_env_field -> [ValueError: Could not resolve source or target environment]
# @TEST_EDGE: invalid_regex_pattern -> [Regex compilation exception is thrown or caught gracefully]
# @TEST_EDGE: target_api_timeout -> [Dashboard added to failed_dashboards, task concludes with PARTIAL_SUCCESS]
async def execute(self, params: Dict[str, Any], context: Optional[TaskContext] = None):
with belief_scope("MigrationPlugin.execute"):
app_logger.reason("Evaluating migration task parameters", extra={"params": params})
source_env_id = params.get("source_env_id")
target_env_id = params.get("target_env_id")
selected_ids = params.get("selected_ids")
from_env_name = params.get("from_env")
to_env_name = params.get("to_env")
dashboard_regex = params.get("dashboard_regex")
replace_db_config = params.get("replace_db_config", False)
fix_cross_filters = params.get("fix_cross_filters", True)
task_id = params.get("_task_id")
from ..dependencies import get_task_manager
tm = get_task_manager()
log = context.logger if context else app_logger
superset_log = log.with_source("superset_api") if context else log
migration_log = log.with_source("migration") if context else log
log.info("Starting migration task.")
try:
config_manager = get_config_manager()
environments = config_manager.get_environments()
# Resolve environments
src_env = next((e for e in environments if e.id == source_env_id), None) if source_env_id else next((e for e in environments if e.name == from_env_name), None)
tgt_env = next((e for e in environments if e.id == target_env_id), None) if target_env_id else next((e for e in environments if e.name == to_env_name), None)
if not src_env or not tgt_env:
app_logger.explore("Environment resolution failed", extra={"src": source_env_id or from_env_name, "tgt": target_env_id or to_env_name})
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
from_env_name = src_env.name
to_env_name = tgt_env.name
app_logger.reason("Environments resolved successfully", extra={"from": from_env_name, "to": to_env_name})
migration_result = {
"status": "SUCCESS",
"source_environment": from_env_name,
"target_environment": to_env_name,
"selected_dashboards": 0,
"migrated_dashboards": [],
"failed_dashboards": [],
"mapping_count": 0
}
from_c = SupersetClient(src_env)
to_c = SupersetClient(tgt_env)
if not from_c or not to_c:
raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
_, all_dashboards = from_c.get_dashboards()
# Selection Logic
if selected_ids:
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
elif dashboard_regex:
regex_pattern = re.compile(str(dashboard_regex), re.IGNORECASE)
dashboards_to_migrate = [d for d in all_dashboards if regex_pattern.search(d.get("dashboard_title", ""))]
else:
app_logger.explore("No deterministic selection criteria provided")
migration_result["status"] = "NO_SELECTION"
return migration_result
if not dashboards_to_migrate:
app_logger.explore("Zero dashboards match selection criteria")
migration_result["status"] = "NO_MATCHES"
return migration_result
migration_result["selected_dashboards"] = len(dashboards_to_migrate)
# Database Mapping Resolution
db_mapping = params.get("db_mappings", {})
if not isinstance(db_mapping, dict):
db_mapping = {}
if replace_db_config:
app_logger.reason("Fetching environment DB mappings from catalog")
db = SessionLocal()
try:
src_env_db = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env_db = db.query(Environment).filter(Environment.name == to_env_name).first()
if src_env_db and tgt_env_db:
stored_mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env_db.id,
DatabaseMapping.target_env_id == tgt_env_db.id
).all()
stored_map_dict = {m.source_db_uuid: m.target_db_uuid for m in stored_mappings}
stored_map_dict.update(db_mapping)
db_mapping = stored_map_dict
log.info(f"Loaded {len(stored_mappings)} database mappings from database.")
finally:
db.close()
migration_result["mapping_count"] = len(db_mapping)
engine = MigrationEngine()
# Migration Loop
for dash in dashboards_to_migrate:
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
app_logger.reason(f"Starting pipeline for dashboard '{title}'", extra={"dash_id": dash_id})
try:
exported_content, _ = from_c.export_dashboard(dash_id)
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip") as tmp_zip_path:
with create_temp_file(suffix=".zip", dry_run=True) as tmp_new_zip:
success = engine.transform_zip(
str(tmp_zip_path),
str(tmp_new_zip),
db_mapping,
strip_databases=False,
target_env_id=tgt_env.id if tgt_env else None,
fix_cross_filters=fix_cross_filters
)
if not success and replace_db_config:
if task_id:
app_logger.explore("Missing mapping blocks AST transform. Pausing task for user intervention.", extra={"task_id": task_id})
await tm.wait_for_resolution(task_id)
app_logger.reason("Task resumed, re-evaluating mapping states")
db = SessionLocal()
try:
src_env_rt = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env_rt = db.query(Environment).filter(Environment.name == to_env_name).first()
mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env_rt.id,
DatabaseMapping.target_env_id == tgt_env_rt.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
finally:
db.close()
success = engine.transform_zip(
str(tmp_zip_path),
str(tmp_new_zip),
db_mapping,
strip_databases=False,
target_env_id=tgt_env.id if tgt_env else None,
fix_cross_filters=fix_cross_filters
)
if success:
app_logger.reason("Pushing transformed ZIP to target Superset")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
migration_result["migrated_dashboards"].append({"id": dash_id, "title": title})
app_logger.reflect("Import successful", extra={"title": title})
else:
app_logger.explore("Transformation strictly failed, bypassing ingestion")
migration_log.error(f"Failed to transform ZIP for dashboard {title}")
migration_result["failed_dashboards"].append({
"id": dash_id, "title": title, "error": "Failed to transform ZIP"
})
except Exception as exc:
error_msg = str(exc)
if "Must provide a password for the database" in error_msg:
db_name = "unknown"
match = re.search(r"databases/([^.]+)\.yaml", error_msg)
if match:
db_name = match.group(1)
else:
match_alt = re.search(r"database '([^']+)'", error_msg)
if match_alt:
db_name = match_alt.group(1)
app_logger.explore(f"Missing DB password detected during ingestion. Escalating to UI.", extra={"db_name": db_name})
if task_id:
tm.await_input(task_id, {
"type": "database_password",
"databases": [db_name],
"error_message": error_msg
})
await tm.wait_for_input(task_id)
task = tm.get_task(task_id)
passwords = task.params.get("passwords", {})
if passwords:
app_logger.reason(f"Retrying import for {title} with injected credentials")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug, passwords=passwords)
migration_result["migrated_dashboards"].append({"id": dash_id, "title": title})
app_logger.reflect("Password injection unblocked import")
if "passwords" in task.params:
del task.params["passwords"]
continue
app_logger.explore(f"Catastrophic dashboard ingestion failure: {exc}")
migration_result["failed_dashboards"].append({"id": dash_id, "title": title, "error": str(exc)})
if migration_result["failed_dashboards"]:
migration_result["status"] = "PARTIAL_SUCCESS"
# Post-Migration ID Mapping Synchronization
try:
app_logger.reason("Executing incremental ID catalog sync on target")
db_session = SessionLocal()
mapping_service = IdMappingService(db_session)
mapping_service.sync_environment(tgt_env.id, to_c, incremental=True)
db_session.close()
app_logger.reflect("Incremental catalog sync closed out cleanly")
except Exception as sync_exc:
app_logger.explore(f"ID Mapping sync failed, mapping state might be degraded: {sync_exc}")
app_logger.reflect("Migration cycle fully resolved", extra={"result": migration_result})
return migration_result
except Exception as e:
app_logger.explore(f"Fatal plugin failure: {e}", exc_info=True)
raise e
# [/DEF:execute:Function]
# [/DEF:MigrationPlugin:Class]
# [/DEF:MigrationPlugin:Module]

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.schemas.__tests__.test_settings_and_health_schemas:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @PURPOSE: Regression tests for settings and health schema contracts updated in 026 fix batch.
import pytest

View File

@@ -1,6 +1,6 @@
# [DEF:backend.src.schemas.auth:Module]
#
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: auth, schemas, pydantic, user, token
# @PURPOSE: Pydantic schemas for authentication requests and responses.
# @LAYER: API
@@ -15,7 +15,7 @@ from datetime import datetime
# [/SECTION]
# [DEF:Token:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents a JWT access token response.
class Token(BaseModel):
access_token: str
@@ -23,7 +23,7 @@ class Token(BaseModel):
# [/DEF:Token:Class]
# [DEF:TokenData:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents the data encoded in a JWT token.
class TokenData(BaseModel):
username: Optional[str] = None
@@ -31,7 +31,7 @@ class TokenData(BaseModel):
# [/DEF:TokenData:Class]
# [DEF:PermissionSchema:Class]
# @TIER: TRIVIAL
# @COMPLEXITY: 1
# @PURPOSE: Represents a permission in API responses.
class PermissionSchema(BaseModel):
id: Optional[str] = None

View File

@@ -1,5 +1,5 @@
# [DEF:backend.src.schemas.health:Module]
# @TIER: STANDARD
# @COMPLEXITY: 3
# @SEMANTICS: health, schemas, pydantic
# @PURPOSE: Pydantic schemas for dashboard health summary.
# @LAYER: Domain

Some files were not shown because too many files have changed in this diff Show More