From 80ce8fe1504a750c4473a547de3576742605ee7b Mon Sep 17 00:00:00 2001 From: busya Date: Fri, 20 Mar 2026 20:01:58 +0300 Subject: [PATCH] semantics --- .ai/structure/MODULE_MAP.md | 1587 ------ .ai/structure/PROJECT_MAP.md | 4299 ----------------- .kilo/agent/subagent-orchestrator.md | 2 +- backend/src/api/routes/migration.py | 214 +- .../test_superset_preview_pipeline.py | 97 +- backend/src/core/async_superset_client.py | 391 +- backend/src/core/database.py | 75 +- backend/src/core/superset_client.py | 1045 ++-- .../core/utils/superset_context_extractor.py | 310 +- backend/src/models/filter_state.py | 4 +- .../services/dataset_review/orchestrator.py | 286 +- frontend/src/lib/api.js | 1 + 12 files changed, 1734 insertions(+), 6577 deletions(-) delete mode 100644 .ai/structure/MODULE_MAP.md delete mode 100644 .ai/structure/PROJECT_MAP.md diff --git a/.ai/structure/MODULE_MAP.md b/.ai/structure/MODULE_MAP.md deleted file mode 100644 index f7ee076b..00000000 --- a/.ai/structure/MODULE_MAP.md +++ /dev/null @@ -1,1587 +0,0 @@ -# Module Map - -> High-level module structure for AI Context. Generated automatically. - -**Generated:** 2026-02-25T20:19:23.587354 - -## Summary - -- **Total Modules:** 77 -- **Total Entities:** 1811 - -## Module Hierarchy - -### πŸ“ `backend/` - -- πŸ—οΈ **Layers:** Unknown, Utility -- πŸ“Š **Tiers:** STANDARD: 2, TRIVIAL: 2 -- πŸ“„ **Files:** 2 -- πŸ“¦ **Entities:** 4 - -**Key Entities:** - - - πŸ“¦ **backend.delete_running_tasks** (Module) - - Script to delete tasks with RUNNING status from the database... - - πŸ“¦ **test_auth_debug** (Module) `[TRIVIAL]` - - Auto-generated module for backend/test_auth_debug.py - - ### πŸ“ `src/` - - - πŸ—οΈ **Layers:** API, Core, UI (API) - - πŸ“Š **Tiers:** CRITICAL: 2, STANDARD: 19, TRIVIAL: 2 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 23 - - **Key Entities:** - - - πŸ“¦ **AppModule** (Module) `[CRITICAL]` - - The main entry point for the FastAPI application. It initial... - - πŸ“¦ **Dependencies** (Module) - - Manages creation and provision of shared application depende... - - ### πŸ“ `api/` - - - πŸ—οΈ **Layers:** API - - πŸ“Š **Tiers:** STANDARD: 7 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 7 - - **Key Entities:** - - - πŸ“¦ **backend.src.api.auth** (Module) - - Authentication API endpoints. - - ### πŸ“ `routes/` - - - πŸ—οΈ **Layers:** API, UI (API) - - πŸ“Š **Tiers:** CRITICAL: 2, STANDARD: 187, TRIVIAL: 5 - - πŸ“„ **Files:** 17 - - πŸ“¦ **Entities:** 194 - - **Key Entities:** - - - β„‚ **AssistantAction** (Class) `[TRIVIAL]` - - UI action descriptor returned with assistant responses. - - β„‚ **AssistantMessageRequest** (Class) `[TRIVIAL]` - - Input payload for assistant message endpoint. - - β„‚ **AssistantMessageResponse** (Class) - - Output payload contract for assistant interaction endpoints. - - β„‚ **BranchCheckout** (Class) - - Schema for branch checkout requests. - - β„‚ **BranchCreate** (Class) - - Schema for branch creation requests. - - β„‚ **BranchSchema** (Class) - - Schema for representing a Git branch metadata. - - β„‚ **CommitCreate** (Class) - - Schema for staging and committing changes. - - β„‚ **CommitSchema** (Class) - - Schema for representing Git commit details. - - β„‚ **ConfirmationRecord** (Class) - - In-memory confirmation token model for risky operation dispa... - - β„‚ **ConflictResolution** (Class) - - Schema for resolving merge conflicts. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> ConfigManager - - πŸ”— DEPENDS_ON -> ConfigModels - - πŸ”— DEPENDS_ON -> backend.src.core.database - - πŸ”— DEPENDS_ON -> backend.src.core.superset_client - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** API, Domain (Tests), UI (API Tests) - - πŸ“Š **Tiers:** CRITICAL: 3, STANDARD: 36, TRIVIAL: 98 - - πŸ“„ **Files:** 8 - - πŸ“¦ **Entities:** 137 - - **Key Entities:** - - - β„‚ **_FakeConfigManager** (Class) `[TRIVIAL]` - - Provide deterministic environment aliases required by intent... - - β„‚ **_FakeConfigManager** (Class) `[TRIVIAL]` - - Environment config fixture with dev/prod aliases for parser ... - - β„‚ **_FakeDb** (Class) `[TRIVIAL]` - - In-memory session substitute for assistant route persistence... - - β„‚ **_FakeDb** (Class) `[TRIVIAL]` - - In-memory fake database implementing subset of Session inter... - - β„‚ **_FakeQuery** (Class) `[TRIVIAL]` - - Minimal chainable query object for fake DB interactions. - - β„‚ **_FakeQuery** (Class) `[TRIVIAL]` - - Minimal chainable query object for fake SQLAlchemy-like DB b... - - β„‚ **_FakeTask** (Class) `[TRIVIAL]` - - Lightweight task model used for assistant authz tests. - - β„‚ **_FakeTask** (Class) `[TRIVIAL]` - - Lightweight task stub used by assistant API tests. - - β„‚ **_FakeTaskManager** (Class) `[TRIVIAL]` - - Minimal task manager for deterministic operation creation an... - - β„‚ **_FakeTaskManager** (Class) `[TRIVIAL]` - - Minimal async-compatible TaskManager fixture for determinist... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.api.routes.assistant - - ### πŸ“ `core/` - - - πŸ—οΈ **Layers:** Core - - πŸ“Š **Tiers:** CRITICAL: 2, STANDARD: 125, TRIVIAL: 8 - - πŸ“„ **Files:** 10 - - πŸ“¦ **Entities:** 135 - - **Key Entities:** - - - β„‚ **AuthSessionLocal** (Class) `[TRIVIAL]` - - A session factory for the authentication database. - - β„‚ **BeliefFormatter** (Class) - - Custom logging formatter that adds belief state prefixes to ... - - β„‚ **ConfigManager** (Class) - - A class to handle application configuration persistence and ... - - β„‚ **IdMappingService** (Class) `[CRITICAL]` - - Service handling the cataloging and retrieval of remote Supe... - - β„‚ **LogEntry** (Class) - - A Pydantic model representing a single, structured log entry... - - β„‚ **MigrationEngine** (Class) - - Engine for transforming Superset export ZIPs. - - β„‚ **PluginBase** (Class) - - Defines the abstract base class that all plugins must implem... - - β„‚ **PluginConfig** (Class) - - A Pydantic model used to represent the validated configurati... - - β„‚ **PluginLoader** (Class) - - Scans a specified directory for Python modules, dynamically ... - - β„‚ **SchedulerService** (Class) - - Provides a service to manage scheduled backup tasks. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> AppConfigRecord - - πŸ”— DEPENDS_ON -> ConfigModels - - πŸ”— DEPENDS_ON -> PyYAML - - πŸ”— DEPENDS_ON -> backend.src.core.auth.config - - πŸ”— DEPENDS_ON -> backend.src.core.logger - - ### πŸ“ `auth/` - - - πŸ—οΈ **Layers:** Core - - πŸ“Š **Tiers:** STANDARD: 26 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 26 - - **Key Entities:** - - - β„‚ **AuthConfig** (Class) - - Holds authentication-related settings. - - β„‚ **AuthRepository** (Class) - - Encapsulates database operations for authentication. - - πŸ“¦ **backend.src.core.auth.config** (Module) - - Centralized configuration for authentication and authorizati... - - πŸ“¦ **backend.src.core.auth.jwt** (Module) - - JWT token generation and validation logic. - - πŸ“¦ **backend.src.core.auth.logger** (Module) - - Audit logging for security-related events. - - πŸ“¦ **backend.src.core.auth.oauth** (Module) - - ADFS OIDC configuration and client using Authlib. - - πŸ“¦ **backend.src.core.auth.repository** (Module) - - Data access layer for authentication-related entities. - - πŸ“¦ **backend.src.core.auth.security** (Module) - - Utility for password hashing and verification using Passlib. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> authlib - - πŸ”— DEPENDS_ON -> jose - - πŸ”— DEPENDS_ON -> passlib - - πŸ”— DEPENDS_ON -> pydantic - - πŸ”— DEPENDS_ON -> sqlalchemy - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Domain - - πŸ“Š **Tiers:** STANDARD: 1, TRIVIAL: 9 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 10 - - **Key Entities:** - - - πŸ“¦ **test_auth** (Module) - - Unit tests for authentication module - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Infra - - πŸ“Š **Tiers:** STANDARD: 11, TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 12 - - **Key Entities:** - - - πŸ“¦ **test_logger** (Module) - - Unit tests for logger module - - ### πŸ“ `task_manager/` - - - πŸ—οΈ **Layers:** Core - - πŸ“Š **Tiers:** CRITICAL: 10, STANDARD: 63, TRIVIAL: 5 - - πŸ“„ **Files:** 7 - - πŸ“¦ **Entities:** 78 - - **Key Entities:** - - - β„‚ **LogEntry** (Class) `[CRITICAL]` - - A Pydantic model representing a single, structured log entry... - - β„‚ **LogFilter** (Class) - - Filter parameters for querying task logs. - - β„‚ **LogStats** (Class) - - Statistics about log entries for a task. - - β„‚ **Task** (Class) - - A Pydantic model representing a single execution instance of... - - β„‚ **TaskCleanupService** (Class) - - Provides methods to clean up old task records and their asso... - - β„‚ **TaskContext** (Class) `[CRITICAL]` - - A container passed to plugin.execute() providing the logger ... - - β„‚ **TaskLog** (Class) - - A Pydantic model representing a persisted log entry from the... - - β„‚ **TaskLogPersistenceService** (Class) `[CRITICAL]` - - Provides methods to save and query task logs from the task_l... - - β„‚ **TaskLogger** (Class) `[CRITICAL]` - - A wrapper around TaskManager._add_log that carries task_id a... - - β„‚ **TaskManager** (Class) `[CRITICAL]` - - Manages the lifecycle of tasks, including their creation, ex... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> TaskLogRecord - - πŸ”— DEPENDS_ON -> TaskLogger, USED_BY -> plugins - - πŸ”— DEPENDS_ON -> TaskManager, CALLS -> TaskManager._add_log - - ### πŸ“ `utils/` - - - πŸ—οΈ **Layers:** Core, Domain, Infra - - πŸ“Š **Tiers:** STANDARD: 48, TRIVIAL: 1 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 49 - - **Key Entities:** - - - β„‚ **APIClient** (Class) - - Π˜Π½ΠΊΠ°ΠΏΡΡƒΠ»ΠΈΡ€ΡƒΠ΅Ρ‚ HTTP-Π»ΠΎΠ³ΠΈΠΊΡƒ для Ρ€Π°Π±ΠΎΡ‚Ρ‹ с API, Π²ΠΊΠ»ΡŽΡ‡Π°Ρ сСссии, ... - - β„‚ **AuthenticationError** (Class) - - Exception raised when authentication fails. - - β„‚ **DashboardNotFoundError** (Class) - - Exception raised when a dashboard cannot be found. - - β„‚ **DatasetMapper** (Class) - - Класс для ΠΌΠ΅ΠΏΠΏΠΈΠ½Π³Π° ΠΈ обновлСния verbose_map Π² датасСтах Supe... - - β„‚ **InvalidZipFormatError** (Class) - - Exception raised when a file is not a valid ZIP archive. - - β„‚ **NetworkError** (Class) - - Exception raised when a network level error occurs. - - β„‚ **PermissionDeniedError** (Class) - - Exception raised when access is denied. - - β„‚ **SupersetAPIError** (Class) - - Base exception for all Superset API related errors. - - πŸ“¦ **backend.core.utils.dataset_mapper** (Module) - - Π­Ρ‚ΠΎΡ‚ ΠΌΠΎΠ΄ΡƒΠ»ΡŒ ΠΎΡ‚Π²Π΅Ρ‡Π°Π΅Ρ‚ Π·Π° ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠ΅ ΠΌΠ΅Ρ‚Π°Π΄Π°Π½Π½Ρ‹Ρ… (verbose_map) ... - - πŸ“¦ **backend.core.utils.fileio** (Module) - - ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ Π½Π°Π±ΠΎΡ€ ΡƒΡ‚ΠΈΠ»ΠΈΡ‚ для управлСния Ρ„Π°ΠΉΠ»ΠΎΠ²Ρ‹ΠΌΠΈ опСрация... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.core.superset_client - - πŸ”— DEPENDS_ON -> backend.src.core.logger - - πŸ”— DEPENDS_ON -> pandas - - πŸ”— DEPENDS_ON -> psycopg2 - - πŸ”— DEPENDS_ON -> pyyaml - - ### πŸ“ `models/` - - - πŸ—οΈ **Layers:** Domain, Model - - πŸ“Š **Tiers:** CRITICAL: 9, STANDARD: 22, TRIVIAL: 22 - - πŸ“„ **Files:** 11 - - πŸ“¦ **Entities:** 53 - - **Key Entities:** - - - β„‚ **ADGroupMapping** (Class) - - Maps an Active Directory group to a local System Role. - - β„‚ **AppConfigRecord** (Class) - - Stores the single source of truth for application configurat... - - β„‚ **AssistantAuditRecord** (Class) - - Store audit decisions and outcomes produced by assistant com... - - β„‚ **AssistantConfirmationRecord** (Class) - - Persist risky operation confirmation tokens with lifecycle s... - - β„‚ **AssistantMessageRecord** (Class) - - Persist chat history entries for assistant conversations. - - β„‚ **ConnectionConfig** (Class) `[TRIVIAL]` - - Stores credentials for external databases used for column ma... - - β„‚ **DashboardMetadata** (Class) `[TRIVIAL]` - - Represents a dashboard available for migration. - - β„‚ **DashboardSelection** (Class) `[TRIVIAL]` - - Represents the user's selection of dashboards to migrate. - - β„‚ **DatabaseMapping** (Class) - - Represents a mapping between source and target databases. - - β„‚ **DeploymentEnvironment** (Class) `[TRIVIAL]` - - Target Superset environments for dashboard deployment. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> Role - - πŸ”— DEPENDS_ON -> TaskRecord - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager.models - - πŸ”— DEPENDS_ON -> backend.src.models.mapping - - πŸ”— DEPENDS_ON -> sqlalchemy - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Domain - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 1, TRIVIAL: 27 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 29 - - **Key Entities:** - - - πŸ“¦ **test_models** (Module) `[TRIVIAL]` - - Unit tests for data models - - πŸ“¦ **test_report_models** (Module) `[CRITICAL]` - - Unit tests for report Pydantic models and their validators - - ### πŸ“ `plugins/` - - - πŸ—οΈ **Layers:** App, Plugin, Plugins - - πŸ“Š **Tiers:** STANDARD: 63 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 63 - - **Key Entities:** - - - β„‚ **BackupPlugin** (Class) - - Implementation of the backup plugin logic. - - β„‚ **DebugPlugin** (Class) - - Plugin for system diagnostics and debugging. - - β„‚ **GitPlugin** (Class) - - РСализация ΠΏΠ»Π°Π³ΠΈΠ½Π° Git Integration для управлСния вСрсиями Π΄... - - β„‚ **MapperPlugin** (Class) - - Plugin for mapping dataset columns verbose names. - - β„‚ **MigrationPlugin** (Class) - - Implementation of the migration plugin logic. - - β„‚ **SearchPlugin** (Class) - - Plugin for searching text patterns in Superset datasets. - - πŸ“¦ **BackupPlugin** (Module) - - A plugin that provides functionality to back up Superset das... - - πŸ“¦ **DebugPluginModule** (Module) - - Implements a plugin for system diagnostics and debugging Sup... - - πŸ“¦ **MapperPluginModule** (Module) - - Implements a plugin for mapping dataset columns using extern... - - πŸ“¦ **MigrationPlugin** (Module) - - A plugin that provides functionality to migrate Superset das... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> superset_tool.client - - πŸ”— DEPENDS_ON -> superset_tool.utils - - πŸ”— IMPLEMENTS -> PluginBase - - ### πŸ“ `git/` - - - πŸ—οΈ **Layers:** Unknown - - πŸ“Š **Tiers:** STANDARD: 2, TRIVIAL: 2 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 4 - - **Key Entities:** - - - β„‚ **GitLLMExtension** (Class) - - Provides LLM capabilities to the Git plugin. - - πŸ“¦ **llm_extension** (Module) `[TRIVIAL]` - - Auto-generated module for backend/src/plugins/git/llm_extens... - - ### πŸ“ `llm_analysis/` - - - πŸ—οΈ **Layers:** Unknown - - πŸ“Š **Tiers:** STANDARD: 19, TRIVIAL: 24 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 43 - - **Key Entities:** - - - β„‚ **DashboardValidationPlugin** (Class) - - Plugin for automated dashboard health analysis using LLMs. - - β„‚ **DetectedIssue** (Class) - - Model for a single issue detected during validation. - - β„‚ **DocumentationPlugin** (Class) - - Plugin for automated dataset documentation using LLMs. - - β„‚ **LLMClient** (Class) - - Wrapper for LLM provider APIs. - - β„‚ **LLMProviderConfig** (Class) - - Configuration for an LLM provider. - - β„‚ **LLMProviderType** (Class) - - Enum for supported LLM providers. - - β„‚ **ScreenshotService** (Class) - - Handles capturing screenshots of Superset dashboards. - - β„‚ **ValidationResult** (Class) - - Model for dashboard validation result. - - β„‚ **ValidationStatus** (Class) - - Enum for dashboard validation status. - - πŸ“¦ **plugin** (Module) `[TRIVIAL]` - - Auto-generated module for backend/src/plugins/llm_analysis/p... - - **Dependencies:** - - - πŸ”— IMPLEMENTS -> backend.src.core.plugin_base.PluginBase - - ### πŸ“ `storage/` - - - πŸ—οΈ **Layers:** App - - πŸ“Š **Tiers:** STANDARD: 18 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 18 - - **Key Entities:** - - - β„‚ **StoragePlugin** (Class) - - Implementation of the storage management plugin. - - πŸ“¦ **StoragePlugin** (Module) - - Provides core filesystem operations for managing backups and... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.models.storage - - πŸ”— IMPLEMENTS -> PluginBase - - ### πŸ“ `schemas/` - - - πŸ—οΈ **Layers:** API - - πŸ“Š **Tiers:** STANDARD: 10, TRIVIAL: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 13 - - **Key Entities:** - - - β„‚ **ADGroupMappingCreate** (Class) - - Schema for creating an AD Group mapping. - - β„‚ **ADGroupMappingSchema** (Class) - - Represents an AD Group to Role mapping in API responses. - - β„‚ **PermissionSchema** (Class) `[TRIVIAL]` - - Represents a permission in API responses. - - β„‚ **RoleCreate** (Class) - - Schema for creating a new role. - - β„‚ **RoleSchema** (Class) - - Represents a role in API responses. - - β„‚ **RoleUpdate** (Class) - - Schema for updating an existing role. - - β„‚ **Token** (Class) `[TRIVIAL]` - - Represents a JWT access token response. - - β„‚ **TokenData** (Class) `[TRIVIAL]` - - Represents the data encoded in a JWT token. - - β„‚ **User** (Class) - - Schema for user data in API responses. - - β„‚ **UserBase** (Class) - - Base schema for user data. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> pydantic - - ### πŸ“ `scripts/` - - - πŸ—οΈ **Layers:** Scripts, Unknown - - πŸ“Š **Tiers:** STANDARD: 26, TRIVIAL: 2 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 28 - - **Key Entities:** - - - πŸ“¦ **backend.src.scripts.create_admin** (Module) - - CLI tool for creating the initial admin user. - - πŸ“¦ **backend.src.scripts.init_auth_db** (Module) - - Initializes the auth database and creates the necessary tabl... - - πŸ“¦ **backend.src.scripts.migrate_sqlite_to_postgres** (Module) - - Migrates legacy config and task history from SQLite/file sto... - - πŸ“¦ **backend.src.scripts.seed_permissions** (Module) - - Populates the auth database with initial system permissions. - - πŸ“¦ **backend.src.scripts.seed_superset_load_test** (Module) - - Creates randomized load-test data in Superset by cloning cha... - - πŸ“¦ **test_dataset_dashboard_relations** (Module) `[TRIVIAL]` - - Auto-generated module for backend/src/scripts/test_dataset_d... - - ### πŸ“ `services/` - - - πŸ—οΈ **Layers:** Core, Domain, Service - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 58, TRIVIAL: 5 - - πŸ“„ **Files:** 7 - - πŸ“¦ **Entities:** 64 - - **Key Entities:** - - - β„‚ **AuthService** (Class) - - Provides high-level authentication services. - - β„‚ **EncryptionManager** (Class) `[CRITICAL]` - - Handles encryption and decryption of sensitive data like API... - - β„‚ **GitService** (Class) - - Wrapper for GitPython operations with semantic logging and e... - - β„‚ **LLMProviderService** (Class) - - Service to manage LLM provider lifecycle. - - β„‚ **MappingService** (Class) - - Service for handling database mapping logic. - - β„‚ **ResourceService** (Class) - - Provides centralized access to resource data with enhanced m... - - πŸ“¦ **backend.src.services** (Module) - - Package initialization for services module - - πŸ“¦ **backend.src.services.auth_service** (Module) - - Orchestrates authentication business logic. - - πŸ“¦ **backend.src.services.git_service** (Module) - - Core Git logic using GitPython to manage dashboard repositor... - - πŸ“¦ **backend.src.services.llm_prompt_templates** (Module) - - Provide default LLM prompt templates and normalization helpe... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.core.config_manager - - πŸ”— DEPENDS_ON -> backend.src.core.database - - πŸ”— DEPENDS_ON -> backend.src.core.superset_client - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager - - πŸ”— DEPENDS_ON -> backend.src.core.utils.matching - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Domain, Domain Tests, Service - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 21, TRIVIAL: 7 - - πŸ“„ **Files:** 3 - - πŸ“¦ **Entities:** 29 - - **Key Entities:** - - - β„‚ **TestEncryptionManager** (Class) - - Validate EncryptionManager encrypt/decrypt roundtrip, unique... - - πŸ“¦ **backend.src.services.__tests__.test_llm_prompt_templates** (Module) - - Validate normalization and rendering behavior for configurab... - - πŸ“¦ **backend.src.services.__tests__.test_resource_service** (Module) - - Unit tests for ResourceService - - πŸ“¦ **test_encryption_manager** (Module) `[CRITICAL]` - - Unit tests for EncryptionManager encrypt/decrypt functionali... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.services.llm_prompt_templates - - ### πŸ“ `reports/` - - - πŸ—οΈ **Layers:** Domain - - πŸ“Š **Tiers:** CRITICAL: 5, STANDARD: 15 - - πŸ“„ **Files:** 3 - - πŸ“¦ **Entities:** 20 - - **Key Entities:** - - - β„‚ **ReportsService** (Class) `[CRITICAL]` - - Service layer for list/detail report retrieval and normaliza... - - πŸ“¦ **backend.src.services.reports.normalizer** (Module) `[CRITICAL]` - - Convert task manager task objects into canonical unified Tas... - - πŸ“¦ **backend.src.services.reports.report_service** (Module) `[CRITICAL]` - - Aggregate, normalize, filter, and paginate task reports for ... - - πŸ“¦ **backend.src.services.reports.type_profiles** (Module) `[CRITICAL]` - - Deterministic mapping of plugin/task identifiers to canonica... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager.manager.TaskManager - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager.models.Task - - πŸ”— DEPENDS_ON -> backend.src.models.report - - πŸ”— DEPENDS_ON -> backend.src.models.report.TaskType - - πŸ”— DEPENDS_ON -> backend.src.services.reports.normalizer - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Domain, Domain (Tests) - - πŸ“Š **Tiers:** CRITICAL: 2, TRIVIAL: 19 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 21 - - **Key Entities:** - - - πŸ“¦ **backend.tests.test_report_normalizer** (Module) `[CRITICAL]` - - Validate unknown task type fallback and partial payload norm... - - πŸ“¦ **test_report_service** (Module) `[CRITICAL]` - - Unit tests for ReportsService list/detail operations - - ### πŸ“ `tests/` - - - πŸ—οΈ **Layers:** Core, Domain (Tests), Test, Unknown - - πŸ“Š **Tiers:** CRITICAL: 6, STANDARD: 80, TRIVIAL: 57 - - πŸ“„ **Files:** 10 - - πŸ“¦ **Entities:** 143 - - **Key Entities:** - - - β„‚ **TestLogPersistence** (Class) `[CRITICAL]` - - Test suite for TaskLogPersistenceService. - - β„‚ **TestTaskContext** (Class) - - Test suite for TaskContext. - - β„‚ **TestTaskLogger** (Class) - - Test suite for TaskLogger. - - β„‚ **TestTaskPersistenceHelpers** (Class) `[CRITICAL]` - - Test suite for TaskPersistenceService static helper methods. - - β„‚ **TestTaskPersistenceService** (Class) `[CRITICAL]` - - Test suite for TaskPersistenceService CRUD operations. - - πŸ“¦ **backend.tests.test_dashboards_api** (Module) - - Contract-driven tests for Dashboard Hub API - - πŸ“¦ **test_auth** (Module) `[TRIVIAL]` - - Auto-generated module for backend/tests/test_auth.py - - πŸ“¦ **test_log_persistence** (Module) `[CRITICAL]` - - Unit tests for TaskLogPersistenceService. - - πŸ“¦ **test_resource_hubs** (Module) `[TRIVIAL]` - - Auto-generated module for backend/tests/test_resource_hubs.p... - - πŸ“¦ **test_smoke_plugins** (Module) `[TRIVIAL]` - - Auto-generated module for backend/tests/test_smoke_plugins.p... - - ### πŸ“ `core/` - - - πŸ—οΈ **Layers:** Domain, Unknown - - πŸ“Š **Tiers:** STANDARD: 2, TRIVIAL: 31 - - πŸ“„ **Files:** 3 - - πŸ“¦ **Entities:** 33 - - **Key Entities:** - - - πŸ“¦ **backend.tests.core.test_mapping_service** (Module) - - Unit tests for the IdMappingService matching UUIDs to intege... - - πŸ“¦ **backend.tests.core.test_migration_engine** (Module) - - Unit tests for MigrationEngine's cross-filter patching algor... - - πŸ“¦ **test_defensive_guards** (Module) `[TRIVIAL]` - - Auto-generated module for backend/tests/core/test_defensive_... - - ### πŸ“ `components/` - - - πŸ—οΈ **Layers:** Component, Feature, UI, UI -->, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 49, TRIVIAL: 4 - - πŸ“„ **Files:** 13 - - πŸ“¦ **Entities:** 54 - - **Key Entities:** - - - 🧩 **DashboardGrid** (Component) - - Displays a grid of dashboards with selection and pagination. - - 🧩 **DynamicForm** (Component) - - Generates a form dynamically based on a JSON schema. - - 🧩 **EnvSelector** (Component) - - Provides a UI component for selecting source and target envi... - - 🧩 **Footer** (Component) `[TRIVIAL]` - - Displays the application footer with copyright information. - - 🧩 **MappingTable** (Component) - - Displays and allows editing of database mappings. - - 🧩 **MissingMappingModal** (Component) - - Prompts the user to provide a database mapping when one is m... - - 🧩 **Navbar** (Component) - - Main navigation bar for the application. - - 🧩 **PasswordPrompt** (Component) - - A modal component to prompt the user for database passwords ... - - 🧩 **TaskHistory** (Component) - - Displays a list of recent tasks with their status and allows... - - 🧩 **TaskList** (Component) - - Displays a list of tasks with their status and execution det... - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** UI (Tests) - - πŸ“Š **Tiers:** CRITICAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - πŸ“¦ **frontend.src.components.__tests__.task_log_viewer** (Module) `[CRITICAL]` - - Unit tests for TaskLogViewer component by mounting it and ob... - - ### πŸ“ `auth/` - - - πŸ—οΈ **Layers:** Component - - πŸ“Š **Tiers:** TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - 🧩 **ProtectedRoute** (Component) `[TRIVIAL]` - - Wraps content to ensure only authenticated users can access ... - - ### πŸ“ `git/` - - - πŸ—οΈ **Layers:** Component - - πŸ“Š **Tiers:** STANDARD: 26 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 26 - - **Key Entities:** - - - 🧩 **BranchSelector** (Component) - - UI для Π²Ρ‹Π±ΠΎΡ€Π° ΠΈ создания Π²Π΅Ρ‚ΠΎΠΊ Git. - - 🧩 **CommitHistory** (Component) - - Displays the commit history for a specific dashboard. - - 🧩 **CommitModal** (Component) - - МодальноС ΠΎΠΊΠ½ΠΎ для создания ΠΊΠΎΠΌΠΌΠΈΡ‚Π° с просмотром ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΉ (... - - 🧩 **ConflictResolver** (Component) - - UI for resolving merge conflicts (Keep Mine / Keep Theirs). - - 🧩 **DeploymentModal** (Component) - - Modal for deploying a dashboard to a target environment. - - 🧩 **GitManager** (Component) - - Π¦Π΅Π½Ρ‚Ρ€Π°Π»ΡŒΠ½Ρ‹ΠΉ ΠΊΠΎΠΌΠΏΠΎΠ½Π΅Π½Ρ‚ для управлСния Git-опСрациями ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½... - - ### πŸ“ `llm/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** STANDARD: 2, TRIVIAL: 11 - - πŸ“„ **Files:** 3 - - πŸ“¦ **Entities:** 13 - - **Key Entities:** - - - 🧩 **DocPreview** (Component) - - UI component for previewing generated dataset documentation ... - - 🧩 **ProviderConfig** (Component) - - UI form for managing LLM provider configurations. - - πŸ“¦ **DocPreview** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/llm/DocPre... - - πŸ“¦ **ProviderConfig** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/llm/Provid... - - πŸ“¦ **ValidationReport** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/llm/Valida... - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** UI Tests - - πŸ“Š **Tiers:** STANDARD: 2 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 2 - - **Key Entities:** - - - πŸ“¦ **frontend.src.components.llm.__tests__.provider_config_integration** (Module) - - Protect edit-button interaction contract in LLM provider set... - - ### πŸ“ `storage/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 7 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 7 - - **Key Entities:** - - - 🧩 **FileList** (Component) - - Displays a table of files with metadata and actions. - - 🧩 **FileUpload** (Component) - - Provides a form for uploading files to a specific category. - - ### πŸ“ `tasks/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** STANDARD: 4, TRIVIAL: 12 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 16 - - **Key Entities:** - - - 🧩 **LogEntryRow** (Component) - - Renders a single log entry with stacked layout optimized for... - - 🧩 **LogFilterBar** (Component) - - Compact filter toolbar for logs β€” level, source, and text se... - - 🧩 **TaskLogPanel** (Component) - - Combines log filtering and display into a single cohesive da... - - πŸ“¦ **LogFilterBar** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/tasks/LogF... - - πŸ“¦ **TaskLogPanel** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/tasks/Task... - - πŸ“¦ **TaskResultPanel** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/tasks/Task... - - ### πŸ“ `tools/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** STANDARD: 14, TRIVIAL: 2 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 16 - - **Key Entities:** - - - 🧩 **ConnectionForm** (Component) - - UI component for creating a new database connection configur... - - 🧩 **ConnectionList** (Component) - - UI component for listing and deleting saved database connect... - - 🧩 **DebugTool** (Component) - - UI component for system diagnostics and debugging API respon... - - 🧩 **MapperTool** (Component) - - UI component for mapping dataset column verbose names using ... - - πŸ“¦ **MapperTool** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/components/tools/Mapp... - - ### πŸ“ `lib/` - - - πŸ—οΈ **Layers:** Infra, Infra-API, UI, UI-State - - πŸ“Š **Tiers:** STANDARD: 20, TRIVIAL: 3 - - πŸ“„ **Files:** 5 - - πŸ“¦ **Entities:** 23 - - **Key Entities:** - - - 🧩 **Counter** (Component) `[TRIVIAL]` - - Simple counter demo component - - πŸ“¦ **Utils** (Module) `[TRIVIAL]` - - General utility functions (class merging) - - πŸ“¦ **api_module** (Module) - - Handles all communication with the backend API. - - πŸ“¦ **stores_module** (Module) - - Global state management using Svelte stores. - - πŸ“¦ **toasts_module** (Module) - - Manages toast notifications using a Svelte writable store. - - ### πŸ“ `api/` - - - πŸ—οΈ **Layers:** Infra, Infra-API - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 10 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 11 - - **Key Entities:** - - - πŸ“¦ **frontend.src.lib.api.assistant** (Module) - - API client wrapper for assistant chat, confirmation actions,... - - πŸ“¦ **frontend.src.lib.api.reports** (Module) `[CRITICAL]` - - Wrapper-based reports API client for list/detail retrieval w... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> [DEF:api_module] - - πŸ”— DEPENDS_ON -> frontend.src.lib.api.api_module - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Infra (Tests) - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 4 - - **Key Entities:** - - - β„‚ **TestBuildReportQueryString** (Class) - - Validate query string construction from filter options. - - β„‚ **TestGetReportsAsync** (Class) - - Validate getReports and getReportDetail with mocked api.fetc... - - β„‚ **TestNormalizeApiError** (Class) - - Validate error normalization for UI-state mapping. - - πŸ“¦ **frontend.src.lib.api.__tests__.reports_api** (Module) `[CRITICAL]` - - Unit tests for reports API client functions: query string bu... - - ### πŸ“ `auth/` - - - πŸ—οΈ **Layers:** Feature - - πŸ“Š **Tiers:** STANDARD: 7 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 7 - - **Key Entities:** - - - πŸ—„οΈ **authStore** (Store) - - Manages the global authentication state on the frontend. - - ### πŸ“ `assistant/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 12, TRIVIAL: 4 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 17 - - **Key Entities:** - - - 🧩 **AssistantChatPanel** (Component) `[CRITICAL]` - - Slide-out assistant chat panel for natural language command ... - - πŸ“¦ **AssistantChatPanel** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/assist... - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** UI Tests - - πŸ“Š **Tiers:** STANDARD: 5 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 5 - - **Key Entities:** - - - πŸ“¦ **frontend.src.lib.components.assistant.__tests__.assistant_chat_integration** (Module) - - Contract-level integration checks for assistant chat panel i... - - πŸ“¦ **frontend.src.lib.components.assistant.__tests__.assistant_confirmation_integration** (Module) - - Validate confirm/cancel UX contract bindings in assistant ch... - - ### πŸ“ `layout/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 3, STANDARD: 5, TRIVIAL: 26 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 34 - - **Key Entities:** - - - 🧩 **Breadcrumbs** (Component) - - Display page hierarchy navigation - - 🧩 **Sidebar** (Component) `[CRITICAL]` - - Persistent left sidebar with resource categories navigation - - 🧩 **TaskDrawer** (Component) `[CRITICAL]` - - Global task drawer for monitoring background operations - - 🧩 **TopNavbar** (Component) `[CRITICAL]` - - Unified top navigation bar with Logo, Search, Activity, and ... - - πŸ“¦ **Breadcrumbs** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/layout... - - πŸ“¦ **Sidebar** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/layout... - - πŸ“¦ **TaskDrawer** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/layout... - - πŸ“¦ **TopNavbar** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/layout... - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Unknown - - πŸ“Š **Tiers:** TRIVIAL: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 3 - - **Key Entities:** - - - πŸ“¦ **test_breadcrumbs.svelte** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/layout... - - ### πŸ“ `reports/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 4, STANDARD: 1, TRIVIAL: 10 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 15 - - **Key Entities:** - - - 🧩 **ReportCard** (Component) `[CRITICAL]` - - Render one report with explicit textual type label and profi... - - 🧩 **ReportDetailPanel** (Component) `[CRITICAL]` - - Display detailed report context with diagnostics and actiona... - - 🧩 **ReportsList** (Component) `[CRITICAL]` - - Render unified list of normalized reports with canonical min... - - πŸ“¦ **ReportCard** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/report... - - πŸ“¦ **ReportDetailPanel** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/report... - - πŸ“¦ **ReportsList** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/components/report... - - πŸ“¦ **frontend.src.lib.components.reports.reportTypeProfiles** (Module) `[CRITICAL]` - - Deterministic mapping from report task_type to visual profil... - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> frontend/src/lib/i18n/index.ts - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** UI, UI (Tests) - - πŸ“Š **Tiers:** CRITICAL: 5, STANDARD: 1, TRIVIAL: 4 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 10 - - **Key Entities:** - - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_card.ux** (Module) `[CRITICAL]` - - Test UX states and transitions for ReportCard component - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_detail.integration** (Module) `[CRITICAL]` - - Validate detail-panel behavior for failed reports and recove... - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_detail.ux** (Module) `[CRITICAL]` - - Test UX states and recovery for ReportDetailPanel component - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_type_profiles** (Module) `[CRITICAL]` - - Validate report type profile mapping and unknown fallback be... - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.reports_filter_performance** (Module) - - Guard test for report filter responsiveness on moderate in-m... - - πŸ“¦ **frontend.src.lib.components.reports.__tests__.reports_page.integration** (Module) `[CRITICAL]` - - Integration-style checks for unified mixed-type reports rend... - - ### πŸ“ `fixtures/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - πŸ“¦ **reports.fixtures** (Module) - - Shared frontend fixtures for unified reports states. - - ### πŸ“ `i18n/` - - - πŸ—οΈ **Layers:** Infra - - πŸ“Š **Tiers:** STANDARD: 4 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 4 - - **Key Entities:** - - - πŸ“¦ **i18n** (Module) - - Determines the starting locale. - - πŸ—„οΈ **locale** (Store) - - Holds the current active locale string. - - πŸ—„οΈ **t** (Store) - - Derived store providing the translation dictionary. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> locales/en.json - - πŸ”— DEPENDS_ON -> locales/ru.json - - ### πŸ“ `stores/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 7, TRIVIAL: 12 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 20 - - **Key Entities:** - - - πŸ“¦ **sidebar** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/stores/sidebar.js - - πŸ“¦ **taskDrawer** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/lib/stores/taskDrawer... - - πŸ—„οΈ **activity** (Store) - - Track active task count for navbar indicator - - πŸ—„οΈ **assistantChat** (Store) - - Control assistant chat panel visibility and active conversat... - - πŸ—„οΈ **sidebar** (Store) - - Manage sidebar visibility and navigation state - - πŸ—„οΈ **taskDrawer** (Store) `[CRITICAL]` - - Manage Task Drawer visibility and resource-to-task mapping - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> WebSocket connection, taskDrawer store - - ### πŸ“ `__tests__/` - - - πŸ—οΈ **Layers:** Domain (Tests), UI, UI Tests - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 10 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 11 - - **Key Entities:** - - - πŸ“¦ **frontend.src.lib.stores.__tests__.assistantChat** (Module) - - Validate assistant chat store visibility and conversation bi... - - πŸ“¦ **frontend.src.lib.stores.__tests__.sidebar** (Module) - - Unit tests for sidebar store - - πŸ“¦ **frontend.src.lib.stores.__tests__.test_activity** (Module) - - Unit tests for activity store - - πŸ“¦ **frontend.src.lib.stores.__tests__.test_sidebar** (Module) - - Unit tests for sidebar store - - πŸ“¦ **frontend.src.lib.stores.__tests__.test_taskDrawer** (Module) `[CRITICAL]` - - Unit tests for task drawer store - - πŸ“¦ **setupTests** (Module) - - Global test setup with mocks for SvelteKit modules - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> assistantChatStore - - πŸ”— DEPENDS_ON -> frontend.src.lib.stores.taskDrawer - - ### πŸ“ `mocks/` - - - πŸ—οΈ **Layers:** UI (Tests) - - πŸ“Š **Tiers:** STANDARD: 4 - - πŸ“„ **Files:** 4 - - πŸ“¦ **Entities:** 4 - - **Key Entities:** - - - πŸ“¦ **mock_env_public** (Module) - - Mock for $env/static/public SvelteKit module in vitest - - ### πŸ“ `ui/` - - - πŸ—οΈ **Layers:** Atom - - πŸ“Š **Tiers:** TRIVIAL: 7 - - πŸ“„ **Files:** 7 - - πŸ“¦ **Entities:** 7 - - **Key Entities:** - - - 🧩 **Button** (Component) `[TRIVIAL]` - - Define component interface and default values (Svelte 5 Rune... - - 🧩 **Card** (Component) `[TRIVIAL]` - - Standardized container with padding and elevation. - - 🧩 **Input** (Component) `[TRIVIAL]` - - Standardized text input component with label and error handl... - - 🧩 **LanguageSwitcher** (Component) `[TRIVIAL]` - - Dropdown component to switch between supported languages. - - 🧩 **PageHeader** (Component) `[TRIVIAL]` - - Standardized page header with title and action area. - - 🧩 **Select** (Component) `[TRIVIAL]` - - Standardized dropdown selection component. - - πŸ“¦ **ui** (Module) `[TRIVIAL]` - - Central export point for standardized UI components. - - ### πŸ“ `utils/` - - - πŸ—οΈ **Layers:** Infra - - πŸ“Š **Tiers:** TRIVIAL: 2 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 2 - - **Key Entities:** - - - πŸ“¦ **Debounce** (Module) `[TRIVIAL]` - - Debounce utility for limiting function execution rate - - ### πŸ“ `pages/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 11 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 11 - - **Key Entities:** - - - 🧩 **Dashboard** (Component) - - Displays the list of available plugins and allows selecting ... - - 🧩 **Settings** (Component) - - The main settings page for the application, allowing managem... - - ### πŸ“ `routes/` - - - πŸ—οΈ **Layers:** Infra, UI - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 3, TRIVIAL: 1 - - πŸ“„ **Files:** 5 - - πŸ“¦ **Entities:** 5 - - **Key Entities:** - - - πŸ“¦ **RootLayoutConfig** (Module) `[TRIVIAL]` - - Root layout configuration (SPA mode) - - πŸ“¦ **layout** (Module) - - Bind global layout shell and conditional login/full-app rend... - - ### πŸ“ `roles/` - - - πŸ—οΈ **Layers:** Domain - - πŸ“Š **Tiers:** STANDARD: 6 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 6 - - **Key Entities:** - - - 🧩 **AdminRolesPage** (Component) - - UI for managing system roles and their permissions. - - ### πŸ“ `settings/` - - - πŸ—οΈ **Layers:** Feature - - πŸ“Š **Tiers:** STANDARD: 5 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 5 - - **Key Entities:** - - - 🧩 **AdminSettingsPage** (Component) - - UI for configuring Active Directory Group to local Role mapp... - - ### πŸ“ `llm/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** STANDARD: 1, TRIVIAL: 5 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 6 - - **Key Entities:** - - - 🧩 **LLMSettingsPage** (Component) - - Admin settings page for LLM provider configuration. - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/admin/settings... - - ### πŸ“ `users/` - - - πŸ—οΈ **Layers:** Feature - - πŸ“Š **Tiers:** STANDARD: 6 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 6 - - **Key Entities:** - - - 🧩 **AdminUsersPage** (Component) - - UI for managing system users and their roles. - - ### πŸ“ `dashboards/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, TRIVIAL: 37 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 38 - - **Key Entities:** - - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/dashboards/+pa... - - ### πŸ“ `[id]/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, TRIVIAL: 5 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 6 - - **Key Entities:** - - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/dashboards/[id... - - ### πŸ“ `datasets/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, TRIVIAL: 17 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 18 - - **Key Entities:** - - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/datasets/+page... - - ### πŸ“ `[id]/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, TRIVIAL: 6 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 7 - - **Key Entities:** - - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/datasets/[id]/... - - ### πŸ“ `git/` - - - πŸ—οΈ **Layers:** Page - - πŸ“Š **Tiers:** STANDARD: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 3 - - **Key Entities:** - - - 🧩 **GitDashboardPage** (Component) - - Dashboard management page for Git integration. - - ### πŸ“ `login/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 3 - - **Key Entities:** - - - 🧩 **LoginPage** (Component) - - Provides the user interface for local and ADFS authenticatio... - - ### πŸ“ `migration/` - - - πŸ—οΈ **Layers:** Page - - πŸ“Š **Tiers:** STANDARD: 10 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 10 - - **Key Entities:** - - - 🧩 **DashboardSelectionSection** (Component) - - 🧩 **MigrationDashboard** (Component) - - Main dashboard for configuring and starting migrations. - - ### πŸ“ `mappings/` - - - πŸ—οΈ **Layers:** Page - - πŸ“Š **Tiers:** STANDARD: 4 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 4 - - **Key Entities:** - - - 🧩 **MappingManagement** (Component) - - Page for managing database mappings between environments. - - ### πŸ“ `reports/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, TRIVIAL: 7 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 8 - - **Key Entities:** - - - 🧩 **UnifiedReportsPage** (Component) `[CRITICAL]` - - Unified reports page with filtering and resilient UX states ... - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/reports/+page.... - - ### πŸ“ `settings/` - - - πŸ—οΈ **Layers:** UI, Unknown - - πŸ“Š **Tiers:** CRITICAL: 1, STANDARD: 1, TRIVIAL: 23 - - πŸ“„ **Files:** 2 - - πŸ“¦ **Entities:** 25 - - **Key Entities:** - - - πŸ“¦ **+page** (Module) `[TRIVIAL]` - - Auto-generated module for frontend/src/routes/settings/+page... - - ### πŸ“ `connections/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 2 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 2 - - **Key Entities:** - - - 🧩 **ConnectionsSettingsPage** (Component) - - Page for managing database connection configurations. - - ### πŸ“ `git/` - - - πŸ—οΈ **Layers:** Page - - πŸ“Š **Tiers:** STANDARD: 5 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 5 - - **Key Entities:** - - - 🧩 **GitSettingsPage** (Component) - - Manage Git server configurations for dashboard versioning. - - ### πŸ“ `storage/` - - - πŸ—οΈ **Layers:** Page - - πŸ“Š **Tiers:** TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - ### πŸ“ `repos/` - - - πŸ“Š **Tiers:** STANDARD: 3 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 3 - - ### πŸ“ `debug/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - 🧩 **DebugPage** (Component) `[TRIVIAL]` - - Page for system diagnostics and debugging. - - ### πŸ“ `mapper/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - 🧩 **MapperPage** (Component) `[TRIVIAL]` - - Page for the dataset column mapper tool. - - ### πŸ“ `storage/` - - - πŸ—οΈ **Layers:** UI - - πŸ“Š **Tiers:** STANDARD: 5 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 5 - - **Key Entities:** - - - 🧩 **StoragePage** (Component) - - Main page for file storage management. - - ### πŸ“ `services/` - - - πŸ—οΈ **Layers:** Service - - πŸ“Š **Tiers:** STANDARD: 33 - - πŸ“„ **Files:** 6 - - πŸ“¦ **Entities:** 33 - - **Key Entities:** - - - πŸ“¦ **GitServiceClient** (Module) - - API client for Git operations, managing the communication be... - - πŸ“¦ **adminService** (Module) - - Service for Admin-related API calls (User and Role managemen... - - πŸ“¦ **storageService** (Module) - - Frontend API client for file storage management. - - **Dependencies:** - - - πŸ”— DEPENDS_ON -> frontend.src.lib.api - - ### πŸ“ `types/` - - - πŸ—οΈ **Layers:** Domain - - πŸ“Š **Tiers:** TRIVIAL: 1 - - πŸ“„ **Files:** 1 - - πŸ“¦ **Entities:** 1 - - **Key Entities:** - - - πŸ“¦ **DashboardTypes** (Module) `[TRIVIAL]` - - TypeScript interfaces for Dashboard entities - -### πŸ“ `root/` - -- πŸ—οΈ **Layers:** DevOps/Tooling, Domain, Unknown -- πŸ“Š **Tiers:** CRITICAL: 14, STANDARD: 24, TRIVIAL: 10 -- πŸ“„ **Files:** 3 -- πŸ“¦ **Entities:** 48 - -**Key Entities:** - - - β„‚ **ComplianceIssue** (Class) `[TRIVIAL]` - - Represents a single compliance issue with severity. - - β„‚ **ReportsService** (Class) `[CRITICAL]` - - Service layer for list/detail report retrieval and normaliza... - - β„‚ **SemanticEntity** (Class) `[CRITICAL]` - - Represents a code entity (Module, Function, Component) found... - - β„‚ **SemanticMapGenerator** (Class) `[CRITICAL]` - - Orchestrates the mapping process with tier-based validation. - - β„‚ **Severity** (Class) `[TRIVIAL]` - - Severity levels for compliance issues. - - β„‚ **Tier** (Class) `[TRIVIAL]` - - Enumeration of semantic tiers defining validation strictness... - - πŸ“¦ **backend.src.services.reports.report_service** (Module) `[CRITICAL]` - - Aggregate, normalize, filter, and paginate task reports for ... - - πŸ“¦ **generate_semantic_map** (Module) - - Scans the codebase to generate a Semantic Map, Module Map, a... - - πŸ“¦ **test_analyze** (Module) `[TRIVIAL]` - - Auto-generated module for test_analyze.py - -**Dependencies:** - - - πŸ”— DEPENDS_ON -> backend.src.core.task_manager.manager.TaskManager - - πŸ”— DEPENDS_ON -> backend.src.models.report - - πŸ”— DEPENDS_ON -> backend.src.services.reports.normalizer - -## Cross-Module Dependencies - -```mermaid -graph TD - api-->|USES|backend - api-->|USES|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|USES|backend - routes-->|USES|backend - routes-->|CALLS|backend - routes-->|CALLS|backend - routes-->|CALLS|backend - routes-->|CALLS|backend - routes-->|CALLS|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - routes-->|DEPENDS_ON|backend - __tests__-->|TESTS|backend - __tests__-->|TESTS|backend - __tests__-->|TESTS|backend - __tests__-->|TESTS|backend - __tests__-->|DEPENDS_ON|backend - __tests__-->|DEPENDS_ON|backend - __tests__-->|VERIFIES|backend - core-->|USES|backend - core-->|USES|backend - core-->|DEPENDS_ON|backend - core-->|DEPENDS_ON|backend - core-->|DEPENDS_ON|backend - core-->|DEPENDS_ON|backend - auth-->|USES|backend - auth-->|USES|backend - auth-->|USES|backend - auth-->|USES|backend - utils-->|DEPENDS_ON|backend - utils-->|DEPENDS_ON|backend - utils-->|DEPENDS_ON|backend - models-->|INHERITS_FROM|backend - models-->|DEPENDS_ON|backend - models-->|DEPENDS_ON|backend - models-->|USED_BY|backend - models-->|INHERITS_FROM|backend - __tests__-->|TESTS|backend - llm_analysis-->|IMPLEMENTS|backend - llm_analysis-->|IMPLEMENTS|backend - storage-->|DEPENDS_ON|backend - scripts-->|USES|backend - scripts-->|USES|backend - scripts-->|READS_FROM|backend - scripts-->|READS_FROM|backend - scripts-->|USES|backend - scripts-->|USES|backend - scripts-->|CALLS|backend - scripts-->|USES|backend - scripts-->|USES|backend - scripts-->|USES|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - services-->|USES|backend - services-->|USES|backend - services-->|USES|backend - services-->|DEPENDS_ON|backend - services-->|DEPENDS_ON|backend - __tests__-->|TESTS|backend - __tests__-->|DEPENDS_ON|backend - __tests__-->|TESTS|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - reports-->|DEPENDS_ON|backend - __tests__-->|TESTS|backend - __tests__-->|TESTS|backend - tests-->|TESTS|backend - tests-->|TESTS|backend - core-->|VERIFIES|backend - core-->|VERIFIES|backend - __tests__-->|VERIFIES|components - __tests__-->|VERIFIES|lib - __tests__-->|VERIFIES|lib - reports-->|DEPENDS_ON|lib - __tests__-->|TESTS|routes - __tests__-->|TESTS|routes - __tests__-->|TESTS|lib - __tests__-->|TESTS|lib - __tests__-->|TESTS|lib - __tests__-->|TESTS|routes - root-->|DEPENDS_ON|backend - root-->|DEPENDS_ON|backend - root-->|DEPENDS_ON|backend -``` diff --git a/.ai/structure/PROJECT_MAP.md b/.ai/structure/PROJECT_MAP.md deleted file mode 100644 index 348a6cd7..00000000 --- a/.ai/structure/PROJECT_MAP.md +++ /dev/null @@ -1,4299 +0,0 @@ -# Project Semantic Map - -> Compressed view for AI Context. Generated automatically. - -- πŸ“¦ **backend.src.services.reports.report_service** (`Module`) `[CRITICAL]` - - πŸ“ Aggregate, normalize, filter, and paginate task reports for unified list/detail API use cases. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: List responses are deterministic and include applied filter echo metadata. - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager.manager.TaskManager` - - πŸ”— DEPENDS_ON -> `backend.src.models.report` - - πŸ”— DEPENDS_ON -> `backend.src.services.reports.normalizer` - - β„‚ **ReportsService** (`Class`) `[CRITICAL]` - - πŸ“ Service layer for list/detail report retrieval and normalization. - - πŸ”’ Invariant: Service methods are read-only over task history source. - - Ζ’ **__init__** (`Function`) `[CRITICAL]` - - πŸ“ Initialize service with TaskManager dependency. - - πŸ”’ Invariant: Constructor performs no task mutations. - - Ζ’ **_load_normalized_reports** (`Function`) - - πŸ“ Build normalized reports from all available tasks. - - πŸ”’ Invariant: Every returned item is a TaskReport. - - Ζ’ **_to_utc_datetime** (`Function`) - - πŸ“ Normalize naive/aware datetime values to UTC-aware datetime for safe comparisons. - - πŸ”’ Invariant: Naive datetimes are interpreted as UTC to preserve deterministic ordering/filtering. - - Ζ’ **_datetime_sort_key** (`Function`) - - πŸ“ Produce stable numeric sort key for report timestamps. - - πŸ”’ Invariant: Mixed naive/aware datetimes never raise TypeError. - - Ζ’ **_matches_query** (`Function`) - - πŸ“ Apply query filtering to a report. - - πŸ”’ Invariant: Filter evaluation is side-effect free. - - Ζ’ **_sort_reports** (`Function`) - - πŸ“ Sort reports deterministically according to query settings. - - πŸ”’ Invariant: Sorting criteria are deterministic for equal input. - - Ζ’ **list_reports** (`Function`) - - πŸ“ Return filtered, sorted, paginated report collection. - - Ζ’ **get_report_detail** (`Function`) - - πŸ“ Return one normalized report with timeline/diagnostics/next actions. - - Ζ’ **print_entity** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_analyze** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for test_analyze.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **print_issues** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **generate_semantic_map** (`Module`) - - πŸ“ Scans the codebase to generate a Semantic Map, Module Map, and Compliance Report based on the System Standard. - - πŸ—οΈ Layer: DevOps/Tooling - - πŸ”’ Invariant: All DEF anchors must have matching closing anchors; TIER determines validation strictness. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Mock init for self-containment. - - Ζ’ **__enter__** (`Function`) `[TRIVIAL]` - - πŸ“ Mock enter. - - Ζ’ **__exit__** (`Function`) `[TRIVIAL]` - - πŸ“ Mock exit. - - β„‚ **Tier** (`Class`) `[TRIVIAL]` - - πŸ“ Enumeration of semantic tiers defining validation strictness. - - β„‚ **Severity** (`Class`) `[TRIVIAL]` - - πŸ“ Severity levels for compliance issues. - - β„‚ **ComplianceIssue** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a single compliance issue with severity. - - β„‚ **SemanticEntity** (`Class`) `[CRITICAL]` - - πŸ“ Represents a code entity (Module, Function, Component) found during parsing. - - πŸ”’ Invariant: start_line is always set; end_line is set upon closure; tier defaults to STANDARD. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes a new SemanticEntity instance. - - Ζ’ **get_tier** (`Function`) - - πŸ“ Returns the tier of the entity, defaulting to STANDARD. - - Ζ’ **to_dict** (`Function`) - - πŸ“ Serializes the entity to a dictionary for JSON output. - - Ζ’ **validate** (`Function`) `[CRITICAL]` - - πŸ“ Checks for semantic compliance based on TIER requirements. - - Ζ’ **get_score** (`Function`) - - πŸ“ Calculates a compliance score (0.0 to 1.0) based on tier requirements. - - Ζ’ **get_patterns** (`Function`) - - πŸ“ Returns regex patterns for a specific language. - - Ζ’ **extract_svelte_props** (`Function`) - - πŸ“ Extracts props from Svelte component script section. - - Ζ’ **extract_svelte_events** (`Function`) - - πŸ“ Extracts dispatched events from Svelte component. - - Ζ’ **extract_data_flow** (`Function`) - - πŸ“ Extracts store subscriptions and data flow from Svelte component. - - Ζ’ **parse_file** (`Function`) `[CRITICAL]` - - πŸ“ Parses a single file to extract semantic entities with tier awareness and enhanced Svelte analysis. - - πŸ”’ Invariant: Every opened anchor must have a matching closing anchor for valid compliance. - - β„‚ **SemanticMapGenerator** (`Class`) `[CRITICAL]` - - πŸ“ Orchestrates the mapping process with tier-based validation. - - πŸ”’ Invariant: All entities are validated according to their TIER requirements. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the generator with a root directory. - - Ζ’ **_load_gitignore** (`Function`) - - πŸ“ Loads patterns from .gitignore file. - - Ζ’ **_is_ignored** (`Function`) - - πŸ“ Checks if a path should be ignored based on .gitignore or hardcoded defaults. - - Ζ’ **run** (`Function`) `[CRITICAL]` - - πŸ“ Main execution flow. - - πŸ”— CALLS -> `_walk_and_parse` - - πŸ”— CALLS -> `_generate_artifacts` - - Ζ’ **_walk_and_parse** (`Function`) `[CRITICAL]` - - πŸ“ Recursively walks directories and triggers parsing. - - Ζ’ **_process_file_results** (`Function`) - - πŸ“ Validates entities and calculates file scores with tier awareness. - - Ζ’ **validate_recursive** (`Function`) - - πŸ“ Calculate score and determine module's max tier for weighted global score - - Ζ’ **_generate_artifacts** (`Function`) `[CRITICAL]` - - πŸ“ Writes output files with tier-based compliance data. - - Ζ’ **_generate_report** (`Function`) `[CRITICAL]` - - πŸ“ Generates the Markdown compliance report with severity levels. - - Ζ’ **_collect_issues** (`Function`) - - πŸ“ Helper to collect issues for a specific file from the entity tree. - - Ζ’ **_generate_compressed_map** (`Function`) `[CRITICAL]` - - πŸ“ Generates the token-optimized project map with enhanced Svelte details. - - Ζ’ **_write_entity_md** (`Function`) `[CRITICAL]` - - πŸ“ Recursive helper to write entity tree to Markdown with tier badges and enhanced details. - - Ζ’ **_generate_module_map** (`Function`) `[CRITICAL]` - - πŸ“ Generates a module-centric map grouping entities by directory structure. - - Ζ’ **_get_module_path** (`Function`) - - πŸ“ Extracts the module path from a file path. - - Ζ’ **_collect_all_entities** (`Function`) - - πŸ“ Flattens entity tree for easier grouping. - - Ζ’ **to_dict** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **DashboardTypes** (`Module`) `[TRIVIAL]` - - πŸ“ TypeScript interfaces for Dashboard entities - - πŸ—οΈ Layer: Domain -- 🧩 **Counter** (`Component`) `[TRIVIAL]` - - πŸ“ Simple counter demo component - - πŸ—οΈ Layer: UI - - ➑️ WRITES_TO `state` -- πŸ“¦ **stores_module** (`Module`) - - πŸ“ Global state management using Svelte stores. - - πŸ—οΈ Layer: UI-State - - πŸ“¦ **plugins** (`Data`) - - πŸ“ Store for the list of available plugins. - - πŸ“¦ **tasks** (`Data`) - - πŸ“ Store for the list of tasks. - - πŸ“¦ **selectedPlugin** (`Data`) - - πŸ“ Store for the currently selected plugin. - - πŸ“¦ **selectedTask** (`Data`) - - πŸ“ Store for the currently selected task. - - πŸ“¦ **currentPage** (`Data`) - - πŸ“ Store for the current page. - - πŸ“¦ **taskLogs** (`Data`) - - πŸ“ Store for the logs of the currently selected task. - - Ζ’ **fetchPlugins** (`Function`) - - πŸ“ Fetches plugins from the API and updates the plugins store. - - Ζ’ **fetchTasks** (`Function`) - - πŸ“ Fetches tasks from the API and updates the tasks store. -- πŸ“¦ **toasts_module** (`Module`) - - πŸ“ Manages toast notifications using a Svelte writable store. - - πŸ—οΈ Layer: UI-State - - πŸ“¦ **toasts** (`Data`) - - πŸ“ Writable store containing the list of active toasts. - - Ζ’ **addToast** (`Function`) - - πŸ“ Adds a new toast message. - - Ζ’ **removeToast** (`Function`) - - πŸ“ Removes a toast message by ID. -- πŸ“¦ **api_module** (`Module`) - - πŸ“ Handles all communication with the backend API. - - πŸ—οΈ Layer: Infra-API - - Ζ’ **getWsUrl** (`Function`) - - πŸ“ Returns the WebSocket URL for a specific task, with fallback logic. - - Ζ’ **getAuthHeaders** (`Function`) - - πŸ“ Returns headers with Authorization if token exists. - - Ζ’ **fetchApi** (`Function`) - - πŸ“ Generic GET request wrapper. - - Ζ’ **postApi** (`Function`) - - πŸ“ Generic POST request wrapper. - - Ζ’ **requestApi** (`Function`) - - πŸ“ Generic request wrapper. - - πŸ“¦ **api** (`Data`) - - πŸ“ API client object with specific methods. -- πŸ“¦ **Utils** (`Module`) `[TRIVIAL]` - - πŸ“ General utility functions (class merging) - - πŸ—οΈ Layer: Infra - - Ζ’ **cn** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ—„οΈ **authStore** (`Store`) - - πŸ“ Manages the global authentication state on the frontend. - - πŸ—οΈ Layer: Feature - - πŸ“¦ **AuthState** (`Interface`) - - πŸ“ Defines the structure of the authentication state. - - Ζ’ **createAuthStore** (`Function`) - - πŸ“ Creates and configures the auth store with helper methods. - - Ζ’ **setToken** (`Function`) - - πŸ“ Updates the store with a new JWT token. - - Ζ’ **setUser** (`Function`) - - πŸ“ Sets the current user profile data. - - Ζ’ **logout** (`Function`) - - πŸ“ Clears authentication state and storage. - - Ζ’ **setLoading** (`Function`) - - πŸ“ Updates the loading state. -- πŸ“¦ **Debounce** (`Module`) `[TRIVIAL]` - - πŸ“ Debounce utility for limiting function execution rate - - πŸ—οΈ Layer: Infra - - Ζ’ **debounce** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ—„οΈ **assistantChat** (`Store`) - - πŸ“ Control assistant chat panel visibility and active conversation binding. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: conversationId persists while panel toggles unless explicitly reset. - - Ζ’ **toggleAssistantChat** (`Function`) - - πŸ“ Toggle assistant panel visibility. - - Ζ’ **openAssistantChat** (`Function`) - - πŸ“ Open assistant panel. - - Ζ’ **closeAssistantChat** (`Function`) - - πŸ“ Close assistant panel. - - Ζ’ **setAssistantConversationId** (`Function`) - - πŸ“ Bind current conversation id in UI state. -- πŸ—„οΈ **taskDrawer** (`Store`) `[CRITICAL]` - - πŸ“ Manage Task Drawer visibility and resource-to-task mapping - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: resourceTaskMap always reflects current task associations -- πŸ“¦ **taskDrawer** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/stores/taskDrawer.js - - πŸ—οΈ Layer: Unknown - - Ζ’ **openDrawerForTask** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **openDrawer** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **closeDrawer** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **updateResourceTask** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getTaskForResource** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ—„οΈ **sidebar** (`Store`) - - πŸ“ Manage sidebar visibility and navigation state - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: isExpanded state is always synced with localStorage -- πŸ“¦ **sidebar** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/stores/sidebar.js - - πŸ—οΈ Layer: Unknown - - Ζ’ **toggleSidebar** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **setActiveItem** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **setMobileOpen** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **closeMobile** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **toggleMobileSidebar** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ—„οΈ **activity** (`Store`) - - πŸ“ Track active task count for navbar indicator - - πŸ—οΈ Layer: UI - - πŸ”— DEPENDS_ON -> `WebSocket connection, taskDrawer store` -- πŸ“¦ **frontend.src.lib.stores.__tests__.test_sidebar** (`Module`) - - πŸ“ Unit tests for sidebar store - - πŸ—οΈ Layer: UI -- πŸ“¦ **frontend.src.lib.stores.__tests__.sidebar** (`Module`) - - πŸ“ Unit tests for sidebar store - - πŸ—οΈ Layer: Domain (Tests) - - πŸ”’ Invariant: Sidebar store transitions must be deterministic across desktop/mobile toggles. - - Ζ’ **test_sidebar_initial_state** (`Function`) - - πŸ“ Verify initial sidebar store values when no persisted state is available. - - Ζ’ **test_toggleSidebar** (`Function`) - - πŸ“ Verify desktop sidebar expansion toggles deterministically. - - Ζ’ **test_setActiveItem** (`Function`) - - Ζ’ **test_mobile_functions** (`Function`) -- πŸ“¦ **frontend.src.lib.stores.__tests__.test_activity** (`Module`) - - πŸ“ Unit tests for activity store - - πŸ—οΈ Layer: UI - - πŸ”— DEPENDS_ON -> `frontend.src.lib.stores.taskDrawer` -- πŸ“¦ **setupTests** (`Module`) - - πŸ“ Global test setup with mocks for SvelteKit modules - - πŸ—οΈ Layer: UI -- πŸ“¦ **frontend.src.lib.stores.__tests__.test_taskDrawer** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for task drawer store - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Store state transitions remain deterministic for open/close and task-status mapping. -- πŸ“¦ **frontend.src.lib.stores.__tests__.assistantChat** (`Module`) - - πŸ“ Validate assistant chat store visibility and conversation binding transitions. - - πŸ—οΈ Layer: UI Tests - - πŸ”’ Invariant: Each test starts from default closed state. - - πŸ”— DEPENDS_ON -> `assistantChatStore` - - Ζ’ **assistantChatStore_tests** (`Function`) - - πŸ“ Group store unit scenarios for assistant panel behavior. -- πŸ“¦ **navigation** (`Mock`) - - πŸ“ Mock for $app/navigation in tests -- πŸ“¦ **stores** (`Mock`) - - πŸ“ Mock for $app/stores in tests -- πŸ“¦ **environment** (`Mock`) - - πŸ“ Mock for $app/environment in tests -- πŸ“¦ **mock_env_public** (`Module`) - - πŸ“ Mock for $env/static/public SvelteKit module in vitest - - πŸ—οΈ Layer: UI (Tests) -- πŸ“¦ **frontend.src.lib.api.reports** (`Module`) `[CRITICAL]` - - πŸ“ Wrapper-based reports API client for list/detail retrieval without direct native fetch usage. - - πŸ—οΈ Layer: Infra - - πŸ”’ Invariant: Uses existing api wrapper methods and returns structured errors for UI-state mapping. - - πŸ”— DEPENDS_ON -> `[DEF:api_module]` - - Ζ’ **buildReportQueryString** (`Function`) - - πŸ“ Build query string for reports list endpoint from filter options. - - Ζ’ **normalizeApiError** (`Function`) - - πŸ“ Convert unknown API exceptions into deterministic UI-consumable error objects. - - Ζ’ **getReports** (`Function`) - - πŸ“ Fetch unified report list using existing request wrapper. - - Ζ’ **getReportDetail** (`Function`) - - πŸ“ Fetch one report detail by report_id. -- πŸ“¦ **frontend.src.lib.api.assistant** (`Module`) - - πŸ“ API client wrapper for assistant chat, confirmation actions, and history retrieval. - - πŸ—οΈ Layer: Infra-API - - πŸ”’ Invariant: All assistant requests must use requestApi wrapper (no native fetch). - - πŸ”— DEPENDS_ON -> `frontend.src.lib.api.api_module` - - Ζ’ **sendAssistantMessage** (`Function`) - - πŸ“ Send a user message to assistant orchestrator endpoint. - - Ζ’ **confirmAssistantOperation** (`Function`) - - πŸ“ Confirm a pending risky assistant operation. - - Ζ’ **cancelAssistantOperation** (`Function`) - - πŸ“ Cancel a pending risky assistant operation. - - Ζ’ **getAssistantHistory** (`Function`) - - πŸ“ Retrieve paginated assistant conversation history. - - Ζ’ **getAssistantConversations** (`Function`) - - πŸ“ Retrieve paginated conversation list for assistant sidebar/history switcher. -- πŸ“¦ **frontend.src.lib.api.__tests__.reports_api** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for reports API client functions: query string building, error normalization, and fetch wrappers. - - πŸ—οΈ Layer: Infra (Tests) - - πŸ”’ Invariant: Pure functions produce deterministic output. Async wrappers propagate structured errors. - - β„‚ **TestBuildReportQueryString** (`Class`) - - πŸ“ Validate query string construction from filter options. - - β„‚ **TestNormalizeApiError** (`Class`) - - πŸ“ Validate error normalization for UI-state mapping. - - β„‚ **TestGetReportsAsync** (`Class`) - - πŸ“ Validate getReports and getReportDetail with mocked api.fetchApi. -- 🧩 **Select** (`Component`) `[TRIVIAL]` - - πŸ“ Standardized dropdown selection component. - - πŸ—οΈ Layer: Atom - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `bindable` - - ➑️ WRITES_TO `props` -- πŸ“¦ **ui** (`Module`) `[TRIVIAL]` - - πŸ“ Central export point for standardized UI components. - - πŸ—οΈ Layer: Atom - - πŸ”’ Invariant: All components exported here must follow Semantic Protocol. -- 🧩 **PageHeader** (`Component`) `[TRIVIAL]` - - πŸ“ Standardized page header with title and action area. - - πŸ—οΈ Layer: Atom - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` -- 🧩 **Card** (`Component`) `[TRIVIAL]` - - πŸ“ Standardized container with padding and elevation. - - πŸ—οΈ Layer: Atom - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` -- 🧩 **Button** (`Component`) `[TRIVIAL]` - - πŸ“ Define component interface and default values (Svelte 5 Runes). - - πŸ—οΈ Layer: Atom - - πŸ”’ Invariant: Supports accessible labels and keyboard navigation. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` -- 🧩 **Input** (`Component`) `[TRIVIAL]` - - πŸ“ Standardized text input component with label and error handling. - - πŸ—οΈ Layer: Atom - - πŸ”’ Invariant: Consistent spacing and focus states. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `bindable` - - ➑️ WRITES_TO `props` -- 🧩 **LanguageSwitcher** (`Component`) `[TRIVIAL]` - - πŸ“ Dropdown component to switch between supported languages. - - πŸ—οΈ Layer: Atom - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `locale` -- πŸ“¦ **i18n** (`Module`) - - πŸ“ Determines the starting locale. - - πŸ—οΈ Layer: Infra - - πŸ”’ Invariant: Persistence is handled via LocalStorage. - - πŸ”— DEPENDS_ON -> `locales/ru.json` - - πŸ”— DEPENDS_ON -> `locales/en.json` - - πŸ—„οΈ **locale** (`Store`) - - πŸ“ Holds the current active locale string. - - πŸ—„οΈ **t** (`Store`) - - πŸ“ Derived store providing the translation dictionary. - - Ζ’ **_** (`Function`) - - πŸ“ Get translation by key path. -- 🧩 **AssistantChatPanel** (`Component`) `[CRITICAL]` - - πŸ“ Slide-out assistant chat panel for natural language command execution and task tracking. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Risky operations are executed only through explicit confirm action. - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `assistantChatStore` - - Ζ’ **loadHistory** (`Function`) - - πŸ“ Load current conversation history when panel becomes visible. - - Ζ’ **loadConversations** (`Function`) - - πŸ“ Load paginated conversation summaries for quick switching UI. - - Ζ’ **loadOlderMessages** (`Function`) - - πŸ“ Lazy-load older messages for active conversation when user scrolls to top. - - Ζ’ **appendLocalUserMessage** (`Function`) - - πŸ“ Add optimistic local user message before backend response. - - Ζ’ **appendAssistantResponse** (`Function`) - - πŸ“ Normalize and append assistant response payload to chat list. - - Ζ’ **handleSend** (`Function`) - - πŸ“ Submit user command to assistant orchestration API. - - Ζ’ **selectConversation** (`Function`) - - πŸ“ Switch active chat context to selected conversation item. - - Ζ’ **startNewConversation** (`Function`) - - πŸ“ Create local empty chat context that will be persisted on first message. - - Ζ’ **handleAction** (`Function`) - - πŸ“ Execute assistant action button behavior (open task/reports, confirm, cancel). - - Ζ’ **handleKeydown** (`Function`) - - πŸ“ Submit command by Enter while preserving multiline input with Shift+Enter. - - Ζ’ **stateClass** (`Function`) - - πŸ“ Map assistant state to visual badge style class. - - Ζ’ **handleHistoryScroll** (`Function`) - - πŸ“ Trigger lazy history fetch when user scroll reaches top boundary. -- πŸ“¦ **AssistantChatPanel** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/assistant/AssistantChatPanel.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **buildConversationTitle** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **setConversationFilter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatConversationTime** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.lib.components.assistant.__tests__.assistant_confirmation_integration** (`Module`) - - πŸ“ Validate confirm/cancel UX contract bindings in assistant chat panel source. - - πŸ—οΈ Layer: UI Tests - - πŸ”’ Invariant: Confirm/cancel action handling must remain explicit and confirmation-id bound. - - Ζ’ **assistant_confirmation_contract_tests** (`Function`) - - πŸ“ Assert that confirmation UX flow and API bindings are preserved in chat panel. -- πŸ“¦ **frontend.src.lib.components.assistant.__tests__.assistant_chat_integration** (`Module`) - - πŸ“ Contract-level integration checks for assistant chat panel implementation and localization wiring. - - πŸ—οΈ Layer: UI Tests - - πŸ”’ Invariant: Critical assistant UX states and action hooks remain present in component source. - - Ζ’ **readJson** (`Function`) - - πŸ“ Read and parse JSON fixture file from disk. - - Ζ’ **assistant_chat_contract_tests** (`Function`) - - πŸ“ Validate assistant chat component contract and locale integration without DOM runtime dependency. -- 🧩 **ReportCard** (`Component`) `[CRITICAL]` - - πŸ“ Render one report with explicit textual type label and profile-driven visual variant. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Unknown task type always uses fallback profile. - - ⚑ Events: select - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `derived` -- πŸ“¦ **ReportCard** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/reports/ReportCard.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **getStatusClass** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getStatusLabel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatDate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **onSelect** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **ReportsList** (`Component`) `[CRITICAL]` - - πŸ“ Render unified list of normalized reports with canonical minimum fields. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Every rendered row shows task_type label, status, summary, and updated_at. - - ⚑ Events: select - - ➑️ WRITES_TO `props` -- πŸ“¦ **ReportsList** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/reports/ReportsList.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleSelect** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.lib.components.reports.reportTypeProfiles** (`Module`) `[CRITICAL]` - - πŸ“ Deterministic mapping from report task_type to visual profile with one fallback. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Unknown type always resolves to fallback profile. - - πŸ”— DEPENDS_ON -> `frontend/src/lib/i18n/index.ts` - - Ζ’ **getReportTypeProfile** (`Function`) - - πŸ“ Resolve visual profile by task type with guaranteed fallback. -- 🧩 **ReportDetailPanel** (`Component`) `[CRITICAL]` - - πŸ“ Display detailed report context with diagnostics and actionable recovery guidance. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Failed/partial reports surface actionable hints when available. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` - - ⬅️ READS_FROM `t` -- πŸ“¦ **ReportDetailPanel** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/reports/ReportDetailPanel.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **notProvided** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatDate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.reports_filter_performance** (`Module`) - - πŸ“ Guard test for report filter responsiveness on moderate in-memory dataset. - - πŸ—οΈ Layer: UI (Tests) - - Ζ’ **applyFilters** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **makeDataset** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.reports_page.integration** (`Module`) `[CRITICAL]` - - πŸ“ Integration-style checks for unified mixed-type reports rendering expectations. - - πŸ—οΈ Layer: UI (Tests) - - πŸ”’ Invariant: Mixed fixture includes all supported report types in one list. - - Ζ’ **collectVisibleTypeLabels** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_type_profiles** (`Module`) `[CRITICAL]` - - πŸ“ Validate report type profile mapping and unknown fallback behavior. - - πŸ—οΈ Layer: UI (Tests) - - πŸ”’ Invariant: Unknown task_type always resolves to the fallback profile. -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_card.ux** (`Module`) `[CRITICAL]` - - πŸ“ Test UX states and transitions for ReportCard component - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Each test asserts at least one observable UX contract outcome. -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_detail.ux** (`Module`) `[CRITICAL]` - - πŸ“ Test UX states and recovery for ReportDetailPanel component - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Detail UX tests keep placeholder-safe rendering and recovery visibility verifiable. -- πŸ“¦ **frontend.src.lib.components.reports.__tests__.report_detail.integration** (`Module`) `[CRITICAL]` - - πŸ“ Validate detail-panel behavior for failed reports and recovery guidance visibility. - - πŸ—οΈ Layer: UI (Tests) - - πŸ”’ Invariant: Failed report detail exposes actionable next actions when available. - - Ζ’ **buildFailedDetailFixture** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **reports.fixtures** (`Module`) - - πŸ“ Shared frontend fixtures for unified reports states. - - πŸ—οΈ Layer: UI -- 🧩 **Sidebar** (`Component`) `[CRITICAL]` - - πŸ“ Persistent left sidebar with resource categories navigation - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows active category and item - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `t` -- πŸ“¦ **Sidebar** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/layout/Sidebar.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **buildCategories** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleItemClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleCategoryToggle** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSubItemClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleToggleClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleOverlayClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **TopNavbar** (`Component`) `[CRITICAL]` - - πŸ“ Unified top navigation bar with Logo, Search, Activity, and User menu - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always visible on non-login pages - - ⚑ Events: activityClick - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `sidebarStore` -- πŸ“¦ **TopNavbar** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/layout/TopNavbar.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **toggleUserMenu** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **closeUserMenu** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleLogout** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleActivityClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleAssistantClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSearchFocus** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSearchBlur** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleDocumentClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleHamburgerClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **Breadcrumbs** (`Component`) - - πŸ“ Display page hierarchy navigation - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows current page path - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` -- πŸ“¦ **Breadcrumbs** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/layout/Breadcrumbs.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **getBreadcrumbs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatBreadcrumbLabel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getCrumbMeta** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **TaskDrawer** (`Component`) `[CRITICAL]` - - πŸ“ Global task drawer for monitoring background operations - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Drawer shows logs for active task or remains closed - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `taskDrawerStore` - - ➑️ WRITES_TO `taskDrawerStore` - - Ζ’ **disconnectWebSocket** (`Function`) - - πŸ“ Disconnects the active WebSocket connection - - Ζ’ **loadRecentTasks** (`Function`) - - πŸ“ Load recent tasks for list mode display - - Ζ’ **selectTask** (`Function`) - - πŸ“ Select a task from list to view details - - Ζ’ **goBackToList** (`Function`) - - πŸ“ Return to task list view from task details -- πŸ“¦ **TaskDrawer** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/layout/TaskDrawer.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleClose** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **goToReportsPage** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGlobalKeydown** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **connectWebSocket** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_breadcrumbs.svelte** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/lib/components/layout/__tests__/test_breadcrumbs.svelte.js - - πŸ—οΈ Layer: Unknown - - Ζ’ **getBreadcrumbs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatBreadcrumbLabel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **ErrorPage** (`Page`) - - πŸ“ Global error page displaying HTTP status and messages - - πŸ—οΈ Layer: UI -- πŸ“¦ **RootLayoutConfig** (`Module`) `[TRIVIAL]` - - πŸ“ Root layout configuration (SPA mode) - - πŸ—οΈ Layer: Infra -- πŸ“¦ **HomePage** (`Page`) `[CRITICAL]` - - πŸ“ Redirect to Dashboard Hub as per UX requirements - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always redirects to /dashboards -- Ζ’ **load** (`Function`) - - πŸ“ Loads initial plugin data for the dashboard. -- πŸ“¦ **layout** (`Module`) - - πŸ“ Bind global layout shell and conditional login/full-app rendering. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Login route bypasses shell; all other routes are wrapped by ProtectedRoute. -- πŸ“¦ **DatasetHub** (`Page`) `[CRITICAL]` - - πŸ“ Dataset Hub - Dedicated hub for datasets with mapping progress - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows environment selector and dataset grid -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/datasets/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **loadEnvironments** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadDatasets** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleEnvChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSearch** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handlePageChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handlePageSizeChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **updateSelectionState** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleCheckboxChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSelectAll** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSelectVisible** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleAction** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleBulkMapColumns** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleBulkGenerateDocs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleTaskStatusClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getTaskStatusIcon** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getMappingProgressClass** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **DatasetDetail** (`Page`) `[CRITICAL]` - - πŸ“ Dataset Detail View - Shows detailed dataset information with columns, SQL, and linked dashboards - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows dataset details when loaded -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/datasets/[id]/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **loadDatasetDetail** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **navigateToDashboard** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **goBack** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getColumnTypeClass** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getMappingProgress** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **UnifiedReportsPage** (`Component`) `[CRITICAL]` - - πŸ“ Unified reports page with filtering and resilient UX states for mixed task types. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: List state remains deterministic for active filter set. - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/reports/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **buildQuery** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadReports** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **hasActiveFilters** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **clearFilters** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **onFilterChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **onSelectReport** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **LoginPage** (`Component`) - - πŸ“ Provides the user interface for local and ADFS authentication. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Shows both local login form and ADFS SSO button. - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - Ζ’ **handleLogin** (`Function`) - - πŸ“ Submits the local login form to the backend. - - Ζ’ **handleADFSLogin** (`Function`) - - πŸ“ Redirects the user to the ADFS login endpoint. -- πŸ“¦ **StorageIndexPage** (`Page`) `[TRIVIAL]` - - πŸ“ Redirect to the backups page as the default storage view. - - πŸ—οΈ Layer: Page - - πŸ”’ Invariant: Always redirects to /storage/backups. -- πŸ“¦ **StorageReposPage** (`Page`) - - Ζ’ **fetchEnvironments** (`Function`) - - πŸ“ Fetches the list of available environments. - - Ζ’ **fetchDashboards** (`Function`) - - πŸ“ Fetches dashboards for a specific environment. -- πŸ“¦ **DashboardHub** (`Page`) `[CRITICAL]` - - πŸ“ Dashboard Hub - Central hub for managing dashboards with Git status and task actions - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows environment selector and dashboard grid -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/dashboards/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleDocumentClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadEnvironments** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadDashboards** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleEnvChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSearch** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handlePageChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handlePageSizeChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **updateSelectionState** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleCheckboxChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSelectAll** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSelectVisible** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **toggleActionDropdown** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **closeActionDropdown** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleAction** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleValidate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleTargetEnvChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadDatabases** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleMappingUpdate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadDbMappings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleBulkMigrate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleBulkBackup** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleTaskStatusClick** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **navigateToDashboardDetail** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getStatusBadgeClass** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **isGitBusy** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **setGitBusy** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **ensureGitConfigs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **updateDashboardGitState** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **refreshDashboardGitState** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGitInit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGitSync** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGitCommit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGitPull** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleGitPush** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getTaskStatusIcon** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getPaginationRange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **DashboardDetail** (`Page`) `[CRITICAL]` - - πŸ“ Dashboard Detail View - Overview of charts and datasets linked to a dashboard - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Shows dashboard metadata, charts, and datasets for selected environment -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/dashboards/[id]/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **loadDashboardDetail** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **goBack** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **openDataset** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **formatDate** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **AdminRolesPage** (`Component`) - - πŸ“ UI for managing system roles and their permissions. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Only accessible by users with Admin role. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **loadData** (`Function`) - - πŸ“ Fetches roles and available permissions. - - Ζ’ **openCreateModal** (`Function`) - - πŸ“ Initializes state for creating a new role. - - Ζ’ **openEditModal** (`Function`) - - πŸ“ Initializes state for editing an existing role. - - Ζ’ **handleSaveRole** (`Function`) - - πŸ“ Submits role data (create or update). - - Ζ’ **handleDeleteRole** (`Function`) - - πŸ“ Deletes a role after confirmation. -- 🧩 **AdminUsersPage** (`Component`) - - πŸ“ UI for managing system users and their roles. - - πŸ—οΈ Layer: Feature - - πŸ”’ Invariant: Only accessible by users with "admin:users" permission. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **loadData** (`Function`) - - πŸ“ Fetches users and roles from the backend. - - Ζ’ **openCreateModal** (`Function`) - - πŸ“ Prepares the form for creating a new user. - - Ζ’ **openEditModal** (`Function`) - - πŸ“ Prepares the form for editing an existing user. - - Ζ’ **handleSaveUser** (`Function`) - - πŸ“ Submits user data to the backend (create or update). - - Ζ’ **handleDeleteUser** (`Function`) - - πŸ“ Deletes a user after confirmation. -- 🧩 **AdminSettingsPage** (`Component`) - - πŸ“ UI for configuring Active Directory Group to local Role mappings for ADFS SSO and logging settings. - - πŸ—οΈ Layer: Feature - - πŸ”’ Invariant: Only accessible by users with "admin:settings" permission. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **loadData** (`Function`) - - πŸ“ Fetches AD mappings and roles from the backend to populate the UI. - - Ζ’ **handleCreateMapping** (`Function`) - - πŸ“ Submits a new AD Group to Role mapping to the backend. - - Ζ’ **loadLoggingConfig** (`Function`) - - πŸ“ Fetches current logging configuration from the backend. - - Ζ’ **saveLoggingConfig** (`Function`) - - πŸ“ Saves logging configuration to the backend. -- 🧩 **LLMSettingsPage** (`Component`) - - πŸ“ Admin settings page for LLM provider configuration. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/admin/settings/llm/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **isMultimodalModel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getProviderById** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **fetchProviders** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **saveSettings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **MigrationDashboard** (`Component`) - - πŸ“ Main dashboard for configuring and starting migrations. - - πŸ—οΈ Layer: Page - - πŸ”’ Invariant: Migration cannot start without source and target environments. - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `selectedTask` - - ➑️ WRITES_TO `selectedTask` - - Ζ’ **fetchEnvironments** (`Function`) - - πŸ“ Fetches the list of environments from the API. - - Ζ’ **fetchDashboards** (`Function`) - - πŸ“ Fetches dashboards for the selected source environment. - - Ζ’ **fetchDatabases** (`Function`) - - πŸ“ Fetches databases from both environments and gets suggestions. - - Ζ’ **handleMappingUpdate** (`Function`) - - πŸ“ Saves a mapping to the backend. - - Ζ’ **handleViewLogs** (`Function`) - - πŸ“ Opens the log viewer for a specific task. - - Ζ’ **handlePasswordPrompt** (`Function`) - - πŸ“ Reactive logic to show password prompt when a task is awaiting input. - - Ζ’ **handleResumeMigration** (`Function`) - - πŸ“ Resumes a migration task with provided passwords. - - Ζ’ **startMigration** (`Function`) - - πŸ“ Starts the migration process. - - 🧩 **DashboardSelectionSection** (`Component`) -- 🧩 **MappingManagement** (`Component`) - - πŸ“ Page for managing database mappings between environments. - - πŸ—οΈ Layer: Page - - πŸ”’ Invariant: Mappings are saved to the backend for persistence. - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **fetchEnvironments** (`Function`) - - πŸ“ Fetches the list of environments. - - Ζ’ **fetchDatabases** (`Function`) - - πŸ“ Fetches databases from both environments and gets suggestions. - - Ζ’ **handleUpdate** (`Function`) - - πŸ“ Saves a mapping to the backend. -- 🧩 **StoragePage** (`Component`) - - πŸ“ Main page for file storage management. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always displays tabs for Backups and Repositories. - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `page` - - Ζ’ **loadFiles** (`Function`) - - πŸ“ Fetches the list of files from the server. - - Ζ’ **handleDelete** (`Function`) - - πŸ“ Handles the file deletion process. - - Ζ’ **handleNavigate** (`Function`) - - πŸ“ Updates the current path and reloads files when navigating into a directory. - - Ζ’ **navigateUp** (`Function`) - - πŸ“ Navigates one level up in the directory structure. -- 🧩 **MapperPage** (`Component`) `[TRIVIAL]` - - πŸ“ Page for the dataset column mapper tool. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` -- 🧩 **DebugPage** (`Component`) `[TRIVIAL]` - - πŸ“ Page for system diagnostics and debugging. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` -- πŸ“¦ **SettingsPage** (`Page`) `[CRITICAL]` - - πŸ“ Consolidated Settings Page - All settings in one place with tabbed navigation - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Always shows tabbed interface with all settings categories -- πŸ“¦ **+page** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/routes/settings/+page.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **normalizeTab** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **readTabFromUrl** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **writeTabToUrl** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadSettings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **normalizeLlmSettings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **isMultimodalModel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **getProviderById** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **isDashboardValidationBindingValid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleTabChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadMigrationSettings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **loadMappingsPage** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **onMappingsSearchInput** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **onMappingsFilterChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **goToMappingsPage** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **saveMigrationSettings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **triggerSyncNow** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSave** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleTestEnv** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **editEnv** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **resetEnvForm** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleAddOrUpdateEnv** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleDeleteEnv** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- Ζ’ **load** (`Function`) - - πŸ“ Loads application settings and environment list. -- 🧩 **ConnectionsSettingsPage** (`Component`) - - πŸ“ Page for managing database connection configurations. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - Ζ’ **handleSuccess** (`Function`) - - πŸ“ Refreshes the connection list after a successful creation. -- 🧩 **GitSettingsPage** (`Component`) - - πŸ“ Manage Git server configurations for dashboard versioning. - - πŸ—οΈ Layer: Page - - πŸ”’ Invariant: All configurations must be validated via connection test. - - ⬅️ READS_FROM `lib` - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **loadConfigs** (`Function`) - - πŸ“ Fetches existing git configurations. - - Ζ’ **handleTest** (`Function`) - - πŸ“ Tests connection to a git server with current form data. - - Ζ’ **handleSave** (`Function`) - - πŸ“ Saves a new git configuration. - - Ζ’ **handleDelete** (`Function`) - - πŸ“ Deletes a git configuration by ID. -- 🧩 **GitDashboardPage** (`Component`) - - πŸ“ Dashboard management page for Git integration. - - πŸ—οΈ Layer: Page - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `t` - - Ζ’ **fetchEnvironments** (`Function`) - - πŸ“ Fetches the list of deployment environments from the API. - - Ζ’ **fetchDashboards** (`Function`) - - πŸ“ Fetches dashboards for a specific environment. -- 🧩 **Dashboard** (`Component`) - - πŸ“ Displays the list of available plugins and allows selecting one. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `plugins` - - Ζ’ **onMount** (`Function`) - - πŸ“ Fetch plugins when the component mounts. - - Ζ’ **selectPlugin** (`Function`) - - πŸ“ Selects a plugin to display its form. -- 🧩 **Settings** (`Component`) - - πŸ“ The main settings page for the application, allowing management of environments and global settings. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Settings changes must be saved to the backend. - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **loadSettings** (`Function`) - - πŸ“ Loads settings from the backend. - - Ζ’ **handleSaveGlobal** (`Function`) - - πŸ“ Saves global settings to the backend. - - Ζ’ **handleAddOrUpdateEnv** (`Function`) - - πŸ“ Adds or updates an environment. - - Ζ’ **handleDeleteEnv** (`Function`) - - πŸ“ Deletes an environment. - - Ζ’ **handleTestEnv** (`Function`) - - πŸ“ Tests the connection to an environment. - - Ζ’ **editEnv** (`Function`) - - πŸ“ Sets the form to edit an existing environment. - - Ζ’ **resetEnvForm** (`Function`) - - πŸ“ Resets the environment form. -- Ζ’ **getConnections** (`Function`) - - πŸ“ Fetch a list of saved connections. -- Ζ’ **createConnection** (`Function`) - - πŸ“ Create a new connection configuration. -- Ζ’ **deleteConnection** (`Function`) - - πŸ“ Delete a connection configuration. -- πŸ“¦ **GitServiceClient** (`Module`) - - πŸ“ API client for Git operations, managing the communication between frontend and backend. - - πŸ—οΈ Layer: Service - - πŸ“¦ **gitService** (`Action`) - - πŸ“ Retrieves the diff for specific files or the whole repository. -- Ζ’ **runTask** (`Function`) - - πŸ“ Start a new task for a given plugin. -- Ζ’ **getTaskStatus** (`Function`) - - πŸ“ Fetch details for a specific task (to poll status or get result). -- πŸ“¦ **adminService** (`Module`) - - πŸ“ Service for Admin-related API calls (User and Role management). - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: All requests must include valid Admin JWT token (handled by api client). - - πŸ”— DEPENDS_ON -> `frontend.src.lib.api` - - Ζ’ **getUsers** (`Function`) - - πŸ“ Fetches all registered users from the backend. - - Ζ’ **createUser** (`Function`) - - πŸ“ Creates a new local user. - - Ζ’ **getRoles** (`Function`) - - πŸ“ Fetches all available system roles. - - Ζ’ **getADGroupMappings** (`Function`) - - πŸ“ Fetches mappings between AD groups and local roles. - - Ζ’ **createADGroupMapping** (`Function`) - - πŸ“ Creates or updates an AD group to Role mapping. - - Ζ’ **updateUser** (`Function`) - - πŸ“ Updates an existing user. - - Ζ’ **deleteUser** (`Function`) - - πŸ“ Deletes a user. - - Ζ’ **createRole** (`Function`) - - πŸ“ Creates a new role. - - Ζ’ **updateRole** (`Function`) - - πŸ“ Updates an existing role. - - Ζ’ **deleteRole** (`Function`) - - πŸ“ Deletes a role. - - Ζ’ **getPermissions** (`Function`) - - πŸ“ Fetches all available permissions. - - Ζ’ **getLoggingConfig** (`Function`) - - πŸ“ Fetches current logging configuration. - - Ζ’ **updateLoggingConfig** (`Function`) - - πŸ“ Updates logging configuration. -- Ζ’ **getTasks** (`Function`) - - πŸ“ Fetch a list of tasks with pagination and optional status filter. -- Ζ’ **getTask** (`Function`) - - πŸ“ Fetch details for a specific task. -- Ζ’ **getTaskLogs** (`Function`) - - πŸ“ Fetch logs for a specific task. -- Ζ’ **resumeTask** (`Function`) - - πŸ“ Resume a task that is awaiting input (e.g., passwords). -- Ζ’ **resolveTask** (`Function`) - - πŸ“ Resolve a task that is awaiting mapping. -- Ζ’ **clearTasks** (`Function`) - - πŸ“ Clear tasks based on status. -- πŸ“¦ **storageService** (`Module`) - - πŸ“ Frontend API client for file storage management. - - πŸ—οΈ Layer: Service - - Ζ’ **getStorageAuthHeaders** (`Function`) - - πŸ“ Returns headers with Authorization for storage API calls. - - Ζ’ **listFiles** (`Function`) - - πŸ“ Fetches the list of files for a given category and subpath. - - Ζ’ **uploadFile** (`Function`) - - πŸ“ Uploads a file to the storage system. - - Ζ’ **deleteFile** (`Function`) - - πŸ“ Deletes a file or directory from storage. - - Ζ’ **downloadFileUrl** (`Function`) - - πŸ“ Returns the URL for downloading a file. -- 🧩 **PasswordPrompt** (`Component`) - - πŸ“ A modal component to prompt the user for database passwords when a migration task is paused. - - πŸ—οΈ Layer: UI - - ⚑ Events: cancel, resume - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `effect` - - Ζ’ **handleSubmit** (`Function`) - - πŸ“ Validates and dispatches the passwords to resume the task. - - Ζ’ **handleCancel** (`Function`) - - πŸ“ Cancels the password prompt. -- 🧩 **MappingTable** (`Component`) - - πŸ“ Displays and allows editing of database mappings. - - πŸ—οΈ Layer: Feature - - πŸ”’ Invariant: Each source database can be mapped to one target database. - - ⚑ Events: update - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `t` - - Ζ’ **updateMapping** (`Function`) - - πŸ“ Updates a mapping for a specific source database. - - Ζ’ **getSuggestion** (`Function`) - - πŸ“ Finds a suggestion for a source database. -- 🧩 **TaskLogViewer** (`Component`) `[CRITICAL]` - - πŸ“ Displays detailed logs for a specific task inline or in a modal using TaskLogPanel. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Real-time logs are always appended without duplicates. - - ⚑ Events: close - - ➑️ WRITES_TO `bindable` - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - πŸ“¦ **handleRealTimeLogs** (`Action`) - - πŸ“ Sync real-time logs to the current log list - - Ζ’ **fetchLogs** (`Function`) - - πŸ“ Fetches logs for a given task ID - - Ζ’ **handleFilterChange** (`Function`) - - πŸ“ Updates filter conditions for the log viewer - - Ζ’ **handleRefresh** (`Function`) - - πŸ“ Refreshes the logs by polling the API - - 🧩 **showInline** (`Component`) - - πŸ“ Shows inline logs --> - - πŸ—οΈ Layer: UI --> - - 🧩 **showModal** (`Component`) - - πŸ“ Shows modal logs --> - - πŸ—οΈ Layer: UI --> -- 🧩 **Footer** (`Component`) `[TRIVIAL]` - - πŸ“ Displays the application footer with copyright information. - - πŸ—οΈ Layer: UI -- 🧩 **MissingMappingModal** (`Component`) - - πŸ“ Prompts the user to provide a database mapping when one is missing during migration. - - πŸ—οΈ Layer: Feature - - πŸ”’ Invariant: Modal blocks migration progress until resolved or cancelled. - - ⚑ Events: cancel, resolve - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - Ζ’ **resolve** (`Function`) - - πŸ“ Dispatches the resolution event with the selected mapping. - - Ζ’ **cancel** (`Function`) - - πŸ“ Cancels the mapping resolution modal. -- 🧩 **DashboardGrid** (`Component`) - - πŸ“ Displays a grid of dashboards with selection and pagination. - - πŸ—οΈ Layer: Component - - πŸ”’ Invariant: Selected IDs must be a subset of available dashboards. - - ⚑ Events: selectionChanged - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ➑️ WRITES_TO `derived` - - Ζ’ **handleValidate** (`Function`) - - πŸ“ Triggers dashboard validation task. - - Ζ’ **handleSort** (`Function`) - - πŸ“ Toggles sort direction or changes sort column. - - Ζ’ **handleSelectionChange** (`Function`) - - πŸ“ Handles individual checkbox changes. - - Ζ’ **handleSelectAll** (`Function`) - - πŸ“ Handles select all checkbox. - - Ζ’ **goToPage** (`Function`) - - πŸ“ Changes current page. - - Ζ’ **openGit** (`Function`) - - πŸ“ Opens the Git management modal for a dashboard. -- 🧩 **Navbar** (`Component`) - - πŸ“ Main navigation bar for the application. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `lib` - - ➑️ WRITES_TO `page` -- πŸ“¦ **Navbar** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/Navbar.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleLogout** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **TaskHistory** (`Component`) - - πŸ“ Displays a list of recent tasks with their status and allows selecting them for viewing logs. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `selectedTask` - - ➑️ WRITES_TO `selectedTask` - - ⬅️ READS_FROM `t` - - Ζ’ **fetchTasks** (`Function`) - - πŸ“ Fetches the list of recent tasks from the API. - - Ζ’ **clearTasks** (`Function`) - - πŸ“ Clears tasks from the history, optionally filtered by status. - - Ζ’ **selectTask** (`Function`) - - πŸ“ Selects a task and fetches its full details. - - Ζ’ **getStatusColor** (`Function`) - - πŸ“ Returns the CSS color class for a given task status. - - Ζ’ **onMount** (`Function`) - - πŸ“ Initializes the component by fetching tasks and starting polling. - - Ζ’ **onDestroy** (`Function`) - - πŸ“ Cleans up the polling interval when the component is destroyed. -- 🧩 **Toast** (`Component`) `[TRIVIAL]` - - πŸ“ Displays transient notifications (toasts) in the bottom-right corner. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `toasts` -- 🧩 **TaskRunner** (`Component`) - - πŸ“ Connects to a WebSocket to display real-time logs for a running task with filtering support. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `t` - - ⬅️ READS_FROM `selectedTask` - - ➑️ WRITES_TO `t` - - Ζ’ **connect** (`Function`) - - πŸ“ Establishes WebSocket connection with exponential backoff and filter parameters. - - Ζ’ **handleFilterChange** (`Function`) - - πŸ“ Handles filter changes and reconnects WebSocket with new parameters. - - Ζ’ **fetchTargetDatabases** (`Function`) - - πŸ“ Fetches available databases from target environment for mapping. - - Ζ’ **handleMappingResolve** (`Function`) - - πŸ“ Resolves missing database mapping and continues migration. - - Ζ’ **handlePasswordResume** (`Function`) - - πŸ“ Submits passwords and resumes paused migration task. - - Ζ’ **startDataTimeout** (`Function`) - - πŸ“ Starts timeout timer to detect idle connection. - - Ζ’ **resetDataTimeout** (`Function`) - - πŸ“ Resets data timeout timer when new data arrives. - - Ζ’ **onMount** (`Function`) - - πŸ“ Initializes WebSocket connection when component mounts. - - Ζ’ **onDestroy** (`Function`) -- 🧩 **TaskList** (`Component`) - - πŸ“ Displays a list of tasks with their status and execution details. - - πŸ—οΈ Layer: Component - - ⚑ Events: select - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **getStatusColor** (`Function`) - - πŸ“ Returns the CSS color class for a given task status. - - Ζ’ **formatTime** (`Function`) - - πŸ“ Formats a date string using date-fns. - - Ζ’ **handleTaskClick** (`Function`) - - πŸ“ Dispatches a select event when a task is clicked. -- 🧩 **DynamicForm** (`Component`) - - πŸ“ Generates a form dynamically based on a JSON schema. - - πŸ—οΈ Layer: UI - - ⚑ Events: submit - - ➑️ WRITES_TO `props` - - Ζ’ **handleSubmit** (`Function`) - - πŸ“ Dispatches the submit event with the form data. - - Ζ’ **initializeForm** (`Function`) - - πŸ“ Initialize form data with default values from the schema. -- 🧩 **EnvSelector** (`Component`) - - πŸ“ Provides a UI component for selecting source and target environments. - - πŸ—οΈ Layer: Feature - - πŸ”’ Invariant: Source and target environments must be selectable from the list of configured environments. - - ⚑ Events: change - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `t` - - Ζ’ **handleSelect** (`Function`) - - πŸ“ Dispatches the selection change event. -- 🧩 **ProtectedRoute** (`Component`) `[TRIVIAL]` - - πŸ“ Wraps content to ensure only authenticated users can access it. - - πŸ—οΈ Layer: Component - - πŸ”’ Invariant: Redirects to /login if user is not authenticated. - - ⬅️ READS_FROM `app` - - ⬅️ READS_FROM `auth` -- 🧩 **TaskLogPanel** (`Component`) - - πŸ“ Combines log filtering and display into a single cohesive dark-themed panel. - - πŸ—οΈ Layer: UI - - πŸ”’ Invariant: Must always display logs in chronological order and respect auto-scroll preference. - - ⚑ Events: filterChange - - ➑️ WRITES_TO `bindable` - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` -- πŸ“¦ **TaskLogPanel** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/tasks/TaskLogPanel.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **filterLogs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleFilterChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **scrollToBottom** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **toggleAutoScroll** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **LogFilterBar** (`Component`) - - πŸ“ Compact filter toolbar for logs β€” level, source, and text search in a single dense row. - - πŸ—οΈ Layer: UI - - ➑️ WRITES_TO `bindable` - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `derived` -- πŸ“¦ **LogFilterBar** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/tasks/LogFilterBar.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleLevelChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSourceChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSearchChange** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **clearFilters** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **LogEntryRow** (`Component`) - - πŸ“ Renders a single log entry with stacked layout optimized for narrow drawer panels. - - πŸ—οΈ Layer: UI - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `derived` - - Ζ’ **formatTime** (`Function`) - - πŸ“ Format ISO timestamp to HH:MM:SS */ -- πŸ“¦ **TaskResultPanel** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/tasks/TaskResultPanel.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **statusColor** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.components.__tests__.task_log_viewer** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for TaskLogViewer component by mounting it and observing the DOM. - - πŸ—οΈ Layer: UI (Tests) - - πŸ”’ Invariant: Duplicate logs are never appended. Polling only active for in-progress tasks. -- 🧩 **FileList** (`Component`) - - πŸ“ Displays a table of files with metadata and actions. - - πŸ—οΈ Layer: UI - - ⚑ Events: delete, navigate - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` - - Ζ’ **isDirectory** (`Function`) - - πŸ“ Checks if a file object represents a directory. - - Ζ’ **formatSize** (`Function`) - - πŸ“ Formats file size in bytes into a human-readable string. - - Ζ’ **formatDate** (`Function`) - - πŸ“ Formats an ISO date string into a localized readable format. -- 🧩 **FileUpload** (`Component`) - - πŸ“ Provides a form for uploading files to a specific category. - - πŸ—οΈ Layer: UI - - ⚑ Events: uploaded - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `t` - - Ζ’ **handleUpload** (`Function`) - - πŸ“ Handles the file upload process. - - Ζ’ **handleDrop** (`Function`) - - πŸ“ Handles the file drop event for drag-and-drop. -- 🧩 **ConnectionForm** (`Component`) - - πŸ“ UI component for creating a new database connection configuration. - - πŸ—οΈ Layer: UI - - ⚑ Events: success - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **handleSubmit** (`Function`) - - πŸ“ Submits the connection form to the backend. - - Ζ’ **resetForm** (`Function`) - - πŸ“ Resets the connection form fields to their default values. -- 🧩 **ConnectionList** (`Component`) - - πŸ“ UI component for listing and deleting saved database connection configurations. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **fetchConnections** (`Function`) - - πŸ“ Fetches the list of connections from the backend. - - Ζ’ **handleDelete** (`Function`) - - πŸ“ Deletes a connection configuration. -- 🧩 **MapperTool** (`Component`) - - πŸ“ UI component for mapping dataset column verbose names using the MapperPlugin. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **fetchData** (`Function`) - - πŸ“ Fetches environments and saved connections. - - Ζ’ **handleRunMapper** (`Function`) - - πŸ“ Triggers the MapperPlugin task. - - Ζ’ **handleGenerateDocs** (`Function`) - - πŸ“ Triggers the LLM Documentation task. -- πŸ“¦ **MapperTool** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/tools/MapperTool.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleApplyDoc** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **DebugTool** (`Component`) - - πŸ“ UI component for system diagnostics and debugging API responses. - - πŸ—οΈ Layer: UI - - ⬅️ READS_FROM `t` - - ➑️ WRITES_TO `t` - - Ζ’ **fetchEnvironments** (`Function`) - - πŸ“ Fetches available environments. - - Ζ’ **handleRunDebug** (`Function`) - - πŸ“ Triggers the debug task. - - Ζ’ **startPolling** (`Function`) - - πŸ“ Polls for task completion. -- 🧩 **CommitHistory** (`Component`) - - πŸ“ Displays the commit history for a specific dashboard. - - πŸ—οΈ Layer: Component - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `t` - - Ζ’ **onMount** (`Function`) - - πŸ“ Load history when component is mounted. - - Ζ’ **loadHistory** (`Function`) - - πŸ“ Fetch commit history from the backend. -- 🧩 **DeploymentModal** (`Component`) - - πŸ“ Modal for deploying a dashboard to a target environment. - - πŸ—οΈ Layer: Component - - πŸ”’ Invariant: Cannot deploy without a selected environment. - - ⚑ Events: deploy - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `effect` - - πŸ“¦ **loadStatus** (`Watcher`) - - Ζ’ **loadEnvironments** (`Function`) - - πŸ“ Fetch available environments from API. - - Ζ’ **handleDeploy** (`Function`) - - πŸ“ Trigger deployment to selected environment. -- 🧩 **ConflictResolver** (`Component`) - - πŸ“ UI for resolving merge conflicts (Keep Mine / Keep Theirs). - - πŸ—οΈ Layer: Component - - πŸ”’ Invariant: User must resolve all conflicts before saving. - - ⚑ Events: resolve - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - Ζ’ **resolve** (`Function`) - - πŸ“ Set resolution strategy for a file. - - Ζ’ **handleSave** (`Function`) - - πŸ“ Validate and submit resolutions. -- 🧩 **CommitModal** (`Component`) - - πŸ“ МодальноС ΠΎΠΊΠ½ΠΎ для создания ΠΊΠΎΠΌΠΌΠΈΡ‚Π° с просмотром ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΉ (diff). - - πŸ—οΈ Layer: Component - - ⚑ Events: commit - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `t` - - Ζ’ **handleGenerateMessage** (`Function`) - - πŸ“ Generates a commit message using LLM. - - Ζ’ **loadStatus** (`Function`) - - πŸ“ Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ Ρ‚Π΅ΠΊΡƒΡ‰ΠΈΠΉ статус рСпозитория ΠΈ diff. - - Ζ’ **handleCommit** (`Function`) - - πŸ“ Π‘ΠΎΠ·Π΄Π°Π΅Ρ‚ ΠΊΠΎΠΌΠΌΠΈΡ‚ с ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹ΠΌ сообщСниСм. -- 🧩 **BranchSelector** (`Component`) - - πŸ“ UI для Π²Ρ‹Π±ΠΎΡ€Π° ΠΈ создания Π²Π΅Ρ‚ΠΎΠΊ Git. - - πŸ—οΈ Layer: Component - - ⚑ Events: change - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `t` - - Ζ’ **onMount** (`Function`) - - πŸ“ Load branches when component is mounted. - - Ζ’ **loadBranches** (`Function`) - - πŸ“ Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ список Π²Π΅Ρ‚ΠΎΠΊ для Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°. - - Ζ’ **handleSelect** (`Function`) - - πŸ“ Handles branch selection from dropdown. - - Ζ’ **handleCheckout** (`Function`) - - πŸ“ ΠŸΠ΅Ρ€Π΅ΠΊΠ»ΡŽΡ‡Π°Π΅Ρ‚ Ρ‚Π΅ΠΊΡƒΡ‰ΡƒΡŽ Π²Π΅Ρ‚ΠΊΡƒ. - - Ζ’ **handleCreate** (`Function`) - - πŸ“ Π‘ΠΎΠ·Π΄Π°Π΅Ρ‚ Π½ΠΎΠ²ΡƒΡŽ Π²Π΅Ρ‚ΠΊΡƒ. -- 🧩 **GitManager** (`Component`) - - πŸ“ Π¦Π΅Π½Ρ‚Ρ€Π°Π»ΡŒΠ½Ρ‹ΠΉ ΠΊΠΎΠΌΠΏΠΎΠ½Π΅Π½Ρ‚ для управлСния Git-опСрациями ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠ³ΠΎ Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°. - - πŸ—οΈ Layer: Component - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `state` - - ⬅️ READS_FROM `t` - - Ζ’ **checkStatus** (`Function`) - - πŸ“ ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅Ρ‚, ΠΈΠ½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½ Π»ΠΈ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ для Π΄Π°Π½Π½ΠΎΠ³ΠΎ Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°. - - Ζ’ **handleInit** (`Function`) - - πŸ“ Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ для Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°. - - Ζ’ **handleSync** (`Function`) - - πŸ“ Π‘ΠΈΠ½Ρ…Ρ€ΠΎΠ½ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ состояниС Superset с Π»ΠΎΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΌ Git-Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠ΅ΠΌ. - - Ζ’ **handlePush** (`Function`) - - πŸ“ Pushes local commits to the remote repository. - - Ζ’ **handlePull** (`Function`) - - πŸ“ Pulls changes from the remote repository. -- 🧩 **DocPreview** (`Component`) - - πŸ“ UI component for previewing generated dataset documentation before saving. - - πŸ—οΈ Layer: UI - - ➑️ WRITES_TO `props` - - ➑️ WRITES_TO `derived` - - ➑️ WRITES_TO `state` -- πŸ“¦ **DocPreview** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/llm/DocPreview.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **handleSave** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- 🧩 **ProviderConfig** (`Component`) - - πŸ“ UI form for managing LLM provider configurations. - - πŸ—οΈ Layer: UI - - πŸ“₯ Props: providers: any, onSave: any - - ➑️ WRITES_TO `t` - - ⬅️ READS_FROM `t` -- πŸ“¦ **ProviderConfig** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/llm/ProviderConfig.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **isMultimodalModel** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **resetForm** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleEdit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **testConnection** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **handleSubmit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **toggleActive** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **ValidationReport** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for frontend/src/components/llm/ValidationReport.svelte - - πŸ—οΈ Layer: Unknown - - Ζ’ **getStatusColor** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **frontend.src.components.llm.__tests__.provider_config_integration** (`Module`) - - πŸ“ Protect edit-button interaction contract in LLM provider settings UI. - - πŸ—οΈ Layer: UI Tests - - πŸ”’ Invariant: Edit action keeps explicit click handler and opens normalized edit form. - - Ζ’ **provider_config_edit_contract_tests** (`Function`) - - πŸ“ Validate edit button handler wiring and normalized edit form state mapping. -- πŸ“¦ **test_auth_debug** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/test_auth_debug.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **main** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.delete_running_tasks** (`Module`) - - πŸ“ Script to delete tasks with RUNNING status from the database. - - πŸ—οΈ Layer: Utility - - Ζ’ **delete_running_tasks** (`Function`) - - πŸ“ Delete all tasks with RUNNING status from the database. -- πŸ“¦ **AppModule** (`Module`) `[CRITICAL]` - - πŸ“ The main entry point for the FastAPI application. It initializes the app, configures CORS, sets up dependencies, includes API routers, and defines the WebSocket endpoint for log streaming. - - πŸ—οΈ Layer: UI (API) - - πŸ”’ Invariant: All WebSocket connections must be properly cleaned up on disconnect. - - πŸ“¦ **App** (`Global`) - - πŸ“ The global FastAPI application instance. - - Ζ’ **startup_event** (`Function`) - - πŸ“ Handles application startup tasks, such as starting the scheduler. - - Ζ’ **shutdown_event** (`Function`) - - πŸ“ Handles application shutdown tasks, such as stopping the scheduler. - - Ζ’ **network_error_handler** (`Function`) - - πŸ“ Global exception handler for NetworkError. - - Ζ’ **log_requests** (`Function`) - - πŸ“ Middleware to log incoming HTTP requests and their response status. - - πŸ“¦ **api.include_routers** (`Action`) - - πŸ“ Registers all API routers with the FastAPI application. - - πŸ—οΈ Layer: API - - Ζ’ **websocket_endpoint** (`Function`) `[CRITICAL]` - - πŸ“ Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering. - - πŸ“¦ **StaticFiles** (`Mount`) - - πŸ“ Mounts the frontend build directory to serve static assets. - - Ζ’ **serve_spa** (`Function`) - - πŸ“ Serves the SPA frontend for any path not matched by API routes. - - Ζ’ **read_root** (`Function`) - - πŸ“ A simple root endpoint to confirm that the API is running when frontend is missing. - - Ζ’ **matches_filters** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **Dependencies** (`Module`) - - πŸ“ Manages creation and provision of shared application dependencies, such as PluginLoader and TaskManager, to avoid circular imports. - - πŸ—οΈ Layer: Core - - Ζ’ **get_config_manager** (`Function`) - - πŸ“ Dependency injector for ConfigManager. - - Ζ’ **get_plugin_loader** (`Function`) - - πŸ“ Dependency injector for PluginLoader. - - Ζ’ **get_task_manager** (`Function`) - - πŸ“ Dependency injector for TaskManager. - - Ζ’ **get_scheduler_service** (`Function`) - - πŸ“ Dependency injector for SchedulerService. - - Ζ’ **get_resource_service** (`Function`) - - πŸ“ Dependency injector for ResourceService. - - Ζ’ **get_mapping_service** (`Function`) - - πŸ“ Dependency injector for MappingService. - - πŸ“¦ **oauth2_scheme** (`Variable`) - - πŸ“ OAuth2 password bearer scheme for token extraction. - - Ζ’ **get_current_user** (`Function`) - - πŸ“ Dependency for retrieving currently authenticated user from a JWT. - - Ζ’ **has_permission** (`Function`) - - πŸ“ Dependency for checking if the current user has a specific permission. - - Ζ’ **permission_checker** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.scripts.seed_superset_load_test** (`Module`) - - πŸ“ Creates randomized load-test data in Superset by cloning chart configurations and creating dashboards in target environments. - - πŸ—οΈ Layer: Scripts - - πŸ”’ Invariant: Created chart and dashboard names are globally unique for one script run. - - Ζ’ **_parse_args** (`Function`) - - πŸ“ Parses CLI arguments for load-test data generation. - - Ζ’ **_extract_result_payload** (`Function`) - - πŸ“ Normalizes Superset API payloads that may be wrapped in `result`. - - Ζ’ **_extract_created_id** (`Function`) - - πŸ“ Extracts object ID from create/update API response. - - Ζ’ **_generate_unique_name** (`Function`) - - πŸ“ Generates globally unique random names for charts/dashboards. - - Ζ’ **_resolve_target_envs** (`Function`) - - πŸ“ Resolves requested environment IDs from configuration. - - Ζ’ **_build_chart_template_pool** (`Function`) - - πŸ“ Builds a pool of source chart templates to clone in one environment. - - Ζ’ **seed_superset_load_data** (`Function`) - - πŸ“ Creates dashboards and cloned charts for load testing across target environments. - - Ζ’ **main** (`Function`) - - πŸ“ CLI entrypoint for Superset load-test data seeding. -- πŸ“¦ **test_dataset_dashboard_relations** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/src/scripts/test_dataset_dashboard_relations.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **test_dashboard_dataset_relations** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.scripts.migrate_sqlite_to_postgres** (`Module`) - - πŸ“ Migrates legacy config and task history from SQLite/file storage to PostgreSQL. - - πŸ—οΈ Layer: Scripts - - πŸ”’ Invariant: Script is idempotent for task_records and app_configurations. - - πŸ“¦ **Constants** (`Section`) - - Ζ’ **_json_load_if_needed** (`Function`) - - πŸ“ Parses JSON-like values from SQLite TEXT/JSON columns to Python objects. - - Ζ’ **_find_legacy_config_path** (`Function`) - - πŸ“ Resolves the existing legacy config.json path from candidates. - - Ζ’ **_connect_sqlite** (`Function`) - - πŸ“ Opens a SQLite connection with row factory. - - Ζ’ **_ensure_target_schema** (`Function`) - - πŸ“ Ensures required PostgreSQL tables exist before migration. - - Ζ’ **_migrate_config** (`Function`) - - πŸ“ Migrates legacy config.json into app_configurations(global). - - Ζ’ **_migrate_tasks_and_logs** (`Function`) - - πŸ“ Migrates task_records and task_logs from SQLite into PostgreSQL. - - Ζ’ **run_migration** (`Function`) - - πŸ“ Orchestrates migration from SQLite/file to PostgreSQL. - - Ζ’ **main** (`Function`) - - πŸ“ CLI entrypoint. -- πŸ“¦ **backend.src.scripts.seed_permissions** (`Module`) - - πŸ“ Populates the auth database with initial system permissions. - - πŸ—οΈ Layer: Scripts - - πŸ”’ Invariant: Safe to run multiple times (idempotent). - - πŸ“¦ **INITIAL_PERMISSIONS** (`Constant`) - - Ζ’ **seed_permissions** (`Function`) - - πŸ“ Inserts missing permissions into the database. -- πŸ“¦ **backend.src.scripts.init_auth_db** (`Module`) - - πŸ“ Initializes the auth database and creates the necessary tables. - - πŸ—οΈ Layer: Scripts - - πŸ”’ Invariant: Safe to run multiple times (idempotent). - - πŸ”— CALLS -> `backend.src.core.database.init_db` - - Ζ’ **run_init** (`Function`) - - πŸ“ Main entry point for the initialization script. -- πŸ“¦ **backend.src.scripts.create_admin** (`Module`) - - πŸ“ CLI tool for creating the initial admin user. - - πŸ—οΈ Layer: Scripts - - πŸ”’ Invariant: Admin user must have the "Admin" role. - - Ζ’ **create_admin** (`Function`) - - πŸ“ Creates an admin user and necessary roles/permissions. -- πŸ“¦ **backend.src.schemas.auth** (`Module`) - - πŸ“ Pydantic schemas for authentication requests and responses. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Sensitive fields like password must not be included in response schemas. - - πŸ”— DEPENDS_ON -> `pydantic` - - β„‚ **Token** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a JWT access token response. - - β„‚ **TokenData** (`Class`) `[TRIVIAL]` - - πŸ“ Represents the data encoded in a JWT token. - - β„‚ **PermissionSchema** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a permission in API responses. - - β„‚ **RoleSchema** (`Class`) - - πŸ“ Represents a role in API responses. - - β„‚ **RoleCreate** (`Class`) - - πŸ“ Schema for creating a new role. - - β„‚ **RoleUpdate** (`Class`) - - πŸ“ Schema for updating an existing role. - - β„‚ **ADGroupMappingSchema** (`Class`) - - πŸ“ Represents an AD Group to Role mapping in API responses. - - β„‚ **ADGroupMappingCreate** (`Class`) - - πŸ“ Schema for creating an AD Group mapping. - - β„‚ **UserBase** (`Class`) - - πŸ“ Base schema for user data. - - β„‚ **UserCreate** (`Class`) - - πŸ“ Schema for creating a new user. - - β„‚ **UserUpdate** (`Class`) - - πŸ“ Schema for updating an existing user. - - β„‚ **User** (`Class`) - - πŸ“ Schema for user data in API responses. -- πŸ“¦ **backend.src.core.superset_client** (`Module`) - - πŸ“ ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ высокоуровнСвый ΠΊΠ»ΠΈΠ΅Π½Ρ‚ для взаимодСйствия с Superset REST API, инкапсулируя Π»ΠΎΠ³ΠΈΠΊΡƒ запросов, ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΡƒ ошибок ΠΈ ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡŽ. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: All network operations must use the internal APIClient instance. - - β„‚ **SupersetClient** (`Class`) - - πŸ“ Класс-ΠΎΠ±Ρ‘Ρ€Ρ‚ΠΊΠ° Π½Π°Π΄ Superset REST API, ΠΏΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‰ΠΈΠΉ ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹ для Ρ€Π°Π±ΠΎΡ‚Ρ‹ с Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°ΠΌΠΈ ΠΈ датасСтами. - - Ζ’ **__init__** (`Function`) - - πŸ“ Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ ΠΊΠ»ΠΈΠ΅Π½Ρ‚, провСряСт ΠΊΠΎΠ½Ρ„ΠΈΠ³ΡƒΡ€Π°Ρ†ΠΈΡŽ ΠΈ создаСт сСтСвой ΠΊΠ»ΠΈΠ΅Π½Ρ‚. - - Ζ’ **authenticate** (`Function`) - - πŸ“ Authenticates the client using the configured credentials. - - Ζ’ **headers** (`Function`) - - πŸ“ Π’ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅Ρ‚ Π±Π°Π·ΠΎΠ²Ρ‹Π΅ HTTP-Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΊΠΈ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌΡ‹Π΅ сСтСвым ΠΊΠ»ΠΈΠ΅Π½Ρ‚ΠΎΠΌ. - - Ζ’ **get_dashboards** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΏΠΎΠ»Π½Ρ‹ΠΉ список Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ΠΎΠ², автоматичСски обрабатывая ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡŽ. - - Ζ’ **get_dashboards_summary** (`Function`) - - πŸ“ Fetches dashboard metadata optimized for the grid. - - Ζ’ **get_dashboard** (`Function`) - - πŸ“ Fetches a single dashboard by ID. - - Ζ’ **get_chart** (`Function`) - - πŸ“ Fetches a single chart by ID. - - Ζ’ **get_dashboard_detail** (`Function`) - - πŸ“ Fetches detailed dashboard information including related charts and datasets. - - Ζ’ **_extract_chart_ids_from_layout** (`Function`) - - πŸ“ Traverses dashboard layout metadata and extracts chart IDs from common keys. - - Ζ’ **export_dashboard** (`Function`) - - πŸ“ ЭкспортируСт Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ Π² Π²ΠΈΠ΄Π΅ ZIP-Π°Ρ€Ρ…ΠΈΠ²Π°. - - Ζ’ **import_dashboard** (`Function`) - - πŸ“ Π˜ΠΌΠΏΠΎΡ€Ρ‚ΠΈΡ€ΡƒΠ΅Ρ‚ Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ ΠΈΠ· ZIP-Ρ„Π°ΠΉΠ»Π°. - - Ζ’ **delete_dashboard** (`Function`) - - πŸ“ УдаляСт Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ ΠΏΠΎ Π΅Π³ΠΎ ID ΠΈΠ»ΠΈ slug. - - Ζ’ **get_datasets** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΏΠΎΠ»Π½Ρ‹ΠΉ список датасСтов, автоматичСски обрабатывая ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡŽ. - - Ζ’ **get_datasets_summary** (`Function`) - - πŸ“ Fetches dataset metadata optimized for the Dataset Hub grid. - - Ζ’ **get_dataset_detail** (`Function`) - - πŸ“ Fetches detailed dataset information including columns and linked dashboards - - πŸ”— CALLS -> `self.get_dataset` - - πŸ”— CALLS -> `self.network.request (for related_objects)` - - Ζ’ **get_dataset** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΌ датасСтС ΠΏΠΎ Π΅Π³ΠΎ ID. - - Ζ’ **update_dataset** (`Function`) - - πŸ“ ΠžΠ±Π½ΠΎΠ²Π»ΡΠ΅Ρ‚ Π΄Π°Π½Π½Ρ‹Π΅ датасСта ΠΏΠΎ Π΅Π³ΠΎ ID. - - Ζ’ **get_databases** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΏΠΎΠ»Π½Ρ‹ΠΉ список Π±Π°Π· Π΄Π°Π½Π½Ρ‹Ρ…. - - Ζ’ **get_database** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Π±Π°Π·Π΅ Π΄Π°Π½Π½Ρ‹Ρ… ΠΏΠΎ Π΅Ρ‘ ID. - - Ζ’ **get_databases_summary** (`Function`) - - πŸ“ Fetch a summary of databases including uuid, name, and engine. - - Ζ’ **get_database_by_uuid** (`Function`) - - πŸ“ Find a database by its UUID. - - Ζ’ **_resolve_target_id_for_delete** (`Function`) - - πŸ“ Resolves a dashboard ID from either an ID or a slug. - - Ζ’ **_do_import** (`Function`) - - πŸ“ Performs the actual multipart upload for import. - - Ζ’ **_validate_export_response** (`Function`) - - πŸ“ Validates that the export response is a non-empty ZIP archive. - - Ζ’ **_resolve_export_filename** (`Function`) - - πŸ“ Determines the filename for an exported dashboard. - - Ζ’ **_validate_query_params** (`Function`) - - πŸ“ Ensures query parameters have default page and page_size. - - Ζ’ **_fetch_total_object_count** (`Function`) - - πŸ“ Fetches the total number of items for a given endpoint. - - Ζ’ **_fetch_all_pages** (`Function`) - - πŸ“ Iterates through all pages to collect all data items. - - Ζ’ **_validate_import_file** (`Function`) - - πŸ“ Validates that the file to be imported is a valid ZIP with metadata.yaml. - - Ζ’ **get_all_resources** (`Function`) - - πŸ“ Fetches all resources of a given type with id, uuid, and name columns. - - Ζ’ **extract_dataset_id_from_form_data** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **walk** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **as_bool** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **ConfigManagerModule** (`Module`) - - πŸ“ Manages application configuration persisted in database with one-time migration from JSON. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Configuration must always be valid according to AppConfig model. - - πŸ”— DEPENDS_ON -> `ConfigModels` - - πŸ”— DEPENDS_ON -> `AppConfigRecord` - - πŸ”— CALLS -> `logger` - - β„‚ **ConfigManager** (`Class`) - - πŸ“ A class to handle application configuration persistence and management. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the ConfigManager. - - Ζ’ **_default_config** (`Function`) - - πŸ“ Returns default application configuration. - - Ζ’ **_load_from_legacy_file** (`Function`) - - πŸ“ Loads legacy configuration from config.json for migration fallback. - - Ζ’ **_get_record** (`Function`) - - πŸ“ Loads config record from DB. - - Ζ’ **_load_config** (`Function`) - - πŸ“ Loads the configuration from DB or performs one-time migration from JSON file. - - Ζ’ **_save_config_to_db** (`Function`) - - πŸ“ Saves the provided configuration object to DB. - - Ζ’ **save** (`Function`) - - πŸ“ Saves the current configuration state to DB. - - Ζ’ **get_config** (`Function`) - - πŸ“ Returns the current configuration. - - Ζ’ **update_global_settings** (`Function`) - - πŸ“ Updates the global settings and persists the change. - - Ζ’ **validate_path** (`Function`) - - πŸ“ Validates if a path exists and is writable. - - Ζ’ **get_environments** (`Function`) - - πŸ“ Returns the list of configured environments. - - Ζ’ **has_environments** (`Function`) - - πŸ“ Checks if at least one environment is configured. - - Ζ’ **get_environment** (`Function`) - - πŸ“ Returns a single environment by ID. - - Ζ’ **add_environment** (`Function`) - - πŸ“ Adds a new environment to the configuration. - - Ζ’ **update_environment** (`Function`) - - πŸ“ Updates an existing environment. - - Ζ’ **delete_environment** (`Function`) - - πŸ“ Deletes an environment by ID. -- πŸ“¦ **SchedulerModule** (`Module`) - - πŸ“ Manages scheduled tasks using APScheduler. - - πŸ—οΈ Layer: Core - - β„‚ **SchedulerService** (`Class`) - - πŸ“ Provides a service to manage scheduled backup tasks. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the scheduler service with task and config managers. - - Ζ’ **start** (`Function`) - - πŸ“ Starts the background scheduler and loads initial schedules. - - Ζ’ **stop** (`Function`) - - πŸ“ Stops the background scheduler. - - Ζ’ **load_schedules** (`Function`) - - πŸ“ Loads backup schedules from configuration and registers them. - - Ζ’ **add_backup_job** (`Function`) - - πŸ“ Adds a scheduled backup job for an environment. - - Ζ’ **_trigger_backup** (`Function`) - - πŸ“ Triggered by the scheduler to start a backup task. -- πŸ“¦ **ConfigModels** (`Module`) - - πŸ“ Defines the data models for application configuration using Pydantic. - - πŸ—οΈ Layer: Core - - πŸ“¦ **Schedule** (`DataClass`) - - πŸ“ Represents a backup schedule configuration. - - πŸ“¦ **Environment** (`DataClass`) - - πŸ“ Represents a Superset environment configuration. - - πŸ“¦ **LoggingConfig** (`DataClass`) - - πŸ“ Defines the configuration for the application's logging system. - - πŸ“¦ **GlobalSettings** (`DataClass`) - - πŸ“ Represents global application settings. - - πŸ“¦ **AppConfig** (`DataClass`) - - πŸ“ The root configuration model containing all application settings. -- πŸ“¦ **backend.src.core.database** (`Module`) - - πŸ“ Configures database connection and session management (PostgreSQL-first). - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: A single engine instance is used for the entire application. - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - πŸ”— DEPENDS_ON -> `backend.src.models.mapping` - - πŸ”— DEPENDS_ON -> `backend.src.core.auth.config` - - πŸ“¦ **BASE_DIR** (`Variable`) - - πŸ“ Base directory for the backend. - - πŸ“¦ **DATABASE_URL** (`Constant`) - - πŸ“ URL for the main application database. - - πŸ“¦ **TASKS_DATABASE_URL** (`Constant`) - - πŸ“ URL for the tasks execution database. - - πŸ“¦ **AUTH_DATABASE_URL** (`Constant`) - - πŸ“ URL for the authentication database. - - πŸ“¦ **engine** (`Variable`) - - πŸ“ SQLAlchemy engine for mappings database. - - πŸ“¦ **tasks_engine** (`Variable`) - - πŸ“ SQLAlchemy engine for tasks database. - - πŸ“¦ **auth_engine** (`Variable`) - - πŸ“ SQLAlchemy engine for authentication database. - - β„‚ **SessionLocal** (`Class`) `[TRIVIAL]` - - πŸ“ A session factory for the main mappings database. - - β„‚ **TasksSessionLocal** (`Class`) `[TRIVIAL]` - - πŸ“ A session factory for the tasks execution database. - - β„‚ **AuthSessionLocal** (`Class`) `[TRIVIAL]` - - πŸ“ A session factory for the authentication database. - - Ζ’ **init_db** (`Function`) - - πŸ“ Initializes the database by creating all tables. - - Ζ’ **get_db** (`Function`) - - πŸ“ Dependency for getting a database session. - - Ζ’ **get_tasks_db** (`Function`) - - πŸ“ Dependency for getting a tasks database session. - - Ζ’ **get_auth_db** (`Function`) - - πŸ“ Dependency for getting an authentication database session. - - Ζ’ **_build_engine** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **LoggerModule** (`Module`) - - πŸ“ Configures the application's logging system, including a custom handler for buffering logs and streaming them over WebSockets. - - πŸ—οΈ Layer: Core - - β„‚ **BeliefFormatter** (`Class`) - - πŸ“ Custom logging formatter that adds belief state prefixes to log messages. - - Ζ’ **format** (`Function`) - - πŸ“ Formats the log record, adding belief state context if available. - - β„‚ **LogEntry** (`Class`) - - πŸ“ A Pydantic model representing a single, structured log entry. This is a re-definition for consistency, as it's also defined in task_manager.py. - - Ζ’ **belief_scope** (`Function`) - - πŸ“ Context manager for structured Belief State logging. - - Ζ’ **configure_logger** (`Function`) - - πŸ“ Configures the logger with the provided logging settings. - - Ζ’ **get_task_log_level** (`Function`) - - πŸ“ Returns the current task log level filter. - - Ζ’ **should_log_task_level** (`Function`) - - πŸ“ Checks if a log level should be recorded based on task_log_level setting. - - β„‚ **WebSocketLogHandler** (`Class`) - - πŸ“ A custom logging handler that captures log records into a buffer. It is designed to be extended for real-time log streaming over WebSockets. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the handler with a fixed-capacity buffer. - - Ζ’ **emit** (`Function`) - - πŸ“ Captures a log record, formats it, and stores it in the buffer. - - Ζ’ **get_recent_logs** (`Function`) - - πŸ“ Returns a list of recent log entries from the buffer. - - πŸ“¦ **Logger** (`Global`) - - πŸ“ The global logger instance for the application, configured with both a console handler and the custom WebSocket handler. - - Ζ’ **believed** (`Function`) - - πŸ“ A decorator that wraps a function in a belief scope. - - Ζ’ **decorator** (`Function`) - - πŸ“ Internal decorator for belief scope. - - Ζ’ **explore** (`Function`) - - πŸ“ Logs an EXPLORE message (Van der Waals force) for searching, alternatives, and hypotheses. - - Ζ’ **reason** (`Function`) - - πŸ“ Logs a REASON message (Covalent bond) for strict deduction and core logic. - - Ζ’ **reflect** (`Function`) - - πŸ“ Logs a REFLECT message (Hydrogen bond) for self-check and structural validation. -- β„‚ **PluginLoader** (`Class`) - - πŸ“ Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface. - - πŸ—οΈ Layer: Core - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the PluginLoader with a directory to scan. - - Ζ’ **_load_plugins** (`Function`) - - πŸ“ Scans the plugin directory and loads all valid plugins. - - Ζ’ **_load_module** (`Function`) - - πŸ“ Loads a single Python module and discovers PluginBase implementations. - - Ζ’ **_register_plugin** (`Function`) - - πŸ“ Registers a PluginBase instance and its configuration. - - Ζ’ **get_plugin** (`Function`) - - πŸ“ Retrieves a loaded plugin instance by its ID. - - Ζ’ **get_all_plugin_configs** (`Function`) - - πŸ“ Returns a list of all registered plugin configurations. - - Ζ’ **has_plugin** (`Function`) - - πŸ“ Checks if a plugin with the given ID is registered. -- πŸ“¦ **backend.src.core.migration_engine** (`Module`) - - πŸ“ Handles the interception and transformation of Superset asset ZIP archives. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: ZIP structure must be preserved after transformation. - - πŸ”— DEPENDS_ON -> `PyYAML` - - β„‚ **MigrationEngine** (`Class`) - - πŸ“ Engine for transforming Superset export ZIPs. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the migration engine with optional ID mapping service. - - Ζ’ **transform_zip** (`Function`) - - πŸ“ Extracts ZIP, replaces database UUIDs in YAMLs, patches cross-filters, and re-packages. - - Ζ’ **_transform_yaml** (`Function`) - - πŸ“ Replaces database_uuid in a single YAML file. - - Ζ’ **_extract_chart_uuids_from_archive** (`Function`) - - πŸ“ Scans the unpacked ZIP to map local exported integer IDs back to their UUIDs. - - Ζ’ **_patch_dashboard_metadata** (`Function`) - - πŸ“ Replaces integer IDs in json_metadata. -- β„‚ **PluginBase** (`Class`) - - πŸ“ Defines the abstract base class that all plugins must implement to be recognized by the system. It enforces a common structure for plugin metadata and execution. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: All plugins MUST inherit from this class. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a brief description of the plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the plugin. - - Ζ’ **required_permission** (`Function`) - - πŸ“ Returns the required permission string to execute this plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the plugin's UI, if applicable. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for the plugin's input parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the plugin's core logic. -- β„‚ **PluginConfig** (`Class`) - - πŸ“ A Pydantic model used to represent the validated configuration and metadata of a loaded plugin. This object is what gets exposed to the API layer. - - πŸ—οΈ Layer: Core -- πŸ“¦ **backend.src.core.mapping_service** (`Module`) `[CRITICAL]` - - πŸ“ Service for tracking and synchronizing Superset Resource IDs (UUID <-> Integer ID) - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: sync_environment must handle remote API failures gracefully. - - πŸ”— DEPENDS_ON -> `backend.src.models.mapping (ResourceMapping, ResourceType)` - - πŸ”— DEPENDS_ON -> `backend.src.core.logger` - - β„‚ **IdMappingService** (`Class`) `[CRITICAL]` - - πŸ“ Service handling the cataloging and retrieval of remote Superset Integer IDs. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the mapping service. - - Ζ’ **start_scheduler** (`Function`) - - πŸ“ Starts the background scheduler with a given cron string. - - Ζ’ **sync_environment** (`Function`) - - πŸ“ Fully synchronizes mapping for a specific environment. - - Ζ’ **get_remote_id** (`Function`) - - πŸ“ Retrieves the remote integer ID for a given universal UUID. - - Ζ’ **get_remote_ids_batch** (`Function`) - - πŸ“ Retrieves remote integer IDs for a list of universal UUIDs efficiently. - - Ζ’ **sync_all** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.core.auth.config** (`Module`) - - πŸ“ Centralized configuration for authentication and authorization. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: All sensitive configuration must have defaults or be loaded from environment. - - πŸ”— DEPENDS_ON -> `pydantic` - - β„‚ **AuthConfig** (`Class`) - - πŸ“ Holds authentication-related settings. - - πŸ“¦ **auth_config** (`Variable`) - - πŸ“ Singleton instance of AuthConfig. -- πŸ“¦ **backend.src.core.auth.jwt** (`Module`) - - πŸ“ JWT token generation and validation logic. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Tokens must include expiration time and user identifier. - - πŸ”— DEPENDS_ON -> `jose` - - Ζ’ **create_access_token** (`Function`) - - πŸ“ Generates a new JWT access token. - - Ζ’ **decode_token** (`Function`) - - πŸ“ Decodes and validates a JWT token. -- πŸ“¦ **backend.src.core.auth.oauth** (`Module`) - - πŸ“ ADFS OIDC configuration and client using Authlib. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Must use secure OIDC flows. - - πŸ”— DEPENDS_ON -> `authlib` - - πŸ“¦ **oauth** (`Variable`) - - πŸ“ Global Authlib OAuth registry. - - Ζ’ **register_adfs** (`Function`) - - πŸ“ Registers the ADFS OIDC client. - - Ζ’ **is_adfs_configured** (`Function`) - - πŸ“ Checks if ADFS is properly configured. -- πŸ“¦ **backend.src.core.auth.logger** (`Module`) - - πŸ“ Audit logging for security-related events. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Must not log sensitive data like passwords or full tokens. - - Ζ’ **log_security_event** (`Function`) - - πŸ“ Logs a security-related event for audit trails. -- πŸ“¦ **backend.src.core.auth.security** (`Module`) - - πŸ“ Utility for password hashing and verification using Passlib. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Uses bcrypt for hashing with standard work factor. - - πŸ”— DEPENDS_ON -> `passlib` - - Ζ’ **verify_password** (`Function`) - - πŸ“ Verifies a plain password against a hashed password. - - Ζ’ **get_password_hash** (`Function`) - - πŸ“ Generates a bcrypt hash for a plain password. -- πŸ“¦ **backend.src.core.auth.repository** (`Module`) - - πŸ“ Data access layer for authentication-related entities. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: All database operations must be performed within a session. - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - β„‚ **AuthRepository** (`Class`) - - πŸ“ Encapsulates database operations for authentication. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the repository with a database session. - - Ζ’ **get_user_by_username** (`Function`) - - πŸ“ Retrieves a user by their username. - - Ζ’ **get_user_by_id** (`Function`) - - πŸ“ Retrieves a user by their unique ID. - - Ζ’ **get_role_by_name** (`Function`) - - πŸ“ Retrieves a role by its name. - - Ζ’ **update_last_login** (`Function`) - - πŸ“ Updates the last_login timestamp for a user. - - Ζ’ **get_role_by_id** (`Function`) - - πŸ“ Retrieves a role by its unique ID. - - Ζ’ **get_permission_by_id** (`Function`) - - πŸ“ Retrieves a permission by its unique ID. - - Ζ’ **get_permission_by_resource_action** (`Function`) - - πŸ“ Retrieves a permission by resource and action. - - Ζ’ **list_permissions** (`Function`) - - πŸ“ Lists all available permissions. -- πŸ“¦ **test_auth** (`Module`) - - πŸ“ Unit tests for authentication module - - πŸ—οΈ Layer: Domain - - Ζ’ **db_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **auth_service** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **auth_repo** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_authenticate_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_role_permission_association** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_user_role_association** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_ad_group_mapping** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.core.utils.fileio** (`Module`) - - πŸ“ ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ Π½Π°Π±ΠΎΡ€ ΡƒΡ‚ΠΈΠ»ΠΈΡ‚ для управлСния Ρ„Π°ΠΉΠ»ΠΎΠ²Ρ‹ΠΌΠΈ опСрациями, Π²ΠΊΠ»ΡŽΡ‡Π°Ρ Ρ€Π°Π±ΠΎΡ‚Ρƒ с Π²Ρ€Π΅ΠΌΠ΅Π½Π½Ρ‹ΠΌΠΈ Ρ„Π°ΠΉΠ»Π°ΠΌΠΈ, Π°Ρ€Ρ…ΠΈΠ²Π°ΠΌΠΈ ZIP, Ρ„Π°ΠΉΠ»Π°ΠΌΠΈ YAML ΠΈ очистку Π΄ΠΈΡ€Π΅ΠΊΡ‚ΠΎΡ€ΠΈΠΉ. - - πŸ—οΈ Layer: Infra - - πŸ”— DEPENDS_ON -> `backend.src.core.logger` - - πŸ”— DEPENDS_ON -> `pyyaml` - - β„‚ **InvalidZipFormatError** (`Class`) - - πŸ“ Exception raised when a file is not a valid ZIP archive. - - Ζ’ **create_temp_file** (`Function`) - - πŸ“ ΠšΠΎΠ½Ρ‚Π΅ΠΊΡΡ‚Π½Ρ‹ΠΉ ΠΌΠ΅Π½Π΅Π΄ΠΆΠ΅Ρ€ для создания Π²Ρ€Π΅ΠΌΠ΅Π½Π½ΠΎΠ³ΠΎ Ρ„Π°ΠΉΠ»Π° ΠΈΠ»ΠΈ Π΄ΠΈΡ€Π΅ΠΊΡ‚ΠΎΡ€ΠΈΠΈ с Π³Π°Ρ€Π°Π½Ρ‚ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΌ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠ΅ΠΌ. - - Ζ’ **remove_empty_directories** (`Function`) - - πŸ“ РСкурсивно удаляСт всС пустыС ΠΏΠΎΠ΄Π΄ΠΈΡ€Π΅ΠΊΡ‚ΠΎΡ€ΠΈΠΈ, начиная с ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ ΠΏΡƒΡ‚ΠΈ. - - Ζ’ **read_dashboard_from_disk** (`Function`) - - πŸ“ Π§ΠΈΡ‚Π°Π΅Ρ‚ Π±ΠΈΠ½Π°Ρ€Π½ΠΎΠ΅ содСрТимоС Ρ„Π°ΠΉΠ»Π° с диска. - - Ζ’ **calculate_crc32** (`Function`) - - πŸ“ ВычисляСт ΠΊΠΎΠ½Ρ‚Ρ€ΠΎΠ»ΡŒΠ½ΡƒΡŽ сумму CRC32 для Ρ„Π°ΠΉΠ»Π°. - - πŸ“¦ **RetentionPolicy** (`DataClass`) - - πŸ“ ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅Ρ‚ ΠΏΠΎΠ»ΠΈΡ‚ΠΈΠΊΡƒ хранСния для Π°Ρ€Ρ…ΠΈΠ²ΠΎΠ² (Π΅ΠΆΠ΅Π΄Π½Π΅Π²Π½Ρ‹Π΅, Π΅ΠΆΠ΅Π½Π΅Π΄Π΅Π»ΡŒΠ½Ρ‹Π΅, СТСмСсячныС). - - Ζ’ **archive_exports** (`Function`) - - πŸ“ УправляСт Π°Ρ€Ρ…ΠΈΠ²ΠΎΠΌ экспортированных Ρ„Π°ΠΉΠ»ΠΎΠ², примСняя ΠΏΠΎΠ»ΠΈΡ‚ΠΈΠΊΡƒ хранСния ΠΈ Π΄Π΅Π΄ΡƒΠΏΠ»ΠΈΠΊΠ°Ρ†ΠΈΡŽ. - - πŸ”— CALLS -> `apply_retention_policy` - - πŸ”— CALLS -> `calculate_crc32` - - Ζ’ **apply_retention_policy** (`Function`) - - πŸ“ (Helper) ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅Ρ‚ ΠΏΠΎΠ»ΠΈΡ‚ΠΈΠΊΡƒ хранСния ΠΊ списку Ρ„Π°ΠΉΠ»ΠΎΠ², возвращая Ρ‚Π΅, Ρ‡Ρ‚ΠΎ Π½ΡƒΠΆΠ½ΠΎ ΡΠΎΡ…Ρ€Π°Π½ΠΈΡ‚ΡŒ. - - Ζ’ **save_and_unpack_dashboard** (`Function`) - - πŸ“ БохраняСт Π±ΠΈΠ½Π°Ρ€Π½ΠΎΠ΅ содСрТимоС ZIP-Π°Ρ€Ρ…ΠΈΠ²Π° Π½Π° диск ΠΈ ΠΎΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½ΠΎ распаковываСт Π΅Π³ΠΎ. - - Ζ’ **update_yamls** (`Function`) - - πŸ“ ΠžΠ±Π½ΠΎΠ²Π»ΡΠ΅Ρ‚ ΠΊΠΎΠ½Ρ„ΠΈΠ³ΡƒΡ€Π°Ρ†ΠΈΠΈ Π² YAML-Ρ„Π°ΠΉΠ»Π°Ρ…, замСняя значСния ΠΈΠ»ΠΈ примСняя regex. - - πŸ”— CALLS -> `_update_yaml_file` - - Ζ’ **_update_yaml_file** (`Function`) - - πŸ“ (Helper) ΠžΠ±Π½ΠΎΠ²Π»ΡΠ΅Ρ‚ ΠΎΠ΄ΠΈΠ½ YAML Ρ„Π°ΠΉΠ». - - Ζ’ **replacer** (`Function`) - - πŸ“ Ѐункция Π·Π°ΠΌΠ΅Π½Ρ‹, ΡΠΎΡ…Ρ€Π°Π½ΡΡŽΡ‰Π°Ρ ΠΊΠ°Π²Ρ‹Ρ‡ΠΊΠΈ Ссли ΠΎΠ½ΠΈ Π±Ρ‹Π»ΠΈ. - - Ζ’ **create_dashboard_export** (`Function`) - - πŸ“ Π‘ΠΎΠ·Π΄Π°Π΅Ρ‚ ZIP-Π°Ρ€Ρ…ΠΈΠ² ΠΈΠ· ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹Ρ… исходных ΠΏΡƒΡ‚Π΅ΠΉ. - - Ζ’ **sanitize_filename** (`Function`) - - πŸ“ ΠžΡ‡ΠΈΡ‰Π°Π΅Ρ‚ строку ΠΎΡ‚ символов, нСдопустимых Π² ΠΈΠΌΠ΅Π½Π°Ρ… Ρ„Π°ΠΉΠ»ΠΎΠ². - - Ζ’ **get_filename_from_headers** (`Function`) - - πŸ“ Π˜Π·Π²Π»Π΅ΠΊΠ°Π΅Ρ‚ имя Ρ„Π°ΠΉΠ»Π° ΠΈΠ· HTTP Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΊΠ° 'Content-Disposition'. - - Ζ’ **consolidate_archive_folders** (`Function`) - - πŸ“ ΠšΠΎΠ½ΡΠΎΠ»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ Π΄ΠΈΡ€Π΅ΠΊΡ‚ΠΎΡ€ΠΈΠΈ Π°Ρ€Ρ…ΠΈΠ²ΠΎΠ² Π½Π° основС ΠΎΠ±Ρ‰Π΅Π³ΠΎ слага Π² ΠΈΠΌΠ΅Π½ΠΈ. -- πŸ“¦ **backend.core.utils.network** (`Module`) - - πŸ“ Π˜Π½ΠΊΠ°ΠΏΡΡƒΠ»ΠΈΡ€ΡƒΠ΅Ρ‚ Π½ΠΈΠ·ΠΊΠΎΡƒΡ€ΠΎΠ²Π½Π΅Π²ΡƒΡŽ HTTP-Π»ΠΎΠ³ΠΈΠΊΡƒ для взаимодСйствия с Superset API, Π²ΠΊΠ»ΡŽΡ‡Π°Ρ Π°ΡƒΡ‚Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ†ΠΈΡŽ, ΡƒΠΏΡ€Π°Π²Π»Π΅Π½ΠΈΠ΅ сСссиСй, retry-Π»ΠΎΠ³ΠΈΠΊΡƒ ΠΈ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΡƒ ошибок. - - πŸ—οΈ Layer: Infra - - πŸ”— DEPENDS_ON -> `backend.src.core.logger` - - πŸ”— DEPENDS_ON -> `requests` - - β„‚ **SupersetAPIError** (`Class`) - - πŸ“ Base exception for all Superset API related errors. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the exception with a message and context. - - β„‚ **AuthenticationError** (`Class`) - - πŸ“ Exception raised when authentication fails. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the authentication error. - - β„‚ **PermissionDeniedError** (`Class`) - - πŸ“ Exception raised when access is denied. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the permission denied error. - - β„‚ **DashboardNotFoundError** (`Class`) - - πŸ“ Exception raised when a dashboard cannot be found. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the not found error with resource ID. - - β„‚ **NetworkError** (`Class`) - - πŸ“ Exception raised when a network level error occurs. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the network error. - - β„‚ **APIClient** (`Class`) - - πŸ“ Π˜Π½ΠΊΠ°ΠΏΡΡƒΠ»ΠΈΡ€ΡƒΠ΅Ρ‚ HTTP-Π»ΠΎΠ³ΠΈΠΊΡƒ для Ρ€Π°Π±ΠΎΡ‚Ρ‹ с API, Π²ΠΊΠ»ΡŽΡ‡Π°Ρ сСссии, Π°ΡƒΡ‚Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ†ΠΈΡŽ, ΠΈ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΡƒ запросов. - - Ζ’ **__init__** (`Function`) - - πŸ“ Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ API ΠΊΠ»ΠΈΠ΅Π½Ρ‚ с ΠΊΠΎΠ½Ρ„ΠΈΠ³ΡƒΡ€Π°Ρ†ΠΈΠ΅ΠΉ, сСссиСй ΠΈ Π»ΠΎΠ³Π³Π΅Ρ€ΠΎΠΌ. - - Ζ’ **_init_session** (`Function`) - - πŸ“ Π‘ΠΎΠ·Π΄Π°Π΅Ρ‚ ΠΈ настраиваСт `requests.Session` с retry-Π»ΠΎΠ³ΠΈΠΊΠΎΠΉ. - - Ζ’ **authenticate** (`Function`) - - πŸ“ ВыполняСт Π°ΡƒΡ‚Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ†ΠΈΡŽ Π² Superset API ΠΈ ΠΏΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ access ΠΈ CSRF Ρ‚ΠΎΠΊΠ΅Π½Ρ‹. - - Ζ’ **headers** (`Function`) - - πŸ“ Π’ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅Ρ‚ HTTP-Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΊΠΈ для Π°ΡƒΡ‚Π΅Π½Ρ‚ΠΈΡ„ΠΈΡ†ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Ρ… запросов. - - Ζ’ **request** (`Function`) - - πŸ“ ВыполняСт ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Ρ‹ΠΉ HTTP-запрос ΠΊ API. - - Ζ’ **_handle_http_error** (`Function`) - - πŸ“ (Helper) ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅Ρ‚ HTTP ошибки Π² кастомныС ΠΈΡΠΊΠ»ΡŽΡ‡Π΅Π½ΠΈΡ. - - Ζ’ **_handle_network_error** (`Function`) - - πŸ“ (Helper) ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅Ρ‚ сСтСвыС ошибки Π² `NetworkError`. - - Ζ’ **upload_file** (`Function`) - - πŸ“ Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ Ρ„Π°ΠΉΠ» Π½Π° сСрвСр Ρ‡Π΅Ρ€Π΅Π· multipart/form-data. - - Ζ’ **_perform_upload** (`Function`) - - πŸ“ (Helper) ВыполняСт POST запрос с Ρ„Π°ΠΉΠ»ΠΎΠΌ. - - Ζ’ **fetch_paginated_count** (`Function`) - - πŸ“ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΎΠ±Ρ‰Π΅Π΅ количСство элСмСнтов для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ. - - Ζ’ **fetch_paginated_data** (`Function`) - - πŸ“ АвтоматичСски собираСт Π΄Π°Π½Π½Ρ‹Π΅ со всСх страниц ΠΏΠ°Π³ΠΈΠ½ΠΈΡ€ΠΎΠ²Π°Π½Π½ΠΎΠ³ΠΎ эндпоинта. - - Ζ’ **init_poolmanager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.core.utils.matching** (`Module`) - - πŸ“ Provides utility functions for fuzzy matching database names. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Confidence scores are returned as floats between 0.0 and 1.0. - - πŸ”— DEPENDS_ON -> `rapidfuzz` - - Ζ’ **suggest_mappings** (`Function`) - - πŸ“ Suggests mappings between source and target databases using fuzzy matching. -- πŸ“¦ **backend.core.utils.dataset_mapper** (`Module`) - - πŸ“ Π­Ρ‚ΠΎΡ‚ ΠΌΠΎΠ΄ΡƒΠ»ΡŒ ΠΎΡ‚Π²Π΅Ρ‡Π°Π΅Ρ‚ Π·Π° ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠ΅ ΠΌΠ΅Ρ‚Π°Π΄Π°Π½Π½Ρ‹Ρ… (verbose_map) Π² датасСтах Superset, извлСкая ΠΈΡ… ΠΈΠ· PostgreSQL ΠΈΠ»ΠΈ XLSX-Ρ„Π°ΠΉΠ»ΠΎΠ². - - πŸ—οΈ Layer: Domain - - πŸ”— DEPENDS_ON -> `backend.core.superset_client` - - πŸ”— DEPENDS_ON -> `pandas` - - πŸ”— DEPENDS_ON -> `psycopg2` - - β„‚ **DatasetMapper** (`Class`) - - πŸ“ Класс для ΠΌΠ΅ΠΏΠΏΠΈΠ½Π³Π° ΠΈ обновлСния verbose_map Π² датасСтах Superset. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the mapper. - - Ζ’ **get_postgres_comments** (`Function`) - - πŸ“ Π˜Π·Π²Π»Π΅ΠΊΠ°Π΅Ρ‚ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ ΠΊ ΠΊΠΎΠ»ΠΎΠ½ΠΊΠ°ΠΌ ΠΈΠ· систСмного ΠΊΠ°Ρ‚Π°Π»ΠΎΠ³Π° PostgreSQL. - - Ζ’ **load_excel_mappings** (`Function`) - - πŸ“ Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ ΠΌΠ΅ΠΏΠΏΠΈΠ½Π³ΠΈ 'column_name' -> 'column_comment' ΠΈΠ· XLSX Ρ„Π°ΠΉΠ»Π°. - - Ζ’ **run_mapping** (`Function`) - - πŸ“ Основная функция для выполнСния ΠΌΠ΅ΠΏΠΏΠΈΠ½Π³Π° ΠΈ обновлСния verbose_map датасСта Π² Superset. - - πŸ”— CALLS -> `self.get_postgres_comments` - - πŸ”— CALLS -> `self.load_excel_mappings` - - πŸ”— CALLS -> `superset_client.get_dataset` - - πŸ”— CALLS -> `superset_client.update_dataset` -- πŸ“¦ **test_logger** (`Module`) - - πŸ“ Unit tests for logger module - - πŸ—οΈ Layer: Infra - - Ζ’ **test_belief_scope_logs_entry_action_exit_at_debug** (`Function`) - - πŸ“ Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs at DEBUG level. - - Ζ’ **test_belief_scope_error_handling** (`Function`) - - πŸ“ Test that belief_scope logs Coherence:Failed on exception. - - Ζ’ **test_belief_scope_success_coherence** (`Function`) - - πŸ“ Test that belief_scope logs Coherence:OK on success. - - Ζ’ **test_belief_scope_not_visible_at_info** (`Function`) - - πŸ“ Test that belief_scope Entry/Exit/Coherence logs are NOT visible at INFO level. - - Ζ’ **test_task_log_level_default** (`Function`) - - πŸ“ Test that default task log level is INFO. - - Ζ’ **test_should_log_task_level** (`Function`) - - πŸ“ Test that should_log_task_level correctly filters log levels. - - Ζ’ **test_configure_logger_task_log_level** (`Function`) - - πŸ“ Test that configure_logger updates task_log_level. - - Ζ’ **test_enable_belief_state_flag** (`Function`) - - πŸ“ Test that enable_belief_state flag controls belief_scope logging. - - Ζ’ **test_belief_scope_missing_anchor** (`Function`) - - πŸ“ Test @PRE condition: anchor_id must be provided - - Ζ’ **test_configure_logger_post_conditions** (`Function`) - - πŸ“ Test @POST condition: Logger level, handlers, belief state flag, and task log level are updated. - - Ζ’ **reset_logger_state** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **TaskLoggerModule** (`Module`) `[CRITICAL]` - - πŸ“ Provides a dedicated logger for tasks with automatic source attribution. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Each TaskLogger instance is bound to a specific task_id and default source. - - πŸ”— DEPENDS_ON -> `TaskManager, CALLS -> TaskManager._add_log` - - β„‚ **TaskLogger** (`Class`) `[CRITICAL]` - - πŸ“ A wrapper around TaskManager._add_log that carries task_id and source context. - - πŸ”’ Invariant: All log calls include the task_id and source. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initialize the TaskLogger with task context. - - Ζ’ **with_source** (`Function`) - - πŸ“ Create a sub-logger with a different default source. - - Ζ’ **_log** (`Function`) - - πŸ“ Internal method to log a message at a given level. - - Ζ’ **debug** (`Function`) - - πŸ“ Log a DEBUG level message. - - Ζ’ **info** (`Function`) - - πŸ“ Log an INFO level message. - - Ζ’ **warning** (`Function`) - - πŸ“ Log a WARNING level message. - - Ζ’ **error** (`Function`) - - πŸ“ Log an ERROR level message. - - Ζ’ **progress** (`Function`) - - πŸ“ Log a progress update with percentage. -- πŸ“¦ **TaskPersistenceModule** (`Module`) `[CRITICAL]` - - πŸ“ Handles the persistence of tasks using SQLAlchemy and the tasks.db database. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Database schema must match the TaskRecord model structure. - - β„‚ **TaskPersistenceService** (`Class`) `[CRITICAL]` - - πŸ“ Provides methods to save and load tasks from the tasks.db database using SQLAlchemy. - - πŸ”’ Invariant: Persistence must handle potentially missing task fields natively. - - Ζ’ **_json_load_if_needed** (`Function`) - - πŸ“ Safely load JSON strings from DB if necessary - - Ζ’ **_parse_datetime** (`Function`) - - πŸ“ Safely parse a datetime string from the database - - Ζ’ **_resolve_environment_id** (`Function`) - - πŸ“ Resolve environment id based on provided value or fallback to default - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the persistence service. - - Ζ’ **persist_task** (`Function`) - - πŸ“ Persists or updates a single task in the database. - - Ζ’ **persist_tasks** (`Function`) - - πŸ“ Persists multiple tasks. - - Ζ’ **load_tasks** (`Function`) - - πŸ“ Loads tasks from the database. - - Ζ’ **delete_tasks** (`Function`) - - πŸ“ Deletes specific tasks from the database. - - β„‚ **TaskLogPersistenceService** (`Class`) `[CRITICAL]` - - πŸ“ Provides methods to save and query task logs from the task_logs table. - - πŸ”’ Invariant: Log entries are batch-inserted for performance. - - πŸ”— DEPENDS_ON -> `TaskLogRecord` - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the TaskLogPersistenceService - - Ζ’ **add_logs** (`Function`) - - πŸ“ Batch insert log entries for a task. - - Ζ’ **get_logs** (`Function`) - - πŸ“ Query logs for a task with filtering and pagination. - - Ζ’ **get_log_stats** (`Function`) - - πŸ“ Get statistics about logs for a task. - - Ζ’ **get_sources** (`Function`) - - πŸ“ Get unique sources for a task's logs. - - Ζ’ **delete_logs_for_task** (`Function`) - - πŸ“ Delete all logs for a specific task. - - Ζ’ **delete_logs_for_tasks** (`Function`) - - πŸ“ Delete all logs for multiple tasks. - - Ζ’ **json_serializable** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **TaskManagerModule** (`Module`) `[CRITICAL]` - - πŸ“ Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Task IDs are unique. - - β„‚ **TaskManager** (`Class`) `[CRITICAL]` - - πŸ“ Manages the lifecycle of tasks, including their creation, execution, and state tracking. - - πŸ”’ Invariant: Log entries are never deleted after being added to a task. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initialize the TaskManager with dependencies. - - Ζ’ **_flusher_loop** (`Function`) - - πŸ“ Background thread that periodically flushes log buffer to database. - - Ζ’ **_flush_logs** (`Function`) - - πŸ“ Flush all buffered logs to the database. - - Ζ’ **_flush_task_logs** (`Function`) - - πŸ“ Flush logs for a specific task immediately. - - Ζ’ **create_task** (`Function`) - - πŸ“ Creates and queues a new task for execution. - - Ζ’ **_run_task** (`Function`) - - πŸ“ Internal method to execute a task with TaskContext support. - - Ζ’ **resolve_task** (`Function`) - - πŸ“ Resumes a task that is awaiting mapping. - - Ζ’ **wait_for_resolution** (`Function`) - - πŸ“ Pauses execution and waits for a resolution signal. - - Ζ’ **wait_for_input** (`Function`) - - πŸ“ Pauses execution and waits for user input. - - Ζ’ **get_task** (`Function`) - - πŸ“ Retrieves a task by its ID. - - Ζ’ **get_all_tasks** (`Function`) - - πŸ“ Retrieves all registered tasks. - - Ζ’ **get_tasks** (`Function`) - - πŸ“ Retrieves tasks with pagination and optional status filter. - - Ζ’ **get_task_logs** (`Function`) - - πŸ“ Retrieves logs for a specific task (from memory for running, persistence for completed). - - Ζ’ **get_task_log_stats** (`Function`) - - πŸ“ Get statistics about logs for a task. - - Ζ’ **get_task_log_sources** (`Function`) - - πŸ“ Get unique sources for a task's logs. - - Ζ’ **_add_log** (`Function`) - - πŸ“ Adds a log entry to a task buffer and notifies subscribers. - - Ζ’ **subscribe_logs** (`Function`) - - πŸ“ Subscribes to real-time logs for a task. - - Ζ’ **unsubscribe_logs** (`Function`) - - πŸ“ Unsubscribes from real-time logs for a task. - - Ζ’ **load_persisted_tasks** (`Function`) - - πŸ“ Load persisted tasks using persistence service. - - Ζ’ **await_input** (`Function`) - - πŸ“ Transition a task to AWAITING_INPUT state with input request. - - Ζ’ **resume_task_with_password** (`Function`) - - πŸ“ Resume a task that is awaiting input with provided passwords. - - Ζ’ **clear_tasks** (`Function`) - - πŸ“ Clears tasks based on status filter (also deletes associated logs). - - Ζ’ **sort_key** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **TaskManagerModels** (`Module`) - - πŸ“ Defines the data models and enumerations used by the Task Manager. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Task IDs are immutable once created. - - πŸ“¦ **TaskStatus** (`Enum`) `[TRIVIAL]` - - πŸ“ Defines the possible states a task can be in during its lifecycle. - - πŸ“¦ **LogLevel** (`Enum`) - - πŸ“ Defines the possible log levels for task logging. - - β„‚ **LogEntry** (`Class`) `[CRITICAL]` - - πŸ“ A Pydantic model representing a single, structured log entry associated with a task. - - πŸ”’ Invariant: Each log entry has a unique timestamp and source. - - β„‚ **TaskLog** (`Class`) - - πŸ“ A Pydantic model representing a persisted log entry from the database. - - β„‚ **LogFilter** (`Class`) - - πŸ“ Filter parameters for querying task logs. - - β„‚ **LogStats** (`Class`) - - πŸ“ Statistics about log entries for a task. - - β„‚ **Task** (`Class`) - - πŸ“ A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the Task model and validates input_request for AWAITING_INPUT status. -- πŸ“¦ **TaskCleanupModule** (`Module`) - - πŸ“ Implements task cleanup and retention policies, including associated logs. - - πŸ—οΈ Layer: Core - - β„‚ **TaskCleanupService** (`Class`) - - πŸ“ Provides methods to clean up old task records and their associated logs. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the cleanup service with dependencies. - - Ζ’ **run_cleanup** (`Function`) - - πŸ“ Deletes tasks older than the configured retention period and their logs. - - Ζ’ **delete_task_with_logs** (`Function`) - - πŸ“ Delete a single task and all its associated logs. -- πŸ“¦ **TaskManagerPackage** (`Module`) `[TRIVIAL]` - - πŸ“ Exports the public API of the task manager package. - - πŸ—οΈ Layer: Core -- πŸ“¦ **TaskContextModule** (`Module`) `[CRITICAL]` - - πŸ“ Provides execution context passed to plugins during task execution. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: Each TaskContext is bound to a single task execution. - - πŸ”— DEPENDS_ON -> `TaskLogger, USED_BY -> plugins` - - β„‚ **TaskContext** (`Class`) `[CRITICAL]` - - πŸ“ A container passed to plugin.execute() providing the logger and other task-specific utilities. - - πŸ”’ Invariant: logger is always a valid TaskLogger instance. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initialize the TaskContext with task-specific resources. - - Ζ’ **task_id** (`Function`) - - πŸ“ Get the task ID. - - Ζ’ **logger** (`Function`) - - πŸ“ Get the TaskLogger instance for this context. - - Ζ’ **params** (`Function`) - - πŸ“ Get the task parameters. - - Ζ’ **get_param** (`Function`) - - πŸ“ Get a specific parameter value with optional default. - - Ζ’ **create_sub_context** (`Function`) - - πŸ“ Create a sub-context with a different default source. - - Ζ’ **execute** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.api.auth** (`Module`) - - πŸ“ Authentication API endpoints. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All auth endpoints must return consistent error codes. - - πŸ“¦ **router** (`Variable`) - - πŸ“ APIRouter instance for authentication routes. - - Ζ’ **login_for_access_token** (`Function`) - - πŸ“ Authenticates a user and returns a JWT access token. - - Ζ’ **read_users_me** (`Function`) - - πŸ“ Retrieves the profile of the currently authenticated user. - - Ζ’ **logout** (`Function`) - - πŸ“ Logs out the current user (placeholder for session revocation). - - Ζ’ **login_adfs** (`Function`) - - πŸ“ Initiates the ADFS OIDC login flow. - - Ζ’ **auth_callback_adfs** (`Function`) - - πŸ“ Handles the callback from ADFS after successful authentication. -- πŸ“¦ **router** (`Global`) - - πŸ“ APIRouter instance for LLM routes. -- Ζ’ **get_providers** (`Function`) - - πŸ“ Retrieve all LLM provider configurations. -- Ζ’ **create_provider** (`Function`) - - πŸ“ Create a new LLM provider configuration. -- Ζ’ **update_provider** (`Function`) - - πŸ“ Update an existing LLM provider configuration. -- Ζ’ **delete_provider** (`Function`) - - πŸ“ Delete an LLM provider configuration. -- Ζ’ **test_connection** (`Function`) - - πŸ“ Test connection to an LLM provider. -- Ζ’ **test_provider_config** (`Function`) - - πŸ“ Test connection with a provided configuration (not yet saved). -- πŸ“¦ **backend.src.api.routes.datasets** (`Module`) - - πŸ“ API endpoints for the Dataset Hub - listing datasets with mapping progress - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All dataset responses include last_task metadata - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - πŸ”— DEPENDS_ON -> `backend.src.services.resource_service` - - πŸ”— DEPENDS_ON -> `backend.src.core.superset_client` - - πŸ“¦ **MappedFields** (`DataClass`) - - πŸ“¦ **LastTask** (`DataClass`) - - πŸ“¦ **DatasetItem** (`DataClass`) - - πŸ“¦ **LinkedDashboard** (`DataClass`) - - πŸ“¦ **DatasetColumn** (`DataClass`) - - πŸ“¦ **DatasetDetailResponse** (`DataClass`) - - πŸ“¦ **DatasetsResponse** (`DataClass`) - - πŸ“¦ **TaskResponse** (`DataClass`) - - Ζ’ **get_dataset_ids** (`Function`) - - πŸ“ Fetch list of all dataset IDs from a specific environment (without pagination) - - πŸ”— CALLS -> `ResourceService.get_datasets_with_status` - - Ζ’ **get_datasets** (`Function`) - - πŸ“ Fetch list of datasets from a specific environment with mapping progress - - πŸ”— CALLS -> `ResourceService.get_datasets_with_status` - - πŸ“¦ **MapColumnsRequest** (`DataClass`) - - Ζ’ **map_columns** (`Function`) - - πŸ“ Trigger bulk column mapping for datasets - - πŸ”— DISPATCHES -> `MapperPlugin` - - πŸ”— CALLS -> `task_manager.create_task` - - πŸ“¦ **GenerateDocsRequest** (`DataClass`) - - Ζ’ **generate_docs** (`Function`) - - πŸ“ Trigger bulk documentation generation for datasets - - πŸ”— DISPATCHES -> `LLMAnalysisPlugin` - - πŸ”— CALLS -> `task_manager.create_task` - - Ζ’ **get_dataset_detail** (`Function`) - - πŸ“ Get detailed dataset information including columns and linked dashboards - - πŸ”— CALLS -> `SupersetClient.get_dataset_detail` -- πŸ“¦ **backend.src.api.routes.git** (`Module`) - - πŸ“ Provides FastAPI endpoints for Git integration operations. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All Git operations must be routed through GitService. - - Ζ’ **get_git_configs** (`Function`) - - πŸ“ List all configured Git servers. - - Ζ’ **create_git_config** (`Function`) - - πŸ“ Register a new Git server configuration. - - Ζ’ **delete_git_config** (`Function`) - - πŸ“ Remove a Git server configuration. - - Ζ’ **test_git_config** (`Function`) - - πŸ“ Validate connection to a Git server using provided credentials. - - Ζ’ **init_repository** (`Function`) - - πŸ“ Link a dashboard to a Git repository and perform initial clone/init. - - Ζ’ **get_branches** (`Function`) - - πŸ“ List all branches for a dashboard's repository. - - Ζ’ **create_branch** (`Function`) - - πŸ“ Create a new branch in the dashboard's repository. - - Ζ’ **checkout_branch** (`Function`) - - πŸ“ Switch the dashboard's repository to a specific branch. - - Ζ’ **commit_changes** (`Function`) - - πŸ“ Stage and commit changes in the dashboard's repository. - - Ζ’ **push_changes** (`Function`) - - πŸ“ Push local commits to the remote repository. - - Ζ’ **pull_changes** (`Function`) - - πŸ“ Pull changes from the remote repository. - - Ζ’ **sync_dashboard** (`Function`) - - πŸ“ Sync dashboard state from Superset to Git using the GitPlugin. - - Ζ’ **get_environments** (`Function`) - - πŸ“ List all deployment environments. - - Ζ’ **deploy_dashboard** (`Function`) - - πŸ“ Deploy dashboard from Git to a target environment. - - Ζ’ **get_history** (`Function`) - - πŸ“ View commit history for a dashboard's repository. - - Ζ’ **get_repository_status** (`Function`) - - πŸ“ Get current Git status for a dashboard repository. - - Ζ’ **get_repository_diff** (`Function`) - - πŸ“ Get Git diff for a dashboard repository. - - Ζ’ **generate_commit_message** (`Function`) - - πŸ“ Generate a suggested commit message using LLM. -- πŸ“¦ **ConnectionsRouter** (`Module`) - - πŸ“ Defines the FastAPI router for managing external database connections. - - πŸ—οΈ Layer: UI (API) - - β„‚ **ConnectionSchema** (`Class`) - - πŸ“ Pydantic model for connection response. - - β„‚ **ConnectionCreate** (`Class`) - - πŸ“ Pydantic model for creating a connection. - - Ζ’ **list_connections** (`Function`) - - πŸ“ Lists all saved connections. - - Ζ’ **create_connection** (`Function`) - - πŸ“ Creates a new connection configuration. - - Ζ’ **delete_connection** (`Function`) - - πŸ“ Deletes a connection configuration. -- πŸ“¦ **backend.src.api.routes.environments** (`Module`) - - πŸ“ API endpoints for listing environments and their databases. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Environment IDs must exist in the configuration. - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - πŸ”— DEPENDS_ON -> `backend.src.core.superset_client` - - πŸ“¦ **ScheduleSchema** (`DataClass`) - - πŸ“¦ **EnvironmentResponse** (`DataClass`) - - πŸ“¦ **DatabaseResponse** (`DataClass`) - - Ζ’ **get_environments** (`Function`) - - πŸ“ List all configured environments. - - πŸ—οΈ Layer: API - - Ζ’ **update_environment_schedule** (`Function`) - - πŸ“ Update backup schedule for an environment. - - πŸ—οΈ Layer: API - - Ζ’ **get_environment_databases** (`Function`) - - πŸ“ Fetch the list of databases from a specific environment. - - πŸ—οΈ Layer: API -- πŸ“¦ **backend.src.api.routes.migration** (`Module`) - - πŸ“ API endpoints for migration operations. - - πŸ—οΈ Layer: API - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - πŸ”— DEPENDS_ON -> `backend.src.models.dashboard` - - Ζ’ **get_dashboards** (`Function`) - - πŸ“ Fetch all dashboards from the specified environment for the grid. - - Ζ’ **execute_migration** (`Function`) - - πŸ“ Execute the migration of selected dashboards. - - Ζ’ **get_migration_settings** (`Function`) - - πŸ“ Get current migration Cron string explicitly. - - Ζ’ **update_migration_settings** (`Function`) - - πŸ“ Update migration Cron string. - - Ζ’ **get_resource_mappings** (`Function`) - - πŸ“ Fetch synchronized object mappings with search, filtering, and pagination. - - Ζ’ **trigger_sync_now** (`Function`) - - πŸ“ Triggers an immediate ID synchronization for all environments. -- πŸ“¦ **PluginsRouter** (`Module`) - - πŸ“ Defines the FastAPI router for plugin-related endpoints, allowing clients to list available plugins. - - πŸ—οΈ Layer: UI (API) - - Ζ’ **list_plugins** (`Function`) - - πŸ“ Retrieve a list of all available plugins. -- πŸ“¦ **backend.src.api.routes.mappings** (`Module`) - - πŸ“ API endpoints for managing database mappings and getting suggestions. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Mappings are persisted in the SQLite database. - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - πŸ”— DEPENDS_ON -> `backend.src.core.database` - - πŸ”— DEPENDS_ON -> `backend.src.services.mapping_service` - - πŸ“¦ **MappingCreate** (`DataClass`) - - πŸ“¦ **MappingResponse** (`DataClass`) - - πŸ“¦ **SuggestRequest** (`DataClass`) - - Ζ’ **get_mappings** (`Function`) - - πŸ“ List all saved database mappings. - - Ζ’ **create_mapping** (`Function`) - - πŸ“ Create or update a database mapping. - - Ζ’ **suggest_mappings_api** (`Function`) - - πŸ“ Get suggested mappings based on fuzzy matching. -- πŸ“¦ **SettingsRouter** (`Module`) - - πŸ“ Provides API endpoints for managing application settings and Superset environments. - - πŸ—οΈ Layer: UI (API) - - πŸ”’ Invariant: All settings changes must be persisted via ConfigManager. - - πŸ”— DEPENDS_ON -> `ConfigManager` - - πŸ”— DEPENDS_ON -> `ConfigModels` - - β„‚ **LoggingConfigResponse** (`Class`) - - πŸ“ Response model for logging configuration with current task log level. - - Ζ’ **get_settings** (`Function`) - - πŸ“ Retrieves all application settings. - - Ζ’ **update_global_settings** (`Function`) - - πŸ“ Updates global application settings. - - Ζ’ **get_storage_settings** (`Function`) - - πŸ“ Retrieves storage-specific settings. - - Ζ’ **update_storage_settings** (`Function`) - - πŸ“ Updates storage-specific settings. - - Ζ’ **get_environments** (`Function`) - - πŸ“ Lists all configured Superset environments. - - Ζ’ **add_environment** (`Function`) - - πŸ“ Adds a new Superset environment. - - Ζ’ **update_environment** (`Function`) - - πŸ“ Updates an existing Superset environment. - - Ζ’ **delete_environment** (`Function`) - - πŸ“ Deletes a Superset environment. - - Ζ’ **test_environment_connection** (`Function`) - - πŸ“ Tests the connection to a Superset environment. - - Ζ’ **get_logging_config** (`Function`) - - πŸ“ Retrieves current logging configuration. - - Ζ’ **update_logging_config** (`Function`) - - πŸ“ Updates logging configuration. - - β„‚ **ConsolidatedSettingsResponse** (`Class`) - - Ζ’ **get_consolidated_settings** (`Function`) - - πŸ“ Retrieves all settings categories in a single call - - Ζ’ **update_consolidated_settings** (`Function`) - - πŸ“ Bulk update application settings from the consolidated view. -- πŸ“¦ **backend.src.api.routes.admin** (`Module`) - - πŸ“ Admin API endpoints for user and role management. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All endpoints in this module require 'Admin' role or 'admin' scope. - - πŸ“¦ **router** (`Variable`) - - πŸ“ APIRouter instance for admin routes. - - Ζ’ **list_users** (`Function`) - - πŸ“ Lists all registered users. - - Ζ’ **create_user** (`Function`) - - πŸ“ Creates a new local user. - - Ζ’ **update_user** (`Function`) - - πŸ“ Updates an existing user. - - Ζ’ **delete_user** (`Function`) - - πŸ“ Deletes a user. - - Ζ’ **list_roles** (`Function`) - - πŸ“ Lists all available roles. - - πŸ”— CALLS -> `backend.src.models.auth.Role` - - Ζ’ **create_role** (`Function`) - - πŸ“ Creates a new system role with associated permissions. - - πŸ”— CALLS -> `backend.src.core.auth.repository.AuthRepository.get_permission_by_id` - - Ζ’ **update_role** (`Function`) - - πŸ“ Updates an existing role's metadata and permissions. - - πŸ”— CALLS -> `backend.src.core.auth.repository.AuthRepository.get_role_by_id` - - Ζ’ **delete_role** (`Function`) - - πŸ“ Removes a role from the system. - - πŸ”— CALLS -> `backend.src.core.auth.repository.AuthRepository.get_role_by_id` - - Ζ’ **list_permissions** (`Function`) - - πŸ“ Lists all available system permissions for assignment. - - πŸ”— CALLS -> `backend.src.core.auth.repository.AuthRepository.list_permissions` - - Ζ’ **list_ad_mappings** (`Function`) - - πŸ“ Lists all AD Group to Role mappings. - - Ζ’ **create_ad_mapping** (`Function`) - - πŸ“ Creates a new AD Group mapping. -- πŸ“¦ **backend.src.api.routes.git_schemas** (`Module`) - - πŸ“ Defines Pydantic models for the Git integration API layer. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All schemas must be compatible with the FastAPI router. - - πŸ”— DEPENDS_ON -> `backend.src.models.git` - - β„‚ **GitServerConfigBase** (`Class`) `[TRIVIAL]` - - πŸ“ Base schema for Git server configuration attributes. - - β„‚ **GitServerConfigCreate** (`Class`) - - πŸ“ Schema for creating a new Git server configuration. - - β„‚ **GitServerConfigSchema** (`Class`) - - πŸ“ Schema for representing a Git server configuration with metadata. - - β„‚ **GitRepositorySchema** (`Class`) - - πŸ“ Schema for tracking a local Git repository linked to a dashboard. - - β„‚ **BranchSchema** (`Class`) - - πŸ“ Schema for representing a Git branch metadata. - - β„‚ **CommitSchema** (`Class`) - - πŸ“ Schema for representing Git commit details. - - β„‚ **BranchCreate** (`Class`) - - πŸ“ Schema for branch creation requests. - - β„‚ **BranchCheckout** (`Class`) - - πŸ“ Schema for branch checkout requests. - - β„‚ **CommitCreate** (`Class`) - - πŸ“ Schema for staging and committing changes. - - β„‚ **ConflictResolution** (`Class`) - - πŸ“ Schema for resolving merge conflicts. - - β„‚ **DeploymentEnvironmentSchema** (`Class`) - - πŸ“ Schema for representing a target deployment environment. - - β„‚ **DeployRequest** (`Class`) - - πŸ“ Schema for dashboard deployment requests. - - β„‚ **RepoInitRequest** (`Class`) - - πŸ“ Schema for repository initialization requests. -- πŸ“¦ **backend.src.api.routes.assistant** (`Module`) - - πŸ“ API routes for LLM assistant command parsing and safe execution orchestration. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Risky operations are never executed without valid confirmation token. - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager` - - πŸ”— DEPENDS_ON -> `backend.src.models.assistant` - - β„‚ **AssistantMessageRequest** (`Class`) `[TRIVIAL]` - - πŸ“ Input payload for assistant message endpoint. - - β„‚ **AssistantAction** (`Class`) `[TRIVIAL]` - - πŸ“ UI action descriptor returned with assistant responses. - - β„‚ **AssistantMessageResponse** (`Class`) - - πŸ“ Output payload contract for assistant interaction endpoints. - - β„‚ **ConfirmationRecord** (`Class`) - - πŸ“ In-memory confirmation token model for risky operation dispatch. - - Ζ’ **_append_history** (`Function`) - - πŸ“ Append conversation message to in-memory history buffer. - - Ζ’ **_persist_message** (`Function`) - - πŸ“ Persist assistant/user message record to database. - - Ζ’ **_audit** (`Function`) - - πŸ“ Append in-memory audit record for assistant decision trace. - - Ζ’ **_persist_audit** (`Function`) - - πŸ“ Persist structured assistant audit payload in database. - - Ζ’ **_persist_confirmation** (`Function`) - - πŸ“ Persist confirmation token record to database. - - Ζ’ **_update_confirmation_state** (`Function`) - - πŸ“ Update persistent confirmation token lifecycle state. - - Ζ’ **_load_confirmation_from_db** (`Function`) - - πŸ“ Load confirmation token from database into in-memory model. - - Ζ’ **_ensure_conversation** (`Function`) - - πŸ“ Resolve active conversation id in memory or create a new one. - - Ζ’ **_resolve_or_create_conversation** (`Function`) - - πŸ“ Resolve active conversation using explicit id, memory cache, or persisted history. - - Ζ’ **_cleanup_history_ttl** (`Function`) - - πŸ“ Enforce assistant message retention window by deleting expired rows and in-memory records. - - Ζ’ **_is_conversation_archived** (`Function`) - - πŸ“ Determine archived state for a conversation based on last update timestamp. - - Ζ’ **_coerce_query_bool** (`Function`) - - πŸ“ Normalize bool-like query values for compatibility in direct handler invocations/tests. - - Ζ’ **_extract_id** (`Function`) - - πŸ“ Extract first regex match group from text by ordered pattern list. - - Ζ’ **_resolve_env_id** (`Function`) - - πŸ“ Resolve environment identifier/name token to canonical environment id. - - Ζ’ **_is_production_env** (`Function`) - - πŸ“ Determine whether environment token resolves to production-like target. - - Ζ’ **_resolve_provider_id** (`Function`) - - πŸ“ Resolve provider token to provider id with active/default fallback. - - Ζ’ **_get_default_environment_id** (`Function`) - - πŸ“ Resolve default environment id from settings or first configured environment. - - Ζ’ **_resolve_dashboard_id_by_ref** (`Function`) - - πŸ“ Resolve dashboard id by title or slug reference in selected environment. - - Ζ’ **_resolve_dashboard_id_entity** (`Function`) - - πŸ“ Resolve dashboard id from intent entities using numeric id or dashboard_ref fallback. - - Ζ’ **_parse_command** (`Function`) - - πŸ“ Deterministically parse RU/EN command text into intent payload. - - Ζ’ **_check_any_permission** (`Function`) - - πŸ“ Validate user against alternative permission checks (logical OR). - - Ζ’ **_has_any_permission** (`Function`) - - πŸ“ Check whether user has at least one permission tuple from the provided list. - - Ζ’ **_build_tool_catalog** (`Function`) - - πŸ“ Build current-user tool catalog for LLM planner with operation contracts and defaults. - - Ζ’ **_coerce_intent_entities** (`Function`) - - πŸ“ Normalize intent entity value types from LLM output to route-compatible values. - - Ζ’ **_confirmation_summary** (`Function`) - - πŸ“ Build human-readable confirmation prompt for an intent before execution. - - Ζ’ **_clarification_text_for_intent** (`Function`) - - πŸ“ Convert technical missing-parameter errors into user-facing clarification prompts. - - Ζ’ **_plan_intent_with_llm** (`Function`) - - πŸ“ Use active LLM provider to select best tool/operation from dynamic catalog. - - Ζ’ **_authorize_intent** (`Function`) - - πŸ“ Validate user permissions for parsed intent before confirmation/dispatch. - - Ζ’ **_dispatch_intent** (`Function`) - - πŸ“ Execute parsed assistant intent via existing task/plugin/git services. - - Ζ’ **send_message** (`Function`) - - πŸ“ Parse assistant command, enforce safety gates, and dispatch executable intent. - - Ζ’ **confirm_operation** (`Function`) - - πŸ“ Execute previously requested risky operation after explicit user confirmation. - - Ζ’ **cancel_operation** (`Function`) - - πŸ“ Cancel pending risky operation and mark confirmation token as cancelled. - - Ζ’ **list_conversations** (`Function`) - - πŸ“ Return paginated conversation list for current user with archived flag and last message preview. - - Ζ’ **get_history** (`Function`) - - πŸ“ Retrieve paginated assistant conversation history for current user. - - Ζ’ **get_assistant_audit** (`Function`) - - πŸ“ Return assistant audit decisions for current user from persistent and in-memory stores. - - Ζ’ **_label** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **storage_routes** (`Module`) - - πŸ“ API endpoints for file storage management (backups and repositories). - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All paths must be validated against path traversal. - - πŸ”— DEPENDS_ON -> `backend.src.models.storage` - - Ζ’ **list_files** (`Function`) - - πŸ“ List all files and directories in the storage system. - - πŸ”— CALLS -> `StoragePlugin.list_files` - - Ζ’ **upload_file** (`Function`) - - πŸ“ Upload a file to the storage system. - - πŸ”— CALLS -> `StoragePlugin.save_file` - - Ζ’ **delete_file** (`Function`) - - πŸ“ Delete a specific file or directory. - - πŸ”— CALLS -> `StoragePlugin.delete_file` - - Ζ’ **download_file** (`Function`) - - πŸ“ Retrieve a file for download. - - πŸ”— CALLS -> `StoragePlugin.get_file_path` -- πŸ“¦ **ReportsRouter** (`Module`) `[CRITICAL]` - - πŸ“ FastAPI router for unified task report list and detail retrieval endpoints. - - πŸ—οΈ Layer: UI (API) - - πŸ”’ Invariant: Endpoints are read-only and do not trigger long-running tasks. - - πŸ”— DEPENDS_ON -> `backend.src.services.reports.report_service.ReportsService` - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - Ζ’ **_parse_csv_enum_list** (`Function`) - - πŸ“ Parse comma-separated query value into enum list. - - Ζ’ **list_reports** (`Function`) - - πŸ“ Return paginated unified reports list. - - Ζ’ **get_report_detail** (`Function`) - - πŸ“ Return one normalized report detail with diagnostics and next actions. -- πŸ“¦ **backend.src.api.routes.__init__** (`Module`) - - πŸ“ Provide lazy route module loading to avoid heavyweight imports during tests. - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Only names listed in __all__ are importable via __getattr__. - - πŸ”— DEPENDS_ON -> `importlib` - - Ζ’ **__getattr__** (`Function`) `[TRIVIAL]` - - πŸ“ Lazily import route module by attribute name. -- πŸ“¦ **TasksRouter** (`Module`) - - πŸ“ Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks. - - πŸ—οΈ Layer: UI (API) - - Ζ’ **create_task** (`Function`) - - πŸ“ Create and start a new task for a given plugin. - - Ζ’ **list_tasks** (`Function`) - - πŸ“ Retrieve a list of tasks with pagination and optional status filter. - - Ζ’ **get_task** (`Function`) - - πŸ“ Retrieve the details of a specific task. - - Ζ’ **get_task_logs** (`Function`) `[CRITICAL]` - - πŸ“ Retrieve logs for a specific task with optional filtering. - - Ζ’ **get_task_log_stats** (`Function`) - - πŸ“ Get statistics about logs for a task (counts by level and source). - - Ζ’ **get_task_log_sources** (`Function`) - - πŸ“ Get unique sources for a task's logs. - - Ζ’ **resolve_task** (`Function`) - - πŸ“ Resolve a task that is awaiting mapping. - - Ζ’ **resume_task** (`Function`) - - πŸ“ Resume a task that is awaiting input (e.g., passwords). - - Ζ’ **clear_tasks** (`Function`) - - πŸ“ Clear tasks matching the status filter. -- πŸ“¦ **backend.src.api.routes.dashboards** (`Module`) - - πŸ“ API endpoints for the Dashboard Hub - listing dashboards with Git and task status - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: All dashboard responses include git_status and last_task metadata - - πŸ”— DEPENDS_ON -> `backend.src.dependencies` - - πŸ”— DEPENDS_ON -> `backend.src.services.resource_service` - - πŸ”— DEPENDS_ON -> `backend.src.core.superset_client` - - πŸ“¦ **GitStatus** (`DataClass`) - - πŸ“¦ **LastTask** (`DataClass`) - - πŸ“¦ **DashboardItem** (`DataClass`) - - πŸ“¦ **DashboardsResponse** (`DataClass`) - - πŸ“¦ **DashboardChartItem** (`DataClass`) - - πŸ“¦ **DashboardDatasetItem** (`DataClass`) - - πŸ“¦ **DashboardDetailResponse** (`DataClass`) - - πŸ“¦ **DatabaseMapping** (`DataClass`) - - πŸ“¦ **DatabaseMappingsResponse** (`DataClass`) - - Ζ’ **get_dashboards** (`Function`) - - πŸ“ Fetch list of dashboards from a specific environment with Git status and last task status - - πŸ”— CALLS -> `ResourceService.get_dashboards_with_status` - - Ζ’ **get_database_mappings** (`Function`) - - πŸ“ Get database mapping suggestions between source and target environments - - πŸ”— CALLS -> `MappingService.get_suggestions` - - Ζ’ **get_dashboard_detail** (`Function`) - - πŸ“ Fetch detailed dashboard info with related charts and datasets - - πŸ”— CALLS -> `SupersetClient.get_dashboard_detail` - - πŸ“¦ **MigrateRequest** (`DataClass`) - - πŸ“¦ **TaskResponse** (`DataClass`) - - Ζ’ **migrate_dashboards** (`Function`) - - πŸ“ Trigger bulk migration of dashboards from source to target environment - - πŸ”— DISPATCHES -> `MigrationPlugin` - - πŸ”— CALLS -> `task_manager.create_task` - - πŸ“¦ **BackupRequest** (`DataClass`) - - Ζ’ **backup_dashboards** (`Function`) - - πŸ“ Trigger bulk backup of dashboards with optional cron schedule - - πŸ”— DISPATCHES -> `BackupPlugin` - - πŸ”— CALLS -> `task_manager.create_task` -- πŸ“¦ **backend.src.api.routes.__tests__.test_dashboards** (`Module`) - - πŸ“ Unit tests for Dashboards API endpoints - - πŸ—οΈ Layer: API - - Ζ’ **test_get_dashboards_success** (`Function`) - - Ζ’ **test_get_dashboards_with_search** (`Function`) - - Ζ’ **test_get_dashboards_env_not_found** (`Function`) - - Ζ’ **test_get_dashboards_invalid_pagination** (`Function`) - - Ζ’ **test_get_dashboard_detail_success** (`Function`) - - Ζ’ **test_get_dashboard_detail_env_not_found** (`Function`) - - Ζ’ **test_migrate_dashboards_success** (`Function`) - - Ζ’ **test_migrate_dashboards_no_ids** (`Function`) - - Ζ’ **test_backup_dashboards_success** (`Function`) - - Ζ’ **test_get_database_mappings_success** (`Function`) - - Ζ’ **mock_get_dashboards** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **mock_get_dashboards** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.test_reports_openapi_conformance** (`Module`) `[CRITICAL]` - - πŸ“ Validate implemented reports payload shape against OpenAPI-required top-level contract fields. - - πŸ—οΈ Layer: Domain (Tests) - - πŸ”’ Invariant: List and detail payloads include required contract keys. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_all_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_reports_list_openapi_required_keys** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_reports_detail_openapi_required_keys** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.test_reports_api** (`Module`) `[CRITICAL]` - - πŸ“ Contract tests for GET /api/reports defaults, pagination, and filtering behavior. - - πŸ—οΈ Layer: Domain (Tests) - - πŸ”’ Invariant: API response contract contains {items,total,page,page_size,has_next,applied_filters}. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_all_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_reports_default_pagination_contract** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_reports_filter_and_pagination** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_reports_handles_mixed_naive_and_aware_datetimes** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_reports_invalid_filter_returns_400** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.api.routes.__tests__.test_datasets** (`Module`) - - πŸ“ Unit tests for Datasets API endpoints - - πŸ—οΈ Layer: API - - πŸ”’ Invariant: Endpoint contracts remain stable for success and validation failure paths. - - Ζ’ **test_get_datasets_success** (`Function`) - - πŸ“ Validate successful datasets listing contract for an existing environment. - - Ζ’ **test_get_datasets_env_not_found** (`Function`) - - Ζ’ **test_get_datasets_invalid_pagination** (`Function`) - - Ζ’ **test_map_columns_success** (`Function`) - - Ζ’ **test_map_columns_invalid_source_type** (`Function`) - - Ζ’ **test_generate_docs_success** (`Function`) -- πŸ“¦ **backend.tests.test_reports_detail_api** (`Module`) `[CRITICAL]` - - πŸ“ Contract tests for GET /api/reports/{report_id} detail endpoint behavior. - - πŸ—οΈ Layer: Domain (Tests) - - πŸ”’ Invariant: Detail endpoint tests must keep deterministic assertions for success and not-found contracts. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_all_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_report_detail_success** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_report_detail_not_found** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.api.routes.__tests__.test_assistant_authz** (`Module`) - - πŸ“ Verify assistant confirmation ownership, expiration, and deny behavior for restricted users. - - πŸ—οΈ Layer: UI (API Tests) - - πŸ”’ Invariant: Security-sensitive flows fail closed for unauthorized actors. - - πŸ”— DEPENDS_ON -> `backend.src.api.routes.assistant` - - Ζ’ **_run_async** (`Function`) `[TRIVIAL]` - - πŸ“ Execute async endpoint handler in synchronous test context. - - β„‚ **_FakeTask** (`Class`) `[TRIVIAL]` - - πŸ“ Lightweight task model used for assistant authz tests. - - β„‚ **_FakeTaskManager** (`Class`) `[TRIVIAL]` - - πŸ“ Minimal task manager for deterministic operation creation and lookup. - - β„‚ **_FakeConfigManager** (`Class`) `[TRIVIAL]` - - πŸ“ Provide deterministic environment aliases required by intent parsing. - - Ζ’ **_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Build admin principal fixture. - - Ζ’ **_other_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Build second admin principal fixture for ownership tests. - - Ζ’ **_limited_user** (`Function`) `[TRIVIAL]` - - πŸ“ Build limited principal without required assistant execution privileges. - - β„‚ **_FakeQuery** (`Class`) `[TRIVIAL]` - - πŸ“ Minimal chainable query object for fake DB interactions. - - β„‚ **_FakeDb** (`Class`) `[TRIVIAL]` - - πŸ“ In-memory session substitute for assistant route persistence calls. - - Ζ’ **_clear_assistant_state** (`Function`) `[TRIVIAL]` - - πŸ“ Reset assistant process-local state between test cases. - - Ζ’ **test_confirmation_owner_mismatch_returns_403** (`Function`) - - πŸ“ Confirm endpoint should reject requests from user that does not own the confirmation token. - - Ζ’ **test_expired_confirmation_cannot_be_confirmed** (`Function`) - - πŸ“ Expired confirmation token should be rejected and not create task. - - Ζ’ **test_limited_user_cannot_launch_restricted_operation** (`Function`) - - πŸ“ Limited user should receive denied state for privileged operation. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **create_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_environments** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **filter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **order_by** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **first** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **all** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **limit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **offset** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **count** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **add** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **merge** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **query** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **commit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **rollback** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.api.routes.__tests__.test_assistant_api** (`Module`) - - πŸ“ Validate assistant API endpoint logic via direct async handler invocation. - - πŸ—οΈ Layer: UI (API Tests) - - πŸ”’ Invariant: Every test clears assistant in-memory state before execution. - - πŸ”— DEPENDS_ON -> `backend.src.api.routes.assistant` - - Ζ’ **_run_async** (`Function`) `[TRIVIAL]` - - πŸ“ Execute async endpoint handler in synchronous test context. - - β„‚ **_FakeTask** (`Class`) `[TRIVIAL]` - - πŸ“ Lightweight task stub used by assistant API tests. - - β„‚ **_FakeTaskManager** (`Class`) `[TRIVIAL]` - - πŸ“ Minimal async-compatible TaskManager fixture for deterministic test flows. - - β„‚ **_FakeConfigManager** (`Class`) `[TRIVIAL]` - - πŸ“ Environment config fixture with dev/prod aliases for parser tests. - - Ζ’ **_admin_user** (`Function`) `[TRIVIAL]` - - πŸ“ Build admin principal fixture. - - Ζ’ **_limited_user** (`Function`) `[TRIVIAL]` - - πŸ“ Build non-admin principal fixture. - - β„‚ **_FakeQuery** (`Class`) `[TRIVIAL]` - - πŸ“ Minimal chainable query object for fake SQLAlchemy-like DB behavior in tests. - - β„‚ **_FakeDb** (`Class`) `[TRIVIAL]` - - πŸ“ In-memory fake database implementing subset of Session interface used by assistant routes. - - Ζ’ **_clear_assistant_state** (`Function`) `[TRIVIAL]` - - πŸ“ Reset in-memory assistant registries for isolation between tests. - - Ζ’ **test_unknown_command_returns_needs_clarification** (`Function`) - - πŸ“ Unknown command should return clarification state and unknown intent. - - Ζ’ **test_capabilities_question_returns_successful_help** (`Function`) - - πŸ“ Capability query should return deterministic help response, not clarification. - - Ζ’ **test_non_admin_command_returns_denied** (`Function`) - - πŸ“ Non-admin user must receive denied state for privileged command. - - Ζ’ **test_migration_to_prod_requires_confirmation_and_can_be_confirmed** (`Function`) - - πŸ“ Migration to prod must require confirmation and then start task after explicit confirm. - - Ζ’ **test_status_query_returns_task_status** (`Function`) - - πŸ“ Task status command must surface current status text for existing task id. - - Ζ’ **test_status_query_without_task_id_returns_latest_user_task** (`Function`) - - πŸ“ Status command without explicit task_id should resolve to latest task for current user. - - Ζ’ **test_llm_validation_with_dashboard_ref_requires_confirmation** (`Function`) - - πŸ“ LLM validation with dashboard_ref should now require confirmation before dispatch. - - Ζ’ **test_list_conversations_groups_by_conversation_and_marks_archived** (`Function`) - - πŸ“ Conversations endpoint must group messages and compute archived marker by inactivity threshold. - - Ζ’ **test_history_from_latest_returns_recent_page_first** (`Function`) - - πŸ“ History endpoint from_latest mode must return newest page while preserving chronological order in chunk. - - Ζ’ **test_list_conversations_archived_only_filters_active** (`Function`) - - πŸ“ archived_only mode must return only archived conversations. - - Ζ’ **test_guarded_operation_always_requires_confirmation** (`Function`) - - πŸ“ Non-dangerous (guarded) commands must still require confirmation before execution. - - Ζ’ **test_guarded_operation_confirm_roundtrip** (`Function`) - - πŸ“ Guarded operation must execute successfully after explicit confirmation. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **create_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_environments** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **filter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **order_by** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **first** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **all** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **count** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **offset** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **limit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **add** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **merge** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **query** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **commit** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **rollback** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.api.routes.__tests__.test_migration_routes** (`Module`) - - πŸ“ Unit tests for migration API route handlers. - - πŸ—οΈ Layer: API - - Ζ’ **db_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_config_manager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_migration_settings_returns_default_cron** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_migration_settings_returns_fallback_when_no_cron** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_update_migration_settings_saves_cron** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_update_migration_settings_rejects_missing_cron** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_resource_mappings_returns_formatted_list** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_resource_mappings_respects_pagination** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_resource_mappings_search_by_name** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_resource_mappings_filter_by_env** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_resource_mappings_filter_by_type** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_mock_env** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_sync_config_manager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_trigger_sync_now_creates_env_row_and_syncs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_trigger_sync_now_rejects_empty_environments** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_trigger_sync_now_handles_partial_failure** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_trigger_sync_now_idempotent_env_upsert** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.models.config** (`Module`) - - πŸ“ Defines database schema for persisted application configuration. - - πŸ—οΈ Layer: Domain - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - β„‚ **AppConfigRecord** (`Class`) - - πŸ“ Stores the single source of truth for application configuration. -- πŸ“¦ **backend.src.models.llm** (`Module`) - - πŸ“ SQLAlchemy models for LLM provider configuration and validation results. - - πŸ—οΈ Layer: Domain - - β„‚ **LLMProvider** (`Class`) - - πŸ“ SQLAlchemy model for LLM provider configuration. - - β„‚ **ValidationRecord** (`Class`) - - πŸ“ SQLAlchemy model for dashboard validation history. - - Ζ’ **generate_uuid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **GitModels** (`Module`) `[TRIVIAL]` - - πŸ“ Git-specific SQLAlchemy models for configuration and repository tracking. - - πŸ—οΈ Layer: Model - - β„‚ **GitServerConfig** (`Class`) `[TRIVIAL]` - - πŸ“ Configuration for a Git server connection. - - β„‚ **GitRepository** (`Class`) `[TRIVIAL]` - - πŸ“ Tracking for a local Git repository linked to a dashboard. - - β„‚ **DeploymentEnvironment** (`Class`) `[TRIVIAL]` - - πŸ“ Target Superset environments for dashboard deployment. -- πŸ“¦ **backend.src.models.task** (`Module`) `[TRIVIAL]` - - πŸ“ Defines the database schema for task execution records. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: All primary keys are UUID strings. - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - β„‚ **TaskRecord** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a persistent record of a task execution. - - β„‚ **TaskLogRecord** (`Class`) `[CRITICAL]` - - πŸ“ Represents a single persistent log entry for a task. - - πŸ”’ Invariant: Each log entry belongs to exactly one task. - - πŸ”— DEPENDS_ON -> `TaskRecord` -- πŸ“¦ **backend.src.models.connection** (`Module`) `[TRIVIAL]` - - πŸ“ Defines the database schema for external database connection configurations. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: All primary keys are UUID strings. - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - β„‚ **ConnectionConfig** (`Class`) `[TRIVIAL]` - - πŸ“ Stores credentials for external databases used for column mapping. -- πŸ“¦ **backend.src.models.mapping** (`Module`) - - πŸ“ Defines the database schema for environment metadata and database mappings using SQLAlchemy. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: All primary keys are UUID strings. - - πŸ”— DEPENDS_ON -> `sqlalchemy` - - β„‚ **ResourceType** (`Class`) `[TRIVIAL]` - - πŸ“ Enumeration of possible Superset resource types for ID mapping. - - β„‚ **MigrationStatus** (`Class`) `[TRIVIAL]` - - πŸ“ Enumeration of possible migration job statuses. - - β„‚ **Environment** (`Class`) - - πŸ“ Represents a Superset instance environment. - - β„‚ **DatabaseMapping** (`Class`) - - πŸ“ Represents a mapping between source and target databases. - - β„‚ **MigrationJob** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a single migration execution job. - - β„‚ **ResourceMapping** (`Class`) - - πŸ“ Maps a universal UUID for a resource to its actual ID on a specific environment. -- πŸ“¦ **backend.src.models.report** (`Module`) `[CRITICAL]` - - πŸ“ Canonical report schemas for unified task reporting across heterogeneous task types. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Canonical report fields are always present for every report item. - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager.models` - - β„‚ **TaskType** (`Class`) `[CRITICAL]` - - πŸ“ Supported normalized task report types. - - πŸ”’ Invariant: Must contain valid generic task type mappings. - - β„‚ **ReportStatus** (`Class`) `[CRITICAL]` - - πŸ“ Supported normalized report status values. - - πŸ”’ Invariant: TaskStatus enum mapping logic holds. - - β„‚ **ErrorContext** (`Class`) `[CRITICAL]` - - πŸ“ Error and recovery context for failed/partial reports. - - πŸ”’ Invariant: The properties accurately describe error state. - - β„‚ **TaskReport** (`Class`) `[CRITICAL]` - - πŸ“ Canonical normalized report envelope for one task execution. - - πŸ”’ Invariant: Must represent canonical task record attributes. - - β„‚ **ReportQuery** (`Class`) `[CRITICAL]` - - πŸ“ Query object for server-side report filtering, sorting, and pagination. - - πŸ”’ Invariant: Time and pagination queries are mutually consistent. - - β„‚ **ReportCollection** (`Class`) `[CRITICAL]` - - πŸ“ Paginated collection of normalized task reports. - - πŸ”’ Invariant: Represents paginated data correctly. - - β„‚ **ReportDetailView** (`Class`) `[CRITICAL]` - - πŸ“ Detailed report representation including diagnostics and recovery actions. - - πŸ”’ Invariant: Incorporates a report and logs correctly. - - Ζ’ **_non_empty_str** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_validate_sort_by** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_validate_sort_order** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_validate_time_range** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.models.assistant** (`Module`) - - πŸ“ SQLAlchemy models for assistant audit trail and confirmation tokens. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Assistant records preserve immutable ids and creation timestamps. - - πŸ”— DEPENDS_ON -> `backend.src.models.mapping` - - β„‚ **AssistantAuditRecord** (`Class`) - - πŸ“ Store audit decisions and outcomes produced by assistant command handling. - - β„‚ **AssistantMessageRecord** (`Class`) - - πŸ“ Persist chat history entries for assistant conversations. - - β„‚ **AssistantConfirmationRecord** (`Class`) - - πŸ“ Persist risky operation confirmation tokens with lifecycle state. -- πŸ“¦ **backend.src.models.storage** (`Module`) `[TRIVIAL]` - - πŸ“ Data models for the storage system. - - πŸ—οΈ Layer: Domain - - β„‚ **FileCategory** (`Class`) `[TRIVIAL]` - - πŸ“ Enumeration of supported file categories in the storage system. - - β„‚ **StorageConfig** (`Class`) `[TRIVIAL]` - - πŸ“ Configuration model for the storage system, defining paths and naming patterns. - - β„‚ **StoredFile** (`Class`) `[TRIVIAL]` - - πŸ“ Data model representing metadata for a file stored in the system. -- πŸ“¦ **backend.src.models.dashboard** (`Module`) - - πŸ“ Defines data models for dashboard metadata and selection. - - πŸ—οΈ Layer: Model - - β„‚ **DashboardMetadata** (`Class`) `[TRIVIAL]` - - πŸ“ Represents a dashboard available for migration. - - β„‚ **DashboardSelection** (`Class`) `[TRIVIAL]` - - πŸ“ Represents the user's selection of dashboards to migrate. -- πŸ“¦ **backend.src.models.auth** (`Module`) - - πŸ“ SQLAlchemy models for multi-user authentication and authorization. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Usernames and emails must be unique. - - Ζ’ **generate_uuid** (`Function`) - - πŸ“ Generates a unique UUID string. - - πŸ“¦ **user_roles** (`Table`) - - πŸ“ Association table for many-to-many relationship between Users and Roles. - - πŸ“¦ **role_permissions** (`Table`) - - πŸ“ Association table for many-to-many relationship between Roles and Permissions. - - β„‚ **User** (`Class`) - - πŸ“ Represents an identity that can authenticate to the system. - - β„‚ **Role** (`Class`) - - πŸ“ Represents a collection of permissions. - - β„‚ **Permission** (`Class`) - - πŸ“ Represents a specific capability within the system. - - β„‚ **ADGroupMapping** (`Class`) - - πŸ“ Maps an Active Directory group to a local System Role. - - πŸ”— DEPENDS_ON -> `Role` -- πŸ“¦ **test_models** (`Module`) `[TRIVIAL]` - - πŸ“ Unit tests for data models - - πŸ—οΈ Layer: Domain - - Ζ’ **test_environment_model** (`Function`) - - πŸ“ Tests that Environment model correctly stores values. -- πŸ“¦ **test_report_models** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for report Pydantic models and their validators - - πŸ—οΈ Layer: Domain - - Ζ’ **test_enum_values** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_enum_values** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_creation** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_minimal_creation** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_report** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_creation** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_empty_report_id_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_whitespace_report_id_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_empty_task_id_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_empty_summary_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_summary_whitespace_trimmed** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_optional_fields** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_with_error_context** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_defaults** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_invalid_sort_by_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_sort_by_values** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_invalid_sort_order_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_sort_order_values** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_time_range_validation_valid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_time_range_validation_invalid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_page_ge_1** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_page_size_bounds** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_creation** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_with_items** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_valid_creation** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_with_all_fields** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.services.resource_service** (`Module`) - - πŸ“ Shared service for fetching resource data with Git status and task status - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: All resources include metadata about their current state - - πŸ”— DEPENDS_ON -> `backend.src.core.superset_client` - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager` - - πŸ”— DEPENDS_ON -> `backend.src.services.git_service` - - β„‚ **ResourceService** (`Class`) - - πŸ“ Provides centralized access to resource data with enhanced metadata - - Ζ’ **__init__** (`Function`) - - πŸ“ Initialize the resource service with dependencies - - Ζ’ **get_dashboards_with_status** (`Function`) - - πŸ“ Fetch dashboards from environment with Git status and last task status - - πŸ”— CALLS -> `SupersetClient.get_dashboards_summary` - - πŸ”— CALLS -> `self._get_git_status_for_dashboard` - - πŸ”— CALLS -> `self._get_last_task_for_resource` - - Ζ’ **get_datasets_with_status** (`Function`) - - πŸ“ Fetch datasets from environment with mapping progress and last task status - - πŸ”— CALLS -> `SupersetClient.get_datasets_summary` - - πŸ”— CALLS -> `self._get_last_task_for_resource` - - Ζ’ **get_activity_summary** (`Function`) - - πŸ“ Get summary of active and recent tasks for the activity indicator - - Ζ’ **_get_git_status_for_dashboard** (`Function`) - - πŸ“ Get Git sync status for a dashboard - - πŸ”— CALLS -> `GitService.get_repo` - - Ζ’ **_get_last_task_for_resource** (`Function`) - - πŸ“ Get the most recent task for a specific resource - - Ζ’ **_extract_resource_name_from_task** (`Function`) - - πŸ“ Extract resource name from task params - - Ζ’ **_extract_resource_type_from_task** (`Function`) - - πŸ“ Extract resource type from task params -- πŸ“¦ **backend.src.services.llm_prompt_templates** (`Module`) - - πŸ“ Provide default LLM prompt templates and normalization helpers for runtime usage. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: All required prompt template keys are always present after normalization. - - πŸ”— DEPENDS_ON -> `backend.src.core.config_manager` - - πŸ“¦ **DEFAULT_LLM_PROMPTS** (`Constant`) - - πŸ“ Default prompt templates used by documentation, dashboard validation, and git commit generation. - - πŸ“¦ **DEFAULT_LLM_PROVIDER_BINDINGS** (`Constant`) - - πŸ“ Default provider binding per task domain. - - πŸ“¦ **DEFAULT_LLM_ASSISTANT_SETTINGS** (`Constant`) - - πŸ“ Default planner settings for assistant chat intent model/provider resolution. - - Ζ’ **normalize_llm_settings** (`Function`) - - πŸ“ Ensure llm settings contain stable schema with prompts section and default templates. - - Ζ’ **is_multimodal_model** (`Function`) - - πŸ“ Heuristically determine whether model supports image input required for dashboard validation. - - Ζ’ **resolve_bound_provider_id** (`Function`) - - πŸ“ Resolve provider id configured for a task binding with fallback to default provider. - - Ζ’ **render_prompt** (`Function`) - - πŸ“ Render prompt template using deterministic placeholder replacement with graceful fallback. -- πŸ“¦ **backend.src.services.llm_provider** (`Module`) - - πŸ“ Service for managing LLM provider configurations with encrypted API keys. - - πŸ—οΈ Layer: Domain - - πŸ”— DEPENDS_ON -> `backend.src.core.database` - - πŸ”— DEPENDS_ON -> `backend.src.models.llm` - - β„‚ **EncryptionManager** (`Class`) `[CRITICAL]` - - πŸ“ Handles encryption and decryption of sensitive data like API keys. - - πŸ”’ Invariant: Uses a secret key from environment or a default one (fallback only for dev). - - Ζ’ **EncryptionManager.__init__** (`Function`) - - πŸ“ Initialize the encryption manager with a Fernet key. - - Ζ’ **EncryptionManager.encrypt** (`Function`) - - πŸ“ Encrypt a plaintext string. - - Ζ’ **EncryptionManager.decrypt** (`Function`) - - πŸ“ Decrypt an encrypted string. - - β„‚ **LLMProviderService** (`Class`) - - πŸ“ Service to manage LLM provider lifecycle. - - Ζ’ **LLMProviderService.__init__** (`Function`) - - πŸ“ Initialize the service with database session. - - Ζ’ **get_all_providers** (`Function`) - - πŸ“ Returns all configured LLM providers. - - Ζ’ **get_provider** (`Function`) - - πŸ“ Returns a single LLM provider by ID. - - Ζ’ **create_provider** (`Function`) - - πŸ“ Creates a new LLM provider with encrypted API key. - - Ζ’ **update_provider** (`Function`) - - πŸ“ Updates an existing LLM provider. - - Ζ’ **delete_provider** (`Function`) - - πŸ“ Deletes an LLM provider. - - Ζ’ **get_decrypted_api_key** (`Function`) - - πŸ“ Returns the decrypted API key for a provider. - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **encrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **decrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.services** (`Module`) - - πŸ“ Package initialization for services module - - πŸ—οΈ Layer: Core - - Ζ’ **__getattr__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.services.auth_service** (`Module`) - - πŸ“ Orchestrates authentication business logic. - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: Authentication must verify both credentials and account status. - - β„‚ **AuthService** (`Class`) - - πŸ“ Provides high-level authentication services. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the service with a database session. - - Ζ’ **authenticate_user** (`Function`) - - πŸ“ Authenticates a user with username and password. - - Ζ’ **create_session** (`Function`) - - πŸ“ Creates a JWT session for an authenticated user. - - Ζ’ **provision_adfs_user** (`Function`) - - πŸ“ Just-In-Time (JIT) provisioning for ADFS users based on group mappings. -- πŸ“¦ **backend.src.services.git_service** (`Module`) - - πŸ“ Core Git logic using GitPython to manage dashboard repositories. - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: All Git operations must be performed on a valid local directory. - - β„‚ **GitService** (`Class`) - - πŸ“ Wrapper for GitPython operations with semantic logging and error handling. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the GitService with a base path for repositories. - - Ζ’ **_get_repo_path** (`Function`) - - πŸ“ Resolves the local filesystem path for a dashboard's repository. - - Ζ’ **init_repo** (`Function`) - - πŸ“ Initialize or clone a repository for a dashboard. - - Ζ’ **get_repo** (`Function`) - - πŸ“ Get Repo object for a dashboard. - - Ζ’ **list_branches** (`Function`) - - πŸ“ List all branches for a dashboard's repository. - - Ζ’ **create_branch** (`Function`) - - πŸ“ Create a new branch from an existing one. - - Ζ’ **checkout_branch** (`Function`) - - πŸ“ Switch to a specific branch. - - Ζ’ **commit_changes** (`Function`) - - πŸ“ Stage and commit changes. - - Ζ’ **push_changes** (`Function`) - - πŸ“ Push local commits to remote. - - Ζ’ **pull_changes** (`Function`) - - πŸ“ Pull changes from remote. - - Ζ’ **get_status** (`Function`) - - πŸ“ Get current repository status (dirty files, untracked, etc.) - - Ζ’ **get_diff** (`Function`) - - πŸ“ Generate diff for a file or the whole repository. - - Ζ’ **get_commit_history** (`Function`) - - πŸ“ Retrieve commit history for a repository. - - Ζ’ **test_connection** (`Function`) - - πŸ“ Test connection to Git provider using PAT. -- πŸ“¦ **backend.src.services.mapping_service** (`Module`) - - πŸ“ Orchestrates database fetching and fuzzy matching suggestions. - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: Suggestions are based on database names. - - πŸ”— DEPENDS_ON -> `backend.src.core.superset_client` - - πŸ”— DEPENDS_ON -> `backend.src.core.utils.matching` - - β„‚ **MappingService** (`Class`) - - πŸ“ Service for handling database mapping logic. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the mapping service with a config manager. - - Ζ’ **_get_client** (`Function`) - - πŸ“ Helper to get an initialized SupersetClient for an environment. - - Ζ’ **get_suggestions** (`Function`) - - πŸ“ Fetches databases from both environments and returns fuzzy matching suggestions. -- πŸ“¦ **test_encryption_manager** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for EncryptionManager encrypt/decrypt functionality. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Encrypt+decrypt roundtrip always returns original plaintext. - - β„‚ **TestEncryptionManager** (`Class`) - - πŸ“ Validate EncryptionManager encrypt/decrypt roundtrip, uniqueness, and error handling. - - Ζ’ **test_encrypt_decrypt_roundtrip** (`Function`) - - πŸ“ Encrypt then decrypt returns original plaintext. - - Ζ’ **test_encrypt_produces_different_output** (`Function`) - - πŸ“ Same plaintext produces different ciphertext (Fernet uses random IV). - - Ζ’ **test_different_inputs_yield_different_ciphertext** (`Function`) - - πŸ“ Different inputs produce different ciphertexts. - - Ζ’ **test_decrypt_invalid_data_raises** (`Function`) - - πŸ“ Decrypting invalid data raises InvalidToken. - - Ζ’ **test_encrypt_empty_string** (`Function`) - - πŸ“ Encrypting and decrypting an empty string works. - - Ζ’ **test_custom_key_roundtrip** (`Function`) - - πŸ“ Custom Fernet key produces valid roundtrip. - - Ζ’ **_make_manager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **encrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **decrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **encrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **decrypt** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.src.services.__tests__.test_llm_prompt_templates** (`Module`) - - πŸ“ Validate normalization and rendering behavior for configurable LLM prompt templates. - - πŸ—οΈ Layer: Domain Tests - - πŸ”’ Invariant: All required prompt keys remain available after normalization. - - πŸ”— DEPENDS_ON -> `backend.src.services.llm_prompt_templates` - - Ζ’ **test_normalize_llm_settings_adds_default_prompts** (`Function`) - - πŸ“ Ensure legacy/partial llm settings are expanded with all prompt defaults. - - Ζ’ **test_normalize_llm_settings_keeps_custom_prompt_values** (`Function`) - - πŸ“ Ensure user-customized prompt values are preserved during normalization. - - Ζ’ **test_render_prompt_replaces_known_placeholders** (`Function`) - - πŸ“ Ensure template placeholders are deterministically replaced. - - Ζ’ **test_is_multimodal_model_detects_known_vision_models** (`Function`) - - πŸ“ Ensure multimodal model detection recognizes common vision-capable model names. - - Ζ’ **test_resolve_bound_provider_id_prefers_binding_then_default** (`Function`) - - πŸ“ Verify provider binding resolution priority. - - Ζ’ **test_normalize_llm_settings_keeps_assistant_planner_settings** (`Function`) - - πŸ“ Ensure assistant planner provider/model fields are preserved and normalized. -- πŸ“¦ **backend.src.services.__tests__.test_resource_service** (`Module`) - - πŸ“ Unit tests for ResourceService - - πŸ—οΈ Layer: Service - - πŸ”’ Invariant: Resource summaries preserve task linkage and status projection behavior. - - Ζ’ **test_get_dashboards_with_status** (`Function`) - - πŸ“ Validate dashboard enrichment includes git/task status projections. - - Ζ’ **test_get_datasets_with_status** (`Function`) - - Ζ’ **test_get_activity_summary** (`Function`) - - Ζ’ **test_get_git_status_for_dashboard_no_repo** (`Function`) - - Ζ’ **test_get_last_task_for_resource** (`Function`) - - Ζ’ **test_extract_resource_name_from_task** (`Function`) -- πŸ“¦ **backend.src.services.reports.normalizer** (`Module`) `[CRITICAL]` - - πŸ“ Convert task manager task objects into canonical unified TaskReport entities with deterministic fallback behavior. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Unknown task types and partial payloads remain visible via fallback mapping. - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager.models.Task` - - πŸ”— DEPENDS_ON -> `backend.src.models.report` - - πŸ”— DEPENDS_ON -> `backend.src.services.reports.type_profiles` - - Ζ’ **status_to_report_status** (`Function`) - - πŸ“ Normalize internal task status to canonical report status. - - Ζ’ **build_summary** (`Function`) - - πŸ“ Build deterministic user-facing summary from task payload and status. - - Ζ’ **extract_error_context** (`Function`) - - πŸ“ Extract normalized error context and next actions for failed/partial reports. - - Ζ’ **normalize_task_report** (`Function`) - - πŸ“ Convert one Task to canonical TaskReport envelope. -- πŸ“¦ **backend.src.services.reports.type_profiles** (`Module`) `[CRITICAL]` - - πŸ“ Deterministic mapping of plugin/task identifiers to canonical report task types and fallback profile metadata. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: Unknown input always resolves to TaskType.UNKNOWN with a single fallback profile. - - πŸ”— DEPENDS_ON -> `backend.src.models.report.TaskType` - - πŸ“¦ **PLUGIN_TO_TASK_TYPE** (`Data`) - - πŸ“ Maps plugin identifiers to normalized report task types. - - πŸ“¦ **TASK_TYPE_PROFILES** (`Data`) - - πŸ“ Profile metadata registry for each normalized task type. - - Ζ’ **resolve_task_type** (`Function`) - - πŸ“ Resolve canonical task type from plugin/task identifier with guaranteed fallback. - - Ζ’ **get_type_profile** (`Function`) - - πŸ“ Return deterministic profile metadata for a task type. -- πŸ“¦ **backend.src.services.reports.report_service** (`Module`) `[CRITICAL]` - - πŸ“ Aggregate, normalize, filter, and paginate task reports for unified list/detail API use cases. - - πŸ—οΈ Layer: Domain - - πŸ”’ Invariant: List responses are deterministic and include applied filter echo metadata. - - πŸ”— DEPENDS_ON -> `backend.src.core.task_manager.manager.TaskManager` - - πŸ”— DEPENDS_ON -> `backend.src.models.report` - - πŸ”— DEPENDS_ON -> `backend.src.services.reports.normalizer` - - β„‚ **ReportsService** (`Class`) `[CRITICAL]` - - πŸ“ Service layer for list/detail report retrieval and normalization. - - πŸ”’ Invariant: Service methods are read-only over task history source. - - Ζ’ **__init__** (`Function`) `[CRITICAL]` - - πŸ“ Initialize service with TaskManager dependency. - - πŸ”’ Invariant: Constructor performs no task mutations. - - Ζ’ **_load_normalized_reports** (`Function`) - - πŸ“ Build normalized reports from all available tasks. - - πŸ”’ Invariant: Every returned item is a TaskReport. - - Ζ’ **_to_utc_datetime** (`Function`) - - πŸ“ Normalize naive/aware datetime values to UTC-aware datetime for safe comparisons. - - πŸ”’ Invariant: Naive datetimes are interpreted as UTC to preserve deterministic ordering/filtering. - - Ζ’ **_datetime_sort_key** (`Function`) - - πŸ“ Produce stable numeric sort key for report timestamps. - - πŸ”’ Invariant: Mixed naive/aware datetimes never raise TypeError. - - Ζ’ **_matches_query** (`Function`) - - πŸ“ Apply query filtering to a report. - - πŸ”’ Invariant: Filter evaluation is side-effect free. - - Ζ’ **_sort_reports** (`Function`) - - πŸ“ Sort reports deterministically according to query settings. - - πŸ”’ Invariant: Sorting criteria are deterministic for equal input. - - Ζ’ **list_reports** (`Function`) - - πŸ“ Return filtered, sorted, paginated report collection. - - Ζ’ **get_report_detail** (`Function`) - - πŸ“ Return one normalized report with timeline/diagnostics/next actions. -- πŸ“¦ **test_report_service** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for ReportsService list/detail operations - - πŸ—οΈ Layer: Domain - - Ζ’ **_make_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_service** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_empty_tasks_returns_empty_collection** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_single_task_normalized** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_pagination_first_page** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_pagination_last_page** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_filter_by_status** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_filter_by_task_type** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_search_filter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sort_by_status** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_applied_filters_echoed** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_service** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_detail_found** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_detail_not_found** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_detail_includes_timeline** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_detail_failed_task_has_next_actions** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_detail_success_task_no_error_next_actions** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.test_report_normalizer** (`Module`) `[CRITICAL]` - - πŸ“ Validate unknown task type fallback and partial payload normalization behavior. - - πŸ—οΈ Layer: Domain (Tests) - - πŸ”’ Invariant: Unknown plugin types are mapped to canonical unknown task type. - - Ζ’ **test_unknown_type_maps_to_unknown_profile** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_partial_payload_keeps_report_visible_with_placeholders** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **BackupPlugin** (`Module`) - - πŸ“ A plugin that provides functionality to back up Superset dashboards. - - πŸ—οΈ Layer: App - - πŸ”— IMPLEMENTS -> `PluginBase` - - πŸ”— DEPENDS_ON -> `superset_tool.client` - - πŸ”— DEPENDS_ON -> `superset_tool.utils` - - β„‚ **BackupPlugin** (`Class`) - - πŸ“ Implementation of the backup plugin logic. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the backup plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the backup plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the backup plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the backup plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the backup plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for backup plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the dashboard backup logic with TaskContext support. -- πŸ“¦ **DebugPluginModule** (`Module`) - - πŸ“ Implements a plugin for system diagnostics and debugging Superset API responses. - - πŸ—οΈ Layer: Plugins - - β„‚ **DebugPlugin** (`Class`) - - πŸ“ Plugin for system diagnostics and debugging. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the debug plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the debug plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the debug plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the debug plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the debug plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for the debug plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the debug logic with TaskContext support. - - Ζ’ **_test_db_api** (`Function`) - - πŸ“ Tests database API connectivity for source and target environments. - - Ζ’ **_get_dataset_structure** (`Function`) - - πŸ“ Retrieves the structure of a dataset. -- πŸ“¦ **SearchPluginModule** (`Module`) - - πŸ“ Implements a plugin for searching text patterns across all datasets in a specific Superset environment. - - πŸ—οΈ Layer: Plugins - - β„‚ **SearchPlugin** (`Class`) - - πŸ“ Plugin for searching text patterns in Superset datasets. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the search plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the search plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the search plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the search plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the search plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for the search plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the dataset search logic with TaskContext support. - - Ζ’ **_get_context** (`Function`) - - πŸ“ Extracts a small context around the match for display. -- πŸ“¦ **MapperPluginModule** (`Module`) - - πŸ“ Implements a plugin for mapping dataset columns using external database connections or Excel files. - - πŸ—οΈ Layer: Plugins - - β„‚ **MapperPlugin** (`Class`) - - πŸ“ Plugin for mapping dataset columns verbose names. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the mapper plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the mapper plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the mapper plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the mapper plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the mapper plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for the mapper plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the dataset mapping logic with TaskContext support. -- πŸ“¦ **backend.src.plugins.git_plugin** (`Module`) - - πŸ“ ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ ΠΏΠ»Π°Π³ΠΈΠ½ для вСрсионирования ΠΈ развСртывания Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ΠΎΠ² Superset. - - πŸ—οΈ Layer: Plugin - - πŸ”’ Invariant: ВсС ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΈ с Git Π΄ΠΎΠ»ΠΆΠ½Ρ‹ Π²Ρ‹ΠΏΠΎΠ»Π½ΡΡ‚ΡŒΡΡ Ρ‡Π΅Ρ€Π΅Π· GitService. - - β„‚ **GitPlugin** (`Class`) - - πŸ“ РСализация ΠΏΠ»Π°Π³ΠΈΠ½Π° Git Integration для управлСния вСрсиями Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ΠΎΠ². - - Ζ’ **__init__** (`Function`) - - πŸ“ Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ ΠΏΠ»Π°Π³ΠΈΠ½ ΠΈ Π΅Π³ΠΎ зависимости. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the plugin identifier. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the plugin name. - - Ζ’ **description** (`Function`) - - πŸ“ Returns the plugin description. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the plugin version. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the git plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Π’ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅Ρ‚ JSON-схСму ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ² для выполнСния Π·Π°Π΄Π°Ρ‡ ΠΏΠ»Π°Π³ΠΈΠ½Π°. - - Ζ’ **initialize** (`Function`) - - πŸ“ ВыполняСт Π½Π°Ρ‡Π°Π»ΡŒΠ½ΡƒΡŽ настройку ΠΏΠ»Π°Π³ΠΈΠ½Π°. - - Ζ’ **execute** (`Function`) - - πŸ“ Основной ΠΌΠ΅Ρ‚ΠΎΠ΄ выполнСния Π·Π°Π΄Π°Ρ‡ ΠΏΠ»Π°Π³ΠΈΠ½Π° с ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠΎΠΉ TaskContext. - - πŸ”— CALLS -> `self._handle_sync` - - πŸ”— CALLS -> `self._handle_deploy` - - Ζ’ **_handle_sync** (`Function`) - - πŸ“ ЭкспортируСт Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ ΠΈΠ· Superset ΠΈ распаковываСт Π² Git-Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ. - - πŸ”— CALLS -> `src.services.git_service.GitService.get_repo` - - πŸ”— CALLS -> `src.core.superset_client.SupersetClient.export_dashboard` - - Ζ’ **_handle_deploy** (`Function`) - - πŸ“ Π£ΠΏΠ°ΠΊΠΎΠ²Ρ‹Π²Π°Π΅Ρ‚ Ρ€Π΅ΠΏΠΎΠ·ΠΈΡ‚ΠΎΡ€ΠΈΠΉ Π² ZIP ΠΈ ΠΈΠΌΠΏΠΎΡ€Ρ‚ΠΈΡ€ΡƒΠ΅Ρ‚ Π² Ρ†Π΅Π»Π΅Π²ΠΎΠ΅ ΠΎΠΊΡ€ΡƒΠΆΠ΅Π½ΠΈΠ΅ Superset. - - πŸ”— CALLS -> `src.core.superset_client.SupersetClient.import_dashboard` - - Ζ’ **_get_env** (`Function`) - - πŸ“ Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Ρ‹ΠΉ ΠΌΠ΅Ρ‚ΠΎΠ΄ для получСния ΠΊΠΎΠ½Ρ„ΠΈΠ³ΡƒΡ€Π°Ρ†ΠΈΠΈ окруТСния. -- πŸ“¦ **MigrationPlugin** (`Module`) - - πŸ“ A plugin that provides functionality to migrate Superset dashboards between environments. - - πŸ—οΈ Layer: App - - πŸ”— IMPLEMENTS -> `PluginBase` - - πŸ”— DEPENDS_ON -> `superset_tool.client` - - πŸ”— DEPENDS_ON -> `superset_tool.utils` - - β„‚ **MigrationPlugin** (`Class`) - - πŸ“ Implementation of the migration plugin logic. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the migration plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the migration plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the migration plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the migration plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the migration plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for migration plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes the dashboard migration logic with TaskContext support. - - πŸ“¦ **MigrationPlugin.execute** (`Action`) - - πŸ“ Execute the migration logic with proper task logging. -- Ζ’ **schedule_dashboard_validation** (`Function`) - - πŸ“ Schedules a recurring dashboard validation task. -- Ζ’ **_parse_cron** (`Function`) - - πŸ“ Basic cron parser placeholder. -- πŸ“¦ **scheduler** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/src/plugins/llm_analysis/scheduler.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **job_func** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- β„‚ **LLMProviderType** (`Class`) - - πŸ“ Enum for supported LLM providers. -- β„‚ **LLMProviderConfig** (`Class`) - - πŸ“ Configuration for an LLM provider. -- β„‚ **ValidationStatus** (`Class`) - - πŸ“ Enum for dashboard validation status. -- β„‚ **DetectedIssue** (`Class`) - - πŸ“ Model for a single issue detected during validation. -- β„‚ **ValidationResult** (`Class`) - - πŸ“ Model for dashboard validation result. -- β„‚ **DashboardValidationPlugin** (`Class`) - - πŸ“ Plugin for automated dashboard health analysis using LLMs. - - πŸ”— IMPLEMENTS -> `backend.src.core.plugin_base.PluginBase` - - Ζ’ **DashboardValidationPlugin.execute** (`Function`) - - πŸ“ Executes the dashboard validation task with TaskContext support. -- β„‚ **DocumentationPlugin** (`Class`) - - πŸ“ Plugin for automated dataset documentation using LLMs. - - πŸ”— IMPLEMENTS -> `backend.src.core.plugin_base.PluginBase` - - Ζ’ **DocumentationPlugin.execute** (`Function`) - - πŸ“ Executes the dataset documentation task with TaskContext support. -- πŸ“¦ **plugin** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/src/plugins/llm_analysis/plugin.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **id** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **name** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **description** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **version** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_schema** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **execute** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **id** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **name** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **description** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **version** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_schema** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **execute** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- β„‚ **ScreenshotService** (`Class`) - - πŸ“ Handles capturing screenshots of Superset dashboards. - - Ζ’ **ScreenshotService.__init__** (`Function`) - - πŸ“ Initializes the ScreenshotService with environment configuration. - - Ζ’ **ScreenshotService.capture_dashboard** (`Function`) - - πŸ“ Captures a full-page screenshot of a dashboard using Playwright and CDP. -- β„‚ **LLMClient** (`Class`) - - πŸ“ Wrapper for LLM provider APIs. - - Ζ’ **LLMClient.__init__** (`Function`) - - πŸ“ Initializes the LLMClient with provider settings. - - Ζ’ **LLMClient._supports_json_response_format** (`Function`) - - πŸ“ Detect whether provider/model is likely compatible with response_format=json_object. - - Ζ’ **LLMClient.get_json_completion** (`Function`) - - πŸ“ Helper to handle LLM calls with JSON mode and fallback parsing. - - Ζ’ **LLMClient.analyze_dashboard** (`Function`) - - πŸ“ Sends dashboard data (screenshot + logs) to LLM for health analysis. -- πŸ“¦ **service** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/src/plugins/llm_analysis/service.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **capture_dashboard** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **switch_tabs** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_supports_json_response_format** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_should_retry** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_json_completion** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **analyze_dashboard** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **StoragePlugin** (`Module`) - - πŸ“ Provides core filesystem operations for managing backups and repositories. - - πŸ—οΈ Layer: App - - πŸ”’ Invariant: All file operations must be restricted to the configured storage root. - - πŸ”— IMPLEMENTS -> `PluginBase` - - πŸ”— DEPENDS_ON -> `backend.src.models.storage` - - β„‚ **StoragePlugin** (`Class`) - - πŸ“ Implementation of the storage management plugin. - - Ζ’ **__init__** (`Function`) - - πŸ“ Initializes the StoragePlugin and ensures required directories exist. - - Ζ’ **id** (`Function`) - - πŸ“ Returns the unique identifier for the storage plugin. - - Ζ’ **name** (`Function`) - - πŸ“ Returns the human-readable name of the storage plugin. - - Ζ’ **description** (`Function`) - - πŸ“ Returns a description of the storage plugin. - - Ζ’ **version** (`Function`) - - πŸ“ Returns the version of the storage plugin. - - Ζ’ **ui_route** (`Function`) - - πŸ“ Returns the frontend route for the storage plugin. - - Ζ’ **get_schema** (`Function`) - - πŸ“ Returns the JSON schema for storage plugin parameters. - - Ζ’ **execute** (`Function`) - - πŸ“ Executes storage-related tasks with TaskContext support. - - Ζ’ **get_storage_root** (`Function`) - - πŸ“ Resolves the absolute path to the storage root. - - Ζ’ **resolve_path** (`Function`) - - πŸ“ Resolves a dynamic path pattern using provided variables. - - Ζ’ **ensure_directories** (`Function`) - - πŸ“ Creates the storage root and category subdirectories if they don't exist. - - Ζ’ **validate_path** (`Function`) - - πŸ“ Prevents path traversal attacks by ensuring the path is within the storage root. - - Ζ’ **list_files** (`Function`) - - πŸ“ Lists all files and directories in a specific category and subpath. - - Ζ’ **save_file** (`Function`) - - πŸ“ Saves an uploaded file to the specified category and optional subpath. - - Ζ’ **delete_file** (`Function`) - - πŸ“ Deletes a file or directory from the specified category and path. - - Ζ’ **get_file_path** (`Function`) - - πŸ“ Returns the absolute path of a file for download. -- β„‚ **GitLLMExtension** (`Class`) - - πŸ“ Provides LLM capabilities to the Git plugin. - - Ζ’ **suggest_commit_message** (`Function`) - - πŸ“ Generates a suggested commit message based on a diff and history. -- πŸ“¦ **llm_extension** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/src/plugins/git/llm_extension.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_task_persistence** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for TaskPersistenceService. - - πŸ—οΈ Layer: Test - - β„‚ **TestTaskPersistenceHelpers** (`Class`) `[CRITICAL]` - - πŸ“ Test suite for TaskPersistenceService static helper methods. - - Ζ’ **test_json_load_if_needed_none** (`Function`) - - πŸ“ Test _json_load_if_needed with None input. - - Ζ’ **test_json_load_if_needed_dict** (`Function`) - - πŸ“ Test _json_load_if_needed with dict input. - - Ζ’ **test_json_load_if_needed_list** (`Function`) - - πŸ“ Test _json_load_if_needed with list input. - - Ζ’ **test_json_load_if_needed_json_string** (`Function`) - - πŸ“ Test _json_load_if_needed with JSON string. - - Ζ’ **test_json_load_if_needed_empty_string** (`Function`) - - πŸ“ Test _json_load_if_needed with empty/null strings. - - Ζ’ **test_json_load_if_needed_plain_string** (`Function`) - - πŸ“ Test _json_load_if_needed with non-JSON string. - - Ζ’ **test_json_load_if_needed_integer** (`Function`) - - πŸ“ Test _json_load_if_needed with integer. - - Ζ’ **test_parse_datetime_none** (`Function`) - - πŸ“ Test _parse_datetime with None. - - Ζ’ **test_parse_datetime_datetime_object** (`Function`) - - πŸ“ Test _parse_datetime with datetime object. - - Ζ’ **test_parse_datetime_iso_string** (`Function`) - - πŸ“ Test _parse_datetime with ISO string. - - Ζ’ **test_parse_datetime_invalid_string** (`Function`) - - πŸ“ Test _parse_datetime with invalid string. - - Ζ’ **test_parse_datetime_integer** (`Function`) - - πŸ“ Test _parse_datetime with non-string, non-datetime. - - β„‚ **TestTaskPersistenceService** (`Class`) `[CRITICAL]` - - πŸ“ Test suite for TaskPersistenceService CRUD operations. - - Ζ’ **setup_class** (`Function`) - - πŸ“ Setup in-memory test database. - - Ζ’ **teardown_class** (`Function`) - - πŸ“ Dispose of test database. - - Ζ’ **setup_method** (`Function`) - - πŸ“ Clean task_records table before each test. - - Ζ’ **test_persist_task_new** (`Function`) - - πŸ“ Test persisting a new task creates a record. - - Ζ’ **test_persist_task_update** (`Function`) - - πŸ“ Test updating an existing task. - - Ζ’ **test_persist_task_with_logs** (`Function`) - - πŸ“ Test persisting a task with log entries. - - Ζ’ **test_persist_task_failed_extracts_error** (`Function`) - - πŸ“ Test that FAILED task extracts last error message. - - Ζ’ **test_persist_tasks_batch** (`Function`) - - πŸ“ Test persisting multiple tasks. - - Ζ’ **test_load_tasks** (`Function`) - - πŸ“ Test loading tasks from database. - - Ζ’ **test_load_tasks_with_status_filter** (`Function`) - - πŸ“ Test loading tasks filtered by status. - - Ζ’ **test_load_tasks_with_limit** (`Function`) - - πŸ“ Test loading tasks with limit. - - Ζ’ **test_delete_tasks** (`Function`) - - πŸ“ Test deleting tasks by ID list. - - Ζ’ **test_delete_tasks_empty_list** (`Function`) - - πŸ“ Test deleting with empty list (no-op). - - Ζ’ **test_persist_task_with_datetime_in_params** (`Function`) - - πŸ“ Test json_serializable handles datetime in params. - - Ζ’ **_patched** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_make_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- Ζ’ **test_environment_model** (`Function`) - - πŸ“ Tests that Environment model correctly stores values. -- πŸ“¦ **test_task_manager** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for TaskManager lifecycle, CRUD, log buffering, and filtering. - - πŸ—οΈ Layer: Core - - πŸ”’ Invariant: TaskManager state changes are deterministic and testable with mocked dependencies. - - Ζ’ **_make_manager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_cleanup_manager** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_init_creates_empty_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_init_loads_persisted_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_init_starts_flusher_thread** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_task_returns_none_for_missing** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_task_returns_existing** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_all_tasks** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_tasks_with_status_filter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_tasks_with_plugin_filter** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_tasks_with_pagination** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_tasks_completed_only** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_task_success** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_task_unknown_plugin_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_task_invalid_params_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_add_log_appends_to_task_and_buffer** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_add_log_skips_nonexistent_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_flush_logs_writes_to_persistence** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_flush_task_logs_writes_single_task** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_flush_logs_requeues_on_failure** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_clear_all_non_active** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_clear_by_status** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_clear_preserves_awaiting_input** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_subscribe_creates_queue** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_unsubscribe_removes_queue** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_multiple_subscribers** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_await_input_sets_status** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_await_input_not_running_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_await_input_nonexistent_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_resume_with_password** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_resume_not_awaiting_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_resume_empty_passwords_raises** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.test_dashboards_api** (`Module`) - - πŸ“ Contract-driven tests for Dashboard Hub API - - πŸ—οΈ Layer: Domain (Tests) - - Ζ’ **test_get_dashboards_success** (`Function`) - - Ζ’ **test_get_dashboards_env_not_found** (`Function`) -- πŸ“¦ **test_dashboards_api** (`Test`) - - πŸ“ Verify GET /api/dashboards contract compliance -- πŸ“¦ **test_datasets_api** (`Test`) - - πŸ“ Verify GET /api/datasets contract compliance -- πŸ“¦ **test_resource_hubs** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/tests/test_resource_hubs.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **mock_deps** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_dashboards_success** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_dashboards_not_found** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_dashboards_search** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_datasets_success** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_datasets_not_found** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_datasets_search** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_datasets_service_failure** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_task_logger** (`Module`) - - πŸ“ Unit tests for TaskLogger and TaskContext. - - πŸ—οΈ Layer: Test - - β„‚ **TestTaskLogger** (`Class`) - - πŸ“ Test suite for TaskLogger. - - Ζ’ **setup_method** (`Function`) - - πŸ“ Setup for each test method. - - Ζ’ **test_init** (`Function`) - - πŸ“ Test TaskLogger initialization. - - Ζ’ **test_with_source** (`Function`) - - πŸ“ Test creating a sub-logger with different source. - - Ζ’ **test_debug** (`Function`) - - πŸ“ Test debug log level. - - Ζ’ **test_info** (`Function`) - - πŸ“ Test info log level. - - Ζ’ **test_warning** (`Function`) - - πŸ“ Test warning log level. - - Ζ’ **test_error** (`Function`) - - πŸ“ Test error log level. - - Ζ’ **test_error_with_metadata** (`Function`) - - πŸ“ Test error logging with metadata. - - Ζ’ **test_progress** (`Function`) - - πŸ“ Test progress logging. - - Ζ’ **test_progress_clamping** (`Function`) - - πŸ“ Test progress value clamping (0-100). - - Ζ’ **test_source_override** (`Function`) - - πŸ“ Test overriding the default source. - - Ζ’ **test_sub_logger_source_independence** (`Function`) - - πŸ“ Test sub-logger independence from parent. - - β„‚ **TestTaskContext** (`Class`) - - πŸ“ Test suite for TaskContext. - - Ζ’ **setup_method** (`Function`) - - πŸ“ Setup for each test method. - - Ζ’ **test_init** (`Function`) - - πŸ“ Test TaskContext initialization. - - Ζ’ **test_task_id_property** (`Function`) - - πŸ“ Test task_id property. - - Ζ’ **test_logger_property** (`Function`) - - πŸ“ Test logger property. - - Ζ’ **test_params_property** (`Function`) - - πŸ“ Test params property. - - Ζ’ **test_get_param** (`Function`) - - πŸ“ Test getting a specific parameter. - - Ζ’ **test_create_sub_context** (`Function`) - - πŸ“ Test creating a sub-context with different source. - - Ζ’ **test_context_logger_delegates_to_task_logger** (`Function`) - - πŸ“ Test context logger delegates to TaskLogger. - - Ζ’ **test_sub_context_with_source** (`Function`) - - πŸ“ Test sub-context logger uses new source. - - Ζ’ **test_multiple_sub_contexts** (`Function`) - - πŸ“ Test creating multiple sub-contexts. -- πŸ“¦ **test_smoke_plugins** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/tests/test_smoke_plugins.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **test_plugins_load_successfully** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_task_manager_initializes_with_plugins** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- Ζ’ **test_belief_scope_logs_entry_action_exit_at_debug** (`Function`) - - πŸ“ Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs at DEBUG level. -- Ζ’ **test_belief_scope_error_handling** (`Function`) - - πŸ“ Test that belief_scope logs Coherence:Failed on exception. -- Ζ’ **test_belief_scope_success_coherence** (`Function`) - - πŸ“ Test that belief_scope logs Coherence:OK on success. -- Ζ’ **test_belief_scope_not_visible_at_info** (`Function`) - - πŸ“ Test that belief_scope Entry/Exit/Coherence logs are NOT visible at INFO level. -- Ζ’ **test_task_log_level_default** (`Function`) - - πŸ“ Test that default task log level is INFO. -- Ζ’ **test_should_log_task_level** (`Function`) - - πŸ“ Test that should_log_task_level correctly filters log levels. -- Ζ’ **test_configure_logger_task_log_level** (`Function`) - - πŸ“ Test that configure_logger updates task_log_level. -- Ζ’ **test_enable_belief_state_flag** (`Function`) - - πŸ“ Test that enable_belief_state flag controls belief_scope logging. -- πŸ“¦ **test_auth** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/tests/test_auth.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **db_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **auth_service** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **auth_repo** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_authenticate_user** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_create_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_role_permission_association** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_user_role_association** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_ad_group_mapping** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_log_persistence** (`Module`) `[CRITICAL]` - - πŸ“ Unit tests for TaskLogPersistenceService. - - πŸ—οΈ Layer: Test - - β„‚ **TestLogPersistence** (`Class`) `[CRITICAL]` - - πŸ“ Test suite for TaskLogPersistenceService. - - Ζ’ **setup_class** (`Function`) - - πŸ“ Setup test database and service instance. - - Ζ’ **teardown_class** (`Function`) - - πŸ“ Clean up test database. - - Ζ’ **setup_method** (`Function`) - - πŸ“ Setup for each test method β€” clean task_logs table. - - Ζ’ **test_add_logs_single** (`Function`) - - πŸ“ Test adding a single log entry. - - Ζ’ **test_add_logs_batch** (`Function`) - - πŸ“ Test adding multiple log entries in batch. - - Ζ’ **test_add_logs_empty** (`Function`) - - πŸ“ Test adding empty log list (should be no-op). - - Ζ’ **test_get_logs_by_task_id** (`Function`) - - πŸ“ Test retrieving logs by task ID. - - Ζ’ **test_get_logs_with_filters** (`Function`) - - πŸ“ Test retrieving logs with level and source filters. - - Ζ’ **test_get_logs_with_pagination** (`Function`) - - πŸ“ Test retrieving logs with pagination. - - Ζ’ **test_get_logs_with_search** (`Function`) - - πŸ“ Test retrieving logs with search query. - - Ζ’ **test_get_log_stats** (`Function`) - - πŸ“ Test retrieving log statistics. - - Ζ’ **test_get_sources** (`Function`) - - πŸ“ Test retrieving unique log sources. - - Ζ’ **test_delete_logs_for_task** (`Function`) - - πŸ“ Test deleting logs by task ID. - - Ζ’ **test_delete_logs_for_tasks** (`Function`) - - πŸ“ Test deleting logs for multiple tasks. - - Ζ’ **test_delete_logs_for_tasks_empty** (`Function`) - - πŸ“ Test deleting with empty list (no-op). - - Ζ’ **_patched** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.core.test_mapping_service** (`Module`) - - πŸ“ Unit tests for the IdMappingService matching UUIDs to integer IDs. - - πŸ—οΈ Layer: Domain - - Ζ’ **db_session** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_all_resources** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_upserts_correctly** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_remote_id_returns_integer** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_remote_ids_batch_returns_dict** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_updates_existing_mapping** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_skips_resources_without_uuid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_handles_api_error_gracefully** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_all_resources** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_remote_id_returns_none_for_missing** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_get_remote_ids_batch_returns_empty_for_empty_input** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_mapping_service_alignment_with_test_data** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_requires_existing_env** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_sync_environment_deletes_stale_mappings** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **test_defensive_guards** (`Module`) `[TRIVIAL]` - - πŸ“ Auto-generated module for backend/tests/core/test_defensive_guards.py - - πŸ—οΈ Layer: Unknown - - Ζ’ **test_git_service_get_repo_path_guard** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_superset_client_import_dashboard_guard** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) -- πŸ“¦ **backend.tests.core.test_migration_engine** (`Module`) - - πŸ“ Unit tests for MigrationEngine's cross-filter patching algorithms. - - πŸ—οΈ Layer: Domain - - Ζ’ **__init__** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **get_remote_ids_batch** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **_write_dashboard_yaml** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_patch_dashboard_metadata_replaces_chart_ids** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_patch_dashboard_metadata_replaces_dataset_ids** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_patch_dashboard_metadata_skips_when_no_metadata** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_patch_dashboard_metadata_handles_missing_targets** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_extract_chart_uuids_from_archive** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_transform_yaml_replaces_database_uuid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_transform_yaml_ignores_unmapped_uuid** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_transform_zip_end_to_end** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_transform_zip_invalid_path** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) - - Ζ’ **test_transform_yaml_nonexistent_file** (`Function`) `[TRIVIAL]` - - πŸ“ Auto-detected function (orphan) diff --git a/.kilo/agent/subagent-orchestrator.md b/.kilo/agent/subagent-orchestrator.md index 3fb4b63a..b64ce2c2 100644 --- a/.kilo/agent/subagent-orchestrator.md +++ b/.kilo/agent/subagent-orchestrator.md @@ -1,7 +1,7 @@ --- description: Primary user-facing fast dispatcher that routes requests only to approved project subagents. mode: all -model: github-copilot/gpt-5.1-codex-mini +model: github-copilot/gpt-5-mini temperature: 0.0 permission: edit: deny diff --git a/backend/src/api/routes/migration.py b/backend/src/api/routes/migration.py index bccbd8b0..fd58fd75 100644 --- a/backend/src/api/routes/migration.py +++ b/backend/src/api/routes/migration.py @@ -4,12 +4,12 @@ # @PURPOSE: HTTP contract layer for migration orchestration, settings, dry-run, and mapping sync endpoints. # @LAYER: Infra # @RELATION: DEPENDS_ON ->[AppDependencies] -# @RELATION: DEPENDS_ON ->[backend.src.core.database] -# @RELATION: DEPENDS_ON ->[backend.src.core.superset_client.SupersetClient] -# @RELATION: DEPENDS_ON ->[backend.src.core.migration.dry_run_orchestrator.MigrationDryRunService] -# @RELATION: DEPENDS_ON ->[backend.src.core.mapping_service.IdMappingService] -# @RELATION: DEPENDS_ON ->[backend.src.models.dashboard] -# @RELATION: DEPENDS_ON ->[backend.src.models.mapping] +# @RELATION: DEPENDS_ON ->[DatabaseModule] +# @RELATION: DEPENDS_ON ->[DashboardSelection] +# @RELATION: DEPENDS_ON ->[DashboardMetadata] +# @RELATION: DEPENDS_ON ->[MigrationDryRunService] +# @RELATION: DEPENDS_ON ->[IdMappingService] +# @RELATION: DEPENDS_ON ->[ResourceMapping] # @INVARIANT: Migration endpoints never execute with invalid environment references and always return explicit HTTP errors on guard failures. # @PRE: Backend core services initialized and Database session available. # @POST: Migration tasks are enqueued or dry-run results are computed and returned. @@ -24,7 +24,7 @@ # @TEST_INVARIANT: [EnvironmentValidationBeforeAction] -> VERIFIED_BY: [invalid_environment, valid_execution] from fastapi import APIRouter, Depends, HTTPException, Query -from typing import List, Dict, Any, Optional +from typing import List, Dict, Any, Optional, cast from sqlalchemy.orm import Session from ...dependencies import get_config_manager, get_task_manager, has_permission from ...core.database import get_db @@ -35,8 +35,11 @@ from ...core.migration.dry_run_orchestrator import MigrationDryRunService from ...core.mapping_service import IdMappingService from ...models.mapping import ResourceMapping +logger = cast(Any, logger) + router = APIRouter(prefix="/api", tags=["migration"]) + # [DEF:get_dashboards:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetch dashboard metadata from a requested environment for migration selection UI. @@ -44,17 +47,18 @@ router = APIRouter(prefix="/api", tags=["migration"]) # @POST: Returns List[DashboardMetadata] for the resolved environment; emits HTTP_404 when environment is absent. # @SIDE_EFFECT: Reads environment configuration and performs remote Superset metadata retrieval over network. # @DATA_CONTRACT: Input[str env_id] -> Output[List[DashboardMetadata]] +# @RELATION: CALLS ->[SupersetClient.get_dashboards_summary] @router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata]) async def get_dashboards( env_id: str, config_manager=Depends(get_config_manager), - _ = Depends(has_permission("plugin:migration", "EXECUTE")) + _=Depends(has_permission("plugin:migration", "EXECUTE")), ): with belief_scope("get_dashboards", f"env_id={env_id}"): logger.reason(f"Fetching dashboards for environment: {env_id}") environments = config_manager.get_environments() env = next((e for e in environments if e.id == env_id), None) - + if not env: logger.explore(f"Environment {env_id} not found in configuration") raise HTTPException(status_code=404, detail="Environment not found") @@ -63,8 +67,11 @@ async def get_dashboards( dashboards = client.get_dashboards_summary() logger.reflect(f"Retrieved {len(dashboards)} dashboards from {env_id}") return dashboards + + # [/DEF:get_dashboards:Function] + # [DEF:execute_migration:Function] # @COMPLEXITY: 5 # @PURPOSE: Validate migration selection and enqueue asynchronous migration task execution. @@ -72,38 +79,60 @@ async def get_dashboards( # @POST: Returns {"task_id": str, "message": str} when task creation succeeds; emits HTTP_400/HTTP_500 on failure. # @SIDE_EFFECT: Reads configuration, writes task record through task manager, and writes operational logs. # @DATA_CONTRACT: Input[DashboardSelection] -> Output[Dict[str, str]] +# @RELATION: CALLS ->[create_task] +# @RELATION: DEPENDS_ON ->[DashboardSelection] +# @INVARIANT: Migration task dispatch never occurs before source and target environment ids pass guard validation. @router.post("/migration/execute") async def execute_migration( selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager), - _ = Depends(has_permission("plugin:migration", "EXECUTE")) + _=Depends(has_permission("plugin:migration", "EXECUTE")), ): with belief_scope("execute_migration"): - logger.reason(f"Initiating migration from {selection.source_env_id} to {selection.target_env_id}") - + logger.reason( + f"Initiating migration from {selection.source_env_id} to {selection.target_env_id}" + ) + # Validate environments exist environments = config_manager.get_environments() env_ids = {e.id for e in environments} - - if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids: - logger.explore("Invalid environment selection", extra={"source": selection.source_env_id, "target": selection.target_env_id}) - raise HTTPException(status_code=400, detail="Invalid source or target environment") + + if ( + selection.source_env_id not in env_ids + or selection.target_env_id not in env_ids + ): + logger.explore( + "Invalid environment selection", + extra={ + "source": selection.source_env_id, + "target": selection.target_env_id, + }, + ) + raise HTTPException( + status_code=400, detail="Invalid source or target environment" + ) # Include replace_db_config and fix_cross_filters in the task parameters task_params = selection.dict() - task_params['replace_db_config'] = selection.replace_db_config - task_params['fix_cross_filters'] = selection.fix_cross_filters - - logger.reason(f"Creating migration task with {len(selection.selected_ids)} dashboards") - + task_params["replace_db_config"] = selection.replace_db_config + task_params["fix_cross_filters"] = selection.fix_cross_filters + + logger.reason( + f"Creating migration task with {len(selection.selected_ids)} dashboards" + ) + try: task = await task_manager.create_task("superset-migration", task_params) logger.reflect(f"Migration task created: {task.id}") return {"task_id": task.id, "message": "Migration initiated"} except Exception as e: logger.explore(f"Task creation failed: {e}") - raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to create migration task: {str(e)}" + ) + + # [/DEF:execute_migration:Function] @@ -114,37 +143,49 @@ async def execute_migration( # @POST: Returns deterministic dry-run payload; emits HTTP_400 for guard violations and HTTP_500 for orchestrator value errors. # @SIDE_EFFECT: Reads local mappings from DB and fetches source/target metadata via Superset API. # @DATA_CONTRACT: Input[DashboardSelection] -> Output[Dict[str, Any]] +# @RELATION: DEPENDS_ON ->[DashboardSelection] +# @RELATION: DEPENDS_ON ->[MigrationDryRunService] +# @INVARIANT: Dry-run flow remains read-only and rejects identical source/target environments before service execution. @router.post("/migration/dry-run", response_model=Dict[str, Any]) async def dry_run_migration( selection: DashboardSelection, config_manager=Depends(get_config_manager), db: Session = Depends(get_db), - _ = Depends(has_permission("plugin:migration", "EXECUTE")) + _=Depends(has_permission("plugin:migration", "EXECUTE")), ): with belief_scope("dry_run_migration"): - logger.reason(f"Starting dry run: {selection.source_env_id} -> {selection.target_env_id}") - + logger.reason( + f"Starting dry run: {selection.source_env_id} -> {selection.target_env_id}" + ) + environments = config_manager.get_environments() env_map = {env.id: env for env in environments} source_env = env_map.get(selection.source_env_id) target_env = env_map.get(selection.target_env_id) - + if not source_env or not target_env: logger.explore("Invalid environment selection for dry run") - raise HTTPException(status_code=400, detail="Invalid source or target environment") - + raise HTTPException( + status_code=400, detail="Invalid source or target environment" + ) + if selection.source_env_id == selection.target_env_id: logger.explore("Source and target environments are identical") - raise HTTPException(status_code=400, detail="Source and target environments must be different") - + raise HTTPException( + status_code=400, + detail="Source and target environments must be different", + ) + if not selection.selected_ids: logger.explore("No dashboards selected for dry run") - raise HTTPException(status_code=400, detail="No dashboards selected for dry run") + raise HTTPException( + status_code=400, detail="No dashboards selected for dry run" + ) service = MigrationDryRunService() source_client = SupersetClient(source_env) target_client = SupersetClient(target_env) - + try: result = service.run( selection=selection, @@ -157,8 +198,11 @@ async def dry_run_migration( except ValueError as exc: logger.explore(f"Dry run orchestrator failed: {exc}") raise HTTPException(status_code=500, detail=str(exc)) from exc + + # [/DEF:dry_run_migration:Function] + # [DEF:get_migration_settings:Function] # @COMPLEXITY: 3 # @PURPOSE: Read and return configured migration synchronization cron expression. @@ -166,17 +210,21 @@ async def dry_run_migration( # @POST: Returns {"cron": str} reflecting current persisted settings value. # @SIDE_EFFECT: Reads configuration from config manager. # @DATA_CONTRACT: Input[None] -> Output[Dict[str, str]] +# @RELATION: DEPENDS_ON ->[AppDependencies] @router.get("/migration/settings", response_model=Dict[str, str]) async def get_migration_settings( config_manager=Depends(get_config_manager), - _ = Depends(has_permission("plugin:migration", "READ")) + _=Depends(has_permission("plugin:migration", "READ")), ): with belief_scope("get_migration_settings"): config = config_manager.get_config() cron = config.settings.migration_sync_cron return {"cron": cron} + + # [/DEF:get_migration_settings:Function] + # [DEF:update_migration_settings:Function] # @COMPLEXITY: 3 # @PURPOSE: Validate and persist migration synchronization cron expression update. @@ -184,25 +232,31 @@ async def get_migration_settings( # @POST: Returns {"cron": str, "status": "updated"} and persists updated cron value. # @SIDE_EFFECT: Mutates configuration and writes persisted config through config manager. # @DATA_CONTRACT: Input[Dict[str, str]] -> Output[Dict[str, str]] +# @RELATION: DEPENDS_ON ->[AppDependencies] @router.put("/migration/settings", response_model=Dict[str, str]) async def update_migration_settings( payload: Dict[str, str], config_manager=Depends(get_config_manager), - _ = Depends(has_permission("plugin:migration", "WRITE")) + _=Depends(has_permission("plugin:migration", "WRITE")), ): with belief_scope("update_migration_settings"): if "cron" not in payload: - raise HTTPException(status_code=400, detail="Missing 'cron' field in payload") - + raise HTTPException( + status_code=400, detail="Missing 'cron' field in payload" + ) + cron_expr = payload["cron"] - + config = config_manager.get_config() config.settings.migration_sync_cron = cron_expr config_manager.save_config(config) - + return {"cron": cron_expr, "status": "updated"} + + # [/DEF:update_migration_settings:Function] + # [DEF:get_resource_mappings:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetch synchronized resource mappings with optional filters and pagination for migration mappings view. @@ -210,6 +264,7 @@ async def update_migration_settings( # @POST: Returns {"items": [...], "total": int} where items reflect applied filters and pagination. # @SIDE_EFFECT: Executes database read queries against ResourceMapping table. # @DATA_CONTRACT: Input[QueryParams] -> Output[Dict[str, Any]] +# @RELATION: DEPENDS_ON ->[ResourceMapping] @router.get("/migration/mappings-data", response_model=Dict[str, Any]) async def get_resource_mappings( skip: int = Query(0, ge=0), @@ -218,42 +273,63 @@ async def get_resource_mappings( env_id: Optional[str] = Query(None, description="Filter by environment ID"), resource_type: Optional[str] = Query(None, description="Filter by resource type"), db: Session = Depends(get_db), - _ = Depends(has_permission("plugin:migration", "READ")) + _=Depends(has_permission("plugin:migration", "READ")), ): with belief_scope("get_resource_mappings"): query = db.query(ResourceMapping) - + if env_id: query = query.filter(ResourceMapping.environment_id == env_id) - + if resource_type: query = query.filter(ResourceMapping.resource_type == resource_type.upper()) - + if search: search_term = f"%{search}%" query = query.filter( - (ResourceMapping.resource_name.ilike(search_term)) | - (ResourceMapping.uuid.ilike(search_term)) + (ResourceMapping.resource_name.ilike(search_term)) + | (ResourceMapping.uuid.ilike(search_term)) ) - + total = query.count() - mappings = query.order_by(ResourceMapping.resource_type, ResourceMapping.resource_name).offset(skip).limit(limit).all() - + mappings = ( + query.order_by(ResourceMapping.resource_type, ResourceMapping.resource_name) + .offset(skip) + .limit(limit) + .all() + ) + items = [] for m in mappings: - items.append({ - "id": m.id, - "environment_id": m.environment_id, - "resource_type": m.resource_type.value if m.resource_type else None, - "uuid": m.uuid, - "remote_id": m.remote_integer_id, - "resource_name": m.resource_name, - "last_synced_at": m.last_synced_at.isoformat() if m.last_synced_at else None - }) - + mapping = cast(Any, m) + resource_type_value = ( + mapping.resource_type.value + if mapping.resource_type is not None + else None + ) + last_synced_at = ( + mapping.last_synced_at.isoformat() + if mapping.last_synced_at is not None + else None + ) + items.append( + { + "id": mapping.id, + "environment_id": mapping.environment_id, + "resource_type": resource_type_value, + "uuid": mapping.uuid, + "remote_id": mapping.remote_integer_id, + "resource_name": mapping.resource_name, + "last_synced_at": last_synced_at, + } + ) + return {"items": items, "total": total} + + # [/DEF:get_resource_mappings:Function] + # [DEF:trigger_sync_now:Function] # @COMPLEXITY: 3 # @PURPOSE: Trigger immediate ID synchronization for every configured environment. @@ -261,22 +337,24 @@ async def get_resource_mappings( # @POST: Returns sync summary with synced/failed counts after attempting all environments. # @SIDE_EFFECT: Upserts Environment rows, commits DB transaction, performs network sync calls, and writes logs. # @DATA_CONTRACT: Input[None] -> Output[Dict[str, Any]] +# @RELATION: DEPENDS_ON ->[IdMappingService] +# @RELATION: CALLS ->[sync_environment] @router.post("/migration/sync-now", response_model=Dict[str, Any]) async def trigger_sync_now( config_manager=Depends(get_config_manager), db: Session = Depends(get_db), - _ = Depends(has_permission("plugin:migration", "EXECUTE")) + _=Depends(has_permission("plugin:migration", "EXECUTE")), ): with belief_scope("trigger_sync_now"): from ...core.logger import logger from ...models.mapping import Environment as EnvironmentModel - + config = config_manager.get_config() environments = config.environments - + if not environments: raise HTTPException(status_code=400, detail="No environments configured") - + # Ensure each environment exists in DB (upsert) to satisfy FK constraints for env in environments: existing = db.query(EnvironmentModel).filter_by(id=env.id).first() @@ -288,15 +366,17 @@ async def trigger_sync_now( credentials_id=env.id, # Use env.id as credentials reference ) db.add(db_env) - logger.info(f"[trigger_sync_now][Action] Created environment row for {env.id}") + logger.info( + f"[trigger_sync_now][Action] Created environment row for {env.id}" + ) else: existing.name = env.name existing.url = env.url db.commit() - + service = IdMappingService(db) results = {"synced": [], "failed": []} - + for env in environments: try: client = SupersetClient(env) @@ -306,13 +386,15 @@ async def trigger_sync_now( except Exception as e: results["failed"].append({"env_id": env.id, "error": str(e)}) logger.error(f"[trigger_sync_now][Error] Failed to sync {env.id}: {e}") - + return { "status": "completed", "synced_count": len(results["synced"]), "failed_count": len(results["failed"]), - "details": results + "details": results, } + + # [/DEF:trigger_sync_now:Function] # [/DEF:MigrationApi:Module] diff --git a/backend/src/core/__tests__/test_superset_preview_pipeline.py b/backend/src/core/__tests__/test_superset_preview_pipeline.py index 1ccbfd75..31bc916c 100644 --- a/backend/src/core/__tests__/test_superset_preview_pipeline.py +++ b/backend/src/core/__tests__/test_superset_preview_pipeline.py @@ -3,9 +3,7 @@ # @SEMANTICS: tests, superset, preview, chart_data, network, 404-mapping # @PURPOSE: Verify explicit chart-data preview compilation and ensure non-dashboard 404 errors remain generic across sync and async clients. # @LAYER: Domain -# @RELATION: [BINDS_TO] ->[SupersetClient] -# @RELATION: [BINDS_TO] ->[APIClient] -# @RELATION: [BINDS_TO] ->[AsyncAPIClient] +# @RELATION: [BINDS_TO] ->[AsyncNetworkModule] import json from unittest.mock import MagicMock @@ -29,11 +27,15 @@ def _make_environment() -> Environment: username="demo", password="secret", ) + + # [/DEF:_make_environment:Function] # [DEF:_make_requests_http_error:Function] -def _make_requests_http_error(status_code: int, url: str) -> requests.exceptions.HTTPError: +def _make_requests_http_error( + status_code: int, url: str +) -> requests.exceptions.HTTPError: response = requests.Response() response.status_code = status_code response.url = url @@ -41,14 +43,20 @@ def _make_requests_http_error(status_code: int, url: str) -> requests.exceptions request = requests.Request("GET", url).prepare() response.request = request return requests.exceptions.HTTPError(response=response, request=request) + + # [/DEF:_make_requests_http_error:Function] # [DEF:_make_httpx_status_error:Function] def _make_httpx_status_error(status_code: int, url: str) -> httpx.HTTPStatusError: request = httpx.Request("GET", url) - response = httpx.Response(status_code=status_code, request=request, text='{"message":"not found"}') + response = httpx.Response( + status_code=status_code, request=request, text='{"message":"not found"}' + ) return httpx.HTTPStatusError("upstream error", request=request, response=response) + + # [/DEF:_make_httpx_status_error:Function] @@ -80,7 +88,10 @@ def test_compile_dataset_preview_prefers_legacy_explore_form_data_strategy(): effective_filters=[{"filter_name": "country", "effective_value": ["DE"]}], ) - assert result["compiled_sql"] == "SELECT count(*) FROM public.sales WHERE country IN ('DE')" + assert ( + result["compiled_sql"] + == "SELECT count(*) FROM public.sales WHERE country IN ('DE')" + ) client.network.request.assert_called_once() request_call = client.network.request.call_args assert request_call.kwargs["method"] == "POST" @@ -129,8 +140,11 @@ def test_compile_dataset_preview_prefers_legacy_explore_form_data_strategy(): "success": True, } ] + + # [/DEF:test_compile_dataset_preview_prefers_legacy_explore_form_data_strategy:Function] + # [DEF:test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures:Function] # @PURPOSE: Superset preview compilation should fall back to chart-data when legacy form_data strategies are rejected. def test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures(): @@ -180,7 +194,9 @@ def test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures( assert len(result["strategy_attempts"]) == 3 assert result["strategy_attempts"][0]["endpoint"] == "/explore_json/form_data" assert result["strategy_attempts"][0]["endpoint_kind"] == "legacy_explore_form_data" - assert result["strategy_attempts"][0]["request_transport"] == "query_param_form_data" + assert ( + result["strategy_attempts"][0]["request_transport"] == "query_param_form_data" + ) assert result["strategy_attempts"][0]["contains_root_datasource"] is False assert result["strategy_attempts"][0]["contains_form_datasource"] is False assert result["strategy_attempts"][0]["contains_query_object_datasource"] is False @@ -191,7 +207,9 @@ def test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures( assert result["strategy_attempts"][1]["endpoint"] == "/data" assert result["strategy_attempts"][1]["endpoint_kind"] == "legacy_data_form_data" - assert result["strategy_attempts"][1]["request_transport"] == "query_param_form_data" + assert ( + result["strategy_attempts"][1]["request_transport"] == "query_param_form_data" + ) assert result["strategy_attempts"][1]["contains_root_datasource"] is False assert result["strategy_attempts"][1]["contains_form_datasource"] is False assert result["strategy_attempts"][1]["contains_query_object_datasource"] is False @@ -208,9 +226,18 @@ def test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures( "contains_form_datasource": False, "contains_query_object_datasource": False, "request_param_keys": [], - "request_payload_keys": ["datasource", "force", "form_data", "queries", "result_format", "result_type"], + "request_payload_keys": [ + "datasource", + "force", + "form_data", + "queries", + "result_format", + "result_type", + ], "success": True, } + + # [/DEF:test_compile_dataset_preview_falls_back_to_chart_data_after_legacy_failures:Function] @@ -234,8 +261,12 @@ def test_build_dataset_preview_query_context_places_recovered_filters_in_chart_s "display_name": "Country", "effective_value": ["DE"], "normalized_filter_payload": { - "filter_clauses": [{"col": "country_code", "op": "IN", "val": ["DE"]}], - "extra_form_data": {"filters": [{"col": "country_code", "op": "IN", "val": ["DE"]}]}, + "filter_clauses": [ + {"col": "country_code", "op": "IN", "val": ["DE"]} + ], + "extra_form_data": { + "filters": [{"col": "country_code", "op": "IN", "val": ["DE"]}] + }, "value_origin": "extra_form_data.filters", }, }, @@ -267,6 +298,8 @@ def test_build_dataset_preview_query_context_places_recovered_filters_in_chart_s "time_range": "Last year", } assert query_context["form_data"]["url_params"] == {"country": "DE"} + + # [/DEF:test_build_dataset_preview_query_context_places_recovered_filters_in_chart_style_form_data:Function] @@ -287,8 +320,16 @@ def test_build_dataset_preview_query_context_merges_dataset_template_params_and_ effective_filters=[], ) - assert query_context["queries"][0]["url_params"] == {"region": "EMEA", "country": "DE"} - assert query_context["form_data"]["url_params"] == {"region": "EMEA", "country": "DE"} + assert query_context["queries"][0]["url_params"] == { + "region": "EMEA", + "country": "DE", + } + assert query_context["form_data"]["url_params"] == { + "region": "EMEA", + "country": "DE", + } + + # [/DEF:test_build_dataset_preview_query_context_merges_dataset_template_params_and_preserves_user_values:Function] @@ -325,6 +366,8 @@ def test_build_dataset_preview_query_context_preserves_time_range_from_native_fi "time_range": "2020-01-01 : 2020-12-31" } assert query_context["queries"][0]["filters"] == [] + + # [/DEF:test_build_dataset_preview_query_context_preserves_time_range_from_native_filter_payload:Function] @@ -348,9 +391,13 @@ def test_build_dataset_preview_legacy_form_data_preserves_native_filter_clauses( "display_name": "Country", "effective_value": ["DE", "FR"], "normalized_filter_payload": { - "filter_clauses": [{"col": "country_code", "op": "IN", "val": ["DE", "FR"]}], + "filter_clauses": [ + {"col": "country_code", "op": "IN", "val": ["DE", "FR"]} + ], "extra_form_data": { - "filters": [{"col": "country_code", "op": "IN", "val": ["DE", "FR"]}], + "filters": [ + {"col": "country_code", "op": "IN", "val": ["DE", "FR"]} + ], "time_range": "Last quarter", }, "value_origin": "extra_form_data.filters", @@ -372,6 +419,8 @@ def test_build_dataset_preview_legacy_form_data_preserves_native_filter_clauses( assert legacy_form_data["time_range"] == "Last quarter" assert legacy_form_data["url_params"] == {"country": "DE"} assert legacy_form_data["result_type"] == "query" + + # [/DEF:test_build_dataset_preview_legacy_form_data_preserves_native_filter_clauses:Function] @@ -393,6 +442,8 @@ def test_sync_network_404_mapping_keeps_non_dashboard_endpoints_generic(): assert not isinstance(exc_info.value, DashboardNotFoundError) assert "API resource not found at endpoint '/chart/data'" in str(exc_info.value) + + # [/DEF:test_sync_network_404_mapping_keeps_non_dashboard_endpoints_generic:Function] @@ -413,6 +464,8 @@ def test_sync_network_404_mapping_translates_dashboard_endpoints(): ) assert "Dashboard '/dashboard/10' Dashboard not found" in str(exc_info.value) + + # [/DEF:test_sync_network_404_mapping_translates_dashboard_endpoints:Function] @@ -430,7 +483,9 @@ async def test_async_network_404_mapping_keeps_non_dashboard_endpoints_generic() try: with pytest.raises(SupersetAPIError) as exc_info: client._handle_http_error( - _make_httpx_status_error(404, "http://superset.local/api/v1/chart/data"), + _make_httpx_status_error( + 404, "http://superset.local/api/v1/chart/data" + ), "/chart/data", ) @@ -438,6 +493,8 @@ async def test_async_network_404_mapping_keeps_non_dashboard_endpoints_generic() assert "API resource not found at endpoint '/chart/data'" in str(exc_info.value) finally: await client.aclose() + + # [/DEF:test_async_network_404_mapping_keeps_non_dashboard_endpoints_generic:Function] @@ -455,14 +512,18 @@ async def test_async_network_404_mapping_translates_dashboard_endpoints(): try: with pytest.raises(DashboardNotFoundError) as exc_info: client._handle_http_error( - _make_httpx_status_error(404, "http://superset.local/api/v1/dashboard/10"), + _make_httpx_status_error( + 404, "http://superset.local/api/v1/dashboard/10" + ), "/dashboard/10", ) assert "Dashboard '/dashboard/10' Dashboard not found" in str(exc_info.value) finally: await client.aclose() + + # [/DEF:test_async_network_404_mapping_translates_dashboard_endpoints:Function] -# [/DEF:SupersetPreviewPipelineTests:Module] \ No newline at end of file +# [/DEF:SupersetPreviewPipelineTests:Module] diff --git a/backend/src/core/async_superset_client.py b/backend/src/core/async_superset_client.py index 4347dc9d..f77d0c66 100644 --- a/backend/src/core/async_superset_client.py +++ b/backend/src/core/async_superset_client.py @@ -32,12 +32,13 @@ from .utils.async_network import AsyncAPIClient # @RELATION: [DEPENDS_ON] ->[backend.src.core.utils.async_network.AsyncAPIClient] # @RELATION: [CALLS] ->[backend.src.core.utils.async_network.AsyncAPIClient.request] class AsyncSupersetClient(SupersetClient): - # [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.__init__:Function] + # [DEF:AsyncSupersetClientInit:Function] # @COMPLEXITY: 3 # @PURPOSE: Initialize async Superset client with AsyncAPIClient transport. # @PRE: env is valid Environment instance. # @POST: Client uses async network transport and inherited projection helpers. # @DATA_CONTRACT: Input[Environment] -> self.network[AsyncAPIClient] + # @RELATION: [DEPENDS_ON] ->[AsyncAPIClient] def __init__(self, env: Environment): self.env = env auth_payload = { @@ -52,23 +53,28 @@ class AsyncSupersetClient(SupersetClient): timeout=env.timeout, ) self.delete_before_reimport = False - # [/DEF:backend.src.core.async_superset_client.AsyncSupersetClient.__init__:Function] - # [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.aclose:Function] + # [/DEF:AsyncSupersetClientInit:Function] + + # [DEF:AsyncSupersetClientClose:Function] # @COMPLEXITY: 3 # @PURPOSE: Close async transport resources. # @POST: Underlying AsyncAPIClient is closed. # @SIDE_EFFECT: Closes network sockets. + # @RELATION: [CALLS] ->[AsyncAPIClient.aclose] async def aclose(self) -> None: await self.network.aclose() - # [/DEF:backend.src.core.async_superset_client.AsyncSupersetClient.aclose:Function] + + # [/DEF:AsyncSupersetClientClose:Function] # [DEF:backend.src.core.async_superset_client.AsyncSupersetClient.get_dashboards_page_async:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetch one dashboards page asynchronously. # @POST: Returns total count and page result list. # @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]] - async def get_dashboards_page_async(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]: + async def get_dashboards_page_async( + self, query: Optional[Dict] = None + ) -> Tuple[int, List[Dict]]: with belief_scope("AsyncSupersetClient.get_dashboards_page_async"): validated_query = self._validate_query_params(query or {}) if "columns" not in validated_query: @@ -96,6 +102,7 @@ class AsyncSupersetClient(SupersetClient): result = response_json.get("result", []) total_count = response_json.get("count", len(result)) return total_count, result + # [/DEF:get_dashboards_page_async:Function] # [DEF:get_dashboard_async:Function] @@ -103,10 +110,16 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Fetch one dashboard payload asynchronously. # @POST: Returns raw dashboard payload from Superset API. # @DATA_CONTRACT: Input[dashboard_id: int] -> Output[Dict] + # @RELATION: [CALLS] ->[AsyncAPIClient.request] async def get_dashboard_async(self, dashboard_id: int) -> Dict: - with belief_scope("AsyncSupersetClient.get_dashboard_async", f"id={dashboard_id}"): - response = await self.network.request(method="GET", endpoint=f"/dashboard/{dashboard_id}") + with belief_scope( + "AsyncSupersetClient.get_dashboard_async", f"id={dashboard_id}" + ): + response = await self.network.request( + method="GET", endpoint=f"/dashboard/{dashboard_id}" + ) return cast(Dict, response) + # [/DEF:get_dashboard_async:Function] # [DEF:get_chart_async:Function] @@ -114,10 +127,14 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Fetch one chart payload asynchronously. # @POST: Returns raw chart payload from Superset API. # @DATA_CONTRACT: Input[chart_id: int] -> Output[Dict] + # @RELATION: [CALLS] ->[AsyncAPIClient.request] async def get_chart_async(self, chart_id: int) -> Dict: with belief_scope("AsyncSupersetClient.get_chart_async", f"id={chart_id}"): - response = await self.network.request(method="GET", endpoint=f"/chart/{chart_id}") + response = await self.network.request( + method="GET", endpoint=f"/chart/{chart_id}" + ) return cast(Dict, response) + # [/DEF:get_chart_async:Function] # [DEF:get_dashboard_detail_async:Function] @@ -125,17 +142,21 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Fetch dashboard detail asynchronously with concurrent charts/datasets requests. # @POST: Returns dashboard detail payload for overview page. # @DATA_CONTRACT: Input[dashboard_id: int] -> Output[Dict] - # @RELATION: [CALLS] ->[self.get_dashboard_async] - # @RELATION: [CALLS] ->[self.get_chart_async] + # @RELATION: [CALLS] ->[get_dashboard_async] + # @RELATION: [CALLS] ->[get_chart_async] async def get_dashboard_detail_async(self, dashboard_id: int) -> Dict: - with belief_scope("AsyncSupersetClient.get_dashboard_detail_async", f"id={dashboard_id}"): + with belief_scope( + "AsyncSupersetClient.get_dashboard_detail_async", f"id={dashboard_id}" + ): dashboard_response = await self.get_dashboard_async(dashboard_id) dashboard_data = dashboard_response.get("result", dashboard_response) charts: List[Dict] = [] datasets: List[Dict] = [] - def extract_dataset_id_from_form_data(form_data: Optional[Dict]) -> Optional[int]: + def extract_dataset_id_from_form_data( + form_data: Optional[Dict], + ) -> Optional[int]: if not isinstance(form_data, dict): return None datasource = form_data.get("datasource") @@ -173,7 +194,11 @@ class AsyncSupersetClient(SupersetClient): ) if not isinstance(charts_response, Exception): - charts_payload = charts_response.get("result", []) if isinstance(charts_response, dict) else [] + charts_payload = ( + charts_response.get("result", []) + if isinstance(charts_response, dict) + else [] + ) for chart_obj in charts_payload: if not isinstance(chart_obj, dict): continue @@ -186,20 +211,45 @@ class AsyncSupersetClient(SupersetClient): form_data = json.loads(form_data) except Exception: form_data = {} - dataset_id = extract_dataset_id_from_form_data(form_data) or chart_obj.get("datasource_id") - charts.append({ - "id": int(chart_id), - "title": chart_obj.get("slice_name") or chart_obj.get("name") or f"Chart {chart_id}", - "viz_type": (form_data.get("viz_type") if isinstance(form_data, dict) else None), - "dataset_id": int(dataset_id) if dataset_id is not None else None, - "last_modified": chart_obj.get("changed_on"), - "overview": chart_obj.get("description") or (form_data.get("viz_type") if isinstance(form_data, dict) else None) or "Chart", - }) + dataset_id = extract_dataset_id_from_form_data( + form_data + ) or chart_obj.get("datasource_id") + charts.append( + { + "id": int(chart_id), + "title": chart_obj.get("slice_name") + or chart_obj.get("name") + or f"Chart {chart_id}", + "viz_type": ( + form_data.get("viz_type") + if isinstance(form_data, dict) + else None + ), + "dataset_id": int(dataset_id) + if dataset_id is not None + else None, + "last_modified": chart_obj.get("changed_on"), + "overview": chart_obj.get("description") + or ( + form_data.get("viz_type") + if isinstance(form_data, dict) + else None + ) + or "Chart", + } + ) else: - app_logger.warning("[get_dashboard_detail_async][Warning] Failed to fetch dashboard charts: %s", charts_response) + app_logger.warning( + "[get_dashboard_detail_async][Warning] Failed to fetch dashboard charts: %s", + charts_response, + ) if not isinstance(datasets_response, Exception): - datasets_payload = datasets_response.get("result", []) if isinstance(datasets_response, dict) else [] + datasets_payload = ( + datasets_response.get("result", []) + if isinstance(datasets_response, dict) + else [] + ) for dataset_obj in datasets_payload: if not isinstance(dataset_obj, dict): continue @@ -207,20 +257,36 @@ class AsyncSupersetClient(SupersetClient): if dataset_id is None: continue db_payload = dataset_obj.get("database") - db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None - table_name = dataset_obj.get("table_name") or dataset_obj.get("datasource_name") or dataset_obj.get("name") or f"Dataset {dataset_id}" + db_name = ( + db_payload.get("database_name") + if isinstance(db_payload, dict) + else None + ) + table_name = ( + dataset_obj.get("table_name") + or dataset_obj.get("datasource_name") + or dataset_obj.get("name") + or f"Dataset {dataset_id}" + ) schema = dataset_obj.get("schema") fq_name = f"{schema}.{table_name}" if schema else table_name - datasets.append({ - "id": int(dataset_id), - "table_name": table_name, - "schema": schema, - "database": db_name or dataset_obj.get("database_name") or "Unknown", - "last_modified": dataset_obj.get("changed_on"), - "overview": fq_name, - }) + datasets.append( + { + "id": int(dataset_id), + "table_name": table_name, + "schema": schema, + "database": db_name + or dataset_obj.get("database_name") + or "Unknown", + "last_modified": dataset_obj.get("changed_on"), + "overview": fq_name, + } + ) else: - app_logger.warning("[get_dashboard_detail_async][Warning] Failed to fetch dashboard datasets: %s", datasets_response) + app_logger.warning( + "[get_dashboard_detail_async][Warning] Failed to fetch dashboard datasets: %s", + datasets_response, + ) if not charts: raw_position_json = dashboard_data.get("position_json") @@ -228,21 +294,29 @@ class AsyncSupersetClient(SupersetClient): if isinstance(raw_position_json, str) and raw_position_json: try: parsed_position = json.loads(raw_position_json) - chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_position)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(parsed_position) + ) except Exception: pass elif isinstance(raw_position_json, dict): - chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_position_json)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(raw_position_json) + ) raw_json_metadata = dashboard_data.get("json_metadata") if isinstance(raw_json_metadata, str) and raw_json_metadata: try: parsed_metadata = json.loads(raw_json_metadata) - chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_metadata)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(parsed_metadata) + ) except Exception: pass elif isinstance(raw_json_metadata, dict): - chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_json_metadata)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(raw_json_metadata) + ) fallback_chart_tasks = [ self.get_chart_async(int(chart_id)) @@ -252,68 +326,113 @@ class AsyncSupersetClient(SupersetClient): *fallback_chart_tasks, return_exceptions=True, ) - for chart_id, chart_response in zip(sorted(chart_ids_from_position), fallback_chart_responses): + for chart_id, chart_response in zip( + sorted(chart_ids_from_position), fallback_chart_responses + ): if isinstance(chart_response, Exception): - app_logger.warning("[get_dashboard_detail_async][Warning] Failed to resolve fallback chart %s: %s", chart_id, chart_response) + app_logger.warning( + "[get_dashboard_detail_async][Warning] Failed to resolve fallback chart %s: %s", + chart_id, + chart_response, + ) continue chart_data = chart_response.get("result", chart_response) - charts.append({ - "id": int(chart_id), - "title": chart_data.get("slice_name") or chart_data.get("name") or f"Chart {chart_id}", - "viz_type": chart_data.get("viz_type"), - "dataset_id": chart_data.get("datasource_id"), - "last_modified": chart_data.get("changed_on"), - "overview": chart_data.get("description") or chart_data.get("viz_type") or "Chart", - }) + charts.append( + { + "id": int(chart_id), + "title": chart_data.get("slice_name") + or chart_data.get("name") + or f"Chart {chart_id}", + "viz_type": chart_data.get("viz_type"), + "dataset_id": chart_data.get("datasource_id"), + "last_modified": chart_data.get("changed_on"), + "overview": chart_data.get("description") + or chart_data.get("viz_type") + or "Chart", + } + ) dataset_ids_from_charts = { - c.get("dataset_id") - for c in charts - if c.get("dataset_id") is not None + c.get("dataset_id") for c in charts if c.get("dataset_id") is not None } - known_dataset_ids = {d.get("id") for d in datasets if d.get("id") is not None} - missing_dataset_ids = sorted(int(item) for item in dataset_ids_from_charts if item not in known_dataset_ids) + known_dataset_ids = { + d.get("id") for d in datasets if d.get("id") is not None + } + missing_dataset_ids = sorted( + int(item) + for item in dataset_ids_from_charts + if item not in known_dataset_ids + ) if missing_dataset_ids: dataset_fetch_tasks = [ - self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}") + self.network.request( + method="GET", endpoint=f"/dataset/{dataset_id}" + ) for dataset_id in missing_dataset_ids ] dataset_fetch_responses = await asyncio.gather( *dataset_fetch_tasks, return_exceptions=True, ) - for dataset_id, dataset_response in zip(missing_dataset_ids, dataset_fetch_responses): + for dataset_id, dataset_response in zip( + missing_dataset_ids, dataset_fetch_responses + ): if isinstance(dataset_response, Exception): - app_logger.warning("[get_dashboard_detail_async][Warning] Failed to backfill dataset %s: %s", dataset_id, dataset_response) + app_logger.warning( + "[get_dashboard_detail_async][Warning] Failed to backfill dataset %s: %s", + dataset_id, + dataset_response, + ) continue - dataset_data = dataset_response.get("result", dataset_response) if isinstance(dataset_response, dict) else {} + dataset_data = ( + dataset_response.get("result", dataset_response) + if isinstance(dataset_response, dict) + else {} + ) db_payload = dataset_data.get("database") - db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None - table_name = dataset_data.get("table_name") or dataset_data.get("datasource_name") or dataset_data.get("name") or f"Dataset {dataset_id}" + db_name = ( + db_payload.get("database_name") + if isinstance(db_payload, dict) + else None + ) + table_name = ( + dataset_data.get("table_name") + or dataset_data.get("datasource_name") + or dataset_data.get("name") + or f"Dataset {dataset_id}" + ) schema = dataset_data.get("schema") fq_name = f" {schema}.{table_name}" if schema else table_name - datasets.append({ - "id": int(dataset_id), - "table_name": table_name, - "schema": schema, - "database": db_name or dataset_data.get("database_name") or "Unknown", - "last_modified": dataset_data.get("changed_on"), - "overview": fq_name, - }) + datasets.append( + { + "id": int(dataset_id), + "table_name": table_name, + "schema": schema, + "database": db_name + or dataset_data.get("database_name") + or "Unknown", + "last_modified": dataset_data.get("changed_on"), + "overview": fq_name, + } + ) return { "id": int(dashboard_data.get("id") or dashboard_id), - "title": dashboard_data.get("dashboard_title") or dashboard_data.get("title") or f"Dashboard {dashboard_id}", + "title": dashboard_data.get("dashboard_title") + or dashboard_data.get("title") + or f"Dashboard {dashboard_id}", "slug": dashboard_data.get("slug"), "url": dashboard_data.get("url"), "description": dashboard_data.get("description"), - "last_modified": dashboard_data.get("changed_on_utc") or dashboard_data.get("changed_on"), + "last_modified": dashboard_data.get("changed_on_utc") + or dashboard_data.get("changed_on"), "published": dashboard_data.get("published"), "charts": charts, "datasets": datasets, "chart_count": len(charts), "dataset_count": len(datasets), } + # [/DEF:get_dashboard_detail_async:Function] # [DEF:get_dashboard_permalink_state_async:Function] @@ -322,12 +441,15 @@ class AsyncSupersetClient(SupersetClient): # @POST: Returns dashboard permalink state payload from Superset API. # @DATA_CONTRACT: Input[permalink_key: str] -> Output[Dict] async def get_dashboard_permalink_state_async(self, permalink_key: str) -> Dict: - with belief_scope("AsyncSupersetClient.get_dashboard_permalink_state_async", f"key={permalink_key}"): + with belief_scope( + "AsyncSupersetClient.get_dashboard_permalink_state_async", + f"key={permalink_key}", + ): response = await self.network.request( - method="GET", - endpoint=f"/dashboard/permalink/{permalink_key}" + method="GET", endpoint=f"/dashboard/permalink/{permalink_key}" ) return cast(Dict, response) + # [/DEF:get_dashboard_permalink_state_async:Function] # [DEF:get_native_filter_state_async:Function] @@ -335,13 +457,19 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Fetch stored native filter state asynchronously. # @POST: Returns native filter state payload from Superset API. # @DATA_CONTRACT: Input[dashboard_id: Union[int, str], filter_state_key: str] -> Output[Dict] - async def get_native_filter_state_async(self, dashboard_id: int, filter_state_key: str) -> Dict: - with belief_scope("AsyncSupersetClient.get_native_filter_state_async", f"dashboard={dashboard_id}, key={filter_state_key}"): + async def get_native_filter_state_async( + self, dashboard_id: int, filter_state_key: str + ) -> Dict: + with belief_scope( + "AsyncSupersetClient.get_native_filter_state_async", + f"dashboard={dashboard_id}, key={filter_state_key}", + ): response = await self.network.request( method="GET", - endpoint=f"/dashboard/{dashboard_id}/filter_state/{filter_state_key}" + endpoint=f"/dashboard/{dashboard_id}/filter_state/{filter_state_key}", ) return cast(Dict, response) + # [/DEF:get_native_filter_state_async:Function] # [DEF:extract_native_filters_from_permalink_async:Function] @@ -349,15 +477,22 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Extract native filters dataMask from a permalink key asynchronously. # @POST: Returns extracted dataMask with filter states. # @DATA_CONTRACT: Input[permalink_key: str] -> Output[Dict] - # @RELATION: [CALLS] ->[self.get_dashboard_permalink_state_async] - async def extract_native_filters_from_permalink_async(self, permalink_key: str) -> Dict: - with belief_scope("AsyncSupersetClient.extract_native_filters_from_permalink_async", f"key={permalink_key}"): - permalink_response = await self.get_dashboard_permalink_state_async(permalink_key) - + # @RELATION: [CALLS] ->[get_dashboard_permalink_state_async] + async def extract_native_filters_from_permalink_async( + self, permalink_key: str + ) -> Dict: + with belief_scope( + "AsyncSupersetClient.extract_native_filters_from_permalink_async", + f"key={permalink_key}", + ): + permalink_response = await self.get_dashboard_permalink_state_async( + permalink_key + ) + result = permalink_response.get("result", permalink_response) state = result.get("state", result) data_mask = state.get("dataMask", {}) - + extracted_filters = {} for filter_id, filter_data in data_mask.items(): if not isinstance(filter_data, dict): @@ -367,7 +502,7 @@ class AsyncSupersetClient(SupersetClient): "filterState": filter_data.get("filterState", {}), "ownState": filter_data.get("ownState", {}), } - + return { "dataMask": extracted_filters, "activeTabs": state.get("activeTabs", []), @@ -375,6 +510,7 @@ class AsyncSupersetClient(SupersetClient): "chartStates": state.get("chartStates", {}), "permalink_key": permalink_key, } + # [/DEF:extract_native_filters_from_permalink_async:Function] # [DEF:extract_native_filters_from_key_async:Function] @@ -382,27 +518,37 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Extract native filters from a native_filters_key URL parameter asynchronously. # @POST: Returns extracted filter state with extraFormData. # @DATA_CONTRACT: Input[dashboard_id: Union[int, str], filter_state_key: str] -> Output[Dict] - # @RELATION: [CALLS] ->[self.get_native_filter_state_async] - async def extract_native_filters_from_key_async(self, dashboard_id: int, filter_state_key: str) -> Dict: - with belief_scope("AsyncSupersetClient.extract_native_filters_from_key_async", f"dashboard={dashboard_id}, key={filter_state_key}"): - filter_response = await self.get_native_filter_state_async(dashboard_id, filter_state_key) - + # @RELATION: [CALLS] ->[get_native_filter_state_async] + async def extract_native_filters_from_key_async( + self, dashboard_id: int, filter_state_key: str + ) -> Dict: + with belief_scope( + "AsyncSupersetClient.extract_native_filters_from_key_async", + f"dashboard={dashboard_id}, key={filter_state_key}", + ): + filter_response = await self.get_native_filter_state_async( + dashboard_id, filter_state_key + ) + result = filter_response.get("result", filter_response) value = result.get("value") - + if isinstance(value, str): try: parsed_value = json.loads(value) except json.JSONDecodeError as e: - app_logger.warning("[extract_native_filters_from_key_async][Warning] Failed to parse filter state JSON: %s", e) + app_logger.warning( + "[extract_native_filters_from_key_async][Warning] Failed to parse filter state JSON: %s", + e, + ) parsed_value = {} elif isinstance(value, dict): parsed_value = value else: parsed_value = {} - + extracted_filters = {} - + if "id" in parsed_value and "extraFormData" in parsed_value: filter_id = parsed_value.get("id", filter_state_key) extracted_filters[filter_id] = { @@ -419,12 +565,13 @@ class AsyncSupersetClient(SupersetClient): "filterState": filter_data.get("filterState", {}), "ownState": filter_data.get("ownState", {}), } - + return { "dataMask": extracted_filters, "dashboard_id": dashboard_id, "filter_state_key": filter_state_key, } + # [/DEF:extract_native_filters_from_key_async:Function] # [DEF:parse_dashboard_url_for_filters_async:Function] @@ -432,36 +579,42 @@ class AsyncSupersetClient(SupersetClient): # @PURPOSE: Parse a Superset dashboard URL and extract native filter state asynchronously. # @POST: Returns extracted filter state or empty dict if no filters found. # @DATA_CONTRACT: Input[url: str] -> Output[Dict] - # @RELATION: [CALLS] ->[self.extract_native_filters_from_permalink_async] - # @RELATION: [CALLS] ->[self.extract_native_filters_from_key_async] + # @RELATION: [CALLS] ->[extract_native_filters_from_permalink_async] + # @RELATION: [CALLS] ->[extract_native_filters_from_key_async] async def parse_dashboard_url_for_filters_async(self, url: str) -> Dict: - with belief_scope("AsyncSupersetClient.parse_dashboard_url_for_filters_async", f"url={url}"): + with belief_scope( + "AsyncSupersetClient.parse_dashboard_url_for_filters_async", f"url={url}" + ): import urllib.parse - + parsed_url = urllib.parse.urlparse(url) query_params = urllib.parse.parse_qs(parsed_url.query) path_parts = parsed_url.path.rstrip("/").split("/") - + result = { "url": url, "dashboard_id": None, "filter_type": None, "filters": {}, } - + # Check for permalink URL: /dashboard/p/{key}/ if "p" in path_parts: try: p_index = path_parts.index("p") if p_index + 1 < len(path_parts): permalink_key = path_parts[p_index + 1] - filter_data = await self.extract_native_filters_from_permalink_async(permalink_key) + filter_data = ( + await self.extract_native_filters_from_permalink_async( + permalink_key + ) + ) result["filter_type"] = "permalink" result["filters"] = filter_data return result except ValueError: pass - + # Check for native_filters_key in query params native_filters_key = query_params.get("native_filters_key", [None])[0] if native_filters_key: @@ -475,7 +628,7 @@ class AsyncSupersetClient(SupersetClient): dashboard_ref = potential_id except ValueError: pass - + if dashboard_ref: # Resolve slug to numeric ID β€” the filter_state API requires a numeric ID resolved_id = None @@ -484,23 +637,35 @@ class AsyncSupersetClient(SupersetClient): except (ValueError, TypeError): try: dash_resp = await self.get_dashboard_async(dashboard_ref) - dash_data = dash_resp.get("result", dash_resp) if isinstance(dash_resp, dict) else {} + dash_data = ( + dash_resp.get("result", dash_resp) + if isinstance(dash_resp, dict) + else {} + ) raw_id = dash_data.get("id") if raw_id is not None: resolved_id = int(raw_id) except Exception as e: - app_logger.warning("[parse_dashboard_url_for_filters_async][Warning] Failed to resolve dashboard slug '%s' to ID: %s", dashboard_ref, e) - + app_logger.warning( + "[parse_dashboard_url_for_filters_async][Warning] Failed to resolve dashboard slug '%s' to ID: %s", + dashboard_ref, + e, + ) + if resolved_id is not None: - filter_data = await self.extract_native_filters_from_key_async(resolved_id, native_filters_key) + filter_data = await self.extract_native_filters_from_key_async( + resolved_id, native_filters_key + ) result["filter_type"] = "native_filters_key" result["dashboard_id"] = resolved_id result["filters"] = filter_data return result else: - app_logger.warning("[parse_dashboard_url_for_filters_async][Warning] Could not resolve dashboard_id from URL for native_filters_key") + app_logger.warning( + "[parse_dashboard_url_for_filters_async][Warning] Could not resolve dashboard_id from URL for native_filters_key" + ) return result - + # Check for native_filters in query params (direct filter values) native_filters = query_params.get("native_filters", [None])[0] if native_filters: @@ -510,10 +675,16 @@ class AsyncSupersetClient(SupersetClient): result["filters"] = {"dataMask": parsed_filters} return result except json.JSONDecodeError as e: - app_logger.warning("[parse_dashboard_url_for_filters_async][Warning] Failed to parse native_filters JSON: %s", e) - + app_logger.warning( + "[parse_dashboard_url_for_filters_async][Warning] Failed to parse native_filters JSON: %s", + e, + ) + return result + # [/DEF:parse_dashboard_url_for_filters_async:Function] + + # [/DEF:AsyncSupersetClient:Class] # [/DEF:backend.src.core.async_superset_client:Module] diff --git a/backend/src/core/database.py b/backend/src/core/database.py index 917f35db..4a0b1754 100644 --- a/backend/src/core/database.py +++ b/backend/src/core/database.py @@ -1,12 +1,12 @@ -# [DEF:backend.src.core.database:Module] +# [DEF:DatabaseModule:Module] # # @COMPLEXITY: 3 # @SEMANTICS: database, postgresql, sqlalchemy, session, persistence # @PURPOSE: Configures database connection and session management (PostgreSQL-first). # @LAYER: Core -# @RELATION: DEPENDS_ON ->[sqlalchemy] -# @RELATION: DEPENDS_ON ->[backend.src.models.mapping] -# @RELATION: DEPENDS_ON ->[backend.src.core.auth.config] +# @RELATION: [DEPENDS_ON] ->[MappingModels] +# @RELATION: [DEPENDS_ON] ->[auth_config] +# @RELATION: [DEPENDS_ON] ->[ConnectionConfig] # # @INVARIANT: A single engine instance is used for the entire application. @@ -15,6 +15,7 @@ from sqlalchemy import create_engine, inspect, text from sqlalchemy.orm import sessionmaker from ..models.mapping import Base from ..models.connection import ConnectionConfig + # Import models to ensure they're registered with Base from ..models import task as _task_models # noqa: F401 from ..models import auth as _auth_models # noqa: F401 @@ -60,6 +61,7 @@ TASKS_DATABASE_URL = os.getenv("TASKS_DATABASE_URL", DATABASE_URL) AUTH_DATABASE_URL = os.getenv("AUTH_DATABASE_URL", auth_config.AUTH_DATABASE_URL) # [/DEF:AUTH_DATABASE_URL:Constant] + # [DEF:engine:Variable] # @COMPLEXITY: 1 # @PURPOSE: SQLAlchemy engine for mappings database. @@ -70,6 +72,7 @@ def _build_engine(db_url: str): return create_engine(db_url, connect_args={"check_same_thread": False}) return create_engine(db_url, pool_pre_ping=True) + engine = _build_engine(DATABASE_URL) # [/DEF:engine:Variable] @@ -106,11 +109,13 @@ TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_e AuthSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=auth_engine) # [/DEF:AuthSessionLocal:Class] + # [DEF:_ensure_user_dashboard_preferences_columns:Function] # @COMPLEXITY: 3 # @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table. # @PRE: bind_engine points to application database where profile table is stored. # @POST: Missing columns are added without data loss. +# @RELATION: [DEPENDS_ON] ->[engine] def _ensure_user_dashboard_preferences_columns(bind_engine): with belief_scope("_ensure_user_dashboard_preferences_columns"): table_name = "user_dashboard_preferences" @@ -170,12 +175,15 @@ def _ensure_user_dashboard_preferences_columns(bind_engine): "[database][EXPLORE] Profile preference additive migration failed: %s", migration_error, ) + + # [/DEF:_ensure_user_dashboard_preferences_columns:Function] # [DEF:_ensure_user_dashboard_preferences_health_columns:Function] # @COMPLEXITY: 3 # @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table (health fields). +# @RELATION: [DEPENDS_ON] ->[engine] def _ensure_user_dashboard_preferences_health_columns(bind_engine): with belief_scope("_ensure_user_dashboard_preferences_health_columns"): table_name = "user_dashboard_preferences" @@ -214,12 +222,15 @@ def _ensure_user_dashboard_preferences_health_columns(bind_engine): "[database][EXPLORE] Profile health preference additive migration failed: %s", migration_error, ) + + # [/DEF:_ensure_user_dashboard_preferences_health_columns:Function] # [DEF:_ensure_llm_validation_results_columns:Function] # @COMPLEXITY: 3 # @PURPOSE: Applies additive schema upgrades for llm_validation_results table. +# @RELATION: [DEPENDS_ON] ->[engine] def _ensure_llm_validation_results_columns(bind_engine): with belief_scope("_ensure_llm_validation_results_columns"): table_name = "llm_validation_results" @@ -254,6 +265,8 @@ def _ensure_llm_validation_results_columns(bind_engine): "[database][EXPLORE] ValidationRecord additive migration failed: %s", migration_error, ) + + # [/DEF:_ensure_llm_validation_results_columns:Function] @@ -262,6 +275,7 @@ def _ensure_llm_validation_results_columns(bind_engine): # @PURPOSE: Applies additive schema upgrades for git_server_configs table. # @PRE: bind_engine points to application database. # @POST: Missing columns are added without data loss. +# @RELATION: [DEPENDS_ON] ->[engine] def _ensure_git_server_configs_columns(bind_engine): with belief_scope("_ensure_git_server_configs_columns"): table_name = "git_server_configs" @@ -292,6 +306,8 @@ def _ensure_git_server_configs_columns(bind_engine): "[database][EXPLORE] GitServerConfig preference additive migration failed: %s", migration_error, ) + + # [/DEF:_ensure_git_server_configs_columns:Function] @@ -300,6 +316,7 @@ def _ensure_git_server_configs_columns(bind_engine): # @PURPOSE: Applies additive schema upgrades for auth users table. # @PRE: bind_engine points to authentication database. # @POST: Missing columns are added without data loss. +# @RELATION: [DEPENDS_ON] ->[auth_engine] def _ensure_auth_users_columns(bind_engine): with belief_scope("_ensure_auth_users_columns"): table_name = "users" @@ -314,9 +331,7 @@ def _ensure_auth_users_columns(bind_engine): alter_statements = [] if "full_name" not in existing_columns: - alter_statements.append( - "ALTER TABLE users ADD COLUMN full_name VARCHAR" - ) + alter_statements.append("ALTER TABLE users ADD COLUMN full_name VARCHAR") if "is_ad_user" not in existing_columns: alter_statements.append( "ALTER TABLE users ADD COLUMN is_ad_user BOOLEAN NOT NULL DEFAULT FALSE" @@ -340,7 +355,13 @@ def _ensure_auth_users_columns(bind_engine): connection.execute(text(statement)) logger.reason( "Auth users schema migration completed", - extra={"table": table_name, "added_columns": [stmt.split(" ADD COLUMN ", 1)[1].split()[0] for stmt in alter_statements]}, + extra={ + "table": table_name, + "added_columns": [ + stmt.split(" ADD COLUMN ", 1)[1].split()[0] + for stmt in alter_statements + ], + }, ) except Exception as migration_error: logger.warning( @@ -348,6 +369,8 @@ def _ensure_auth_users_columns(bind_engine): migration_error, ) raise + + # [/DEF:_ensure_auth_users_columns:Function] @@ -356,6 +379,7 @@ def _ensure_auth_users_columns(bind_engine): # @PURPOSE: Ensures the external connection registry table exists in the main database. # @PRE: bind_engine points to the application database. # @POST: connection_configs table exists without dropping existing data. +# @RELATION: [DEPENDS_ON] ->[ConnectionConfig] def ensure_connection_configs_table(bind_engine): with belief_scope("ensure_connection_configs_table"): try: @@ -366,6 +390,8 @@ def ensure_connection_configs_table(bind_engine): migration_error, ) raise + + # [/DEF:ensure_connection_configs_table:Function] @@ -374,6 +400,7 @@ def ensure_connection_configs_table(bind_engine): # @PURPOSE: Adds missing FilterSource enum values to the PostgreSQL native filtersource type. # @PRE: bind_engine points to application database with imported_filters table. # @POST: New enum values are available without data loss. +# @RELATION: [DEPENDS_ON] ->[engine] def _ensure_filter_source_enum_values(bind_engine): with belief_scope("_ensure_filter_source_enum_values"): try: @@ -387,7 +414,9 @@ def _ensure_filter_source_enum_values(bind_engine): ) ) if result.fetchone() is None: - logger.reason("filtersource enum type does not exist yet; skipping migration") + logger.reason( + "filtersource enum type does not exist yet; skipping migration" + ) return # Get existing enum values @@ -402,7 +431,9 @@ def _ensure_filter_source_enum_values(bind_engine): existing_values = {row[0] for row in result.fetchall()} required_values = ["SUPERSET_PERMALINK", "SUPERSET_NATIVE_FILTERS_KEY"] - missing_values = [v for v in required_values if v not in existing_values] + missing_values = [ + v for v in required_values if v not in existing_values + ] if not missing_values: logger.reason( @@ -417,7 +448,9 @@ def _ensure_filter_source_enum_values(bind_engine): ) for value in missing_values: connection.execute( - text(f"ALTER TYPE filtersource ADD VALUE IF NOT EXISTS '{value}'") + text( + f"ALTER TYPE filtersource ADD VALUE IF NOT EXISTS '{value}'" + ) ) connection.commit() logger.reason( @@ -429,6 +462,8 @@ def _ensure_filter_source_enum_values(bind_engine): "[database][EXPLORE] FilterSource enum additive migration failed: %s", migration_error, ) + + # [/DEF:_ensure_filter_source_enum_values:Function] @@ -438,6 +473,8 @@ def _ensure_filter_source_enum_values(bind_engine): # @PRE: engine, tasks_engine and auth_engine are initialized. # @POST: Database tables created in all databases. # @SIDE_EFFECT: Creates physical database files if they don't exist. +# @RELATION: [CALLS] ->[ensure_connection_configs_table] +# @RELATION: [CALLS] ->[_ensure_filter_source_enum_values] def init_db(): with belief_scope("init_db"): Base.metadata.create_all(bind=engine) @@ -450,14 +487,18 @@ def init_db(): _ensure_auth_users_columns(auth_engine) ensure_connection_configs_table(engine) _ensure_filter_source_enum_values(engine) + + # [/DEF:init_db:Function] + # [DEF:get_db:Function] # @COMPLEXITY: 3 # @PURPOSE: Dependency for getting a database session. # @PRE: SessionLocal is initialized. # @POST: Session is closed after use. # @RETURN: Generator[Session, None, None] +# @RELATION: [DEPENDS_ON] ->[SessionLocal] def get_db(): with belief_scope("get_db"): db = SessionLocal() @@ -465,14 +506,18 @@ def get_db(): yield db finally: db.close() + + # [/DEF:get_db:Function] + # [DEF:get_tasks_db:Function] # @COMPLEXITY: 3 # @PURPOSE: Dependency for getting a tasks database session. # @PRE: TasksSessionLocal is initialized. # @POST: Session is closed after use. # @RETURN: Generator[Session, None, None] +# @RELATION: [DEPENDS_ON] ->[TasksSessionLocal] def get_tasks_db(): with belief_scope("get_tasks_db"): db = TasksSessionLocal() @@ -480,8 +525,11 @@ def get_tasks_db(): yield db finally: db.close() + + # [/DEF:get_tasks_db:Function] + # [DEF:get_auth_db:Function] # @COMPLEXITY: 3 # @PURPOSE: Dependency for getting an authentication database session. @@ -489,6 +537,7 @@ def get_tasks_db(): # @POST: Session is closed after use. # @DATA_CONTRACT: None -> Output[sqlalchemy.orm.Session] # @RETURN: Generator[Session, None, None] +# @RELATION: [DEPENDS_ON] ->[AuthSessionLocal] def get_auth_db(): with belief_scope("get_auth_db"): db = AuthSessionLocal() @@ -496,6 +545,8 @@ def get_auth_db(): yield db finally: db.close() + + # [/DEF:get_auth_db:Function] -# [/DEF:backend.src.core.database:Module] +# [/DEF:DatabaseModule:Module] diff --git a/backend/src/core/superset_client.py b/backend/src/core/superset_client.py index c48f83ae..236934a4 100644 --- a/backend/src/core/superset_client.py +++ b/backend/src/core/superset_client.py @@ -4,7 +4,7 @@ # @SEMANTICS: superset, api, client, rest, http, dashboard, dataset, import, export # @PURPOSE: ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ высокоуровнСвый ΠΊΠ»ΠΈΠ΅Π½Ρ‚ для взаимодСйствия с Superset REST API, инкапсулируя Π»ΠΎΠ³ΠΈΠΊΡƒ запросов, ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΡƒ ошибок ΠΈ ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡŽ. # @LAYER: Core -# @RELATION: [DEPENDS_ON] ->[APIClient] +# @RELATION: [DEPENDS_ON] ->[APIClient.__init__] # # @INVARIANT: All network operations must use the internal APIClient instance. # @PUBLIC_API: SupersetClient @@ -22,8 +22,11 @@ from .logger import logger as app_logger, belief_scope from .utils.network import APIClient, SupersetAPIError from .utils.fileio import get_filename_from_headers from .config_models import Environment + +app_logger = cast(Any, app_logger) # [/SECTION] + # [DEF:SupersetClient:Class] # @COMPLEXITY: 3 # @PURPOSE: Класс-ΠΎΠ±Ρ‘Ρ€Ρ‚ΠΊΠ° Π½Π°Π΄ Superset REST API, ΠΏΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‰ΠΈΠΉ ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹ для Ρ€Π°Π±ΠΎΡ‚Ρ‹ с Π΄Π°ΡˆΠ±ΠΎΡ€Π΄Π°ΠΌΠΈ ΠΈ датасСтами. @@ -36,28 +39,29 @@ class SupersetClient: # @POST: Атрибуты `env` ΠΈ `network` созданы ΠΈ Π³ΠΎΡ‚ΠΎΠ²Ρ‹ ΠΊ Ρ€Π°Π±ΠΎΡ‚Π΅. # @DATA_CONTRACT: Input[Environment] -> self.network[APIClient] # @RELATION: [DEPENDS_ON] ->[Environment] - # @RELATION: [DEPENDS_ON] ->[APIClient] + # @RELATION: [DEPENDS_ON] ->[APIClient.__init__] def __init__(self, env: Environment): with belief_scope("__init__"): - app_logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient for env %s.", env.name) + app_logger.info( + "[SupersetClient.__init__][Enter] Initializing SupersetClient for env %s.", + env.name, + ) self.env = env # Construct auth payload expected by Superset API auth_payload = { "username": env.username, "password": env.password, "provider": "db", - "refresh": "true" + "refresh": "true", } self.network = APIClient( - config={ - "base_url": env.url, - "auth": auth_payload - }, + config={"base_url": env.url, "auth": auth_payload}, verify_ssl=env.verify_ssl, - timeout=env.timeout + timeout=env.timeout, ) self.delete_before_reimport: bool = False app_logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.") + # [/DEF:SupersetClient.__init__:Function] # [DEF:SupersetClient.authenticate:Function] @@ -70,6 +74,7 @@ class SupersetClient: def authenticate(self) -> Dict[str, str]: with belief_scope("SupersetClient.authenticate"): return self.network.authenticate() + # [/DEF:SupersetClient.authenticate:Function] @property @@ -81,6 +86,7 @@ class SupersetClient: def headers(self) -> dict: with belief_scope("headers"): return self.network.headers + # [/DEF:SupersetClient.headers:Function] # [SECTION: DASHBOARD OPERATIONS] @@ -91,13 +97,13 @@ class SupersetClient: # @PRE: Client is authenticated. # @POST: Returns a tuple with total count and list of dashboards. # @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]] - # @RELATION: [CALLS] ->[SupersetClient._fetch_all_pages] + # @RELATION: [CALLS] ->[_fetch_all_pages] def get_dashboards(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]: with belief_scope("get_dashboards"): app_logger.info("[get_dashboards][Enter] Fetching dashboards.") validated_query = self._validate_query_params(query or {}) - if 'columns' not in validated_query: - validated_query['columns'] = [ + if "columns" not in validated_query: + validated_query["columns"] = [ "slug", "id", "url", @@ -109,14 +115,18 @@ class SupersetClient: "changed_by_name", "owners", ] - + paginated_data = self._fetch_all_pages( endpoint="/dashboard/", - pagination_options={"base_query": validated_query, "results_field": "result"}, + pagination_options={ + "base_query": validated_query, + "results_field": "result", + }, ) total_count = len(paginated_data) app_logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count) return total_count, paginated_data + # [/DEF:SupersetClient.get_dashboards:Function] # [DEF:SupersetClient.get_dashboards_page:Function] @@ -125,8 +135,10 @@ class SupersetClient: # @PRE: Client is authenticated. # @POST: Returns total count and one page of dashboards. # @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]] - # @RELATION: [CALLS] ->[APIClient.request] - def get_dashboards_page(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]: + # @RELATION: [CALLS] ->[request] + def get_dashboards_page( + self, query: Optional[Dict] = None + ) -> Tuple[int, List[Dict]]: with belief_scope("get_dashboards_page"): validated_query = self._validate_query_params(query or {}) if "columns" not in validated_query: @@ -154,6 +166,7 @@ class SupersetClient: result = response_json.get("result", []) total_count = response_json.get("count", len(result)) return total_count, result + # [/DEF:SupersetClient.get_dashboards_page:Function] # [DEF:SupersetClient.get_dashboards_summary:Function] @@ -207,21 +220,25 @@ class SupersetClient: if isinstance(raw_owners, list): for owner_payload in raw_owners: if isinstance(owner_payload, dict): - owner_username = self._sanitize_user_text(owner_payload.get("username")) + owner_username = self._sanitize_user_text( + owner_payload.get("username") + ) if owner_username: raw_owner_usernames.append(owner_username) - result.append({ - "id": dash.get("id"), - "slug": dash.get("slug"), - "title": dash.get("dashboard_title"), - "url": dash.get("url"), - "last_modified": dash.get("changed_on_utc"), - "status": "published" if dash.get("published") else "draft", - "created_by": projected_created_by, - "modified_by": projected_modified_by, - "owners": owners, - }) + result.append( + { + "id": dash.get("id"), + "slug": dash.get("slug"), + "title": dash.get("dashboard_title"), + "url": dash.get("url"), + "last_modified": dash.get("changed_on_utc"), + "status": "published" if dash.get("published") else "draft", + "created_by": projected_created_by, + "modified_by": projected_modified_by, + "owners": owners, + } + ) if index < max_debug_samples: app_logger.reflect( @@ -239,6 +256,7 @@ class SupersetClient: f"sampled={min(len(result), max_debug_samples)})" ) return result + # [/DEF:SupersetClient.get_dashboards_summary:Function] # [DEF:SupersetClient.get_dashboards_summary_page:Function] @@ -293,25 +311,28 @@ class SupersetClient: [dash.get("created_by"), dash.get("changed_by")], ) - result.append({ - "id": dash.get("id"), - "slug": dash.get("slug"), - "title": dash.get("dashboard_title"), - "url": dash.get("url"), - "last_modified": dash.get("changed_on_utc"), - "status": "published" if dash.get("published") else "draft", - "created_by": self._extract_user_display( - None, - dash.get("created_by"), - ), - "modified_by": self._extract_user_display( - dash.get("changed_by_name"), - dash.get("changed_by"), - ), - "owners": owners, - }) + result.append( + { + "id": dash.get("id"), + "slug": dash.get("slug"), + "title": dash.get("dashboard_title"), + "url": dash.get("url"), + "last_modified": dash.get("changed_on_utc"), + "status": "published" if dash.get("published") else "draft", + "created_by": self._extract_user_display( + None, + dash.get("created_by"), + ), + "modified_by": self._extract_user_display( + dash.get("changed_by_name"), + dash.get("changed_by"), + ), + "owners": owners, + } + ) return total_count, result + # [/DEF:SupersetClient.get_dashboards_summary_page:Function] # [DEF:SupersetClient._extract_owner_labels:Function] @@ -340,6 +361,7 @@ class SupersetClient: if label and label not in normalized: normalized.append(label) return normalized + # [/DEF:SupersetClient._extract_owner_labels:Function] # [DEF:SupersetClient._extract_user_display:Function] @@ -348,7 +370,9 @@ class SupersetClient: # @PRE: user payload can be string, dict or None. # @POST: Returns compact non-empty display value or None. # @DATA_CONTRACT: Input[Optional[str], Optional[Dict]] -> Output[Optional[str]] - def _extract_user_display(self, preferred_value: Optional[str], user_payload: Optional[Dict]) -> Optional[str]: + def _extract_user_display( + self, preferred_value: Optional[str], user_payload: Optional[Dict] + ) -> Optional[str]: preferred = self._sanitize_user_text(preferred_value) if preferred: return preferred @@ -359,7 +383,9 @@ class SupersetClient: return full_name first_name = self._sanitize_user_text(user_payload.get("first_name")) or "" last_name = self._sanitize_user_text(user_payload.get("last_name")) or "" - combined = " ".join(part for part in [first_name, last_name] if part).strip() + combined = " ".join( + part for part in [first_name, last_name] if part + ).strip() if combined: return combined username = self._sanitize_user_text(user_payload.get("username")) @@ -369,6 +395,7 @@ class SupersetClient: if email: return email return None + # [/DEF:SupersetClient._extract_user_display:Function] # [DEF:SupersetClient._sanitize_user_text:Function] @@ -383,6 +410,7 @@ class SupersetClient: if not normalized: return None return normalized + # [/DEF:SupersetClient._sanitize_user_text:Function] # [DEF:SupersetClient.get_dashboard:Function] @@ -391,11 +419,14 @@ class SupersetClient: # @PRE: Client is authenticated and dashboard_ref exists. # @POST: Returns dashboard payload from Superset API. # @DATA_CONTRACT: Input[dashboard_ref: Union[int, str]] -> Output[Dict] - # @RELATION: [CALLS] ->[APIClient.request] + # @RELATION: [CALLS] ->[request] def get_dashboard(self, dashboard_ref: Union[int, str]) -> Dict: with belief_scope("SupersetClient.get_dashboard", f"ref={dashboard_ref}"): - response = self.network.request(method="GET", endpoint=f"/dashboard/{dashboard_ref}") + response = self.network.request( + method="GET", endpoint=f"/dashboard/{dashboard_ref}" + ) return cast(Dict, response) + # [/DEF:SupersetClient.get_dashboard:Function] # [DEF:SupersetClient.get_dashboard_permalink_state:Function] @@ -404,14 +435,16 @@ class SupersetClient: # @PRE: Client is authenticated and permalink key exists. # @POST: Returns dashboard permalink state payload from Superset API. # @DATA_CONTRACT: Input[permalink_key: str] -> Output[Dict] - # @RELATION: [CALLS] ->[APIClient.request] + # @RELATION: [CALLS] ->[request] def get_dashboard_permalink_state(self, permalink_key: str) -> Dict: - with belief_scope("SupersetClient.get_dashboard_permalink_state", f"key={permalink_key}"): + with belief_scope( + "SupersetClient.get_dashboard_permalink_state", f"key={permalink_key}" + ): response = self.network.request( - method="GET", - endpoint=f"/dashboard/permalink/{permalink_key}" + method="GET", endpoint=f"/dashboard/permalink/{permalink_key}" ) return cast(Dict, response) + # [/DEF:SupersetClient.get_dashboard_permalink_state:Function] # [DEF:SupersetClient.get_native_filter_state:Function] @@ -420,14 +453,20 @@ class SupersetClient: # @PRE: Client is authenticated and filter_state_key exists. # @POST: Returns native filter state payload from Superset API. # @DATA_CONTRACT: Input[dashboard_id: Union[int, str], filter_state_key: str] -> Output[Dict] - # @RELATION: [CALLS] ->[APIClient.request] - def get_native_filter_state(self, dashboard_id: Union[int, str], filter_state_key: str) -> Dict: - with belief_scope("SupersetClient.get_native_filter_state", f"dashboard={dashboard_id}, key={filter_state_key}"): + # @RELATION: [CALLS] ->[request] + def get_native_filter_state( + self, dashboard_id: Union[int, str], filter_state_key: str + ) -> Dict: + with belief_scope( + "SupersetClient.get_native_filter_state", + f"dashboard={dashboard_id}, key={filter_state_key}", + ): response = self.network.request( method="GET", - endpoint=f"/dashboard/{dashboard_id}/filter_state/{filter_state_key}" + endpoint=f"/dashboard/{dashboard_id}/filter_state/{filter_state_key}", ) return cast(Dict, response) + # [/DEF:SupersetClient.get_native_filter_state:Function] # [DEF:SupersetClient.extract_native_filters_from_permalink:Function] @@ -438,15 +477,18 @@ class SupersetClient: # @DATA_CONTRACT: Input[permalink_key: str] -> Output[Dict] # @RELATION: [CALLS] ->[SupersetClient.get_dashboard_permalink_state] def extract_native_filters_from_permalink(self, permalink_key: str) -> Dict: - with belief_scope("SupersetClient.extract_native_filters_from_permalink", f"key={permalink_key}"): + with belief_scope( + "SupersetClient.extract_native_filters_from_permalink", + f"key={permalink_key}", + ): permalink_response = self.get_dashboard_permalink_state(permalink_key) - + # Permalink response structure: { "result": { "state": { "dataMask": {...}, ... } } } # or directly: { "state": { "dataMask": {...}, ... } } result = permalink_response.get("result", permalink_response) state = result.get("state", result) data_mask = state.get("dataMask", {}) - + extracted_filters = {} for filter_id, filter_data in data_mask.items(): if not isinstance(filter_data, dict): @@ -456,7 +498,7 @@ class SupersetClient: "filterState": filter_data.get("filterState", {}), "ownState": filter_data.get("ownState", {}), } - + return { "dataMask": extracted_filters, "activeTabs": state.get("activeTabs", []), @@ -464,6 +506,7 @@ class SupersetClient: "chartStates": state.get("chartStates", {}), "permalink_key": permalink_key, } + # [/DEF:SupersetClient.extract_native_filters_from_permalink:Function] # [DEF:SupersetClient.extract_native_filters_from_key:Function] @@ -473,31 +516,41 @@ class SupersetClient: # @POST: Returns extracted filter state with extraFormData. # @DATA_CONTRACT: Input[dashboard_id: Union[int, str], filter_state_key: str] -> Output[Dict] # @RELATION: [CALLS] ->[SupersetClient.get_native_filter_state] - def extract_native_filters_from_key(self, dashboard_id: Union[int, str], filter_state_key: str) -> Dict: - with belief_scope("SupersetClient.extract_native_filters_from_key", f"dashboard={dashboard_id}, key={filter_state_key}"): - filter_response = self.get_native_filter_state(dashboard_id, filter_state_key) - + def extract_native_filters_from_key( + self, dashboard_id: Union[int, str], filter_state_key: str + ) -> Dict: + with belief_scope( + "SupersetClient.extract_native_filters_from_key", + f"dashboard={dashboard_id}, key={filter_state_key}", + ): + filter_response = self.get_native_filter_state( + dashboard_id, filter_state_key + ) + # Filter state response structure: { "result": { "value": "{...json...}" } } # or: { "value": "{...json...}" } result = filter_response.get("result", filter_response) value = result.get("value") - + if isinstance(value, str): try: parsed_value = json.loads(value) except json.JSONDecodeError as e: - app_logger.warning("[extract_native_filters_from_key][Warning] Failed to parse filter state JSON: %s", e) + app_logger.warning( + "[extract_native_filters_from_key][Warning] Failed to parse filter state JSON: %s", + e, + ) parsed_value = {} elif isinstance(value, dict): parsed_value = value else: parsed_value = {} - + # The parsed value contains filter state with structure: # { "filter_id": { "id": "...", "extraFormData": {...}, "filterState": {...} } } # or a single filter: { "id": "...", "extraFormData": {...}, "filterState": {...} } extracted_filters = {} - + if "id" in parsed_value and "extraFormData" in parsed_value: # Single filter format filter_id = parsed_value.get("id", filter_state_key) @@ -516,12 +569,13 @@ class SupersetClient: "filterState": filter_data.get("filterState", {}), "ownState": filter_data.get("ownState", {}), } - + return { "dataMask": extracted_filters, "dashboard_id": dashboard_id, "filter_state_key": filter_state_key, } + # [/DEF:SupersetClient.extract_native_filters_from_key:Function] # [DEF:SupersetClient.parse_dashboard_url_for_filters:Function] @@ -533,33 +587,37 @@ class SupersetClient: # @RELATION: [CALLS] ->[SupersetClient.extract_native_filters_from_permalink] # @RELATION: [CALLS] ->[SupersetClient.extract_native_filters_from_key] def parse_dashboard_url_for_filters(self, url: str) -> Dict: - with belief_scope("SupersetClient.parse_dashboard_url_for_filters", f"url={url}"): + with belief_scope( + "SupersetClient.parse_dashboard_url_for_filters", f"url={url}" + ): import urllib.parse - + parsed_url = urllib.parse.urlparse(url) query_params = urllib.parse.parse_qs(parsed_url.query) path_parts = parsed_url.path.rstrip("/").split("/") - + result = { "url": url, "dashboard_id": None, "filter_type": None, "filters": {}, } - + # Check for permalink URL: /dashboard/p/{key}/ or /superset/dashboard/p/{key}/ if "p" in path_parts: try: p_index = path_parts.index("p") if p_index + 1 < len(path_parts): permalink_key = path_parts[p_index + 1] - filter_data = self.extract_native_filters_from_permalink(permalink_key) + filter_data = self.extract_native_filters_from_permalink( + permalink_key + ) result["filter_type"] = "permalink" result["filters"] = filter_data return result except ValueError: pass - + # Check for native_filters_key in query params native_filters_key = query_params.get("native_filters_key", [None])[0] if native_filters_key: @@ -575,7 +633,7 @@ class SupersetClient: dashboard_ref = potential_id except ValueError: pass - + if dashboard_ref: # Resolve slug to numeric ID β€” the filter_state API requires a numeric ID resolved_id = None @@ -584,22 +642,34 @@ class SupersetClient: except (ValueError, TypeError): try: dash_resp = self.get_dashboard(dashboard_ref) - dash_data = dash_resp.get("result", dash_resp) if isinstance(dash_resp, dict) else {} + dash_data = ( + dash_resp.get("result", dash_resp) + if isinstance(dash_resp, dict) + else {} + ) raw_id = dash_data.get("id") if raw_id is not None: resolved_id = int(raw_id) except Exception as e: - app_logger.warning("[parse_dashboard_url_for_filters][Warning] Failed to resolve dashboard slug '%s' to ID: %s", dashboard_ref, e) - + app_logger.warning( + "[parse_dashboard_url_for_filters][Warning] Failed to resolve dashboard slug '%s' to ID: %s", + dashboard_ref, + e, + ) + if resolved_id is not None: - filter_data = self.extract_native_filters_from_key(resolved_id, native_filters_key) + filter_data = self.extract_native_filters_from_key( + resolved_id, native_filters_key + ) result["filter_type"] = "native_filters_key" result["dashboard_id"] = resolved_id result["filters"] = filter_data return result else: - app_logger.warning("[parse_dashboard_url_for_filters][Warning] Could not resolve dashboard_id from URL for native_filters_key") - + app_logger.warning( + "[parse_dashboard_url_for_filters][Warning] Could not resolve dashboard_id from URL for native_filters_key" + ) + # Check for native_filters in query params (direct filter values) native_filters = query_params.get("native_filters", [None])[0] if native_filters: @@ -609,9 +679,13 @@ class SupersetClient: result["filters"] = {"dataMask": parsed_filters} return result except json.JSONDecodeError as e: - app_logger.warning("[parse_dashboard_url_for_filters][Warning] Failed to parse native_filters JSON: %s", e) - + app_logger.warning( + "[parse_dashboard_url_for_filters][Warning] Failed to parse native_filters JSON: %s", + e, + ) + return result + # [/DEF:SupersetClient.parse_dashboard_url_for_filters:Function] # [DEF:SupersetClient.get_chart:Function] @@ -625,6 +699,7 @@ class SupersetClient: with belief_scope("SupersetClient.get_chart", f"id={chart_id}"): response = self.network.request(method="GET", endpoint=f"/chart/{chart_id}") return cast(Dict, response) + # [/DEF:SupersetClient.get_chart:Function] # [DEF:SupersetClient.get_dashboard_detail:Function] @@ -633,18 +708,22 @@ class SupersetClient: # @PRE: Client is authenticated and dashboard reference exists. # @POST: Returns dashboard metadata with charts and datasets lists. # @DATA_CONTRACT: Input[dashboard_ref: Union[int, str]] -> Output[Dict] - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.get_dashboard] - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.get_chart] + # @RELATION: [CALLS] ->[SupersetClient.get_dashboard] + # @RELATION: [CALLS] ->[SupersetClient.get_chart] def get_dashboard_detail(self, dashboard_ref: Union[int, str]) -> Dict: - with belief_scope("SupersetClient.get_dashboard_detail", f"ref={dashboard_ref}"): + with belief_scope( + "SupersetClient.get_dashboard_detail", f"ref={dashboard_ref}" + ): dashboard_response = self.get_dashboard(dashboard_ref) dashboard_data = dashboard_response.get("result", dashboard_response) charts: List[Dict] = [] datasets: List[Dict] = [] - # [DEF:backend.src.core.superset_client.SupersetClient.get_dashboard_detail.extract_dataset_id_from_form_data:Function] - def extract_dataset_id_from_form_data(form_data: Optional[Dict]) -> Optional[int]: + # [DEF:extract_dataset_id_from_form_data:Function] + def extract_dataset_id_from_form_data( + form_data: Optional[Dict], + ) -> Optional[int]: if not isinstance(form_data, dict): return None datasource = form_data.get("datasource") @@ -666,16 +745,20 @@ class SupersetClient: return int(ds_id) if ds_id is not None else None except (TypeError, ValueError): return None - # [/DEF:backend.src.core.superset_client.SupersetClient.get_dashboard_detail.extract_dataset_id_from_form_data:Function] + + # [/DEF:extract_dataset_id_from_form_data:Function] # Canonical endpoints from Superset OpenAPI: # /dashboard/{id_or_slug}/charts and /dashboard/{id_or_slug}/datasets. try: charts_response = self.network.request( - method="GET", - endpoint=f"/dashboard/{dashboard_ref}/charts" + method="GET", endpoint=f"/dashboard/{dashboard_ref}/charts" + ) + charts_payload = ( + charts_response.get("result", []) + if isinstance(charts_response, dict) + else [] ) - charts_payload = charts_response.get("result", []) if isinstance(charts_response, dict) else [] for chart_obj in charts_payload: if not isinstance(chart_obj, dict): continue @@ -688,24 +771,48 @@ class SupersetClient: form_data = json.loads(form_data) except Exception: form_data = {} - dataset_id = extract_dataset_id_from_form_data(form_data) or chart_obj.get("datasource_id") - charts.append({ - "id": int(chart_id), - "title": chart_obj.get("slice_name") or chart_obj.get("name") or f"Chart {chart_id}", - "viz_type": (form_data.get("viz_type") if isinstance(form_data, dict) else None), - "dataset_id": int(dataset_id) if dataset_id is not None else None, - "last_modified": chart_obj.get("changed_on"), - "overview": chart_obj.get("description") or (form_data.get("viz_type") if isinstance(form_data, dict) else None) or "Chart", - }) + dataset_id = extract_dataset_id_from_form_data( + form_data + ) or chart_obj.get("datasource_id") + charts.append( + { + "id": int(chart_id), + "title": chart_obj.get("slice_name") + or chart_obj.get("name") + or f"Chart {chart_id}", + "viz_type": ( + form_data.get("viz_type") + if isinstance(form_data, dict) + else None + ), + "dataset_id": int(dataset_id) + if dataset_id is not None + else None, + "last_modified": chart_obj.get("changed_on"), + "overview": chart_obj.get("description") + or ( + form_data.get("viz_type") + if isinstance(form_data, dict) + else None + ) + or "Chart", + } + ) except Exception as e: - app_logger.warning("[get_dashboard_detail][Warning] Failed to fetch dashboard charts: %s", e) + app_logger.warning( + "[get_dashboard_detail][Warning] Failed to fetch dashboard charts: %s", + e, + ) try: datasets_response = self.network.request( - method="GET", - endpoint=f"/dashboard/{dashboard_ref}/datasets" + method="GET", endpoint=f"/dashboard/{dashboard_ref}/datasets" + ) + datasets_payload = ( + datasets_response.get("result", []) + if isinstance(datasets_response, dict) + else [] ) - datasets_payload = datasets_response.get("result", []) if isinstance(datasets_response, dict) else [] for dataset_obj in datasets_payload: if not isinstance(dataset_obj, dict): continue @@ -713,20 +820,36 @@ class SupersetClient: if dataset_id is None: continue db_payload = dataset_obj.get("database") - db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None - table_name = dataset_obj.get("table_name") or dataset_obj.get("datasource_name") or dataset_obj.get("name") or f"Dataset {dataset_id}" + db_name = ( + db_payload.get("database_name") + if isinstance(db_payload, dict) + else None + ) + table_name = ( + dataset_obj.get("table_name") + or dataset_obj.get("datasource_name") + or dataset_obj.get("name") + or f"Dataset {dataset_id}" + ) schema = dataset_obj.get("schema") fq_name = f"{schema}.{table_name}" if schema else table_name - datasets.append({ - "id": int(dataset_id), - "table_name": table_name, - "schema": schema, - "database": db_name or dataset_obj.get("database_name") or "Unknown", - "last_modified": dataset_obj.get("changed_on"), - "overview": fq_name, - }) + datasets.append( + { + "id": int(dataset_id), + "table_name": table_name, + "schema": schema, + "database": db_name + or dataset_obj.get("database_name") + or "Unknown", + "last_modified": dataset_obj.get("changed_on"), + "overview": fq_name, + } + ) except Exception as e: - app_logger.warning("[get_dashboard_detail][Warning] Failed to fetch dashboard datasets: %s", e) + app_logger.warning( + "[get_dashboard_detail][Warning] Failed to fetch dashboard datasets: %s", + e, + ) # Fallback: derive chart IDs from layout metadata if dashboard charts endpoint fails. if not charts: @@ -735,71 +858,109 @@ class SupersetClient: if isinstance(raw_position_json, str) and raw_position_json: try: parsed_position = json.loads(raw_position_json) - chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_position)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(parsed_position) + ) except Exception: pass elif isinstance(raw_position_json, dict): - chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_position_json)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(raw_position_json) + ) raw_json_metadata = dashboard_data.get("json_metadata") if isinstance(raw_json_metadata, str) and raw_json_metadata: try: parsed_metadata = json.loads(raw_json_metadata) - chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_metadata)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(parsed_metadata) + ) except Exception: pass elif isinstance(raw_json_metadata, dict): - chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_json_metadata)) + chart_ids_from_position.update( + self._extract_chart_ids_from_layout(raw_json_metadata) + ) app_logger.info( "[get_dashboard_detail][State] Extracted %s fallback chart IDs from layout (dashboard_id=%s)", len(chart_ids_from_position), - dashboard_id, + dashboard_ref, ) for chart_id in sorted(chart_ids_from_position): try: chart_response = self.get_chart(int(chart_id)) chart_data = chart_response.get("result", chart_response) - charts.append({ - "id": int(chart_id), - "title": chart_data.get("slice_name") or chart_data.get("name") or f"Chart {chart_id}", - "viz_type": chart_data.get("viz_type"), - "dataset_id": chart_data.get("datasource_id"), - "last_modified": chart_data.get("changed_on"), - "overview": chart_data.get("description") or chart_data.get("viz_type") or "Chart", - }) + charts.append( + { + "id": int(chart_id), + "title": chart_data.get("slice_name") + or chart_data.get("name") + or f"Chart {chart_id}", + "viz_type": chart_data.get("viz_type"), + "dataset_id": chart_data.get("datasource_id"), + "last_modified": chart_data.get("changed_on"), + "overview": chart_data.get("description") + or chart_data.get("viz_type") + or "Chart", + } + ) except Exception as e: - app_logger.warning("[get_dashboard_detail][Warning] Failed to resolve fallback chart %s: %s", chart_id, e) + app_logger.warning( + "[get_dashboard_detail][Warning] Failed to resolve fallback chart %s: %s", + chart_id, + e, + ) # Backfill datasets from chart datasource IDs. dataset_ids_from_charts = { - c.get("dataset_id") - for c in charts - if c.get("dataset_id") is not None + c.get("dataset_id") for c in charts if c.get("dataset_id") is not None } - known_dataset_ids = {d.get("id") for d in datasets} - missing_dataset_ids = [ds_id for ds_id in dataset_ids_from_charts if ds_id not in known_dataset_ids] + known_dataset_ids = { + d.get("id") for d in datasets if d.get("id") is not None + } + missing_dataset_ids: List[int] = [] + for raw_dataset_id in dataset_ids_from_charts: + if raw_dataset_id is None or raw_dataset_id in known_dataset_ids: + continue + try: + missing_dataset_ids.append(int(raw_dataset_id)) + except (TypeError, ValueError): + continue for dataset_id in missing_dataset_ids: try: dataset_response = self.get_dataset(int(dataset_id)) dataset_data = dataset_response.get("result", dataset_response) db_payload = dataset_data.get("database") - db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None - table_name = dataset_data.get("table_name") or f"Dataset {dataset_id}" + db_name = ( + db_payload.get("database_name") + if isinstance(db_payload, dict) + else None + ) + table_name = ( + dataset_data.get("table_name") or f"Dataset {dataset_id}" + ) schema = dataset_data.get("schema") fq_name = f"{schema}.{table_name}" if schema else table_name - datasets.append({ - "id": int(dataset_id), - "table_name": table_name, - "schema": schema, - "database": db_name or "Unknown", - "last_modified": dataset_data.get("changed_on_utc") or dataset_data.get("changed_on"), - "overview": fq_name, - }) + datasets.append( + { + "id": int(dataset_id), + "table_name": table_name, + "schema": schema, + "database": db_name or "Unknown", + "last_modified": dataset_data.get("changed_on_utc") + or dataset_data.get("changed_on"), + "overview": fq_name, + } + ) except Exception as e: - app_logger.warning("[get_dashboard_detail][Warning] Failed to resolve dataset %s: %s", dataset_id, e) + app_logger.warning( + "[get_dashboard_detail][Warning] Failed to resolve dataset %s: %s", + dataset_id, + e, + ) unique_charts = {} for chart in charts: @@ -812,18 +973,22 @@ class SupersetClient: resolved_dashboard_id = dashboard_data.get("id", dashboard_ref) return { "id": resolved_dashboard_id, - "title": dashboard_data.get("dashboard_title") or dashboard_data.get("title") or f"Dashboard {resolved_dashboard_id}", + "title": dashboard_data.get("dashboard_title") + or dashboard_data.get("title") + or f"Dashboard {resolved_dashboard_id}", "slug": dashboard_data.get("slug"), "url": dashboard_data.get("url"), "description": dashboard_data.get("description") or "", - "last_modified": dashboard_data.get("changed_on_utc") or dashboard_data.get("changed_on"), + "last_modified": dashboard_data.get("changed_on_utc") + or dashboard_data.get("changed_on"), "published": dashboard_data.get("published"), "charts": list(unique_charts.values()), "datasets": list(unique_datasets.values()), "chart_count": len(unique_charts), "dataset_count": len(unique_datasets), } - # [/DEF:backend.src.core.superset_client.SupersetClient.get_dashboard_detail:Function] + + # [/DEF:SupersetClient.get_dashboard_detail:Function] # [DEF:SupersetClient.get_charts:Function] # @COMPLEXITY: 3 @@ -840,9 +1005,13 @@ class SupersetClient: paginated_data = self._fetch_all_pages( endpoint="/chart/", - pagination_options={"base_query": validated_query, "results_field": "result"}, + pagination_options={ + "base_query": validated_query, + "results_field": "result", + }, ) return len(paginated_data), paginated_data + # [/DEF:SupersetClient.get_charts:Function] # [DEF:SupersetClient._extract_chart_ids_from_layout:Function] @@ -850,7 +1019,9 @@ class SupersetClient: # @PURPOSE: Traverses dashboard layout metadata and extracts chart IDs from common keys. # @PRE: payload can be dict/list/scalar. # @POST: Returns a set of chart IDs found in nested structures. - def _extract_chart_ids_from_layout(self, payload: Union[Dict, List, str, int, None]) -> set: + def _extract_chart_ids_from_layout( + self, payload: Union[Dict, List, str, int, None] + ) -> set: with belief_scope("_extract_chart_ids_from_layout"): found = set() @@ -876,19 +1047,22 @@ class SupersetClient: walk(payload) return found - # [/DEF:backend.src.core.superset_client.SupersetClient._extract_chart_ids_from_layout:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.export_dashboard:Function] + # [/DEF:SupersetClient._extract_chart_ids_from_layout:Function] + + # [DEF:export_dashboard:Function] # @COMPLEXITY: 3 # @PURPOSE: ЭкспортируСт Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ Π² Π²ΠΈΠ΄Π΅ ZIP-Π°Ρ€Ρ…ΠΈΠ²Π°. # @PRE: dashboard_id must exist in Superset. # @POST: Returns ZIP content and filename. # @DATA_CONTRACT: Input[dashboard_id: int] -> Output[Tuple[bytes, str]] # @SIDE_EFFECT: Performs network I/O to download archive. - # @RELATION: [CALLS] ->[backend.src.core.utils.network.APIClient.request] + # @RELATION: [CALLS] ->[request] def export_dashboard(self, dashboard_id: int) -> Tuple[bytes, str]: with belief_scope("export_dashboard"): - app_logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id) + app_logger.info( + "[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id + ) response = self.network.request( method="GET", endpoint="/dashboard/export/", @@ -899,20 +1073,30 @@ class SupersetClient: response = cast(Response, response) self._validate_export_response(response, dashboard_id) filename = self._resolve_export_filename(response, dashboard_id) - app_logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename) + app_logger.info( + "[export_dashboard][Exit] Exported dashboard %s to %s.", + dashboard_id, + filename, + ) return response.content, filename - # [/DEF:backend.src.core.superset_client.SupersetClient.export_dashboard:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.import_dashboard:Function] + # [/DEF:export_dashboard:Function] + + # [DEF:import_dashboard:Function] # @COMPLEXITY: 3 # @PURPOSE: Π˜ΠΌΠΏΠΎΡ€Ρ‚ΠΈΡ€ΡƒΠ΅Ρ‚ Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ ΠΈΠ· ZIP-Ρ„Π°ΠΉΠ»Π°. # @PRE: file_name must be a valid ZIP dashboard export. # @POST: Dashboard is imported or re-imported after deletion. # @DATA_CONTRACT: Input[file_name: Union[str, Path]] -> Output[Dict] # @SIDE_EFFECT: Performs network I/O to upload archive. - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient._do_import] - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.delete_dashboard] - def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict: + # @RELATION: [CALLS] ->[SupersetClient._do_import] + # @RELATION: [CALLS] ->[delete_dashboard] + def import_dashboard( + self, + file_name: Union[str, Path], + dash_id: Optional[int] = None, + dash_slug: Optional[str] = None, + ) -> Dict: with belief_scope("import_dashboard"): if file_name is None: raise ValueError("file_name cannot be None") @@ -921,37 +1105,58 @@ class SupersetClient: try: return self._do_import(file_path) except Exception as exc: - app_logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True) + app_logger.error( + "[import_dashboard][Failure] First import attempt failed: %s", + exc, + exc_info=True, + ) if not self.delete_before_reimport: raise target_id = self._resolve_target_id_for_delete(dash_id, dash_slug) if target_id is None: - app_logger.error("[import_dashboard][Failure] No ID available for delete-retry.") + app_logger.error( + "[import_dashboard][Failure] No ID available for delete-retry." + ) raise self.delete_dashboard(target_id) - app_logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id) + app_logger.info( + "[import_dashboard][State] Deleted dashboard ID %s, retrying import.", + target_id, + ) return self._do_import(file_path) - # [/DEF:backend.src.core.superset_client.SupersetClient.import_dashboard:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.delete_dashboard:Function] + # [/DEF:import_dashboard:Function] + + # [DEF:delete_dashboard:Function] # @COMPLEXITY: 3 # @PURPOSE: УдаляСт Π΄Π°ΡˆΠ±ΠΎΡ€Π΄ ΠΏΠΎ Π΅Π³ΠΎ ID ΠΈΠ»ΠΈ slug. # @PRE: dashboard_id must exist. # @POST: Dashboard is removed from Superset. # @SIDE_EFFECT: Deletes resource from upstream Superset environment. - # @RELATION: [CALLS] ->[APIClient.request] + # @RELATION: [CALLS] ->[request] def delete_dashboard(self, dashboard_id: Union[int, str]) -> None: with belief_scope("delete_dashboard"): - app_logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id) - response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}") + app_logger.info( + "[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id + ) + response = self.network.request( + method="DELETE", endpoint=f"/dashboard/{dashboard_id}" + ) response = cast(Dict, response) if response.get("result", True) is not False: - app_logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id) + app_logger.info( + "[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id + ) else: - app_logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response) - # [/DEF:backend.src.core.superset_client.SupersetClient.delete_dashboard:Function] + app_logger.warning( + "[delete_dashboard][Warning] Unexpected response while deleting %s: %s", + dashboard_id, + response, + ) + + # [/DEF:delete_dashboard:Function] # [DEF:SupersetClient.get_datasets:Function] # @COMPLEXITY: 3 @@ -959,7 +1164,7 @@ class SupersetClient: # @PRE: Client is authenticated. # @POST: Returns total count and list of datasets. # @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]] - # @RELATION: [CALLS] ->[SupersetClient._fetch_all_pages] + # @RELATION: [CALLS] ->[_fetch_all_pages] def get_datasets(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]: with belief_scope("get_datasets"): app_logger.info("[get_datasets][Enter] Fetching datasets.") @@ -967,11 +1172,15 @@ class SupersetClient: paginated_data = self._fetch_all_pages( endpoint="/dataset/", - pagination_options={"base_query": validated_query, "results_field": "result"}, + pagination_options={ + "base_query": validated_query, + "results_field": "result", + }, ) total_count = len(paginated_data) app_logger.info("[get_datasets][Exit] Found %d datasets.", total_count) return total_count, paginated_data + # [/DEF:SupersetClient.get_datasets:Function] # [DEF:SupersetClient.get_datasets_summary:Function] @@ -980,36 +1189,41 @@ class SupersetClient: # @PRE: Client is authenticated. # @POST: Returns a list of dataset metadata summaries. # @RETURN: List[Dict] + # @RELATION: [CALLS] ->[SupersetClient.get_datasets] def get_datasets_summary(self) -> List[Dict]: with belief_scope("SupersetClient.get_datasets_summary"): - query = { - "columns": ["id", "table_name", "schema", "database"] - } + query = {"columns": ["id", "table_name", "schema", "database"]} _, datasets = self.get_datasets(query=query) # Map fields to match the contracts result = [] for ds in datasets: - result.append({ - "id": ds.get("id"), - "table_name": ds.get("table_name"), - "schema": ds.get("schema"), - "database": ds.get("database", {}).get("database_name", "Unknown") - }) + result.append( + { + "id": ds.get("id"), + "table_name": ds.get("table_name"), + "schema": ds.get("schema"), + "database": ds.get("database", {}).get( + "database_name", "Unknown" + ), + } + ) return result - # [/DEF:backend.src.core.superset_client.SupersetClient.get_datasets_summary:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.get_dataset_detail:Function] + # [/DEF:SupersetClient.get_datasets_summary:Function] + + # [DEF:get_dataset_detail:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetches detailed dataset information including columns and linked dashboards # @PRE: Client is authenticated and dataset_id exists. # @POST: Returns detailed dataset info with columns and linked dashboards. # @PARAM: dataset_id (int) - The dataset ID to fetch details for. # @RETURN: Dict - Dataset details with columns and linked_dashboards. - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.get_dataset] - # @RELATION: [CALLS] ->[backend.src.core.utils.network.APIClient.request] + # @RELATION: [CALLS] ->[SupersetClient.get_dataset] + # @RELATION: [CALLS] ->[request] def get_dataset_detail(self, dataset_id: int) -> Dict: with belief_scope("SupersetClient.get_dataset_detail", f"id={dataset_id}"): + def as_bool(value, default=False): if value is None: return default @@ -1021,10 +1235,10 @@ class SupersetClient: # Get base dataset info response = self.get_dataset(dataset_id) - + # If the response is a dict and has a 'result' key, use that (standard Superset API) - if isinstance(response, dict) and 'result' in response: - dataset = response['result'] + if isinstance(response, dict) and "result" in response: + dataset = response["result"] else: dataset = response @@ -1035,59 +1249,71 @@ class SupersetClient: col_id = col.get("id") if col_id is None: continue - column_info.append({ - "id": int(col_id), - "name": col.get("column_name"), - "type": col.get("type"), - "is_dttm": as_bool(col.get("is_dttm"), default=False), - "is_active": as_bool(col.get("is_active"), default=True), - "description": col.get("description", "") - }) - + column_info.append( + { + "id": int(col_id), + "name": col.get("column_name"), + "type": col.get("type"), + "is_dttm": as_bool(col.get("is_dttm"), default=False), + "is_active": as_bool(col.get("is_active"), default=True), + "description": col.get("description", ""), + } + ) + # Get linked dashboards using related_objects endpoint linked_dashboards = [] try: related_objects = self.network.request( - method="GET", - endpoint=f"/dataset/{dataset_id}/related_objects" + method="GET", endpoint=f"/dataset/{dataset_id}/related_objects" ) - + # Handle different response formats if isinstance(related_objects, dict): if "dashboards" in related_objects: dashboards_data = related_objects["dashboards"] - elif "result" in related_objects and isinstance(related_objects["result"], dict): - dashboards_data = related_objects["result"].get("dashboards", []) + elif "result" in related_objects and isinstance( + related_objects["result"], dict + ): + dashboards_data = related_objects["result"].get( + "dashboards", [] + ) else: dashboards_data = [] - + for dash in dashboards_data: if isinstance(dash, dict): dash_id = dash.get("id") if dash_id is None: continue - linked_dashboards.append({ - "id": int(dash_id), - "title": dash.get("dashboard_title") or dash.get("title", f"Dashboard {dash_id}"), - "slug": dash.get("slug") - }) + linked_dashboards.append( + { + "id": int(dash_id), + "title": dash.get("dashboard_title") + or dash.get("title", f"Dashboard {dash_id}"), + "slug": dash.get("slug"), + } + ) else: try: dash_id = int(dash) except (TypeError, ValueError): continue - linked_dashboards.append({ - "id": dash_id, - "title": f"Dashboard {dash_id}", - "slug": None - }) + linked_dashboards.append( + { + "id": dash_id, + "title": f"Dashboard {dash_id}", + "slug": None, + } + ) except Exception as e: - app_logger.warning(f"[get_dataset_detail][Warning] Failed to fetch related dashboards: {e}") + app_logger.warning( + f"[get_dataset_detail][Warning] Failed to fetch related dashboards: {e}" + ) linked_dashboards = [] - + # Extract SQL table information sql = dataset.get("sql", "") - + result = { "id": dataset.get("id"), "table_name": dataset.get("table_name"), @@ -1105,12 +1331,15 @@ class SupersetClient: "linked_dashboard_count": len(linked_dashboards), "is_sqllab_view": as_bool(dataset.get("is_sqllab_view"), default=False), "created_on": dataset.get("created_on"), - "changed_on": dataset.get("changed_on") + "changed_on": dataset.get("changed_on"), } - - app_logger.info(f"[get_dataset_detail][Exit] Got dataset {dataset_id} with {len(column_info)} columns and {len(linked_dashboards)} linked dashboards") + + app_logger.info( + f"[get_dataset_detail][Exit] Got dataset {dataset_id} with {len(column_info)} columns and {len(linked_dashboards)} linked dashboards" + ) return result - # [/DEF:backend.src.core.superset_client.SupersetClient.get_dataset_detail:Function] + + # [/DEF:get_dataset_detail:Function] # [DEF:SupersetClient.get_dataset:Function] # @COMPLEXITY: 3 @@ -1118,14 +1347,17 @@ class SupersetClient: # @PRE: dataset_id must exist. # @POST: Returns dataset details. # @DATA_CONTRACT: Input[dataset_id: int] -> Output[Dict] - # @RELATION: [CALLS] ->[APIClient.request] + # @RELATION: [CALLS] ->[request] def get_dataset(self, dataset_id: int) -> Dict: with belief_scope("SupersetClient.get_dataset", f"id={dataset_id}"): app_logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id) - response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}") + response = self.network.request( + method="GET", endpoint=f"/dataset/{dataset_id}" + ) response = cast(Dict, response) app_logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id) return response + # [/DEF:SupersetClient.get_dataset:Function] # [DEF:SupersetClient.compile_dataset_preview:Function] @@ -1137,8 +1369,8 @@ class SupersetClient: # @RELATION: [CALLS] ->[SupersetClient.get_dataset] # @RELATION: [CALLS] ->[SupersetClient.build_dataset_preview_query_context] # @RELATION: [CALLS] ->[SupersetClient.build_dataset_preview_legacy_form_data] - # @RELATION: [CALLS] ->[APIClient.request] - # @RELATION: [CALLS] ->[SupersetClient._extract_compiled_sql_from_preview_response] + # @RELATION: [CALLS] ->[request] + # @RELATION: [CALLS] ->[_extract_compiled_sql_from_preview_response] # @SIDE_EFFECT: Performs upstream dataset lookup and preview network I/O against Superset. def compile_dataset_preview( self, @@ -1148,7 +1380,11 @@ class SupersetClient: ) -> Dict[str, Any]: with belief_scope("SupersetClient.compile_dataset_preview", f"id={dataset_id}"): dataset_response = self.get_dataset(dataset_id) - dataset_record = dataset_response.get("result", dataset_response) if isinstance(dataset_response, dict) else {} + dataset_record = ( + dataset_response.get("result", dataset_response) + if isinstance(dataset_response, dict) + else {} + ) query_context = self.build_dataset_preview_query_context( dataset_id=dataset_id, dataset_record=dataset_record, @@ -1161,7 +1397,9 @@ class SupersetClient: template_params=template_params or {}, effective_filters=effective_filters or [], ) - legacy_form_data_payload = json.dumps(legacy_form_data, sort_keys=True, default=str) + legacy_form_data_payload = json.dumps( + legacy_form_data, sort_keys=True, default=str + ) request_payload = json.dumps(query_context) strategy_attempts: List[Dict[str, Any]] = [] strategy_candidates: List[Dict[str, Any]] = [ @@ -1209,9 +1447,15 @@ class SupersetClient: "endpoint": endpoint_path, "endpoint_kind": endpoint_kind, "request_transport": request_transport, - "contains_root_datasource": endpoint_kind == "v1_chart_data" and "datasource" in query_context, - "contains_form_datasource": endpoint_kind.startswith("legacy_") and "datasource" in legacy_form_data, - "contains_query_object_datasource": bool(query_context.get("queries")) and isinstance(query_context["queries"][0], dict) and "datasource" in query_context["queries"][0], + "contains_root_datasource": endpoint_kind == "v1_chart_data" + and "datasource" in query_context, + "contains_form_datasource": endpoint_kind.startswith("legacy_") + and "datasource" in legacy_form_data, + "contains_query_object_datasource": bool( + query_context.get("queries") + ) + and isinstance(query_context["queries"][0], dict) + and "datasource" in query_context["queries"][0], "request_param_keys": request_param_keys, "request_payload_keys": request_payload_keys, } @@ -1222,8 +1466,12 @@ class SupersetClient: **strategy_diagnostics, "request_params": request_params, "request_payload": request_body, - "legacy_form_data": legacy_form_data if endpoint_kind.startswith("legacy_") else None, - "query_context": query_context if endpoint_kind == "v1_chart_data" else None, + "legacy_form_data": legacy_form_data + if endpoint_kind.startswith("legacy_") + else None, + "query_context": query_context + if endpoint_kind == "v1_chart_data" + else None, "template_param_count": len(template_params or {}), "filter_count": len(effective_filters or []), }, @@ -1236,7 +1484,9 @@ class SupersetClient: data=request_body, headers=request_headers or None, ) - normalized = self._extract_compiled_sql_from_preview_response(response) + normalized = self._extract_compiled_sql_from_preview_response( + response + ) normalized["query_context"] = query_context normalized["legacy_form_data"] = legacy_form_data normalized["endpoint"] = endpoint_path @@ -1254,8 +1504,12 @@ class SupersetClient: "dataset_id": dataset_id, **strategy_diagnostics, "success": True, - "compiled_sql_length": len(str(normalized.get("compiled_sql") or "")), - "response_diagnostics": normalized.get("response_diagnostics"), + "compiled_sql_length": len( + str(normalized.get("compiled_sql") or "") + ), + "response_diagnostics": normalized.get( + "response_diagnostics" + ), }, ) return normalized @@ -1280,6 +1534,7 @@ class SupersetClient: "Superset preview compilation failed for all known strategies " f"(attempts={strategy_attempts!r})" ) + # [/DEF:SupersetClient.compile_dataset_preview:Function] # [DEF:SupersetClient.build_dataset_preview_legacy_form_data:Function] @@ -1297,25 +1552,41 @@ class SupersetClient: template_params: Dict[str, Any], effective_filters: List[Dict[str, Any]], ) -> Dict[str, Any]: - with belief_scope("SupersetClient.build_dataset_preview_legacy_form_data", f"id={dataset_id}"): + with belief_scope( + "SupersetClient.build_dataset_preview_legacy_form_data", f"id={dataset_id}" + ): query_context = self.build_dataset_preview_query_context( dataset_id=dataset_id, dataset_record=dataset_record, template_params=template_params, effective_filters=effective_filters, ) - query_object = deepcopy(query_context.get("queries", [{}])[0] if query_context.get("queries") else {}) + query_object = deepcopy( + query_context.get("queries", [{}])[0] + if query_context.get("queries") + else {} + ) legacy_form_data = deepcopy(query_context.get("form_data", {})) legacy_form_data.pop("datasource", None) - legacy_form_data["metrics"] = deepcopy(query_object.get("metrics", ["count"])) + legacy_form_data["metrics"] = deepcopy( + query_object.get("metrics", ["count"]) + ) legacy_form_data["columns"] = deepcopy(query_object.get("columns", [])) legacy_form_data["orderby"] = deepcopy(query_object.get("orderby", [])) - legacy_form_data["annotation_layers"] = deepcopy(query_object.get("annotation_layers", [])) + legacy_form_data["annotation_layers"] = deepcopy( + query_object.get("annotation_layers", []) + ) legacy_form_data["row_limit"] = query_object.get("row_limit", 1000) legacy_form_data["series_limit"] = query_object.get("series_limit", 0) - legacy_form_data["url_params"] = deepcopy(query_object.get("url_params", template_params)) - legacy_form_data["applied_time_extras"] = deepcopy(query_object.get("applied_time_extras", {})) - legacy_form_data["result_format"] = query_context.get("result_format", "json") + legacy_form_data["url_params"] = deepcopy( + query_object.get("url_params", template_params) + ) + legacy_form_data["applied_time_extras"] = deepcopy( + query_object.get("applied_time_extras", {}) + ) + legacy_form_data["result_format"] = query_context.get( + "result_format", "json" + ) legacy_form_data["result_type"] = query_context.get("result_type", "query") legacy_form_data["force"] = bool(query_context.get("force", True)) extras = query_object.get("extras") @@ -1333,10 +1604,13 @@ class SupersetClient: "contains_form_datasource": "datasource" in legacy_form_data, "legacy_form_data_keys": sorted(legacy_form_data.keys()), "legacy_extra_filters": legacy_form_data.get("extra_filters", []), - "legacy_extra_form_data": legacy_form_data.get("extra_form_data", {}), + "legacy_extra_form_data": legacy_form_data.get( + "extra_form_data", {} + ), }, ) return legacy_form_data + # [/DEF:SupersetClient.build_dataset_preview_legacy_form_data:Function] # [DEF:SupersetClient.build_dataset_preview_query_context:Function] @@ -1345,7 +1619,7 @@ class SupersetClient: # @PRE: dataset_record should come from Superset dataset detail when possible. # @POST: Returns an explicit chart-data payload based on current session inputs and dataset metadata. # @DATA_CONTRACT: Input[dataset_id:int,dataset_record:Dict,template_params:Dict,effective_filters:List[Dict]] -> Output[Dict[str, Any]] - # @RELATION: [CALLS] ->[SupersetClient._normalize_effective_filters_for_query_context] + # @RELATION: [CALLS] ->[_normalize_effective_filters_for_query_context] # @SIDE_EFFECT: Emits reasoning and reflection logs for deterministic preview payload construction. def build_dataset_preview_query_context( self, @@ -1354,9 +1628,15 @@ class SupersetClient: template_params: Dict[str, Any], effective_filters: List[Dict[str, Any]], ) -> Dict[str, Any]: - with belief_scope("SupersetClient.build_dataset_preview_query_context", f"id={dataset_id}"): + with belief_scope( + "SupersetClient.build_dataset_preview_query_context", f"id={dataset_id}" + ): normalized_template_params = deepcopy(template_params or {}) - normalized_filter_payload = self._normalize_effective_filters_for_query_context(effective_filters or []) + normalized_filter_payload = ( + self._normalize_effective_filters_for_query_context( + effective_filters or [] + ) + ) normalized_filters = normalized_filter_payload["filters"] normalized_extra_form_data = normalized_filter_payload["extra_form_data"] @@ -1374,9 +1654,14 @@ class SupersetClient: datasource_payload["type"] = datasource_type serialized_dataset_template_params = dataset_record.get("template_params") - if isinstance(serialized_dataset_template_params, str) and serialized_dataset_template_params.strip(): + if ( + isinstance(serialized_dataset_template_params, str) + and serialized_dataset_template_params.strip() + ): try: - parsed_dataset_template_params = json.loads(serialized_dataset_template_params) + parsed_dataset_template_params = json.loads( + serialized_dataset_template_params + ) if isinstance(parsed_dataset_template_params, dict): for key, value in parsed_dataset_template_params.items(): normalized_template_params.setdefault(str(key), value) @@ -1408,7 +1693,9 @@ class SupersetClient: if schema: query_object["schema"] = schema - time_range = extra_form_data.get("time_range") or dataset_record.get("default_time_range") + time_range = extra_form_data.get("time_range") or dataset_record.get( + "default_time_range" + ) if time_range: query_object["time_range"] = time_range extra_form_data["time_range"] = time_range @@ -1444,24 +1731,30 @@ class SupersetClient: "dataset_id": dataset_id, "datasource": datasource_payload, "normalized_effective_filters": normalized_filters, - "normalized_filter_diagnostics": normalized_filter_payload["diagnostics"], + "normalized_filter_diagnostics": normalized_filter_payload[ + "diagnostics" + ], "result_type": result_type, "result_format": result_format, }, ) return payload - # [/DEF:backend.src.core.superset_client.SupersetClient.build_dataset_preview_query_context:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._normalize_effective_filters_for_query_context:Function] + # [/DEF:SupersetClient.build_dataset_preview_query_context:Function] + + # [DEF:_normalize_effective_filters_for_query_context:Function] # @COMPLEXITY: 3 # @PURPOSE: Convert execution mappings into Superset chart-data filter objects. # @PRE: effective_filters may contain mapping metadata and arbitrary scalar/list values. # @POST: Returns only valid filter dictionaries suitable for the chart-data query payload. + # @RELATION: [DEPENDS_ON] ->[FilterStateModels] def _normalize_effective_filters_for_query_context( self, effective_filters: List[Dict[str, Any]], ) -> Dict[str, Any]: - with belief_scope("SupersetClient._normalize_effective_filters_for_query_context"): + with belief_scope( + "SupersetClient._normalize_effective_filters_for_query_context" + ): normalized_filters: List[Dict[str, Any]] = [] merged_extra_form_data: Dict[str, Any] = {} diagnostics: List[Dict[str, Any]] = [] @@ -1511,7 +1804,9 @@ class SupersetClient: elif preserved_extra_form_data: outgoing_clauses = [] else: - column = str(item.get("variable_name") or item.get("filter_name") or "").strip() + column = str( + item.get("variable_name") or item.get("filter_name") or "" + ).strip() if column and value is not None: operator = "IN" if isinstance(value, list) else "==" outgoing_clauses.append( @@ -1554,17 +1849,23 @@ class SupersetClient: "extra_form_data": merged_extra_form_data, "diagnostics": diagnostics, } - # [/DEF:backend.src.core.superset_client.SupersetClient._normalize_effective_filters_for_query_context:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._extract_compiled_sql_from_preview_response:Function] + # [/DEF:_normalize_effective_filters_for_query_context:Function] + + # [DEF:_extract_compiled_sql_from_preview_response:Function] # @COMPLEXITY: 3 # @PURPOSE: Normalize compiled SQL from either chart-data or legacy form_data preview responses. # @PRE: response must be the decoded preview response body from a supported Superset endpoint. # @POST: Returns compiled SQL and raw response or raises SupersetAPIError when the endpoint does not expose query text. - def _extract_compiled_sql_from_preview_response(self, response: Any) -> Dict[str, Any]: + # @RELATION: [DEPENDS_ON] ->[SupersetAPIError] + def _extract_compiled_sql_from_preview_response( + self, response: Any + ) -> Dict[str, Any]: with belief_scope("SupersetClient._extract_compiled_sql_from_preview_response"): if not isinstance(response, dict): - raise SupersetAPIError("Superset preview response was not a JSON object") + raise SupersetAPIError( + "Superset preview response was not a JSON object" + ) response_diagnostics: List[Dict[str, Any]] = [] result_payload = response.get("result") @@ -1572,7 +1873,12 @@ class SupersetClient: for index, item in enumerate(result_payload): if not isinstance(item, dict): continue - compiled_sql = str(item.get("query") or item.get("sql") or item.get("compiled_sql") or "").strip() + compiled_sql = str( + item.get("query") + or item.get("sql") + or item.get("compiled_sql") + or "" + ).strip() response_diagnostics.append( { "index": index, @@ -1623,7 +1929,8 @@ class SupersetClient: "Superset preview response did not expose compiled SQL " f"(diagnostics={response_diagnostics!r})" ) - # [/DEF:backend.src.core.superset_client.SupersetClient._extract_compiled_sql_from_preview_response:Function] + + # [/DEF:_extract_compiled_sql_from_preview_response:Function] # [DEF:SupersetClient.update_dataset:Function] # @COMPLEXITY: 3 @@ -1632,7 +1939,7 @@ class SupersetClient: # @POST: Dataset is updated in Superset. # @DATA_CONTRACT: Input[dataset_id: int, data: Dict] -> Output[Dict] # @SIDE_EFFECT: Modifies resource in upstream Superset environment. - # @RELATION: [CALLS] ->[APIClient.request] + # @RELATION: [CALLS] ->[request] def update_dataset(self, dataset_id: int, data: Dict) -> Dict: with belief_scope("SupersetClient.update_dataset", f"id={dataset_id}"): app_logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id) @@ -1640,11 +1947,12 @@ class SupersetClient: method="PUT", endpoint=f"/dataset/{dataset_id}", data=json.dumps(data), - headers={'Content-Type': 'application/json'} + headers={"Content-Type": "application/json"}, ) response = cast(Dict, response) app_logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id) return response + # [/DEF:SupersetClient.update_dataset:Function] # [DEF:SupersetClient.get_databases:Function] @@ -1653,75 +1961,80 @@ class SupersetClient: # @PRE: Client is authenticated. # @POST: Returns total count and list of databases. # @DATA_CONTRACT: Input[query: Optional[Dict]] -> Output[Tuple[int, List[Dict]]] - # @RELATION: [CALLS] ->[SupersetClient._fetch_all_pages] + # @RELATION: [CALLS] ->[_fetch_all_pages] def get_databases(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]: with belief_scope("get_databases"): app_logger.info("[get_databases][Enter] Fetching databases.") validated_query = self._validate_query_params(query or {}) - if 'columns' not in validated_query: - validated_query['columns'] = [] - + if "columns" not in validated_query: + validated_query["columns"] = [] + paginated_data = self._fetch_all_pages( endpoint="/database/", - pagination_options={"base_query": validated_query, "results_field": "result"}, + pagination_options={ + "base_query": validated_query, + "results_field": "result", + }, ) total_count = len(paginated_data) app_logger.info("[get_databases][Exit] Found %d databases.", total_count) return total_count, paginated_data + # [/DEF:SupersetClient.get_databases:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.get_database:Function] + # [DEF:get_database:Function] # @COMPLEXITY: 3 # @PURPOSE: ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Π±Π°Π·Π΅ Π΄Π°Π½Π½Ρ‹Ρ… ΠΏΠΎ Π΅Ρ‘ ID. # @PRE: database_id must exist. # @POST: Returns database details. # @DATA_CONTRACT: Input[database_id: int] -> Output[Dict] - # @RELATION: [CALLS] ->[backend.src.core.utils.network.APIClient.request] + # @RELATION: [CALLS] ->[request] def get_database(self, database_id: int) -> Dict: with belief_scope("get_database"): app_logger.info("[get_database][Enter] Fetching database %s.", database_id) - response = self.network.request(method="GET", endpoint=f"/database/{database_id}") + response = self.network.request( + method="GET", endpoint=f"/database/{database_id}" + ) response = cast(Dict, response) app_logger.info("[get_database][Exit] Got database %s.", database_id) return response - # [/DEF:backend.src.core.superset_client.SupersetClient.get_database:Function] - # [DEF:backend.src.core.superset_client.SupersetClient.get_databases_summary:Function] + # [/DEF:get_database:Function] + + # [DEF:get_databases_summary:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetch a summary of databases including uuid, name, and engine. # @PRE: Client is authenticated. # @POST: Returns list of database summaries. # @DATA_CONTRACT: None -> Output[List[Dict]] - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.get_databases] + # @RELATION: [CALLS] ->[SupersetClient.get_databases] def get_databases_summary(self) -> List[Dict]: with belief_scope("SupersetClient.get_databases_summary"): - query = { - "columns": ["uuid", "database_name", "backend"] - } + query = {"columns": ["uuid", "database_name", "backend"]} _, databases = self.get_databases(query=query) - + # Map 'backend' to 'engine' for consistency with contracts for db in databases: - db['engine'] = db.pop('backend', None) - - return databases - # [/DEF:backend.src.core.superset_client.SupersetClient.get_databases_summary:Function] + db["engine"] = db.pop("backend", None) - # [DEF:backend.src.core.superset_client.SupersetClient.get_database_by_uuid:Function] + return databases + + # [/DEF:get_databases_summary:Function] + + # [DEF:get_database_by_uuid:Function] # @COMPLEXITY: 3 # @PURPOSE: Find a database by its UUID. # @PRE: db_uuid must be a valid UUID string. # @POST: Returns database info or None. # @DATA_CONTRACT: Input[db_uuid: str] -> Output[Optional[Dict]] - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient.get_databases] + # @RELATION: [CALLS] ->[SupersetClient.get_databases] def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]: with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"): - query = { - "filters": [{"col": "uuid", "op": "eq", "value": db_uuid}] - } + query = {"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]} _, databases = self.get_databases(query=query) return databases[0] if databases else None - # [/DEF:backend.src.core.superset_client.SupersetClient.get_database_by_uuid:Function] + + # [/DEF:get_database_by_uuid:Function] # [DEF:SupersetClient._resolve_target_id_for_delete:Function] # @COMPLEXITY: 1 @@ -1729,21 +2042,38 @@ class SupersetClient: # @PRE: Either dash_id or dash_slug should be provided. # @POST: Returns the resolved ID or None. # @RELATION: [CALLS] ->[SupersetClient.get_dashboards] - def _resolve_target_id_for_delete(self, dash_id: Optional[int], dash_slug: Optional[str]) -> Optional[int]: + def _resolve_target_id_for_delete( + self, dash_id: Optional[int], dash_slug: Optional[str] + ) -> Optional[int]: with belief_scope("_resolve_target_id_for_delete"): if dash_id is not None: return dash_id if dash_slug is not None: - app_logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug) + app_logger.debug( + "[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", + dash_slug, + ) try: - _, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]}) + _, candidates = self.get_dashboards( + query={ + "filters": [{"col": "slug", "op": "eq", "value": dash_slug}] + } + ) if candidates: target_id = candidates[0]["id"] - app_logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id) + app_logger.debug( + "[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", + target_id, + ) return target_id except Exception as e: - app_logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e) + app_logger.warning( + "[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", + dash_slug, + e, + ) return None + # [/DEF:SupersetClient._resolve_target_id_for_delete:Function] # [DEF:SupersetClient._do_import:Function] @@ -1757,18 +2087,25 @@ class SupersetClient: app_logger.debug(f"[_do_import][State] Uploading file: {file_name}") file_path = Path(file_name) if not file_path.exists(): - app_logger.error(f"[_do_import][Failure] File does not exist: {file_name}") + app_logger.error( + f"[_do_import][Failure] File does not exist: {file_name}" + ) raise FileNotFoundError(f"File does not exist: {file_name}") - + return self.network.upload_file( endpoint="/dashboard/import/", - file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"}, + file_info={ + "file_obj": file_path, + "file_name": file_path.name, + "form_field": "formData", + }, extra_data={"overwrite": "true"}, timeout=self.env.timeout * 2, ) + # [/DEF:SupersetClient._do_import:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._validate_export_response:Function] + # [DEF:_validate_export_response:Function] # @COMPLEXITY: 1 # @PURPOSE: Validates that the export response is a non-empty ZIP archive. # @PRE: response must be a valid requests.Response object. @@ -1777,12 +2114,15 @@ class SupersetClient: with belief_scope("_validate_export_response"): content_type = response.headers.get("Content-Type", "") if "application/zip" not in content_type: - raise SupersetAPIError(f"ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ Π½Π΅ ZIP-Π°Ρ€Ρ…ΠΈΠ² (Content-Type: {content_type})") + raise SupersetAPIError( + f"ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ Π½Π΅ ZIP-Π°Ρ€Ρ…ΠΈΠ² (Content-Type: {content_type})" + ) if not response.content: raise SupersetAPIError("ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½Ρ‹ пустыС Π΄Π°Π½Π½Ρ‹Π΅ ΠΏΡ€ΠΈ экспортС") - # [/DEF:backend.src.core.superset_client.SupersetClient._validate_export_response:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._resolve_export_filename:Function] + # [/DEF:_validate_export_response:Function] + + # [DEF:_resolve_export_filename:Function] # @COMPLEXITY: 1 # @PURPOSE: Determines the filename for an exported dashboard. # @PRE: response must contain Content-Disposition header or dashboard_id must be provided. @@ -1792,13 +2132,18 @@ class SupersetClient: filename = get_filename_from_headers(dict(response.headers)) if not filename: from datetime import datetime + timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip" - app_logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename) + app_logger.warning( + "[_resolve_export_filename][Warning] Generated filename: %s", + filename, + ) return filename - # [/DEF:backend.src.core.superset_client.SupersetClient._resolve_export_filename:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._validate_query_params:Function] + # [/DEF:_resolve_export_filename:Function] + + # [DEF:_validate_query_params:Function] # @COMPLEXITY: 1 # @PURPOSE: Ensures query parameters have default page and page_size. # @PRE: query can be None or a dictionary. @@ -1809,14 +2154,15 @@ class SupersetClient: # Using 100 avoids partial fetches when larger values are silently truncated. base_query = {"page": 0, "page_size": 100} return {**base_query, **(query or {})} - # [/DEF:backend.src.core.superset_client.SupersetClient._validate_query_params:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._fetch_total_object_count:Function] + # [/DEF:_validate_query_params:Function] + + # [DEF:_fetch_total_object_count:Function] # @COMPLEXITY: 1 # @PURPOSE: Fetches the total number of items for a given endpoint. # @PRE: endpoint must be a valid Superset API path. # @POST: Returns the total count as an integer. - # @RELATION: [CALLS] ->[backend.src.core.utils.network.APIClient.fetch_paginated_count] + # @RELATION: [CALLS] ->[fetch_paginated_count] def _fetch_total_object_count(self, endpoint: str) -> int: with belief_scope("_fetch_total_object_count"): return self.network.fetch_paginated_count( @@ -1824,19 +2170,24 @@ class SupersetClient: query_params={"page": 0, "page_size": 1}, count_field="count", ) - # [/DEF:backend.src.core.superset_client.SupersetClient._fetch_total_object_count:Function] - # [DEF:backend.src.core.superset_client.SupersetClient._fetch_all_pages:Function] + # [/DEF:_fetch_total_object_count:Function] + + # [DEF:_fetch_all_pages:Function] # @COMPLEXITY: 1 # @PURPOSE: Iterates through all pages to collect all data items. # @PRE: pagination_options must contain base_query, total_count, and results_field. # @POST: Returns a combined list of all items. + # @RELATION: [CALLS] ->[fetch_paginated_data] def _fetch_all_pages(self, endpoint: str, pagination_options: Dict) -> List[Dict]: with belief_scope("_fetch_all_pages"): - return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options) - # [/DEF:backend.src.core.superset_client.SupersetClient._fetch_all_pages:Function] + return self.network.fetch_paginated_data( + endpoint=endpoint, pagination_options=pagination_options + ) - # [DEF:backend.src.core.superset_client.SupersetClient._validate_import_file:Function] + # [/DEF:_fetch_all_pages:Function] + + # [DEF:_validate_import_file:Function] # @COMPLEXITY: 1 # @PURPOSE: Validates that the file to be imported is a valid ZIP with metadata.yaml. # @PRE: zip_path must be a path to a file. @@ -1850,52 +2201,76 @@ class SupersetClient: raise SupersetAPIError(f"Π€Π°ΠΉΠ» {zip_path} Π½Π΅ являСтся ZIP-Π°Ρ€Ρ…ΠΈΠ²ΠΎΠΌ") with zipfile.ZipFile(path, "r") as zf: if not any(n.endswith("metadata.yaml") for n in zf.namelist()): - raise SupersetAPIError(f"Архив {zip_path} Π½Π΅ содСрТит 'metadata.yaml'") - # [/DEF:backend.src.core.superset_client.SupersetClient._validate_import_file:Function] + raise SupersetAPIError( + f"Архив {zip_path} Π½Π΅ содСрТит 'metadata.yaml'" + ) - # [DEF:backend.src.core.superset_client.SupersetClient.get_all_resources:Function] + # [/DEF:_validate_import_file:Function] + + # [DEF:get_all_resources:Function] # @COMPLEXITY: 3 # @PURPOSE: Fetches all resources of a given type with id, uuid, and name columns. # @PARAM: resource_type (str) - One of "chart", "dataset", "dashboard". # @PRE: Client is authenticated. resource_type is valid. # @POST: Returns a list of resource dicts with at minimum id, uuid, and name fields. # @RETURN: List[Dict] - def get_all_resources(self, resource_type: str, since_dttm: Optional[datetime] = None) -> List[Dict]: - with belief_scope("SupersetClient.get_all_resources", f"type={resource_type}, since={since_dttm}"): + # @RELATION: [CALLS] ->[_fetch_all_pages] + def get_all_resources( + self, resource_type: str, since_dttm: Optional[datetime] = None + ) -> List[Dict]: + with belief_scope( + "SupersetClient.get_all_resources", + f"type={resource_type}, since={since_dttm}", + ): column_map = { - "chart": {"endpoint": "/chart/", "columns": ["id", "uuid", "slice_name"]}, - "dataset": {"endpoint": "/dataset/", "columns": ["id", "uuid", "table_name"]}, - "dashboard": {"endpoint": "/dashboard/", "columns": ["id", "uuid", "slug", "dashboard_title"]}, + "chart": { + "endpoint": "/chart/", + "columns": ["id", "uuid", "slice_name"], + }, + "dataset": { + "endpoint": "/dataset/", + "columns": ["id", "uuid", "table_name"], + }, + "dashboard": { + "endpoint": "/dashboard/", + "columns": ["id", "uuid", "slug", "dashboard_title"], + }, } config = column_map.get(resource_type) if not config: - app_logger.warning("[get_all_resources][Warning] Unknown resource type: %s", resource_type) + app_logger.warning( + "[get_all_resources][Warning] Unknown resource type: %s", + resource_type, + ) return [] - + query = {"columns": config["columns"]} - + if since_dttm: import math + # Use int milliseconds to be safe timestamp_ms = math.floor(since_dttm.timestamp() * 1000) - + query["filters"] = [ - { - "col": "changed_on_dttm", - "opr": "gt", - "value": timestamp_ms - } + {"col": "changed_on_dttm", "opr": "gt", "value": timestamp_ms} ] - + validated = self._validate_query_params(query) data = self._fetch_all_pages( endpoint=config["endpoint"], pagination_options={"base_query": validated, "results_field": "result"}, ) - app_logger.info("[get_all_resources][Exit] Fetched %d %s resources.", len(data), resource_type) + app_logger.info( + "[get_all_resources][Exit] Fetched %d %s resources.", + len(data), + resource_type, + ) return data - # [/DEF:backend.src.core.superset_client.SupersetClient.get_all_resources:Function] -# [/DEF:backend.src.core.superset_client.SupersetClient:Class] + # [/DEF:get_all_resources:Function] -# [/DEF:backend.src.core.superset_client:Module] + +# [/DEF:SupersetClient:Class] + +# [/DEF:SupersetClientModule:Module] diff --git a/backend/src/core/utils/superset_context_extractor.py b/backend/src/core/utils/superset_context_extractor.py index 721aa699..aa87cca0 100644 --- a/backend/src/core/utils/superset_context_extractor.py +++ b/backend/src/core/utils/superset_context_extractor.py @@ -3,7 +3,7 @@ # @SEMANTICS: dataset_review, superset, link_parsing, context_recovery, partial_recovery # @PURPOSE: Recover dataset and dashboard context from Superset links while preserving explicit partial-recovery markers. # @LAYER: Infra -# @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient:Class] +# @RELATION: [DEPENDS_ON] ->[ImportedFilter] # @RELATION: [DEPENDS_ON] ->[ImportedFilter] # @RELATION: [DEPENDS_ON] ->[TemplateVariable] # @PRE: Superset link or dataset reference must be parseable enough to resolve an environment-scoped target resource. @@ -18,7 +18,7 @@ import json import re from copy import deepcopy from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, List, Optional, Set, cast from urllib.parse import parse_qs, unquote, urlparse from src.core.config_models import Environment @@ -26,6 +26,8 @@ from src.core.logger import belief_scope, logger from src.core.superset_client import SupersetClient # [/DEF:SupersetContextExtractor.imports:Block] +logger = cast(Any, logger) + # [DEF:SupersetParsedContext:Class] # @COMPLEXITY: 2 @@ -42,13 +44,15 @@ class SupersetParsedContext: imported_filters: List[Dict[str, Any]] = field(default_factory=list) unresolved_references: List[str] = field(default_factory=list) partial_recovery: bool = False + + # [/DEF:SupersetParsedContext:Class] # [DEF:SupersetContextExtractor:Class] # @COMPLEXITY: 4 # @PURPOSE: Parse supported Superset URLs and recover canonical dataset/dashboard references for review-session intake. -# @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient] +# @RELATION: [DEPENDS_ON] ->[Environment] # @PRE: constructor receives a configured environment with a usable Superset base URL. # @POST: extractor instance is ready to parse links against one Superset environment. # @SIDE_EFFECT: downstream parse operations may call Superset APIs through SupersetClient. @@ -56,15 +60,18 @@ class SupersetContextExtractor: # [DEF:SupersetContextExtractor.__init__:Function] # @COMPLEXITY: 2 # @PURPOSE: Bind extractor to one Superset environment and client instance. - def __init__(self, environment: Environment, client: Optional[SupersetClient] = None) -> None: + def __init__( + self, environment: Environment, client: Optional[SupersetClient] = None + ) -> None: self.environment = environment self.client = client or SupersetClient(environment) + # [/DEF:SupersetContextExtractor.__init__:Function] # [DEF:SupersetContextExtractor.parse_superset_link:Function] # @COMPLEXITY: 4 # @PURPOSE: Extract candidate identifiers and query state from supported Superset URLs. - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient] + # @RELATION: [CALLS] ->[SupersetClient.get_dashboard_detail] # @PRE: link is a non-empty Superset URL compatible with the configured environment. # @POST: returns resolved dataset/dashboard context, preserving explicit partial-recovery state if some identifiers cannot be confirmed. # @SIDE_EFFECT: may issue Superset API reads to resolve dataset references from dashboard or chart URLs. @@ -115,12 +122,16 @@ class SupersetContextExtractor: resource_type = "dashboard" partial_recovery = True dataset_ref = f"dashboard_permalink:{dashboard_permalink_key}" - unresolved_references.append("dashboard_permalink_dataset_binding_unresolved") + unresolved_references.append( + "dashboard_permalink_dataset_binding_unresolved" + ) logger.reason( "Resolving dashboard permalink state from Superset", extra={"permalink_key": dashboard_permalink_key}, ) - permalink_payload = self.client.get_dashboard_permalink_state(dashboard_permalink_key) + permalink_payload = self.client.get_dashboard_permalink_state( + dashboard_permalink_key + ) permalink_state = ( permalink_payload.get("state", permalink_payload) if isinstance(permalink_payload, dict) @@ -137,8 +148,12 @@ class SupersetContextExtractor: "Extracted native filters from permalink dataMask", extra={"filter_count": len(data_mask)}, ) - resolved_dashboard_id = self._extract_dashboard_id_from_state(permalink_state) - resolved_chart_id = self._extract_chart_id_from_state(permalink_state) + resolved_dashboard_id = self._extract_dashboard_id_from_state( + permalink_state + ) + resolved_chart_id = self._extract_chart_id_from_state( + permalink_state + ) if resolved_dashboard_id is not None: dashboard_id = resolved_dashboard_id unresolved_references = [ @@ -146,10 +161,12 @@ class SupersetContextExtractor: for item in unresolved_references if item != "dashboard_permalink_dataset_binding_unresolved" ] - dataset_id, unresolved_references = self._recover_dataset_binding_from_dashboard( - dashboard_id=dashboard_id, - dataset_ref=dataset_ref, - unresolved_references=unresolved_references, + dataset_id, unresolved_references = ( + self._recover_dataset_binding_from_dashboard( + dashboard_id=dashboard_id, + dataset_ref=dataset_ref, + unresolved_references=unresolved_references, + ) ) if dataset_id is not None: dataset_ref = f"dataset:{dataset_id}" @@ -162,19 +179,30 @@ class SupersetContextExtractor: ] try: chart_payload = self.client.get_chart(chart_id) - chart_data = chart_payload.get("result", chart_payload) if isinstance(chart_payload, dict) else {} + chart_data = ( + chart_payload.get("result", chart_payload) + if isinstance(chart_payload, dict) + else {} + ) datasource_id = chart_data.get("datasource_id") if datasource_id is not None: dataset_id = int(datasource_id) dataset_ref = f"dataset:{dataset_id}" logger.reason( "Recovered dataset reference from permalink chart context", - extra={"chart_id": chart_id, "dataset_id": dataset_id}, + extra={ + "chart_id": chart_id, + "dataset_id": dataset_id, + }, ) else: - unresolved_references.append("chart_dataset_binding_unresolved") + unresolved_references.append( + "chart_dataset_binding_unresolved" + ) except Exception as exc: - unresolved_references.append("chart_dataset_binding_unresolved") + unresolved_references.append( + "chart_dataset_binding_unresolved" + ) logger.explore( "Chart lookup failed during permalink recovery", extra={"chart_id": chart_id, "error": str(exc)}, @@ -186,19 +214,25 @@ class SupersetContextExtractor: ) elif dashboard_id is not None or dashboard_ref is not None: resource_type = "dashboard" - resolved_dashboard_ref = dashboard_id if dashboard_id is not None else dashboard_ref + resolved_dashboard_ref = ( + dashboard_id if dashboard_id is not None else dashboard_ref + ) + if resolved_dashboard_ref is None: + raise ValueError("Dashboard reference could not be resolved") logger.reason( "Resolving dashboard-bound dataset from Superset", extra={"dashboard_ref": resolved_dashboard_ref}, ) - + # Resolve dashboard detail first β€” handles both numeric ID and slug, # ensuring dashboard_id is available for the native_filters_key fetch below. - dashboard_detail = self.client.get_dashboard_detail(resolved_dashboard_ref) + dashboard_detail = self.client.get_dashboard_detail( + resolved_dashboard_ref + ) resolved_dashboard_id = dashboard_detail.get("id") if resolved_dashboard_id is not None: dashboard_id = int(resolved_dashboard_id) - + # Check for native_filters_key in query params and fetch filter state. # This must run AFTER dashboard_id is resolved from slug above. native_filters_key = query_params.get("native_filters_key", [None])[0] @@ -206,7 +240,10 @@ class SupersetContextExtractor: try: logger.reason( "Fetching native filter state from Superset", - extra={"dashboard_id": dashboard_id, "filter_key": native_filters_key}, + extra={ + "dashboard_id": dashboard_id, + "filter_key": native_filters_key, + }, ) extracted = self.client.extract_native_filters_from_key( dashboard_id, native_filters_key @@ -221,14 +258,21 @@ class SupersetContextExtractor: else: logger.explore( "Native filter state returned empty dataMask", - extra={"dashboard_id": dashboard_id, "filter_key": native_filters_key}, + extra={ + "dashboard_id": dashboard_id, + "filter_key": native_filters_key, + }, ) except Exception as exc: logger.explore( "Failed to fetch native filter state from Superset", - extra={"dashboard_id": dashboard_id, "filter_key": native_filters_key, "error": str(exc)}, + extra={ + "dashboard_id": dashboard_id, + "filter_key": native_filters_key, + "error": str(exc), + }, ) - + datasets = dashboard_detail.get("datasets") or [] if datasets: first_dataset = datasets[0] @@ -280,7 +324,10 @@ class SupersetContextExtractor: ) logger.reason( "Canonicalized dataset reference from dataset detail", - extra={"dataset_ref": dataset_ref, "dataset_id": dataset_id}, + extra={ + "dataset_ref": dataset_ref, + "dataset_id": dataset_id, + }, ) except Exception as exc: partial_recovery = True @@ -316,17 +363,20 @@ class SupersetContextExtractor: }, ) return result + # [/DEF:SupersetContextExtractor.parse_superset_link:Function] # [DEF:SupersetContextExtractor.recover_imported_filters:Function] # @COMPLEXITY: 4 # @PURPOSE: Build imported filter entries from URL state and Superset-side saved context. - # @RELATION: [CALLS] ->[backend.src.core.superset_client.SupersetClient] + # @RELATION: [CALLS] ->[SupersetClient.get_dashboard] # @PRE: parsed_context comes from a successful Superset link parse for one environment. # @POST: returns explicit recovered and partial filter entries with preserved provenance and confirmation requirements. # @SIDE_EFFECT: may issue Superset reads for dashboard metadata enrichment. # @DATA_CONTRACT: Input[SupersetParsedContext] -> Output[List[Dict[str,Any]]] - def recover_imported_filters(self, parsed_context: SupersetParsedContext) -> List[Dict[str, Any]]: + def recover_imported_filters( + self, parsed_context: SupersetParsedContext + ) -> List[Dict[str, Any]]: with belief_scope("SupersetContextExtractor.recover_imported_filters"): recovered_filters: List[Dict[str, Any]] = [] seen_filter_keys: Set[str] = set() @@ -349,22 +399,46 @@ class SupersetContextExtractor: return existing = recovered_filters[existing_index] - if existing.get("display_name") in {None, "", existing.get("filter_name")} and candidate.get("display_name"): + if existing.get("display_name") in { + None, + "", + existing.get("filter_name"), + } and candidate.get("display_name"): existing["display_name"] = candidate["display_name"] - if existing.get("raw_value") is None and candidate.get("raw_value") is not None: + if ( + existing.get("raw_value") is None + and candidate.get("raw_value") is not None + ): existing["raw_value"] = candidate["raw_value"] - existing["confidence_state"] = candidate.get("confidence_state", "imported") - existing["requires_confirmation"] = candidate.get("requires_confirmation", False) - existing["recovery_status"] = candidate.get("recovery_status", "recovered") + existing["confidence_state"] = candidate.get( + "confidence_state", "imported" + ) + existing["requires_confirmation"] = candidate.get( + "requires_confirmation", False + ) + existing["recovery_status"] = candidate.get( + "recovery_status", "recovered" + ) existing["source"] = candidate.get("source", existing.get("source")) - if existing.get("normalized_value") is None and candidate.get("normalized_value") is not None: - existing["normalized_value"] = deepcopy(candidate["normalized_value"]) - if existing.get("notes") and candidate.get("notes") and candidate["notes"] not in existing["notes"]: - existing["notes"] = f'{existing["notes"]}; {candidate["notes"]}' + if ( + existing.get("normalized_value") is None + and candidate.get("normalized_value") is not None + ): + existing["normalized_value"] = deepcopy( + candidate["normalized_value"] + ) + if ( + existing.get("notes") + and candidate.get("notes") + and candidate["notes"] not in existing["notes"] + ): + existing["notes"] = f"{existing['notes']}; {candidate['notes']}" if parsed_context.dashboard_id is not None: try: - dashboard_payload = self.client.get_dashboard(parsed_context.dashboard_id) + dashboard_payload = self.client.get_dashboard( + parsed_context.dashboard_id + ) dashboard_record = ( dashboard_payload.get("result", dashboard_payload) if isinstance(dashboard_payload, dict) @@ -376,7 +450,9 @@ class SupersetContextExtractor: if not isinstance(json_metadata, dict): json_metadata = {} - native_filter_configuration = json_metadata.get("native_filter_configuration") or [] + native_filter_configuration = ( + json_metadata.get("native_filter_configuration") or [] + ) default_filters = json_metadata.get("default_filters") or {} if isinstance(default_filters, str) and default_filters.strip(): try: @@ -400,7 +476,9 @@ class SupersetContextExtractor: if not filter_name: continue - display_name = item.get("label") or item.get("name") or filter_name + display_name = ( + item.get("label") or item.get("name") or filter_name + ) filter_id = str(item.get("id") or "").strip() default_value = None @@ -413,7 +491,9 @@ class SupersetContextExtractor: "display_name": display_name, "raw_value": default_value, "source": "superset_native", - "recovery_status": "recovered" if default_value is not None else "partial", + "recovery_status": "recovered" + if default_value is not None + else "partial", "requires_confirmation": default_value is None, "notes": "Recovered from Superset dashboard native filter configuration", }, @@ -445,7 +525,9 @@ class SupersetContextExtractor: default_source="superset_url", default_note="Recovered from Superset URL state", ) - metadata_match = metadata_filters_by_id.get(normalized["filter_name"].strip().lower()) + metadata_match = metadata_filters_by_id.get( + normalized["filter_name"].strip().lower() + ) if metadata_match is not None: normalized["filter_name"] = metadata_match["filter_name"] normalized["display_name"] = metadata_match["display_name"] @@ -517,6 +599,7 @@ class SupersetContextExtractor: }, ) return recovered_filters + # [/DEF:SupersetContextExtractor.recover_imported_filters:Function] # [DEF:SupersetContextExtractor.discover_template_variables:Function] @@ -527,12 +610,16 @@ class SupersetContextExtractor: # @POST: returns deduplicated explicit variable records without executing Jinja or fabricating runtime values. # @SIDE_EFFECT: none. # @DATA_CONTRACT: Input[dataset_payload:Dict[str,Any]] -> Output[List[Dict[str,Any]]] - def discover_template_variables(self, dataset_payload: Dict[str, Any]) -> List[Dict[str, Any]]: + def discover_template_variables( + self, dataset_payload: Dict[str, Any] + ) -> List[Dict[str, Any]]: with belief_scope("SupersetContextExtractor.discover_template_variables"): discovered: List[Dict[str, Any]] = [] seen_variable_names: Set[str] = set() - for expression_source in self._collect_query_bearing_expressions(dataset_payload): + for expression_source in self._collect_query_bearing_expressions( + dataset_payload + ): for filter_match in re.finditer( r"filter_values\(\s*['\"]([^'\"]+)['\"]\s*\)", expression_source, @@ -570,11 +657,16 @@ class SupersetContextExtractor: default_value=self._normalize_default_literal(default_literal), ) - for jinja_match in re.finditer(r"\{\{\s*(.*?)\s*\}\}", expression_source, flags=re.DOTALL): + for jinja_match in re.finditer( + r"\{\{\s*(.*?)\s*\}\}", expression_source, flags=re.DOTALL + ): expression = str(jinja_match.group(1) or "").strip() if not expression: continue - if any(token in expression for token in ("filter_values(", "url_param(", "get_filters(")): + if any( + token in expression + for token in ("filter_values(", "url_param(", "get_filters(") + ): continue variable_name = self._extract_primary_jinja_identifier(expression) if not variable_name: @@ -584,7 +676,9 @@ class SupersetContextExtractor: seen_variable_names=seen_variable_names, variable_name=variable_name, expression_source=expression_source, - variable_kind="derived" if "." in expression or "|" in expression else "parameter", + variable_kind="derived" + if "." in expression or "|" in expression + else "parameter", is_required=True, default_value=None, ) @@ -598,12 +692,15 @@ class SupersetContextExtractor: }, ) return discovered + # [/DEF:SupersetContextExtractor.discover_template_variables:Function] # [DEF:SupersetContextExtractor.build_recovery_summary:Function] # @COMPLEXITY: 2 # @PURPOSE: Summarize recovered, partial, and unresolved context for session state and UX. - def build_recovery_summary(self, parsed_context: SupersetParsedContext) -> Dict[str, Any]: + def build_recovery_summary( + self, parsed_context: SupersetParsedContext + ) -> Dict[str, Any]: return { "dataset_ref": parsed_context.dataset_ref, "dataset_id": parsed_context.dataset_id, @@ -613,12 +710,15 @@ class SupersetContextExtractor: "unresolved_references": list(parsed_context.unresolved_references), "imported_filter_count": len(parsed_context.imported_filters), } + # [/DEF:SupersetContextExtractor.build_recovery_summary:Function] # [DEF:SupersetContextExtractor._extract_numeric_identifier:Function] # @COMPLEXITY: 2 # @PURPOSE: Extract a numeric identifier from a REST-like Superset URL path. - def _extract_numeric_identifier(self, path_parts: List[str], resource_name: str) -> Optional[int]: + def _extract_numeric_identifier( + self, path_parts: List[str], resource_name: str + ) -> Optional[int]: if resource_name not in path_parts: return None try: @@ -633,6 +733,7 @@ class SupersetContextExtractor: if not candidate.isdigit(): return None return int(candidate) + # [/DEF:SupersetContextExtractor._extract_numeric_identifier:Function] # [DEF:SupersetContextExtractor._extract_dashboard_reference:Function] @@ -653,6 +754,7 @@ class SupersetContextExtractor: if not candidate or candidate == "p": return None return candidate + # [/DEF:SupersetContextExtractor._extract_dashboard_reference:Function] # [DEF:SupersetContextExtractor._extract_dashboard_permalink_key:Function] @@ -674,6 +776,7 @@ class SupersetContextExtractor: if permalink_marker != "p" or not permalink_key: return None return permalink_key + # [/DEF:SupersetContextExtractor._extract_dashboard_permalink_key:Function] # [DEF:SupersetContextExtractor._extract_dashboard_id_from_state:Function] @@ -684,6 +787,7 @@ class SupersetContextExtractor: payload=state, candidate_keys={"dashboardId", "dashboard_id", "dashboard_id_value"}, ) + # [/DEF:SupersetContextExtractor._extract_dashboard_id_from_state:Function] # [DEF:SupersetContextExtractor._extract_chart_id_from_state:Function] @@ -694,12 +798,16 @@ class SupersetContextExtractor: payload=state, candidate_keys={"slice_id", "sliceId", "chartId", "chart_id"}, ) + # [/DEF:SupersetContextExtractor._extract_chart_id_from_state:Function] # [DEF:SupersetContextExtractor._search_nested_numeric_key:Function] # @COMPLEXITY: 3 # @PURPOSE: Recursively search nested dict/list payloads for the first numeric value under a candidate key set. - def _search_nested_numeric_key(self, payload: Any, candidate_keys: Set[str]) -> Optional[int]: + # @RELATION: [DEPENDS_ON] ->[SupersetContextExtractor.parse_superset_link] + def _search_nested_numeric_key( + self, payload: Any, candidate_keys: Set[str] + ) -> Optional[int]: if isinstance(payload, dict): for key, value in payload.items(): if key in candidate_keys: @@ -717,11 +825,13 @@ class SupersetContextExtractor: if found is not None: return found return None + # [/DEF:SupersetContextExtractor._search_nested_numeric_key:Function] # [DEF:SupersetContextExtractor._recover_dataset_binding_from_dashboard:Function] # @COMPLEXITY: 3 # @PURPOSE: Recover a dataset binding from resolved dashboard context while preserving explicit unresolved markers. + # @RELATION: [CALLS] ->[SupersetClient.get_dashboard_detail] def _recover_dataset_binding_from_dashboard( self, dashboard_id: int, @@ -744,7 +854,10 @@ class SupersetContextExtractor: "dataset_ref": dataset_ref, }, ) - if len(datasets) > 1 and "multiple_dashboard_datasets" not in unresolved_references: + if ( + len(datasets) > 1 + and "multiple_dashboard_datasets" not in unresolved_references + ): unresolved_references.append("multiple_dashboard_datasets") return resolved_dataset, unresolved_references if "dashboard_dataset_id_missing" not in unresolved_references: @@ -754,6 +867,7 @@ class SupersetContextExtractor: if "dashboard_dataset_binding_missing" not in unresolved_references: unresolved_references.append("dashboard_dataset_binding_missing") return None, unresolved_references + # [/DEF:SupersetContextExtractor._recover_dataset_binding_from_dashboard:Function] # [DEF:SupersetContextExtractor._decode_query_state:Function] @@ -777,12 +891,15 @@ class SupersetContextExtractor: ) query_state[key] = decoded_value return query_state + # [/DEF:SupersetContextExtractor._decode_query_state:Function] # [DEF:SupersetContextExtractor._extract_imported_filters:Function] # @COMPLEXITY: 2 # @PURPOSE: Normalize imported filters from decoded query state without fabricating missing values. - def _extract_imported_filters(self, query_state: Dict[str, Any]) -> List[Dict[str, Any]]: + def _extract_imported_filters( + self, query_state: Dict[str, Any] + ) -> List[Dict[str, Any]]: imported_filters: List[Dict[str, Any]] = [] native_filters_payload = query_state.get("native_filters") @@ -800,7 +917,8 @@ class SupersetContextExtractor: if item.get("column") and ("value" in item or "val" in item): direct_clause = { "col": item.get("column"), - "op": item.get("op") or ("IN" if isinstance(item.get("value"), list) else "=="), + "op": item.get("op") + or ("IN" if isinstance(item.get("value"), list) else "=="), "val": item.get("val", item.get("value")), } imported_filters.append( @@ -809,7 +927,9 @@ class SupersetContextExtractor: "raw_value": item.get("value"), "display_name": item.get("label") or item.get("name"), "normalized_value": { - "filter_clauses": [direct_clause] if isinstance(direct_clause, dict) else [], + "filter_clauses": [direct_clause] + if isinstance(direct_clause, dict) + else [], "extra_form_data": {}, "value_origin": "native_filters", }, @@ -834,7 +954,9 @@ class SupersetContextExtractor: raw_value = None normalized_value = { "filter_clauses": [], - "extra_form_data": deepcopy(extra_form_data) if isinstance(extra_form_data, dict) else {}, + "extra_form_data": deepcopy(extra_form_data) + if isinstance(extra_form_data, dict) + else {}, "value_origin": "unresolved", } @@ -868,10 +990,17 @@ class SupersetContextExtractor: # If still no value, try extraFormData directly for time_range, time_grain, etc. if raw_value is None and isinstance(extra_form_data, dict): # Common Superset filter fields - for field in ["time_range", "time_grain_sqla", "time_column", "granularity"]: + for field in [ + "time_range", + "time_grain_sqla", + "time_column", + "granularity", + ]: if field in extra_form_data: raw_value = extra_form_data[field] - normalized_value["value_origin"] = f"extra_form_data.{field}" + normalized_value["value_origin"] = ( + f"extra_form_data.{field}" + ) break imported_filters.append( @@ -881,7 +1010,9 @@ class SupersetContextExtractor: "display_name": display_name, "normalized_value": normalized_value, "source": "superset_permalink", - "recovery_status": "recovered" if raw_value is not None else "partial", + "recovery_status": "recovered" + if raw_value is not None + else "partial", "requires_confirmation": raw_value is None, "notes": "Recovered from Superset dashboard permalink state", } @@ -901,7 +1032,9 @@ class SupersetContextExtractor: raw_value = None normalized_value = { "filter_clauses": [], - "extra_form_data": deepcopy(extra_form_data) if isinstance(extra_form_data, dict) else {}, + "extra_form_data": deepcopy(extra_form_data) + if isinstance(extra_form_data, dict) + else {}, "value_origin": "unresolved", } @@ -935,10 +1068,17 @@ class SupersetContextExtractor: # If still no value, try extraFormData directly for time_range, time_grain, etc. if raw_value is None and isinstance(extra_form_data, dict): # Common Superset filter fields - for field in ["time_range", "time_grain_sqla", "time_column", "granularity"]: + for field in [ + "time_range", + "time_grain_sqla", + "time_column", + "granularity", + ]: if field in extra_form_data: raw_value = extra_form_data[field] - normalized_value["value_origin"] = f"extra_form_data.{field}" + normalized_value["value_origin"] = ( + f"extra_form_data.{field}" + ) break imported_filters.append( @@ -948,7 +1088,9 @@ class SupersetContextExtractor: "display_name": display_name, "normalized_value": normalized_value, "source": "superset_native_filters_key", - "recovery_status": "recovered" if raw_value is not None else "partial", + "recovery_status": "recovered" + if raw_value is not None + else "partial", "requires_confirmation": raw_value is None, "notes": "Recovered from Superset native_filters_key state", } @@ -960,7 +1102,9 @@ class SupersetContextExtractor: for index, item in enumerate(extra_filters): if not isinstance(item, dict): continue - filter_name = item.get("col") or item.get("column") or f"extra_filter_{index}" + filter_name = ( + item.get("col") or item.get("column") or f"extra_filter_{index}" + ) imported_filters.append( { "filter_name": str(filter_name), @@ -981,6 +1125,7 @@ class SupersetContextExtractor: ) return imported_filters + # [/DEF:SupersetContextExtractor._extract_imported_filters:Function] # [DEF:SupersetContextExtractor._normalize_imported_filter_payload:Function] @@ -996,15 +1141,24 @@ class SupersetContextExtractor: if "raw_value" not in payload and "value" in payload: raw_value = payload.get("value") - recovery_status = str( - payload.get("recovery_status") - or ("recovered" if raw_value is not None else "partial") - ).strip().lower() + recovery_status = ( + str( + payload.get("recovery_status") + or ("recovered" if raw_value is not None else "partial") + ) + .strip() + .lower() + ) requires_confirmation = bool( - payload.get("requires_confirmation", raw_value is None or recovery_status != "recovered") + payload.get( + "requires_confirmation", + raw_value is None or recovery_status != "recovered", + ) ) return { - "filter_name": str(payload.get("filter_name") or "unresolved_filter").strip(), + "filter_name": str( + payload.get("filter_name") or "unresolved_filter" + ).strip(), "display_name": payload.get("display_name"), "raw_value": raw_value, "normalized_value": payload.get("normalized_value"), @@ -1014,13 +1168,16 @@ class SupersetContextExtractor: "recovery_status": recovery_status, "notes": str(payload.get("notes") or default_note), } + # [/DEF:SupersetContextExtractor._normalize_imported_filter_payload:Function] # [DEF:SupersetContextExtractor._collect_query_bearing_expressions:Function] # @COMPLEXITY: 3 # @PURPOSE: Collect SQL and expression-bearing dataset fields for deterministic template-variable discovery. # @RELATION: [DEPENDS_ON] ->[SupersetContextExtractor.discover_template_variables] - def _collect_query_bearing_expressions(self, dataset_payload: Dict[str, Any]) -> List[str]: + def _collect_query_bearing_expressions( + self, dataset_payload: Dict[str, Any] + ) -> List[str]: expressions: List[str] = [] def append_expression(candidate: Any) -> None: @@ -1055,6 +1212,7 @@ class SupersetContextExtractor: append_expression(column.get("expression")) return expressions + # [/DEF:SupersetContextExtractor._collect_query_bearing_expressions:Function] # [DEF:SupersetContextExtractor._append_template_variable:Function] @@ -1087,6 +1245,7 @@ class SupersetContextExtractor: "mapping_status": "unmapped", } ) + # [/DEF:SupersetContextExtractor._append_template_variable:Function] # [DEF:SupersetContextExtractor._extract_primary_jinja_identifier:Function] @@ -1100,6 +1259,7 @@ class SupersetContextExtractor: if candidate in {"if", "else", "for", "set", "True", "False", "none", "None"}: return None return candidate + # [/DEF:SupersetContextExtractor._extract_primary_jinja_identifier:Function] # [DEF:SupersetContextExtractor._normalize_default_literal:Function] @@ -1110,9 +1270,8 @@ class SupersetContextExtractor: if not normalized_literal: return None if ( - (normalized_literal.startswith("'") and normalized_literal.endswith("'")) - or (normalized_literal.startswith('"') and normalized_literal.endswith('"')) - ): + normalized_literal.startswith("'") and normalized_literal.endswith("'") + ) or (normalized_literal.startswith('"') and normalized_literal.endswith('"')): return normalized_literal[1:-1] lowered = normalized_literal.lower() if lowered in {"true", "false"}: @@ -1126,7 +1285,10 @@ class SupersetContextExtractor: return float(normalized_literal) except ValueError: return normalized_literal + # [/DEF:SupersetContextExtractor._normalize_default_literal:Function] + + # [/DEF:SupersetContextExtractor:Class] -# [/DEF:SupersetContextExtractor:Module] \ No newline at end of file +# [/DEF:SupersetContextExtractor:Module] diff --git a/backend/src/models/filter_state.py b/backend/src/models/filter_state.py index fdd3af62..707b0292 100644 --- a/backend/src/models/filter_state.py +++ b/backend/src/models/filter_state.py @@ -1,4 +1,4 @@ -# [DEF:backend.src.models.filter_state:Module] +# [DEF:FilterStateModels:Module] # # @COMPLEXITY: 2 # @SEMANTICS: superset, native, filters, pydantic, models, dataclasses @@ -148,4 +148,4 @@ class ExtraFormDataMerge(BaseModel): # [/DEF:ExtraFormDataMerge:Model] -# [/DEF:backend.src.models.filter_state:Module] \ No newline at end of file +# [/DEF:FilterStateModels:Module] \ No newline at end of file diff --git a/backend/src/services/dataset_review/orchestrator.py b/backend/src/services/dataset_review/orchestrator.py index ac7d5253..14b23953 100644 --- a/backend/src/services/dataset_review/orchestrator.py +++ b/backend/src/services/dataset_review/orchestrator.py @@ -22,7 +22,7 @@ from dataclasses import dataclass, field from datetime import datetime import hashlib import json -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, cast from src.core.config_manager import ConfigManager from src.core.logger import belief_scope, logger @@ -72,6 +72,8 @@ from src.services.dataset_review.semantic_resolver import SemanticSourceResolver from src.services.dataset_review.event_logger import SessionEventPayload # [/DEF:DatasetReviewOrchestrator.imports:Block] +logger = cast(Any, logger) + # [DEF:StartSessionCommand:Class] # @COMPLEXITY: 2 @@ -82,6 +84,8 @@ class StartSessionCommand: environment_id: str source_kind: str source_input: str + + # [/DEF:StartSessionCommand:Class] @@ -93,6 +97,8 @@ class StartSessionResult: session: DatasetReviewSession parsed_context: Optional[SupersetParsedContext] = None findings: List[ValidationFinding] = field(default_factory=list) + + # [/DEF:StartSessionResult:Class] @@ -103,6 +109,8 @@ class StartSessionResult: class PreparePreviewCommand: user: User session_id: str + + # [/DEF:PreparePreviewCommand:Class] @@ -114,6 +122,8 @@ class PreparePreviewResult: session: DatasetReviewSession preview: CompiledPreview blocked_reasons: List[str] = field(default_factory=list) + + # [/DEF:PreparePreviewResult:Class] @@ -124,6 +134,8 @@ class PreparePreviewResult: class LaunchDatasetCommand: user: User session_id: str + + # [/DEF:LaunchDatasetCommand:Class] @@ -135,6 +147,8 @@ class LaunchDatasetResult: session: DatasetReviewSession run_context: DatasetRunContext blocked_reasons: List[str] = field(default_factory=list) + + # [/DEF:LaunchDatasetResult:Class] @@ -168,6 +182,7 @@ class DatasetReviewOrchestrator: self.config_manager = config_manager self.task_manager = task_manager self.semantic_resolver = semantic_resolver or SemanticSourceResolver() + # [/DEF:DatasetReviewOrchestrator.__init__:Function] # [DEF:DatasetReviewOrchestrator.start_session:Function] @@ -188,7 +203,9 @@ class DatasetReviewOrchestrator: normalized_environment_id = str(command.environment_id or "").strip() if not normalized_source_input: - logger.explore("Blocked dataset review session start due to empty source input") + logger.explore( + "Blocked dataset review session start due to empty source input" + ) raise ValueError("source_input must be non-empty") if normalized_source_kind not in {"superset_link", "dataset_selection"}: @@ -196,7 +213,9 @@ class DatasetReviewOrchestrator: "Blocked dataset review session start due to unsupported source kind", extra={"source_kind": normalized_source_kind}, ) - raise ValueError("source_kind must be 'superset_link' or 'dataset_selection'") + raise ValueError( + "source_kind must be 'superset_link' or 'dataset_selection'" + ) environment = self.config_manager.get_environment(normalized_environment_id) if environment is None: @@ -234,11 +253,15 @@ class DatasetReviewOrchestrator: if parsed_context.partial_recovery: readiness_state = ReadinessState.RECOVERY_REQUIRED recommended_action = RecommendedAction.REVIEW_DOCUMENTATION - findings.extend(self._build_partial_recovery_findings(parsed_context)) + findings.extend( + self._build_partial_recovery_findings(parsed_context) + ) else: readiness_state = ReadinessState.REVIEW_READY else: - dataset_ref, dataset_id = self._parse_dataset_selection(normalized_source_input) + dataset_ref, dataset_id = self._parse_dataset_selection( + normalized_source_input + ) readiness_state = ReadinessState.REVIEW_READY current_phase = SessionPhase.REVIEW @@ -255,17 +278,19 @@ class DatasetReviewOrchestrator: status=SessionStatus.ACTIVE, current_phase=current_phase, ) - persisted_session = self.repository.create_session(session) + persisted_session = cast(Any, self.repository.create_session(session)) recovered_filters: List[ImportedFilter] = [] template_variables: List[TemplateVariable] = [] execution_mappings: List[ExecutionMapping] = [] if normalized_source_kind == "superset_link" and parsed_context is not None: - recovered_filters, template_variables, execution_mappings, findings = self._build_recovery_bootstrap( - environment=environment, - session=persisted_session, - parsed_context=parsed_context, - findings=findings, + recovered_filters, template_variables, execution_mappings, findings = ( + self._build_recovery_bootstrap( + environment=environment, + session=persisted_session, + parsed_context=parsed_context, + findings=findings, + ) ) profile = self._build_initial_profile( @@ -286,7 +311,9 @@ class DatasetReviewOrchestrator: "dataset_ref": persisted_session.dataset_ref, "dataset_id": persisted_session.dataset_id, "dashboard_id": persisted_session.dashboard_id, - "partial_recovery": bool(parsed_context and parsed_context.partial_recovery), + "partial_recovery": bool( + parsed_context and parsed_context.partial_recovery + ), }, ) ) @@ -327,7 +354,10 @@ class DatasetReviewOrchestrator: ) logger.reason( "Linked recovery task to started dataset review session", - extra={"session_id": persisted_session.session_id, "task_id": active_task_id}, + extra={ + "session_id": persisted_session.session_id, + "task_id": active_task_id, + }, ) logger.reflect( @@ -347,6 +377,7 @@ class DatasetReviewOrchestrator: parsed_context=parsed_context, findings=findings, ) + # [/DEF:DatasetReviewOrchestrator.start_session:Function] # [DEF:DatasetReviewOrchestrator.prepare_launch_preview:Function] @@ -357,13 +388,20 @@ class DatasetReviewOrchestrator: # @POST: returns preview artifact in pending, ready, failed, or stale state. # @SIDE_EFFECT: persists preview attempt and upstream compilation diagnostics. # @DATA_CONTRACT: Input[PreparePreviewCommand] -> Output[PreparePreviewResult] - def prepare_launch_preview(self, command: PreparePreviewCommand) -> PreparePreviewResult: + def prepare_launch_preview( + self, command: PreparePreviewCommand + ) -> PreparePreviewResult: with belief_scope("DatasetReviewOrchestrator.prepare_launch_preview"): - session = self.repository.load_session_detail(command.session_id, command.user.id) + session = self.repository.load_session_detail( + command.session_id, command.user.id + ) if session is None or session.user_id != command.user.id: logger.explore( "Preview preparation rejected because owned session was not found", - extra={"session_id": command.session_id, "user_id": command.user.id}, + extra={ + "session_id": command.session_id, + "user_id": command.user.id, + }, ) raise ValueError("Session not found") @@ -451,6 +489,7 @@ class DatasetReviewOrchestrator: preview=persisted_preview, blocked_reasons=[], ) + # [/DEF:DatasetReviewOrchestrator.prepare_launch_preview:Function] # [DEF:DatasetReviewOrchestrator.launch_dataset:Function] @@ -464,11 +503,16 @@ class DatasetReviewOrchestrator: # @INVARIANT: launch remains blocked unless blocking findings are closed, approvals are satisfied, and the latest Superset preview fingerprint matches current execution inputs. def launch_dataset(self, command: LaunchDatasetCommand) -> LaunchDatasetResult: with belief_scope("DatasetReviewOrchestrator.launch_dataset"): - session = self.repository.load_session_detail(command.session_id, command.user.id) + session = self.repository.load_session_detail( + command.session_id, command.user.id + ) if session is None or session.user_id != command.user.id: logger.explore( "Launch rejected because owned session was not found", - extra={"session_id": command.session_id, "user_id": command.user.id}, + extra={ + "session_id": command.session_id, + "user_id": command.user.id, + }, ) raise ValueError("Session not found") @@ -579,6 +623,7 @@ class DatasetReviewOrchestrator: run_context=persisted_run_context, blocked_reasons=[], ) + # [/DEF:DatasetReviewOrchestrator.launch_dataset:Function] # [DEF:DatasetReviewOrchestrator._parse_dataset_selection:Function] @@ -601,6 +646,7 @@ class DatasetReviewOrchestrator: return normalized, None return normalized, None + # [/DEF:DatasetReviewOrchestrator._parse_dataset_selection:Function] # [DEF:DatasetReviewOrchestrator._build_initial_profile:Function] @@ -613,7 +659,9 @@ class DatasetReviewOrchestrator: parsed_context: Optional[SupersetParsedContext], dataset_ref: str, ) -> DatasetProfile: - dataset_name = dataset_ref.split(".")[-1] if dataset_ref else "Unresolved dataset" + dataset_name = ( + dataset_ref.split(".")[-1] if dataset_ref else "Unresolved dataset" + ) business_summary = ( f"Review session initialized for {dataset_ref}." if dataset_ref @@ -636,9 +684,12 @@ class DatasetReviewOrchestrator: completeness_score=0.25, confidence_state=confidence_state, has_blocking_findings=False, - has_warning_findings=bool(parsed_context and parsed_context.partial_recovery), + has_warning_findings=bool( + parsed_context and parsed_context.partial_recovery + ), manual_summary_locked=False, ) + # [/DEF:DatasetReviewOrchestrator._build_initial_profile:Function] # [DEF:DatasetReviewOrchestrator._build_partial_recovery_findings:Function] @@ -670,36 +721,57 @@ class DatasetReviewOrchestrator: ) ) return findings + # [/DEF:DatasetReviewOrchestrator._build_partial_recovery_findings:Function] # [DEF:DatasetReviewOrchestrator._build_recovery_bootstrap:Function] # @COMPLEXITY: 4 # @PURPOSE: Recover and materialize initial imported filters, template variables, and draft execution mappings after session creation. + # @RELATION: [CALLS] ->[SupersetContextExtractor.recover_imported_filters] + # @RELATION: [CALLS] ->[SupersetContextExtractor.discover_template_variables] + # @PRE: session belongs to the just-created review aggregate and parsed_context was produced for the same environment scope. + # @POST: Returns bootstrap imported filters, template variables, execution mappings, and updated findings without persisting them directly. + # @SIDE_EFFECT: Performs Superset reads through the extractor and may append warning findings for incomplete recovery. + # @DATA_CONTRACT: Input[Environment, DatasetReviewSession, SupersetParsedContext, List[ValidationFinding]] -> Output[Tuple[List[ImportedFilter], List[TemplateVariable], List[ExecutionMapping], List[ValidationFinding]]] def _build_recovery_bootstrap( self, environment, session: DatasetReviewSession, parsed_context: SupersetParsedContext, findings: List[ValidationFinding], - ) -> tuple[List[ImportedFilter], List[TemplateVariable], List[ExecutionMapping], List[ValidationFinding]]: + ) -> tuple[ + List[ImportedFilter], + List[TemplateVariable], + List[ExecutionMapping], + List[ValidationFinding], + ]: + session_record = cast(Any, session) extractor = SupersetContextExtractor(environment) imported_filters_payload = extractor.recover_imported_filters(parsed_context) if imported_filters_payload is None: imported_filters_payload = [] imported_filters = [ ImportedFilter( - session_id=session.session_id, + session_id=session_record.session_id, filter_name=str(item.get("filter_name") or f"imported_filter_{index}"), display_name=item.get("display_name"), raw_value=item.get("raw_value"), normalized_value=item.get("normalized_value"), - source=FilterSource(str(item.get("source") or FilterSource.SUPERSET_URL.value)), + source=FilterSource( + str(item.get("source") or FilterSource.SUPERSET_URL.value) + ), confidence_state=FilterConfidenceState( - str(item.get("confidence_state") or FilterConfidenceState.UNRESOLVED.value) + str( + item.get("confidence_state") + or FilterConfidenceState.UNRESOLVED.value + ) ), requires_confirmation=bool(item.get("requires_confirmation", False)), recovery_status=FilterRecoveryStatus( - str(item.get("recovery_status") or FilterRecoveryStatus.PARTIAL.value) + str( + item.get("recovery_status") + or FilterRecoveryStatus.PARTIAL.value + ) ), notes=item.get("notes"), ) @@ -711,25 +783,44 @@ class DatasetReviewOrchestrator: if session.dataset_id is not None: try: - dataset_payload = extractor.client.get_dataset_detail(session.dataset_id) - discovered_variables = extractor.discover_template_variables(dataset_payload) + dataset_payload = extractor.client.get_dataset_detail( + session_record.dataset_id + ) + discovered_variables = extractor.discover_template_variables( + dataset_payload + ) template_variables = [ TemplateVariable( - session_id=session.session_id, - variable_name=str(item.get("variable_name") or f"variable_{index}"), + session_id=session_record.session_id, + variable_name=str( + item.get("variable_name") or f"variable_{index}" + ), expression_source=str(item.get("expression_source") or ""), - variable_kind=VariableKind(str(item.get("variable_kind") or VariableKind.UNKNOWN.value)), + variable_kind=VariableKind( + str(item.get("variable_kind") or VariableKind.UNKNOWN.value) + ), is_required=bool(item.get("is_required", True)), default_value=item.get("default_value"), - mapping_status=MappingStatus(str(item.get("mapping_status") or MappingStatus.UNMAPPED.value)), + mapping_status=MappingStatus( + str( + item.get("mapping_status") + or MappingStatus.UNMAPPED.value + ) + ), ) for index, item in enumerate(discovered_variables) ] except Exception as exc: - if "dataset_template_variable_discovery_failed" not in parsed_context.unresolved_references: - parsed_context.unresolved_references.append("dataset_template_variable_discovery_failed") + if ( + "dataset_template_variable_discovery_failed" + not in parsed_context.unresolved_references + ): + parsed_context.unresolved_references.append( + "dataset_template_variable_discovery_failed" + ) if not any( - finding.caused_by_ref == "dataset_template_variable_discovery_failed" + finding.caused_by_ref + == "dataset_template_variable_discovery_failed" for finding in findings ): findings.append( @@ -745,7 +836,11 @@ class DatasetReviewOrchestrator: ) logger.explore( "Template variable discovery failed during session bootstrap", - extra={"session_id": session.session_id, "dataset_id": session.dataset_id, "error": str(exc)}, + extra={ + "session_id": session_record.session_id, + "dataset_id": session_record.dataset_id, + "error": str(exc), + }, ) filter_lookup = { @@ -754,7 +849,9 @@ class DatasetReviewOrchestrator: if str(imported_filter.filter_name or "").strip() } for template_variable in template_variables: - matched_filter = filter_lookup.get(str(template_variable.variable_name or "").strip().lower()) + matched_filter = filter_lookup.get( + str(template_variable.variable_name or "").strip().lower() + ) if matched_filter is None: continue requires_explicit_approval = bool( @@ -763,22 +860,27 @@ class DatasetReviewOrchestrator: ) execution_mappings.append( ExecutionMapping( - session_id=session.session_id, + session_id=session_record.session_id, filter_id=matched_filter.filter_id, variable_id=template_variable.variable_id, mapping_method=MappingMethod.DIRECT_MATCH, raw_input_value=matched_filter.raw_value, - effective_value=matched_filter.normalized_value if matched_filter.normalized_value is not None else matched_filter.raw_value, + effective_value=matched_filter.normalized_value + if matched_filter.normalized_value is not None + else matched_filter.raw_value, transformation_note="Bootstrapped from Superset recovery context", warning_level=None if not requires_explicit_approval else None, requires_explicit_approval=requires_explicit_approval, - approval_state=ApprovalState.PENDING if requires_explicit_approval else ApprovalState.NOT_REQUIRED, + approval_state=ApprovalState.PENDING + if requires_explicit_approval + else ApprovalState.NOT_REQUIRED, approved_by_user_id=None, approved_at=None, ) ) return imported_filters, template_variables, execution_mappings, findings + # [/DEF:DatasetReviewOrchestrator._build_recovery_bootstrap:Function] # [DEF:DatasetReviewOrchestrator._build_execution_snapshot:Function] @@ -789,9 +891,16 @@ class DatasetReviewOrchestrator: # @POST: returns deterministic execution snapshot for current session state without mutating persistence. # @SIDE_EFFECT: none. # @DATA_CONTRACT: Input[DatasetReviewSession] -> Output[Dict[str,Any]] - def _build_execution_snapshot(self, session: DatasetReviewSession) -> Dict[str, Any]: - filter_lookup = {item.filter_id: item for item in session.imported_filters} - variable_lookup = {item.variable_id: item for item in session.template_variables} + def _build_execution_snapshot( + self, session: DatasetReviewSession + ) -> Dict[str, Any]: + session_record = cast(Any, session) + filter_lookup = { + item.filter_id: item for item in session_record.imported_filters + } + variable_lookup = { + item.variable_id: item for item in session_record.template_variables + } effective_filters: List[Dict[str, Any]] = [] template_params: Dict[str, Any] = {} @@ -800,14 +909,16 @@ class DatasetReviewOrchestrator: preview_blockers: List[str] = [] mapped_filter_ids: set[str] = set() - for mapping in session.execution_mappings: + for mapping in session_record.execution_mappings: imported_filter = filter_lookup.get(mapping.filter_id) template_variable = variable_lookup.get(mapping.variable_id) if imported_filter is None: preview_blockers.append(f"mapping:{mapping.mapping_id}:missing_filter") continue if template_variable is None: - preview_blockers.append(f"mapping:{mapping.mapping_id}:missing_variable") + preview_blockers.append( + f"mapping:{mapping.mapping_id}:missing_variable" + ) continue effective_value = mapping.effective_value @@ -819,7 +930,9 @@ class DatasetReviewOrchestrator: effective_value = template_variable.default_value if effective_value is None and template_variable.is_required: - preview_blockers.append(f"variable:{template_variable.variable_name}:missing_required_value") + preview_blockers.append( + f"variable:{template_variable.variable_name}:missing_required_value" + ) continue mapped_filter_ids.add(imported_filter.filter_id) @@ -840,10 +953,13 @@ class DatasetReviewOrchestrator: template_params[template_variable.variable_name] = effective_value if mapping.approval_state == ApprovalState.APPROVED: approved_mapping_ids.append(mapping.mapping_id) - if mapping.requires_explicit_approval and mapping.approval_state != ApprovalState.APPROVED: + if ( + mapping.requires_explicit_approval + and mapping.approval_state != ApprovalState.APPROVED + ): open_warning_refs.append(mapping.mapping_id) - for imported_filter in session.imported_filters: + for imported_filter in session_record.imported_filters: if imported_filter.filter_id in mapped_filter_ids: continue effective_value = imported_filter.normalized_value @@ -862,8 +978,10 @@ class DatasetReviewOrchestrator: } ) - mapped_variable_ids = {mapping.variable_id for mapping in session.execution_mappings} - for variable in session.template_variables: + mapped_variable_ids = { + mapping.variable_id for mapping in session_record.execution_mappings + } + for variable in session_record.template_variables: if variable.variable_id in mapped_variable_ids: continue if variable.default_value is not None: @@ -875,11 +993,13 @@ class DatasetReviewOrchestrator: semantic_decision_refs = [ field.field_id for field in session.semantic_fields - if field.is_locked or not field.needs_review or field.provenance.value != "unresolved" + if field.is_locked + or not field.needs_review + or field.provenance.value != "unresolved" ] preview_fingerprint = self._compute_preview_fingerprint( { - "dataset_id": session.dataset_id, + "dataset_id": session_record.dataset_id, "template_params": template_params, "effective_filters": effective_filters, } @@ -893,6 +1013,7 @@ class DatasetReviewOrchestrator: "preview_blockers": sorted(set(preview_blockers)), "preview_fingerprint": preview_fingerprint, } + # [/DEF:DatasetReviewOrchestrator._build_execution_snapshot:Function] # [DEF:DatasetReviewOrchestrator._build_launch_blockers:Function] @@ -909,16 +1030,21 @@ class DatasetReviewOrchestrator: execution_snapshot: Dict[str, Any], preview: Optional[CompiledPreview], ) -> List[str]: + session_record = cast(Any, session) blockers = list(execution_snapshot["preview_blockers"]) - for finding in session.findings: + for finding in session_record.findings: if ( finding.severity == FindingSeverity.BLOCKING - and finding.resolution_state not in {ResolutionState.RESOLVED, ResolutionState.APPROVED} + and finding.resolution_state + not in {ResolutionState.RESOLVED, ResolutionState.APPROVED} ): blockers.append(f"finding:{finding.code}:blocking") - for mapping in session.execution_mappings: - if mapping.requires_explicit_approval and mapping.approval_state != ApprovalState.APPROVED: + for mapping in session_record.execution_mappings: + if ( + mapping.requires_explicit_approval + and mapping.approval_state != ApprovalState.APPROVED + ): blockers.append(f"mapping:{mapping.mapping_id}:approval_required") if preview is None: @@ -930,23 +1056,28 @@ class DatasetReviewOrchestrator: blockers.append("preview:fingerprint_mismatch") return sorted(set(blockers)) + # [/DEF:DatasetReviewOrchestrator._build_launch_blockers:Function] # [DEF:DatasetReviewOrchestrator._get_latest_preview:Function] # @COMPLEXITY: 2 # @PURPOSE: Resolve the current latest preview snapshot for one session aggregate. - def _get_latest_preview(self, session: DatasetReviewSession) -> Optional[CompiledPreview]: - if not session.previews: + def _get_latest_preview( + self, session: DatasetReviewSession + ) -> Optional[CompiledPreview]: + session_record = cast(Any, session) + if not session_record.previews: return None - if session.last_preview_id: - for preview in session.previews: - if preview.preview_id == session.last_preview_id: + if session_record.last_preview_id: + for preview in session_record.previews: + if preview.preview_id == session_record.last_preview_id: return preview return sorted( - session.previews, + session_record.previews, key=lambda item: (item.created_at or datetime.min, item.preview_id), reverse=True, )[0] + # [/DEF:DatasetReviewOrchestrator._get_latest_preview:Function] # [DEF:DatasetReviewOrchestrator._compute_preview_fingerprint:Function] @@ -955,6 +1086,7 @@ class DatasetReviewOrchestrator: def _compute_preview_fingerprint(self, payload: Dict[str, Any]) -> str: serialized = json.dumps(payload, sort_keys=True, default=str) return hashlib.sha256(serialized.encode("utf-8")).hexdigest() + # [/DEF:DatasetReviewOrchestrator._compute_preview_fingerprint:Function] # [DEF:DatasetReviewOrchestrator._enqueue_recovery_task:Function] @@ -971,28 +1103,33 @@ class DatasetReviewOrchestrator: session: DatasetReviewSession, parsed_context: Optional[SupersetParsedContext], ) -> Optional[str]: + session_record = cast(Any, session) if self.task_manager is None: logger.reason( "Dataset review session started without task manager; continuing synchronously", - extra={"session_id": session.session_id}, + extra={"session_id": session_record.session_id}, ) return None task_params: Dict[str, Any] = { - "session_id": session.session_id, + "session_id": session_record.session_id, "user_id": command.user.id, - "environment_id": session.environment_id, - "source_kind": session.source_kind, - "source_input": session.source_input, - "dataset_ref": session.dataset_ref, - "dataset_id": session.dataset_id, - "dashboard_id": session.dashboard_id, - "partial_recovery": bool(parsed_context and parsed_context.partial_recovery), + "environment_id": session_record.environment_id, + "source_kind": session_record.source_kind, + "source_input": session_record.source_input, + "dataset_ref": session_record.dataset_ref, + "dataset_id": session_record.dataset_id, + "dashboard_id": session_record.dashboard_id, + "partial_recovery": bool( + parsed_context and parsed_context.partial_recovery + ), } create_task = getattr(self.task_manager, "create_task", None) if create_task is None: - logger.explore("Task manager has no create_task method; skipping recovery enqueue") + logger.explore( + "Task manager has no create_task method; skipping recovery enqueue" + ) return None try: @@ -1003,13 +1140,16 @@ class DatasetReviewOrchestrator: except TypeError: logger.explore( "Recovery task enqueue skipped because task manager create_task contract is incompatible", - extra={"session_id": session.session_id}, + extra={"session_id": session_record.session_id}, ) return None task_id = getattr(task_object, "id", None) return str(task_id) if task_id else None + # [/DEF:DatasetReviewOrchestrator._enqueue_recovery_task:Function] + + # [/DEF:DatasetReviewOrchestrator:Class] -# [/DEF:DatasetReviewOrchestrator:Module] \ No newline at end of file +# [/DEF:DatasetReviewOrchestrator:Module] diff --git a/frontend/src/lib/api.js b/frontend/src/lib/api.js index cb633aea..c60f477f 100755 --- a/frontend/src/lib/api.js +++ b/frontend/src/lib/api.js @@ -3,6 +3,7 @@ // @SEMANTICS: api, client, fetch, rest // @PURPOSE: Handles all communication with the backend API. // @LAYER: Infra-API +// @RELATION: [DEPENDS_ON] ->[toasts_module] import { addToast } from './toasts.js'; import { PUBLIC_WS_URL } from '$env/static/public';