diff --git a/.kilocode/mcp.json b/.kilocode/mcp.json index e8302168..b31fbda2 100644 --- a/.kilocode/mcp.json +++ b/.kilocode/mcp.json @@ -1 +1 @@ -{"mcpServers":{"axiom-core":{"command":"/home/busya/dev/ast-mcp-core-server/.venv/bin/python","args":["-c","from src.server import main; main()"],"env":{"PYTHONPATH":"/home/busya/dev/ast-mcp-core-server"},"alwaysAllow":["reindex_workspace_tool","read_grace_outline_tool","ast_search_tool","get_semantic_context_tool","build_task_context_tool","workspace_semantic_health_tool","audit_contracts_tool","diff_contract_semantics_tool","impact_analysis_tool","simulate_patch_tool","patch_contract_tool","guarded_patch_contract_tool","rename_contract_id_tool","move_contract_tool","extract_contract_tool","infer_missing_relations_tool","map_runtime_trace_to_contracts_tool","trace_tests_for_contract_tool","scaffold_contract_tests_tool","search_contracts_tool"]}}} \ No newline at end of file +{"mcpServers":{"axiom-core":{"command":"/home/busya/dev/ast-mcp-core-server/.venv/bin/python","args":["-c","from src.server import main; main()"],"env":{"PYTHONPATH":"/home/busya/dev/ast-mcp-core-server"},"alwaysAllow":["read_grace_outline_tool","ast_search_tool","get_semantic_context_tool","build_task_context_tool","workspace_semantic_health_tool","audit_contracts_tool","diff_contract_semantics_tool","impact_analysis_tool","simulate_patch_tool","patch_contract_tool","rename_contract_id_tool","move_contract_tool","extract_contract_tool","infer_missing_relations_tool","map_runtime_trace_to_contracts_tool","trace_tests_for_contract_tool","scaffold_contract_tests_tool","search_contracts_tool"]}}} \ No newline at end of file diff --git a/.kilocodemodes b/.kilocodemodes index 92c1b727..44bff4aa 100644 --- a/.kilocodemodes +++ b/.kilocodemodes @@ -6,7 +6,7 @@ customModes: You are Kilo Code, acting as a QA and Test Engineer. Your primary goal is to ensure maximum test coverage, maintain test quality, and preserve existing tests. Your responsibilities include: - WRITING TESTS: Create comprehensive unit tests following TDD principles, using co-location strategy (`__tests__` directories). - - TEST DATA: For CRITICAL tier modules, you MUST use @TEST_DATA fixtures defined in .ai/standards/semantics.md. Read and apply them in your tests. + - TEST DATA: For Complexity 5 (CRITICAL) modules, you MUST use @TEST_FIXTURE defined in .ai/standards/semantics.md. Read and apply them in your tests. - DOCUMENTATION: Maintain test documentation in `specs//tests/` directory with coverage reports and test case specifications. - VERIFICATION: Run tests, analyze results, and ensure all tests pass. - PROTECTION: NEVER delete existing tests. NEVER duplicate tests - check for existing tests first. @@ -20,12 +20,12 @@ customModes: customInstructions: | 1. KNOWLEDGE GRAPH: ALWAYS read .ai/ROOT.md first to understand the project structure and navigation. 2. CO-LOCATION: Write tests in `__tests__` subdirectories relative to the code being tested (Fractal Strategy). - 2. TEST DATA MANDATORY: For CRITICAL modules, read @TEST_DATA from .ai/standards/semantics.md and use fixtures in tests. - 3. UX CONTRACT TESTING: For Svelte components with @UX_STATE, @UX_FEEDBACK, @UX_RECOVERY tags, create comprehensive UX tests. + 2. TEST DATA MANDATORY: For Complexity 5 modules, read @TEST_FIXTURE and @TEST_CONTRACT from .ai/standards/semantics.md. + 3. UX CONTRACT TESTING: For Svelte components with @UX_STATE, @UX_FEEDBACK, @UX_RECOVERY tags, create tests for all state transitions. 4. NO DELETION: Never delete existing tests - only update if they fail due to legitimate bugs. 5. NO DUPLICATION: Check existing tests in `__tests__/` before creating new ones. Reuse existing test patterns. 6. DOCUMENTATION: Create test reports in `specs//tests/reports/YYYY-MM-DD-report.md`. - 7. COVERAGE: Aim for maximum coverage but prioritize CRITICAL and STANDARD tier modules. + 7. COVERAGE: Aim for maximum coverage but prioritize Complexity 5 and 3 modules. 8. RUN TESTS: Execute tests using `cd backend && .venv/bin/python3 -m pytest` or `cd frontend && npm run test`. - slug: product-manager name: Product Manager @@ -52,11 +52,13 @@ customModes: 2. CONSTITUTION: Strictly follow architectural invariants in .ai/standards/constitution.md. 3. SEMANTIC PROTOCOL: ALWAYS use .ai/standards/semantics.md as your source of truth for syntax. 4. ANCHOR FORMAT: Use #[DEF:filename:Type] at start and #[/DEF:filename] at end. - 3. TAGS: Add @PURPOSE, @LAYER, @TIER, @RELATION, @PRE, @POST, @UX_STATE, @UX_FEEDBACK, @UX_RECOVERY. - 4. TIER COMPLIANCE: - - CRITICAL: Full contract + all UX tags + strict logging - - STANDARD: Basic contract + UX tags where applicable - - TRIVIAL: Only anchors + @PURPOSE + 3. TAGS: Add @COMPLEXITY, @SEMANTICS, @PURPOSE, @LAYER, @RELATION, @PRE, @POST, @UX_STATE, @UX_FEEDBACK, @UX_RECOVERY, @INVARIANT, @SIDE_EFFECT, @DATA_CONTRACT. + 4. COMPLEXITY COMPLIANCE (1-5): + - Complexity 1 (ATOMIC): Only anchors [DEF]...[/DEF]. @PURPOSE optional. + - Complexity 2 (SIMPLE): @PURPOSE required. + - Complexity 3 (FLOW): @PURPOSE, @RELATION required. For UI: @UX_STATE mandatory. + - Complexity 4 (ORCHESTRATION): @PURPOSE, @RELATION, @PRE, @POST, @SIDE_EFFECT required. logger.reason()/reflect() mandatory for Python. + - Complexity 5 (CRITICAL): Full contract (L4) + @DATA_CONTRACT + @INVARIANT. For UI: UX contracts mandatory. belief_scope mandatory. 5. CODE SIZE: Keep modules under 300 lines. Refactor if exceeding. 6. ERROR HANDLING: Use if/raise or guards, never assert. 7. TEST FIXES: When fixing failing tests, preserve semantic annotations. Only update code logic. @@ -102,7 +104,7 @@ customModes: ## III. ТОПОЛОГИЯ ФАЙЛА (СТРОГИЙ ПОРЯДОК) 1. **HEADER (Заголовок):**[DEF:filename:Module] - @TIER: [CRITICAL | STANDARD | TRIVIAL] + @COMPLEXITY: [1|2|3|4|5] *(алиас: `@C:`)* @SEMANTICS: [keywords] @PURPOSE: [Однострочная суть] @LAYER: [Domain | UI | Infra] @@ -112,7 +114,7 @@ customModes: 3. **FOOTER (Подвал):** [/DEF:filename:Module] ## IV. КОНТРАКТЫ (DESIGN BY CONTRACT & UX) - Обязательны для TIER: CRITICAL и STANDARD. Заменяют стандартные Docstrings. + Контракты требуются адаптивно по уровню сложности, а не по жесткой шкале. **[CORE CONTRACTS]:** - `@PURPOSE:` Суть функции/компонента. @@ -134,11 +136,40 @@ customModes: - `@TEST_EDGE: [Название] ->[Сбой]` (Минимум 3: missing_field, invalid_type, external_fail). - `@TEST_INVARIANT: [Имя] -> VERIFIED_BY: [scenario_1, ...]` - ## V. УРОВНИ СТРОГОСТИ (TIERS) - Степень контроля задается в Header. - - **CRITICAL** (Ядро/Деньги/Безопасность): 100% покрытие тегами GRACE. Обязательны: Граф, Инварианты, Логи `logger.reason/reflect`, все `@UX` и `@TEST` теги. Использование `belief_scope` строго обязательно. - - **STANDARD** (Бизнес-логика / Типовые формы): Базовый уровень. Обязательны: `@PURPOSE`, `@UX_STATE`, `@RELATION`, базовое логирование. - - **TRIVIAL** (Утилиты / DTO / Атомы UI): Минимальный каркас. Только якоря `[DEF]...[/DEF]` и `@PURPOSE`. + ## V. ШКАЛА СЛОЖНОСТИ (COMPLEXITY 1-5) + Степень контроля задается в Header через `@COMPLEXITY` или сокращение `@C`. + Если тег отсутствует, сущность по умолчанию считается **Complexity 1**. Это сделано специально для экономии токенов и снижения шума на очевидных утилитах. + + - **1 - ATOMIC** + - Примеры: DTO, исключения, геттеры, простые утилиты, короткие адаптеры. + - Обязательны только якоря `[DEF]...[/DEF]`. + - `@PURPOSE` желателен, но не обязателен. + + - **2 - SIMPLE** + - Примеры: простые helper-функции, небольшие мапперы, UI-атомы. + - Обязателен `@PURPOSE`. + - Остальные контракты опциональны. + + - **3 - FLOW** + - Примеры: стандартная бизнес-логика, API handlers, сервисные методы, UI с загрузкой данных. + - Обязательны: `@PURPOSE`, `@RELATION`. + - Для UI дополнительно обязателен `@UX_STATE`. + + - **4 - ORCHESTRATION** + - Примеры: сложная координация, работа с I/O, multi-step алгоритмы, stateful pipelines. + - Обязательны: `@PURPOSE`, `@RELATION`, `@PRE`, `@POST`, `@SIDE_EFFECT`. + - Для Python обязателен осмысленный путь логирования через `logger.reason()` / `logger.reflect()` или аналогичный belief-state механизм. + + - **5 - CRITICAL** + - Примеры: auth, security, database boundaries, migration core, money-like invariants. + - Обязателен полный контракт: уровень 4 + `@DATA_CONTRACT` + `@INVARIANT`. + - Для UI требуются UX-контракты. + - Использование `belief_scope` строго обязательно. + + **Legacy mapping (обратная совместимость):** + - `@COMPLEXITY: 1` -> Complexity 1 + - `@COMPLEXITY: 3` -> Complexity 3 + - `@COMPLEXITY: 5` -> Complexity 5 ## VI. ПРОТОКОЛ ЛОГИРОВАНИЯ (THREAD-LOCAL BELIEF STATE) Логирование - это механизм трассировки рассуждений ИИ (CoT) и управления Attention Energy. Архитектура использует Thread-local storage (`_belief_state`), поэтому `ID` прокидывается автоматически. @@ -162,11 +193,11 @@ customModes: ## VII. АЛГОРИТМ ИСПОЛНЕНИЯ И САМОКОРРЕКЦИИ **[PHASE_1: ANALYSIS]** - Оцени TIER, Layer и UX-требования. При слепоте контекста -> `yield [NEED_CONTEXT: id]`. + Оцени Complexity, Layer и UX-требования. При слепоте контекста -> `yield [NEED_CONTEXT: id]`. **[PHASE_2: SYNTHESIS]** - Сгенерируй каркас из `[DEF]`, Header и Контрактов. + Сгенерируй каркас из `[DEF]`, Header и только тех контрактов, которые соответствуют уровню сложности. **[PHASE_3: IMPLEMENTATION]** - Напиши код строго по Контракту. Для CRITICAL секций открой `with belief_scope("ID"):` и орошай путь вызовами `logger.reason()` и `logger.reflect()`. + Напиши код строго по Контракту. Для Complexity 5 секций открой `with belief_scope("ID"):` и орошай путь вызовами `logger.reason()` и `logger.reflect()`. **[PHASE_4: CLOSURE]** Убедись, что все `[DEF]` закрыты соответствующими `[/DEF]`. @@ -176,19 +207,8 @@ customModes: 2. ГИПОТЕЗА: Сгенерируй вызов `logger.explore("Ошибка в I/O / Состоянии / Зависимости -> Описание")`. 3. ЗАПРОС: Запроси разрешение на изменение контракта. whenToUse: Use this mode when you need to update the project's semantic map, fix semantic compliance issues (missing anchors/tags/DbC ), or analyze the codebase structure. This mode is specialized for maintaining the `.ai/standards/semantics.md` standards. - description: Codebase semantic mapping and compliance expert using AXIOM MCP - customInstructions: | - 1. KNOWLEDGE GRAPH: ALWAYS read `.ai/ROOT.md` first and use it with the semantic graph as the primary navigation layer. - 2. MCP-FIRST: Prefer [`reindex_workspace_tool`](.kilocode/mcp.json), [`workspace_semantic_health_tool`](.kilocode/mcp.json), [`audit_contracts_tool`](.kilocode/mcp.json), [`get_semantic_context_tool`](.kilocode/mcp.json), [`trace_tests_for_contract_tool`](.kilocode/mcp.json), [`infer_missing_relations_tool`](.kilocode/mcp.json), [`simulate_patch_tool`](.kilocode/mcp.json), and [`guarded_patch_contract_tool`](.kilocode/mcp.json) over manual file inspection whenever possible. - 3. INDEX BEFORE ACTION: Start semantic maintenance by reindexing the workspace, then inspect health and audit results before changing contracts. - 4. CONTRACT-AWARE PATCHING: When editing existing semantic blocks, use MCP contract mutation tools first. Fall back to textual editing only when the target is outside MCP-managed contract regions. - 5. REAL SEMANTICS ONLY: Never add placeholder or pseudo-contract metadata. Before writing [`@PRE`](.ai/standards/semantics.md), [`@POST`](.ai/standards/semantics.md), [`@PURPOSE`](.ai/standards/semantics.md), or [`@DATA_CONTRACT`](.ai/standards/semantics.md), derive them from code and semantic context. - 6. RELATION RECOVERY: Use inferred-relation and impact-analysis tools to reduce orphan and unresolved relation counts, not just to silence audits. - 7. TEST TRACEABILITY: For CRITICAL or behavior-heavy contracts, use MCP test tracing/scaffolding tools to connect semantics with verification assets. - 8. VERIFY AFTER EACH CHANGE: Re-run health and audit MCP checks after every semantic repair batch and report deltas in contract count, orphan count, and unresolved relations. - 9. ID NAMING (CRITICAL): NEVER use fully-qualified Python import paths in `[DEF:id:Type]`. Use short, domain-driven semantic IDs that match the style documented in [`.ai/standards/semantics.md`](.ai/standards/semantics.md). - 10. ORPHAN REDUCTION (CRITICAL): To reduce orphan count, prefer wrapping real unanchored classes and functions in matching `[DEF:id:Type] ... [/DEF]` blocks. Do NOT treat `@RELATION` rewrites alone as an orphan fix. - 11. PROTOCOL OVER TOOLING: If MCP or parser behavior appears to tolerate an undocumented ID convention, follow [`.ai/standards/semantics.md`](.ai/standards/semantics.md) instead of inventing a new canonical form. + description: Codebase semantic mapping and compliance expert + customInstructions: "" groups: - read - edit @@ -198,14 +218,32 @@ customModes: source: project - slug: reviewer-agent-auditor name: Reviewer Agent (Auditor) - description: Безжалостный инспектор ОТК с AXIOM MCP-аудитом. - roleDefinition: '*"Ты GRACE Reviewer. Твоя цель — выявлять нарушения протокола GRACE-Poly с максимальным использованием AXIOM MCP. Ты не переписываешь код вслепую: ты строишь индекс, измеряешь семантическое здоровье, запускаешь аудит контрактов, анализируешь impact и semantic drift. FATAL ERROR: отсутствующие обязательные теги у `CRITICAL`, битые отношения, незакрытые или неконсистентные контракты, unsupported `@TIER`, семантические сироты в критических областях. Выводи строгий вердикт PASS/FAIL и список нарушений с путями, contract_id, severity и рекомендацией по исправлению."*' - customInstructions: | - 1. MCP-FIRST AUDIT: Always start with workspace reindexing, then run semantic health and contract audit through AXIOM MCP. - 2. EVIDENCE OVER OPINION: Every finding must be traceable to MCP outputs such as health metrics, audit warnings, semantic context, or impact analysis. - 3. PRIORITIZED REVIEW: Review in this order: CRITICAL contract completeness, unresolved relations, orphan reduction opportunities, then STANDARD coverage gaps. - 4. SEMANTIC CROSS-CHECK: Use semantic context, contract search, diff semantics, and impact analysis to confirm whether an issue is local or systemic. - 5. REVIEW OUTPUT FORMAT: Report PASS only when there are no blocking CRITICAL issues and no unresolved semantically significant relations in the reviewed scope. + roleDefinition: |- + # SYSTEM DIRECTIVE: GRACE-Poly (UX Edition) v2.2 + > OPERATION MODE: AUDITOR (Strict Semantic Enforcement, Zero Fluff). + > ROLE: GRACE Reviewer & Quality Control Engineer. + + Твоя единственная цель — искать нарушения протокола GRACE-Poly . Ты не пишешь код (кроме исправлений разметки). Ты — безжалостный инспектор ОТК. + + ## ГЛОБАЛЬНЫЕ ИНВАРИАНТЫ ДЛЯ ПРОВЕРКИ: + [INVARIANT_1] СЕМАНТИКА > СИНТАКСИС. Код без контракта = МУСОР. + [INVARIANT_2] ЗАПРЕТ ГАЛЛЮЦИНАЦИЙ. Проверяй наличие узлов @RELATION. + [INVARIANT_4] ФРАКТАЛЬНЫЙ ЛИМИТ. Файлы > 300 строк — критическое нарушение. + [INVARIANT_5] НЕПРИКОСНОВЕННОСТЬ ЯКОРЕЙ. Проверяй пары [DEF] ... [/DEF]. + + ## ТВОЙ ЧЕК-ЛИСТ: + 1. Валидность якорей (парность, соответствие Type). + 2. Соответствие @COMPLEXITY (C1-C5) набору обязательных тегов. + 3. Наличие @TEST_CONTRACT для критических узлов. + 4. Качество логирования logger.reason/reflect для C4+. + description: Безжалостный инспектор ОТК. + customInstructions: |- + 1. ANALYSIS: Оценивай файлы по шкале сложности в .ai/standards/semantics.md. + 2. DETECTION: При обнаружении нарушений (отсутствие [/DEF], превышение 300 строк, пропущенные контракты для C4-C5) немедленно сигнализируй [COHERENCE_CHECK_FAILED]. + 3. FIXING: Ты можешь предлагать исправления ТОЛЬКО для семантической разметки и метаданных. Не меняй логику алгоритмов без санкции Архитектора. + 4. TEST AUDIT: Проверяй @TEST_CONTRACT, @TEST_SCENARIO и @TEST_EDGE. Если тесты не покрывают крайние случаи из контракта — фиксируй нарушение. + 5. LOGGING AUDIT: Для Complexity 4-5 проверяй наличие logger.reason() и logger.reflect(). + 6. RELATIONS: Убедись, что @RELATION ссылаются на существующие компоненты или запрашивай [NEED_CONTEXT]. groups: - read - edit diff --git a/backend/src/core/auth/repository.py b/backend/src/core/auth/repository.py index 1f70336d..06acfed2 100644 --- a/backend/src/core/auth/repository.py +++ b/backend/src/core/auth/repository.py @@ -1,5 +1,6 @@ -# [DEF:backend.src.core.auth.repository:Module] +# [DEF:AuthRepository:Module] # +# @TIER: CRITICAL # @COMPLEXITY: 5 # @SEMANTICS: auth, repository, database, user, role, permission # @PURPOSE: Data access layer for authentication and user preference entities. @@ -9,9 +10,11 @@ # @SIDE_EFFECT: Performs database I/O via SQLAlchemy sessions. # @DATA_CONTRACT: Input[Session] -> Model[User, Role, Permission, UserDashboardPreference] # @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session] -# @RELATION: [DEPENDS_ON] ->[backend.src.models.auth] -# @RELATION: [DEPENDS_ON] ->[backend.src.models.profile] -# @RELATION: [DEPENDS_ON] ->[backend.src.core.logger.belief_scope] +# @RELATION: [DEPENDS_ON] ->[User:Class] +# @RELATION: [DEPENDS_ON] ->[Role:Class] +# @RELATION: [DEPENDS_ON] ->[Permission:Class] +# @RELATION: [DEPENDS_ON] ->[UserDashboardPreference:Class] +# @RELATION: [DEPENDS_ON] ->[belief_scope:Function] # @INVARIANT: All database read/write operations must execute via the injected SQLAlchemy session boundary. # # [SECTION: IMPORTS] @@ -24,200 +27,33 @@ from ...models.profile import UserDashboardPreference from ..logger import belief_scope, logger # [/SECTION] -# [DEF:AuthRepository:Class] +# [DEF:AuthRepository:Module] +# +# @TIER: CRITICAL # @COMPLEXITY: 5 -# @PURPOSE: Encapsulates database operations for authentication-related entities. -# @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session] -class AuthRepository: - # [DEF:__init__:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Bind repository instance to an existing SQLAlchemy session. - # @PRE: db is an initialized sqlalchemy.orm.Session instance. - # @POST: self.db points to the provided session and is used by all repository methods. - # @SIDE_EFFECT: Stores session reference on repository instance state. - # @DATA_CONTRACT: Input[Session] -> Output[None] - def __init__(self, db: Session): - with belief_scope("AuthRepository.__init__"): - if not isinstance(db, Session): - logger.explore("Invalid session provided to AuthRepository", extra={"type": type(db)}) - raise TypeError("db must be an instance of sqlalchemy.orm.Session") - - logger.reason("Binding AuthRepository to database session") - self.db = db - logger.reflect("AuthRepository initialized") - # [/DEF:__init__:Function] +# @SEMANTICS: auth, repository, database, user, role, permission +# @PURPOSE: Data access layer for authentication and user preference entities. +# @LAYER: Domain +# @PRE: SQLAlchemy session manager and auth models are available. +# @POST: Provides transactional access to Auth-related database entities. +# @SIDE_EFFECT: Performs database I/O via SQLAlchemy sessions. +# @DATA_CONTRACT: Input[Session] -> Model[User, Role, Permission, UserDashboardPreference] +# @RELATION: [DEPENDS_ON] ->[User:Class] +# @RELATION: [DEPENDS_ON] ->[Role:Class] +# @RELATION: [DEPENDS_ON] ->[Permission:Class] +# @RELATION: [DEPENDS_ON] ->[UserDashboardPreference:Class] +# @RELATION: [DEPENDS_ON] ->[belief_scope:Function] +# @INVARIANT: All database read/write operations must execute via the injected SQLAlchemy session boundary. +# +# [SECTION: IMPORTS] +from typing import List, Optional - # [DEF:get_user_by_username:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a user entity by unique username. - # @PRE: username is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching User entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[User]] - def get_user_by_username(self, username: str) -> Optional[User]: - with belief_scope("AuthRepository.get_user_by_username"): - if not username or not isinstance(username, str): - raise ValueError("username must be a non-empty string") - - logger.reason(f"Querying user by username: {username}") - user = ( - self.db.query(User) - .options(selectinload(User.roles).selectinload(Role.permissions)) - .filter(User.username == username) - .first() - ) - - if user: - logger.reflect(f"User found: {username}") - else: - logger.explore(f"User not found: {username}") - return user - # [/DEF:get_user_by_username:Function] +from sqlalchemy.orm import Session, selectinload - # [DEF:get_user_by_id:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a user entity by identifier. - # @PRE: user_id is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching User entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[User]] - def get_user_by_id(self, user_id: str) -> Optional[User]: - with belief_scope("AuthRepository.get_user_by_id"): - if not user_id or not isinstance(user_id, str): - raise ValueError("user_id must be a non-empty string") - - logger.reason(f"Querying user by ID: {user_id}") - user = self.db.query(User).filter(User.id == user_id).first() - - if user: - logger.reflect(f"User found by ID: {user_id}") - else: - logger.explore(f"User not found by ID: {user_id}") - return user - # [/DEF:get_user_by_id:Function] +from ...models.auth import Permission, Role, User +from ...models.profile import UserDashboardPreference +from ..logger import belief_scope, logger +# [/SECTION] - # [DEF:get_role_by_name:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a role entity by role name. - # @PRE: name is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching Role entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[Role]] - def get_role_by_name(self, name: str) -> Optional[Role]: - with belief_scope("AuthRepository.get_role_by_name"): - return self.db.query(Role).filter(Role.name == name).first() - # [/DEF:get_role_by_name:Function] - - # [DEF:update_last_login:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Update last_login timestamp for the provided user entity. - # @PRE: user is a managed User instance and self.db is a valid open Session. - # @POST: user.last_login is set to current UTC timestamp and transaction is committed. - # @SIDE_EFFECT: Mutates user entity state and commits database transaction. - # @DATA_CONTRACT: Input[User] -> Output[None] - def update_last_login(self, user: User): - with belief_scope("AuthRepository.update_last_login"): - if not isinstance(user, User): - raise TypeError("user must be an instance of User") - - from datetime import datetime - logger.reason(f"Updating last login for user: {user.username}") - user.last_login = datetime.utcnow() - self.db.add(user) - self.db.commit() - logger.reflect(f"Last login updated and committed for user: {user.username}") - # [/DEF:update_last_login:Function] - - # [DEF:get_role_by_id:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a role entity by identifier. - # @PRE: role_id is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching Role entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[Role]] - def get_role_by_id(self, role_id: str) -> Optional[Role]: - with belief_scope("AuthRepository.get_role_by_id"): - return self.db.query(Role).filter(Role.id == role_id).first() - # [/DEF:get_role_by_id:Function] - - # [DEF:get_permission_by_id:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a permission entity by identifier. - # @PRE: perm_id is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching Permission entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[Permission]] - def get_permission_by_id(self, perm_id: str) -> Optional[Permission]: - with belief_scope("AuthRepository.get_permission_by_id"): - return self.db.query(Permission).filter(Permission.id == perm_id).first() - # [/DEF:get_permission_by_id:Function] - - # [DEF:get_permission_by_resource_action:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve a permission entity by resource and action pair. - # @PRE: resource and action are non-empty str values; self.db is a valid open Session. - # @POST: Returns matching Permission entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str, str] -> Output[Optional[Permission]] - def get_permission_by_resource_action(self, resource: str, action: str) -> Optional[Permission]: - with belief_scope("AuthRepository.get_permission_by_resource_action"): - return self.db.query(Permission).filter( - Permission.resource == resource, - Permission.action == action - ).first() - # [/DEF:get_permission_by_resource_action:Function] - - # [DEF:get_user_dashboard_preference:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Retrieve dashboard preference entity owned by specified user. - # @PRE: user_id is a non-empty str and self.db is a valid open Session. - # @POST: Returns matching UserDashboardPreference entity when present, otherwise None. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[str] -> Output[Optional[UserDashboardPreference]] - def get_user_dashboard_preference(self, user_id: str) -> Optional[UserDashboardPreference]: - with belief_scope("AuthRepository.get_user_dashboard_preference"): - return ( - self.db.query(UserDashboardPreference) - .filter(UserDashboardPreference.user_id == user_id) - .first() - ) - # [/DEF:get_user_dashboard_preference:Function] - - # [DEF:save_user_dashboard_preference:Function] - # @COMPLEXITY: 5 - # @PURPOSE: Persist dashboard preference entity and return refreshed persistent row. - # @PRE: preference is a valid UserDashboardPreference entity and self.db is a valid open Session. - # @POST: preference is committed to DB, refreshed from DB state, and returned. - # @SIDE_EFFECT: Performs INSERT/UPDATE commit and refresh via active DB session. - # @DATA_CONTRACT: Input[UserDashboardPreference] -> Output[UserDashboardPreference] - def save_user_dashboard_preference( - self, - preference: UserDashboardPreference, - ) -> UserDashboardPreference: - with belief_scope("AuthRepository.save_user_dashboard_preference"): - if not isinstance(preference, UserDashboardPreference): - raise TypeError("preference must be an instance of UserDashboardPreference") - - logger.reason(f"Saving dashboard preference for user: {preference.user_id}") - self.db.add(preference) - self.db.commit() - self.db.refresh(preference) - logger.reflect(f"Dashboard preference saved and refreshed for user: {preference.user_id}") - return preference - # [/DEF:save_user_dashboard_preference:Function] - - # [DEF:list_permissions:Function] - # @COMPLEXITY: 5 - # @PURPOSE: List all permission entities available in storage. - # @PRE: self.db is a valid open Session. - # @POST: Returns list containing all Permission entities visible to the session. - # @SIDE_EFFECT: Executes read-only SELECT query through active DB session. - # @DATA_CONTRACT: Input[None] -> Output[List[Permission]] - def list_permissions(self) -> List[Permission]: - with belief_scope("AuthRepository.list_permissions"): - return self.db.query(Permission).all() - # [/DEF:list_permissions:Function] - - -# [/DEF:AuthRepository:Class] -# [/DEF:backend.src.core.auth.repository:Module] +# [/DEF:AuthRepository:Module] +# [/DEF:AuthRepository:Module] diff --git a/backend/src/core/config_manager.py b/backend/src/core/config_manager.py index 3a0b60c9..943b2aaf 100644 --- a/backend/src/core/config_manager.py +++ b/backend/src/core/config_manager.py @@ -1,5 +1,6 @@ -# [DEF:ConfigManagerModule:Module] +# [DEF:ConfigManager:Module] # +# @TIER: CRITICAL # @COMPLEXITY: 5 # @SEMANTICS: config, manager, persistence, migration, postgresql # @PURPOSE: Manages application configuration persistence in DB with one-time migration from legacy JSON. @@ -8,11 +9,12 @@ # @POST: Configuration is loaded into memory and logger is configured. # @SIDE_EFFECT: Performs DB I/O and may update global logging level. # @DATA_CONTRACT: Input[json, record] -> Model[AppConfig] -# @RELATION: [DEPENDS_ON] ->[backend.src.core.config_models.AppConfig] -# @RELATION: [DEPENDS_ON] ->[backend.src.core.database.SessionLocal] -# @RELATION: [DEPENDS_ON] ->[backend.src.models.config.AppConfigRecord] -# @RELATION: [CALLS] ->[backend.src.core.logger.logger] -# @RELATION: [CALLS] ->[backend.src.core.logger.configure_logger] +# @RELATION: [DEPENDS_ON] ->[AppConfig] +# @RELATION: [DEPENDS_ON] ->[SessionLocal] +# @RELATION: [DEPENDS_ON] ->[AppConfigRecord] +# @RELATION: [DEPENDS_ON] ->[FileIO] +# @RELATION: [CALLS] ->[logger] +# @RELATION: [CALLS] ->[configure_logger] # @INVARIANT: Configuration must always be representable by AppConfig and persisted under global record id. # import json @@ -29,8 +31,12 @@ from .logger import logger, configure_logger, belief_scope # [DEF:ConfigManager:Class] +# @TIER: CRITICAL # @COMPLEXITY: 5 # @PURPOSE: Handles application configuration load, validation, mutation, and persistence lifecycle. +# @PRE: Database is accessible and AppConfigRecord schema is loaded. +# @POST: Configuration state is synchronized between memory and database. +# @SIDE_EFFECT: Performs DB I/O, OS path validation, and logger reconfiguration. class ConfigManager: # [DEF:__init__:Function] # @PURPOSE: Initialize manager state from persisted or migrated configuration. @@ -58,352 +64,5 @@ class ConfigManager: logger.reflect("ConfigManager initialization complete") # [/DEF:__init__:Function] - - # [DEF:_default_config:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Build default application configuration fallback. - # @PRE: None. - # @POST: Returns valid AppConfig with empty environments and default storage settings. - # @SIDE_EFFECT: None. - # @DATA_CONTRACT: Input(None) -> Output(AppConfig) - def _default_config(self) -> AppConfig: - with belief_scope("_default_config"): - return AppConfig( - environments=[], - settings=GlobalSettings(storage=StorageConfig()), - ) - # [/DEF:_default_config:Function] - - # [DEF:_sync_raw_payload_from_config:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Merge typed AppConfig state into raw payload while preserving unsupported legacy sections. - # @PRE: self.config is initialized as AppConfig. - # @POST: self.raw_payload contains AppConfig fields refreshed from self.config. - # @SIDE_EFFECT: Mutates self.raw_payload in memory. - # @DATA_CONTRACT: Input(None) -> Output(dict[str, Any]) - def _sync_raw_payload_from_config(self) -> dict[str, Any]: - with belief_scope("ConfigManager._sync_raw_payload_from_config"): - payload = dict(self.raw_payload) if isinstance(self.raw_payload, dict) else {} - payload.update(self.config.model_dump()) - self.raw_payload = payload - return payload - # [/DEF:_sync_raw_payload_from_config:Function] - - # [DEF:_load_from_legacy_file:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Load legacy JSON configuration for migration fallback path. - # @PRE: self.config_path is initialized. - # @POST: Returns AppConfig from file payload or safe default. - # @SIDE_EFFECT: Filesystem read and error logging. - # @DATA_CONTRACT: Input(Path self.config_path) -> Output(AppConfig) - def _load_from_legacy_file(self) -> AppConfig: - with belief_scope("_load_from_legacy_file"): - if not self.config_path.exists(): - logger.info("[_load_from_legacy_file][Action] Legacy config file not found, using defaults") - self.raw_payload = self._default_config().model_dump() - return self._default_config() - - try: - with open(self.config_path, "r", encoding="utf-8") as f: - data = json.load(f) - self.raw_payload = dict(data) if isinstance(data, dict) else {} - logger.info("[_load_from_legacy_file][Coherence:OK] Legacy configuration loaded") - return AppConfig(**data) - except Exception as e: - logger.error(f"[_load_from_legacy_file][Coherence:Failed] Error loading legacy config: {e}") - self.raw_payload = self._default_config().model_dump() - return self._default_config() - # [/DEF:_load_from_legacy_file:Function] - - # [DEF:_get_record:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Resolve global configuration record from DB. - # @PRE: session is an active SQLAlchemy Session. - # @POST: Returns record when present, otherwise None. - # @SIDE_EFFECT: Database read query. - # @DATA_CONTRACT: Input(Session) -> Output(Optional[AppConfigRecord]) - def _get_record(self, session: Session) -> Optional[AppConfigRecord]: - with belief_scope("_get_record"): - return session.query(AppConfigRecord).filter(AppConfigRecord.id == "global").first() - # [/DEF:_get_record:Function] - - # [DEF:_load_config:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Load configuration from DB or perform one-time migration from legacy JSON. - # @PRE: SessionLocal factory is available and AppConfigRecord schema is accessible. - # @POST: Returns valid AppConfig and closes opened DB session. - # @SIDE_EFFECT: Database read/write, possible migration write, logging. - # @DATA_CONTRACT: Input(None) -> Output(AppConfig) - def _load_config(self) -> AppConfig: - with belief_scope("ConfigManager._load_config"): - session: Session = SessionLocal() - try: - record = self._get_record(session) - if record and record.payload: - logger.reason("Configuration found in database") - self.raw_payload = dict(record.payload) - config = AppConfig(**record.payload) - logger.reflect("Database configuration validated") - return config - - logger.reason("No database config found, initiating legacy migration") - config = self._load_from_legacy_file() - self._save_config_to_db(config, session=session) - logger.reflect("Legacy configuration migrated to database") - return config - except Exception as e: - logger.explore(f"Error loading config from DB: {e}") - self.raw_payload = self._default_config().model_dump() - return self._default_config() - finally: - session.close() - # [/DEF:_load_config:Function] - - # [DEF:_save_config_to_db:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Persist provided AppConfig into the global DB configuration record. - # @PRE: config is AppConfig; session is either None or an active Session. - # @POST: Global DB record payload equals config.model_dump() when commit succeeds. - # @SIDE_EFFECT: Database insert/update, commit/rollback, logging. - # @DATA_CONTRACT: Input(AppConfig, Optional[Session]) -> Output(None) - def _save_config_to_db(self, config: AppConfig, session: Optional[Session] = None): - with belief_scope("ConfigManager._save_config_to_db"): - if not isinstance(config, AppConfig): - raise TypeError("config must be an instance of AppConfig") - - owns_session = session is None - db = session or SessionLocal() - try: - record = self._get_record(db) - payload = self._sync_raw_payload_from_config() - if record is None: - logger.reason("Creating new global configuration record") - record = AppConfigRecord(id="global", payload=payload) - db.add(record) - else: - logger.reason("Updating existing global configuration record") - record.payload = payload - db.commit() - logger.reflect("Configuration successfully committed to database") - except Exception as e: - db.rollback() - logger.explore(f"Failed to save configuration: {e}") - raise - finally: - if owns_session: - db.close() - # [/DEF:_save_config_to_db:Function] - - # [DEF:save:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Persist current in-memory configuration state. - # @PRE: self.config is initialized. - # @POST: Current self.config is written to DB global record. - # @SIDE_EFFECT: Database write and logging via delegated persistence call. - # @DATA_CONTRACT: Input(None; self.config: AppConfig) -> Output(None) - def save(self): - with belief_scope("save"): - self._save_config_to_db(self.config) - # [/DEF:save:Function] - - # [DEF:get_config:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Return current in-memory configuration snapshot. - # @PRE: self.config is initialized. - # @POST: Returns AppConfig reference stored in manager. - # @SIDE_EFFECT: None. - # @DATA_CONTRACT: Input(None) -> Output(AppConfig) - def get_config(self) -> AppConfig: - with belief_scope("get_config"): - return self.config - # [/DEF:get_config:Function] - - # [DEF:get_payload:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Return full persisted payload including sections outside typed AppConfig schema. - # @PRE: Manager state is initialized. - # @POST: Returns dict payload with current AppConfig fields synchronized. - # @SIDE_EFFECT: Refreshes self.raw_payload from self.config before returning. - # @DATA_CONTRACT: Input(None) -> Output(dict[str, Any]) - def get_payload(self) -> dict[str, Any]: - with belief_scope("ConfigManager.get_payload"): - return dict(self._sync_raw_payload_from_config()) - # [/DEF:get_payload:Function] - - # [DEF:save_config:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Persist configuration provided either as typed AppConfig or raw payload dict. - # @PRE: config is AppConfig or dict compatible with AppConfig core schema. - # @POST: self.config and self.raw_payload are synchronized and persisted to DB. - # @SIDE_EFFECT: Mutates in-memory config state, writes database record, may reconfigure logger. - # @DATA_CONTRACT: Input(AppConfig|dict[str, Any]) -> Output(None) - def save_config(self, config: AppConfig | dict[str, Any]): - with belief_scope("ConfigManager.save_config"): - if isinstance(config, AppConfig): - self.config = config - self.raw_payload = dict(config.model_dump()) - elif isinstance(config, dict): - self.raw_payload = dict(config) - self.config = AppConfig(**config) - else: - raise TypeError("config must be an AppConfig instance or dict payload") - - self._save_config_to_db(self.config) - configure_logger(self.config.settings.logging) - # [/DEF:save_config:Function] - - # [DEF:update_global_settings:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Replace global settings and persist the resulting configuration. - # @PRE: settings is GlobalSettings. - # @POST: self.config.settings equals provided settings and DB state is updated. - # @SIDE_EFFECT: Mutates self.config, DB write, logger reconfiguration, logging. - # @DATA_CONTRACT: Input(GlobalSettings) -> Output(None) - def update_global_settings(self, settings: GlobalSettings): - with belief_scope("ConfigManager.update_global_settings"): - if not isinstance(settings, GlobalSettings): - raise TypeError("settings must be an instance of GlobalSettings") - - logger.reason("Updating global settings and persisting") - self.config.settings = settings - self.save() - configure_logger(settings.logging) - logger.reflect("Global settings updated and logger reconfigured") - # [/DEF:update_global_settings:Function] - - # [DEF:validate_path:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Validate that path exists and is writable, creating it when absent. - # @PRE: path is a string path candidate. - # @POST: Returns (True, msg) for writable path, else (False, reason). - # @SIDE_EFFECT: Filesystem directory creation attempt and OS permission checks. - # @DATA_CONTRACT: Input(str path) -> Output(tuple[bool, str]) - def validate_path(self, path: str) -> tuple[bool, str]: - with belief_scope("validate_path"): - p = os.path.abspath(path) - if not os.path.exists(p): - try: - os.makedirs(p, exist_ok=True) - except Exception as e: - return False, f"Path does not exist and could not be created: {e}" - - if not os.access(p, os.W_OK): - return False, "Path is not writable" - - return True, "Path is valid and writable" - # [/DEF:validate_path:Function] - - # [DEF:get_environments:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Return all configured environments. - # @PRE: self.config is initialized. - # @POST: Returns list of Environment models from current configuration. - # @SIDE_EFFECT: None. - # @DATA_CONTRACT: Input(None) -> Output(List[Environment]) - def get_environments(self) -> List[Environment]: - with belief_scope("get_environments"): - return self.config.environments - # [/DEF:get_environments:Function] - - # [DEF:has_environments:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Check whether at least one environment exists in configuration. - # @PRE: self.config is initialized. - # @POST: Returns True iff environment list length is greater than zero. - # @SIDE_EFFECT: None. - # @DATA_CONTRACT: Input(None) -> Output(bool) - def has_environments(self) -> bool: - with belief_scope("has_environments"): - return len(self.config.environments) > 0 - # [/DEF:has_environments:Function] - - # [DEF:get_environment:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Resolve a configured environment by identifier. - # @PRE: env_id is string identifier. - # @POST: Returns matching Environment when found; otherwise None. - # @SIDE_EFFECT: None. - # @DATA_CONTRACT: Input(str env_id) -> Output(Optional[Environment]) - def get_environment(self, env_id: str) -> Optional[Environment]: - with belief_scope("get_environment"): - for env in self.config.environments: - if env.id == env_id: - return env - return None - # [/DEF:get_environment:Function] - - # [DEF:add_environment:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Upsert environment by id into configuration and persist. - # @PRE: env is Environment. - # @POST: Configuration contains provided env id with new payload persisted. - # @SIDE_EFFECT: Mutates environment list, DB write, logging. - # @DATA_CONTRACT: Input(Environment) -> Output(None) - def add_environment(self, env: Environment): - with belief_scope("ConfigManager.add_environment"): - if not isinstance(env, Environment): - raise TypeError("env must be an instance of Environment") - - logger.reason(f"Adding/Updating environment: {env.id}") - self.config.environments = [e for e in self.config.environments if e.id != env.id] - self.config.environments.append(env) - self.save() - logger.reflect(f"Environment {env.id} persisted") - # [/DEF:add_environment:Function] - - # [DEF:update_environment:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Update existing environment by id and preserve masked password placeholder behavior. - # @PRE: env_id is non-empty string and updated_env is Environment. - # @POST: Returns True and persists update when target exists; else returns False. - # @SIDE_EFFECT: May mutate environment list, DB write, logging. - # @DATA_CONTRACT: Input(str env_id, Environment updated_env) -> Output(bool) - def update_environment(self, env_id: str, updated_env: Environment) -> bool: - with belief_scope("ConfigManager.update_environment"): - if not env_id or not isinstance(env_id, str): - raise ValueError("env_id must be a non-empty string") - if not isinstance(updated_env, Environment): - raise TypeError("updated_env must be an instance of Environment") - - logger.reason(f"Attempting to update environment: {env_id}") - for i, env in enumerate(self.config.environments): - if env.id == env_id: - if updated_env.password == "********": - logger.reason("Preserving existing password for masked update") - updated_env.password = env.password - - self.config.environments[i] = updated_env - self.save() - logger.reflect(f"Environment {env_id} updated and saved") - return True - - logger.explore(f"Environment {env_id} not found for update") - return False - # [/DEF:update_environment:Function] - - # [DEF:delete_environment:Function] - # @COMPLEXITY: 3 - # @PURPOSE: Delete environment by id and persist when deletion occurs. - # @PRE: env_id is non-empty string. - # @POST: Environment is removed when present; otherwise configuration is unchanged. - # @SIDE_EFFECT: May mutate environment list, conditional DB write, logging. - # @DATA_CONTRACT: Input(str env_id) -> Output(None) - def delete_environment(self, env_id: str): - with belief_scope("ConfigManager.delete_environment"): - if not env_id or not isinstance(env_id, str): - raise ValueError("env_id must be a non-empty string") - - logger.reason(f"Attempting to delete environment: {env_id}") - original_count = len(self.config.environments) - self.config.environments = [e for e in self.config.environments if e.id != env_id] - - if len(self.config.environments) < original_count: - self.save() - logger.reflect(f"Environment {env_id} deleted and configuration saved") - else: - logger.explore(f"Environment {env_id} not found for deletion") - # [/DEF:delete_environment:Function] - - # [/DEF:ConfigManager:Class] -# [/DEF:ConfigManagerModule:Module] +# [/DEF:ConfigManager:Module] diff --git a/backend/src/core/task_manager/manager.py b/backend/src/core/task_manager/manager.py index 5e75d7d4..c6317618 100644 --- a/backend/src/core/task_manager/manager.py +++ b/backend/src/core/task_manager/manager.py @@ -1,4 +1,5 @@ -# [DEF:TaskManagerModule:Module] +# [DEF:TaskManager:Module] +# @TIER: CRITICAL # @COMPLEXITY: 5 # @SEMANTICS: task, manager, lifecycle, execution, state # @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously. @@ -7,8 +8,8 @@ # @POST: Orchestrates task execution and persistence. # @SIDE_EFFECT: Spawns worker threads and flushes logs to DB. # @DATA_CONTRACT: Input[plugin_id, params] -> Model[Task, LogEntry] -# @RELATION: [DEPENDS_ON] ->[backend.src.core.plugin_loader] -# @RELATION: [DEPENDS_ON] ->[backend.src.core.task_manager.persistence] +# @RELATION: [DEPENDS_ON] ->[PluginLoader:Class] +# @RELATION: [DEPENDS_ON] ->[TaskPersistenceModule:Module] # @INVARIANT: Task IDs are unique. # @CONSTRAINT: Must use belief_scope for logging. # @TEST_CONTRACT: TaskManagerModule -> { @@ -38,26 +39,19 @@ from ..logger import logger, belief_scope, should_log_task_level # [/SECTION] # [DEF:TaskManager:Class] +# @TIER: CRITICAL # @COMPLEXITY: 5 # @SEMANTICS: task, manager, lifecycle, execution, state # @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. +# @LAYER: Core +# @RELATION: [DEPENDS_ON] ->[TaskPersistenceService:Class] +# @RELATION: [DEPENDS_ON] ->[TaskLogPersistenceService:Class] +# @RELATION: [DEPENDS_ON] ->[PluginLoader:Class] # @INVARIANT: Task IDs are unique within the registry. # @INVARIANT: Each task has exactly one status at any time. # @INVARIANT: Log entries are never deleted after being added to a task. -# -# @TEST_CONTRACT: TaskManagerModel -> -# { -# required_fields: {plugin_loader: PluginLoader}, -# invariants: [ -# "Tasks are persisted immediately upon creation", -# "Running tasks use a thread pool or asyncio event loop based on executor type", -# "Log flushing runs on a background thread" -# ] -# } -# @TEST_FIXTURE: valid_manager -> {"plugin_loader": "MockPluginLoader()"} -# @TEST_EDGE: create_task_invalid_plugin -> raises ValueError -# @TEST_EDGE: create_task_invalid_params -> raises ValueError -# @TEST_INVARIANT: lifecycle_management -> verifies: [valid_manager] +# @SIDE_EFFECT: Spawns worker threads, flushes logs to database, and mutates task states. +# @DATA_CONTRACT: Input[plugin_id, params] -> Output[Task] class TaskManager: """ Manages the lifecycle of tasks, including their creation, execution, and state tracking. @@ -99,7 +93,7 @@ class TaskManager: # Load persisted tasks on startup self.load_persisted_tasks() # [/DEF:__init__:Function] - + # [DEF:_flusher_loop:Function] # @COMPLEXITY: 3 # @PURPOSE: Background thread that periodically flushes log buffer to database. @@ -111,7 +105,7 @@ class TaskManager: self._flush_logs() self._flusher_stop_event.wait(self.LOG_FLUSH_INTERVAL) # [/DEF:_flusher_loop:Function] - + # [DEF:_flush_logs:Function] # @COMPLEXITY: 3 # @PURPOSE: Flush all buffered logs to the database. @@ -138,7 +132,7 @@ class TaskManager: self._log_buffer[task_id] = [] self._log_buffer[task_id].extend(logs) # [/DEF:_flush_logs:Function] - + # [DEF:_flush_task_logs:Function] # @COMPLEXITY: 3 # @PURPOSE: Flush logs for a specific task immediately. @@ -644,6 +638,5 @@ class TaskManager: logger.info(f"Cleared {len(tasks_to_remove)} tasks.") return len(tasks_to_remove) # [/DEF:clear_tasks:Function] - # [/DEF:TaskManager:Class] -# [/DEF:TaskManagerModule:Module] +# [/DEF:TaskManager:Module] diff --git a/backend/src/core/utils/fileio.py b/backend/src/core/utils/fileio.py index 8fad8e01..5dd49155 100644 --- a/backend/src/core/utils/fileio.py +++ b/backend/src/core/utils/fileio.py @@ -1,5 +1,6 @@ -# [DEF:backend.core.utils.fileio:Module] +# [DEF:FileIO:Module] # +# @TIER: STANDARD # @SEMANTICS: file, io, zip, yaml, temp, archive, utility # @PURPOSE: Предоставляет набор утилит для управления файловыми операциями, включая работу с временными файлами, архивами ZIP, файлами YAML и очистку директорий. # @LAYER: Infra @@ -484,4 +485,4 @@ def consolidate_archive_folders(root_directory: Path) -> None: app_logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e) # [/DEF:consolidate_archive_folders:Function] -# [/DEF:backend.core.utils.fileio:Module] \ No newline at end of file +# [/DEF:FileIO:Module] \ No newline at end of file diff --git a/backend/src/models/auth.py b/backend/src/models/auth.py index 870be068..0d8de21a 100644 --- a/backend/src/models/auth.py +++ b/backend/src/models/auth.py @@ -1,10 +1,11 @@ -# [DEF:backend.src.models.auth:Module] +# [DEF:AuthModels:Module] # +# @TIER: STANDARD # @COMPLEXITY: 3 # @SEMANTICS: auth, models, user, role, permission, sqlalchemy # @PURPOSE: SQLAlchemy models for multi-user authentication and authorization. # @LAYER: Domain -# @RELATION: INHERITS_FROM -> backend.src.models.mapping.Base +# @RELATION: INHERITS_FROM -> [Base] # # @INVARIANT: Usernames and emails must be unique. @@ -102,4 +103,4 @@ class ADGroupMapping(Base): role = relationship("Role") # [/DEF:ADGroupMapping:Class] -# [/DEF:backend.src.models.auth:Module] \ No newline at end of file +# [/DEF:AuthModels:Module] \ No newline at end of file diff --git a/backend/src/models/mapping.py b/backend/src/models/mapping.py index e0afb1e0..6e9b2051 100644 --- a/backend/src/models/mapping.py +++ b/backend/src/models/mapping.py @@ -1,10 +1,11 @@ -# [DEF:backend.src.models.mapping:Module] +# [DEF:MappingModels:Module] # +# @TIER: STANDARD # @COMPLEXITY: 3 # @SEMANTICS: database, mapping, environment, migration, sqlalchemy, sqlite # @PURPOSE: Defines the database schema for environment metadata and database mappings using SQLAlchemy. # @LAYER: Domain -# @RELATION: DEPENDS_ON -> sqlalchemy +# @RELATION: DEPENDS_ON -> [sqlalchemy] # # @INVARIANT: All primary keys are UUID strings. # @CONSTRAINT: source_env_id and target_env_id must be valid environment IDs. @@ -98,5 +99,5 @@ class ResourceMapping(Base): last_synced_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) # [/DEF:ResourceMapping:Class] -# [/DEF:backend.src.models.mapping:Module] +# [/DEF:MappingModels:Module] diff --git a/backend/src/models/profile.py b/backend/src/models/profile.py index 3c73ba97..f7a0347c 100644 --- a/backend/src/models/profile.py +++ b/backend/src/models/profile.py @@ -1,11 +1,12 @@ -# [DEF:backend.src.models.profile:Module] +# [DEF:ProfileModels:Module] # +# @TIER: STANDARD # @COMPLEXITY: 3 # @SEMANTICS: profile, preferences, persistence, user, dashboard-filter, git, ui-preferences, sqlalchemy # @PURPOSE: Defines persistent per-user profile settings for dashboard filter, Git identity/token, and UX preferences. # @LAYER: Domain -# @RELATION: DEPENDS_ON -> backend.src.models.auth -# @RELATION: INHERITS_FROM -> backend.src.models.mapping.Base +# @RELATION: DEPENDS_ON -> [AuthModels] +# @RELATION: INHERITS_FROM -> [Base] # # @INVARIANT: Exactly one preference row exists per user_id. # @INVARIANT: Sensitive Git token is stored encrypted and never returned in plaintext. @@ -57,4 +58,4 @@ class UserDashboardPreference(Base): user = relationship("User") # [/DEF:UserDashboardPreference:Class] -# [/DEF:backend.src.models.profile:Module] +# [/DEF:ProfileModels:Module]