diff --git a/backend/src/services/dataset_review/event_logger.py b/backend/src/services/dataset_review/event_logger.py index 53063261..d221bbda 100644 --- a/backend/src/services/dataset_review/event_logger.py +++ b/backend/src/services/dataset_review/event_logger.py @@ -1,4 +1,4 @@ -# [DEF:SessionEventLogger:Module] +# [DEF:SessionEventLoggerModule:Module] # @COMPLEXITY: 4 # @SEMANTICS: dataset_review, audit, session_events, persistence, observability # @PURPOSE: Persist explicit session mutation events for dataset-review audit trails without weakening ownership or approval invariants. @@ -8,10 +8,11 @@ # @PRE: Caller provides an owned session scope and an authenticated actor identifier for each persisted mutation event. # @POST: Every logged event is committed as an explicit, queryable audit record with deterministic event metadata. # @SIDE_EFFECT: Inserts persisted session event rows and emits runtime belief-state logs for audit-sensitive mutations. +# @DATA_CONTRACT: Input[SessionEventPayload] -> Output[SessionEvent] from __future__ import annotations -# [DEF:SessionEventLogger.imports:Block] +# [DEF:SessionEventLoggerImports:Block] from dataclasses import dataclass, field from typing import Any, Dict, Optional @@ -19,7 +20,7 @@ from sqlalchemy.orm import Session from src.core.logger import belief_scope, logger from src.models.dataset_review import DatasetReviewSession, SessionEvent -# [/DEF:SessionEventLogger.imports:Block] +# [/DEF:SessionEventLoggerImports:Block] # [DEF:SessionEventPayload:Class] @@ -41,10 +42,11 @@ class SessionEventPayload: # @COMPLEXITY: 4 # @PURPOSE: Persist explicit dataset-review session audit events with meaningful runtime reasoning logs. # @RELATION: [DEPENDS_ON] ->[SessionEvent] -# @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session] +# @RELATION: [DEPENDS_ON] ->[SessionEventPayload] # @PRE: The database session is live and payload identifiers are non-empty. # @POST: Returns the committed session event row with a stable identifier and stored detail payload. # @SIDE_EFFECT: Writes one audit row to persistence and emits logger.reason/logger.reflect traces. +# @DATA_CONTRACT: Input[SessionEventPayload] -> Output[SessionEvent] class SessionEventLogger: # [DEF:SessionEventLogger.__init__:Function] # @COMPLEXITY: 2 @@ -153,4 +155,4 @@ class SessionEventLogger: # [/DEF:SessionEventLogger.log_for_session:Function] # [/DEF:SessionEventLogger:Class] -# [/DEF:SessionEventLogger:Module] \ No newline at end of file +# [/DEF:SessionEventLoggerModule:Module] \ No newline at end of file diff --git a/backend/src/services/git_service.py b/backend/src/services/git_service.py index 669937de..c16b13d6 100644 --- a/backend/src/services/git_service.py +++ b/backend/src/services/git_service.py @@ -65,7 +65,13 @@ class GitService: base = Path(self.base_path) if base.exists() and not base.is_dir(): raise ValueError(f"Git repositories base path is not a directory: {self.base_path}") - base.mkdir(parents=True, exist_ok=True) + try: + base.mkdir(parents=True, exist_ok=True) + except (PermissionError, OSError) as e: + logger.warning( + f"[_ensure_base_path_exists][Coherence:Failed] Cannot create Git repositories base path: {self.base_path}. Error: {e}" + ) + raise ValueError(f"Cannot create Git repositories base path: {self.base_path}. {e}") # [/DEF:backend.src.services.git_service.GitService._ensure_base_path_exists:Function] # [DEF:backend.src.services.git_service.GitService._resolve_base_path:Function] diff --git a/backend/tests/services/dataset_review/test_superset_matrix.py b/backend/tests/services/dataset_review/test_superset_matrix.py index a948a1e1..29ed3975 100644 --- a/backend/tests/services/dataset_review/test_superset_matrix.py +++ b/backend/tests/services/dataset_review/test_superset_matrix.py @@ -3,8 +3,8 @@ # @SEMANTICS: dataset_review, superset, compatibility_matrix, preview, sql_lab, tests # @PURPOSE: Verifies Superset preview and SQL Lab endpoint fallback strategy used by dataset-review orchestration. # @LAYER: Tests +# @RELATION: [DEPENDS_ON] ->[backend.src.core.superset_client.SupersetClient] # @RELATION: [DEPENDS_ON] ->[SupersetCompilationAdapter] -# @RELATION: [DEPENDS_ON] ->[SupersetClient] from types import SimpleNamespace from unittest.mock import MagicMock @@ -23,6 +23,7 @@ from src.core.utils.superset_compilation_adapter import ( # [DEF:make_adapter:Function] # @COMPLEXITY: 2 # @PURPOSE: Build an adapter with a mock Superset client and deterministic environment for compatibility tests. +# @RELATION: [DEPENDS_ON] ->[SupersetCompilationAdapter] def make_adapter(): environment = SimpleNamespace( id="env-1", @@ -42,6 +43,7 @@ def make_adapter(): # [DEF:test_preview_prefers_supported_client_method_before_network_fallback:Function] # @COMPLEXITY: 2 # @PURPOSE: Confirms preview compilation uses a supported client method first when the capability exists. +# @RELATION: [DEPENDS_ON] ->[SupersetCompilationAdapter] def test_preview_prefers_supported_client_method_before_network_fallback(): adapter, client = make_adapter() client.compile_preview = MagicMock(return_value={"compiled_sql": "SELECT 1"}) @@ -65,6 +67,7 @@ def test_preview_prefers_supported_client_method_before_network_fallback(): # [DEF:test_preview_falls_back_across_matrix_until_supported_endpoint_returns_sql:Function] # @COMPLEXITY: 3 # @PURPOSE: Confirms preview fallback walks the compatibility matrix from preferred to legacy endpoints until one returns compiled SQL. +# @RELATION: [DEPENDS_ON] ->[SupersetCompilationAdapter] def test_preview_falls_back_across_matrix_until_supported_endpoint_returns_sql(): adapter, client = make_adapter() payload = PreviewCompilationPayload( @@ -95,6 +98,7 @@ def test_preview_falls_back_across_matrix_until_supported_endpoint_returns_sql() # [DEF:test_sql_lab_launch_falls_back_to_legacy_execute_endpoint:Function] # @COMPLEXITY: 3 # @PURPOSE: Confirms SQL Lab launch falls back from modern to legacy execute endpoint and preserves canonical session reference extraction. +# @RELATION: [DEPENDS_ON] ->[SupersetCompilationAdapter] def test_sql_lab_launch_falls_back_to_legacy_execute_endpoint(): adapter, client = make_adapter() client.get_dataset.return_value = {