This commit is contained in:
2026-04-01 14:51:45 +03:00
parent 784d52d1af
commit 05f5cf5393
13 changed files with 384 additions and 1212 deletions

View File

@@ -5,7 +5,7 @@ os.environ["ENCRYPTION_KEY"] = "OnrCzomBWbIjTf7Y-fnhL2adlU55bHZQjp8zX5zBC5w="
# @COMPLEXITY: 3
# @SEMANTICS: tests, assistant, api
# @PURPOSE: Validate assistant API endpoint logic via direct async handler invocation.
# @RELATION: DEPENDS_ON -> backend.src.api.routes.assistant
# @RELATION: DEPENDS_ON -> [AssistantApi]
# @INVARIANT: Every test clears assistant in-memory state before execution.
import asyncio

View File

@@ -2,7 +2,7 @@
# @COMPLEXITY: 3
# @PURPOSE: API contract tests for redesigned clean release endpoints.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> backend.src.api.routes.clean_release_v2
# @RELATION: DEPENDS_ON -> [CleanReleaseV2Api]
from datetime import datetime, timezone
from types import SimpleNamespace

View File

@@ -2,7 +2,7 @@
# @COMPLEXITY: 3
# @PURPOSE: API contract test scaffolding for clean release approval and publication endpoints.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> backend.src.api.routes.clean_release_v2
# @RELATION: DEPENDS_ON -> [CleanReleaseV2Api]
"""Contract tests for redesigned approval/publication API endpoints."""

View File

@@ -2,7 +2,7 @@
# @COMPLEXITY: 3
# @PURPOSE: Unit tests for dashboards API endpoints.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.api.routes.dashboards
# @RELATION: DEPENDS_ON -> [DashboardsApi]
import pytest
from unittest.mock import MagicMock, patch, AsyncMock

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,13 @@
# [DEF:backend.src.api.routes.clean_release:Module]
# [DEF:CleanReleaseApi:Module]
# @COMPLEXITY: 4
# @SEMANTICS: api, clean-release, candidate-preparation, compliance
# @PURPOSE: Expose clean release endpoints for candidate preparation and subsequent compliance flow.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies.get_clean_release_repository
# @RELATION: DEPENDS_ON -> backend.src.services.clean_release.preparation_service
# @RELATION: DEPENDS_ON -> [get_clean_release_repository]
# @RELATION: DEPENDS_ON -> [PreparationService]
# @PRE: Clean release repository and preparation service dependencies are configured for the current request scope.
# @POST: Candidate preparation, manifest, and compliance routes expose deterministic API payloads without reporting prepared state on failed preparation.
# @SIDE_EFFECT: Persists candidate/compliance lifecycle state and triggers clean-release orchestration services.
# @INVARIANT: API never reports prepared status if preparation errors are present.
from __future__ import annotations
@@ -633,4 +636,4 @@ async def get_report(
# [/DEF:get_report:Function]
# [/DEF:backend.src.api.routes.clean_release:Module]
# [/DEF:CleanReleaseApi:Module]

View File

@@ -1295,6 +1295,7 @@ async def get_dashboard_tasks_history(
# [DEF:get_dashboard_thumbnail:Function]
# @COMPLEXITY: 3
# @PURPOSE: Proxies Superset dashboard thumbnail with cache support.
# @RELATION: CALLS ->[AsyncSupersetClient]
# @PRE: env_id must exist.
# @POST: Returns image bytes or 202 when thumbnail is being prepared by Superset.
@router.get("/{dashboard_ref}/thumbnail")

View File

@@ -448,48 +448,34 @@ def _require_session_version_header(
# [/DEF:_require_session_version_header:Function]
from src.logger import belief_scope, logger
# [DEF:_enforce_session_version:Function]
# @COMPLEXITY: 4
# @PURPOSE: Convert repository optimistic-lock conflicts into deterministic HTTP 409 responses.
# @RELATION: [DEPENDS_ON] ->[DatasetReviewSessionRepository]
def _enforce_session_version(
repository: DatasetReviewSessionRepository,
session: DatasetReviewSession,
expected_version: int,
) -> DatasetReviewSession:
try:
repository.require_session_version(session, expected_version)
return session
except DatasetReviewSessionVersionConflictError as exc:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail={
"error_code": "session_version_conflict",
"message": str(exc),
"session_id": exc.session_id,
"expected_version": exc.expected_version,
"actual_version": exc.actual_version,
},
) from exc
def _enforce_session_version(repository: DatasetReviewSessionRepository, session: DatasetReviewSession, expected_version: int) -> DatasetReviewSession:
with belief_scope('_enforce_session_version'):
logger.reason('Belief protocol reasoning checkpoint for _enforce_session_version')
try:
repository.require_session_version(session, expected_version)
logger.reflect('Belief protocol postcondition checkpoint for _enforce_session_version')
return session
except DatasetReviewSessionVersionConflictError as exc:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail={'error_code': 'session_version_conflict', 'message': str(exc), 'session_id': exc.session_id, 'expected_version': exc.expected_version, 'actual_version': exc.actual_version}) from exc
# [/DEF:_enforce_session_version:Function]
# [DEF:_prepare_owned_session_mutation:Function]
# @COMPLEXITY: 4
# @PURPOSE: Resolve owner-scoped mutation session and enforce optimistic-lock version before changing dataset review state.
def _prepare_owned_session_mutation(
repository: DatasetReviewSessionRepository,
session_id: str,
current_user: User,
expected_version: int,
) -> DatasetReviewSession:
session = _get_owned_session_or_404(repository, session_id, current_user)
_require_owner_mutation_scope(session, current_user)
return _enforce_session_version(repository, session, expected_version)
def _prepare_owned_session_mutation(repository: DatasetReviewSessionRepository, session_id: str, current_user: User, expected_version: int) -> DatasetReviewSession:
with belief_scope('_prepare_owned_session_mutation'):
logger.reason('Belief protocol reasoning checkpoint for _prepare_owned_session_mutation')
session = _get_owned_session_or_404(repository, session_id, current_user)
_require_owner_mutation_scope(session, current_user)
logger.reflect('Belief protocol postcondition checkpoint for _prepare_owned_session_mutation')
return _enforce_session_version(repository, session, expected_version)
# [/DEF:_prepare_owned_session_mutation:Function]
@@ -497,20 +483,16 @@ def _prepare_owned_session_mutation(
# @COMPLEXITY: 4
# @PURPOSE: Centralize dataset-review session version bumping and commit semantics for owner-scoped mutation endpoints.
# @RELATION: [DEPENDS_ON] ->[DatasetReviewSessionRepository]
def _commit_owned_session_mutation(
repository: DatasetReviewSessionRepository,
session: DatasetReviewSession,
*,
refresh_targets: Optional[List[Any]] = None,
) -> DatasetReviewSession:
repository.bump_session_version(session)
repository.db.commit()
repository.db.refresh(session)
for target in refresh_targets or []:
repository.db.refresh(target)
return session
def _commit_owned_session_mutation(repository: DatasetReviewSessionRepository, session: DatasetReviewSession, *, refresh_targets: Optional[List[Any]]=None) -> DatasetReviewSession:
with belief_scope('_commit_owned_session_mutation'):
logger.reason('Belief protocol reasoning checkpoint for _commit_owned_session_mutation')
repository.bump_session_version(session)
repository.db.commit()
repository.db.refresh(session)
for target in refresh_targets or []:
repository.db.refresh(target)
logger.reflect('Belief protocol postcondition checkpoint for _commit_owned_session_mutation')
return session
# [/DEF:_commit_owned_session_mutation:Function]

View File

@@ -5,8 +5,8 @@
# @PURPOSE: API endpoints for the Dataset Hub - listing datasets with mapping progress
# @LAYER: API
# @RELATION: DEPENDS_ON ->[AppDependencies]
# @RELATION: DEPENDS_ON ->[backend.src.services.resource_service.ResourceService]
# @RELATION: DEPENDS_ON ->[backend.src.core.superset_client.SupersetClient]
# @RELATION: DEPENDS_ON ->[ResourceService]
# @RELATION: DEPENDS_ON ->[SupersetClient]
#
# @INVARIANT: All dataset responses include last_task metadata
@@ -178,7 +178,7 @@ async def get_dataset_ids(
# @PARAM: page (Optional[int]) - Page number (default: 1)
# @PARAM: page_size (Optional[int]) - Items per page (default: 10, max: 100)
# @RETURN: DatasetsResponse - List of datasets with status metadata
# @RELATION: CALLS ->[backend.src.services.resource_service.ResourceService.get_datasets_with_status]
# @RELATION: CALLS ->[get_datasets_with_status]
@router.get("", response_model=DatasetsResponse)
async def get_datasets(
env_id: str,
@@ -266,8 +266,8 @@ class MapColumnsRequest(BaseModel):
# @POST: Task is created and queued for execution
# @PARAM: request (MapColumnsRequest) - Mapping request with environment and dataset IDs
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES ->[backend.src.plugins.mapper.MapperPlugin]
# @RELATION: CALLS ->[backend.src.core.task_manager.manager.TaskManager:create_task]
# @RELATION: DISPATCHES ->[MapperPlugin]
# @RELATION: CALLS ->[create_task]
@router.post("/map-columns", response_model=TaskResponse)
async def map_columns(
request: MapColumnsRequest,
@@ -338,8 +338,8 @@ class GenerateDocsRequest(BaseModel):
# @POST: Task is created and queued for execution
# @PARAM: request (GenerateDocsRequest) - Documentation generation request
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES ->[backend.src.plugins.llm_analysis.plugin.DocumentationPlugin]
# @RELATION: CALLS ->[backend.src.core.task_manager.manager.TaskManager:create_task]
# @RELATION: DISPATCHES ->[DocumentationPlugin]
# @RELATION: CALLS ->[create_task]
@router.post("/generate-docs", response_model=TaskResponse)
async def generate_docs(
request: GenerateDocsRequest,
@@ -393,7 +393,7 @@ async def generate_docs(
# @PARAM: env_id (str) - The environment ID
# @PARAM: dataset_id (int) - The dataset ID
# @RETURN: DatasetDetailResponse - Detailed dataset information
# @RELATION: CALLS ->[backend.src.core.superset_client.SupersetClient:get_dataset_detail]
# @RELATION: CALLS ->[SupersetClientGetDatasetDetail]
@router.get("/{dataset_id}", response_model=DatasetDetailResponse)
async def get_dataset_detail(
env_id: str,