Compare commits
153 Commits
v1.0.0-rc2
...
09e59ba88b
| Author | SHA1 | Date | |
|---|---|---|---|
| 09e59ba88b | |||
| 638597f182 | |||
| bb921ce5dd | |||
| fa380ff9a5 | |||
| ce3955ed2e | |||
| 19898b1570 | |||
| da24fb9253 | |||
| 80b28ac371 | |||
| f24200d52a | |||
| 5d45b4adb0 | |||
| daa9f7be3a | |||
| 7e43830144 | |||
| 066747de59 | |||
| 442d0e0ac2 | |||
| 8fa951fc93 | |||
| 149d230426 | |||
| 4c601fbe06 | |||
| 36173c0880 | |||
| 81d62c1345 | |||
| a8f7147500 | |||
| ce684bc5d1 | |||
| 484019e750 | |||
| 4ff6d307f8 | |||
| f4612c0737 | |||
| 5ec1254336 | |||
| b7d1ee2b71 | |||
| 87285d8f0a | |||
| 04b01eadb5 | |||
| 4d5b9e88dd | |||
| 4bad4ab4e2 | |||
| 3801ca13d9 | |||
| 999c0c54df | |||
| f9ac282596 | |||
| 5d42a6b930 | |||
| 99f19ac305 | |||
| 590ba49ddb | |||
| 2a5b225800 | |||
| 33433c3173 | |||
| 21e969a769 | |||
| 783644c6ad | |||
| d32d85556f | |||
| bc0367ab72 | |||
| 1c362f4092 | |||
| 95ae9c6af1 | |||
| 7a12ed0931 | |||
| e0c0dd3221 | |||
| 5f6e9c0cc0 | |||
| 4fd9d6b6d5 | |||
| 7e6bd56488 | |||
| 5e3c213b92 | |||
| 37b75b5a5c | |||
| 3d42a487f7 | |||
| 2e93f5ca63 | |||
| 286167b1d5 | |||
| 7df7b4f98c | |||
| ab1c87ffba | |||
| 40e6d8cd4c | |||
| 18e96a58bc | |||
| 83e4875097 | |||
| e635bd7e5f | |||
| 43dd97ecbf | |||
| 0685f50ae7 | |||
| d0ffc2f1df | |||
| 26880d2e09 | |||
| 008b6d72c9 | |||
| f0c85e4c03 | |||
| 6ffdf5f8a4 | |||
| 0cf0ef25f1 | |||
| af74841765 | |||
| d7e4919d54 | |||
| fdcbe32dfa | |||
| 4de5b22d57 | |||
| c8029ed309 | |||
| c2a4c8062a | |||
| 2c820e103a | |||
| c8b84b7bd7 | |||
| fdb944f123 | |||
| d29bc511a2 | |||
| a3a9f0788d | |||
| 77147dc95b | |||
| 026239e3bf | |||
| 4a0273a604 | |||
| edb2dd5263 | |||
| 76b98fcf8f | |||
| 794cc55fe7 | |||
| 235b0e3c9f | |||
| e6087bd3c1 | |||
| 0f16bab2b8 | |||
| 7de96c17c4 | |||
| f018b97ed2 | |||
| 72846aa835 | |||
| 994c0c3e5d | |||
| 252a8601a9 | |||
| 8044f85ea4 | |||
| d4109e5a03 | |||
| b2bbd73439 | |||
| 0e0e26e2f7 | |||
| 18b42f8dd0 | |||
| e7b31accd6 | |||
| d3c3a80ed2 | |||
| cc244c2d86 | |||
| d10c23e658 | |||
| 1042b35d1b | |||
| 16ffeb1ed6 | |||
| da34deac02 | |||
| 51e9ee3fcc | |||
| edf9286071 | |||
| a542e7d2df | |||
| a863807cf2 | |||
| e2bc68683f | |||
| 43cb82697b | |||
| 4ba28cf93e | |||
| 343f2e29f5 | |||
| c9a53578fd | |||
| 07ec2d9797 | |||
| e9d3f3c827 | |||
| 26ba015b75 | |||
| 49129d3e86 | |||
| d99a13d91f | |||
| 203ce446f4 | |||
| c96d50a3f4 | |||
| 3bbe320949 | |||
| 2d2435642d | |||
| ec8d67c956 | |||
| 76baeb1038 | |||
| 11c59fb420 | |||
| b2529973eb | |||
| ae1d630ad6 | |||
| 9a9c5879e6 | |||
| 696aac32e7 | |||
| 7a9b1a190a | |||
| a3dc1fb2b9 | |||
| 297b29986d | |||
| 4c6fc8256d | |||
| a747a163c8 | |||
| fce0941e98 | |||
| 45c077b928 | |||
| 9ed3a5992d | |||
| a032fe8457 | |||
| 4c9d554432 | |||
| 6962a78112 | |||
| 3d75a21127 | |||
| 07914c8728 | |||
| cddc259b76 | |||
| dcbf0a7d7f | |||
| 65f61c1f80 | |||
| cb7386f274 | |||
| 83e34e1799 | |||
| d197303b9f | |||
| a43f8fb021 | |||
| 4aa01b6470 | |||
| 35b423979d | |||
| 2ffc3cc68f |
1062
.ai/MODULE_MAP.md
1062
.ai/MODULE_MAP.md
File diff suppressed because it is too large
Load Diff
3038
.ai/PROJECT_MAP.md
3038
.ai/PROJECT_MAP.md
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
|||||||
#[DEF:BackendRouteShot:Module]
|
# [DEF:BackendRouteShot:Module]
|
||||||
# @TIER: STANDARD
|
# @TIER: STANDARD
|
||||||
# @SEMANTICS: Route, Task, API, Async
|
# @SEMANTICS: Route, Task, API, Async
|
||||||
# @PURPOSE: Reference implementation of a task-based route using GRACE-Poly.
|
# @PURPOSE: Reference implementation of a task-based route using GRACE-Poly.
|
||||||
@@ -9,66 +9,53 @@
|
|||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
# GRACE: Правильный импорт глобального логгера и scope
|
from ...core.logger import belief_scope
|
||||||
from ...core.logger import logger, belief_scope
|
|
||||||
from ...core.task_manager import TaskManager, Task
|
from ...core.task_manager import TaskManager, Task
|
||||||
from ...core.config_manager import ConfigManager
|
from ...core.config_manager import ConfigManager
|
||||||
from ...dependencies import get_task_manager, get_config_manager, get_current_user
|
from ...dependencies import get_task_manager, get_config_manager, get_current_user
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
# [DEF:CreateTaskRequest:Class]
|
|
||||||
# @PURPOSE: DTO for task creation payload.
|
|
||||||
class CreateTaskRequest(BaseModel):
|
class CreateTaskRequest(BaseModel):
|
||||||
plugin_id: str
|
plugin_id: str
|
||||||
params: Dict[str, Any]
|
params: Dict[str, Any]
|
||||||
# [/DEF:CreateTaskRequest:Class]
|
|
||||||
|
|
||||||
|
@router.post("/tasks", response_model=Task, status_code=status.HTTP_201_CREATED)
|
||||||
# [DEF:create_task:Function]
|
# [DEF:create_task:Function]
|
||||||
# @PURPOSE: Create and start a new task using TaskManager. Non-blocking.
|
# @PURPOSE: Create and start a new task using TaskManager. Non-blocking.
|
||||||
# @DATA_CONTRACT: Input -> CreateTaskRequest, Output -> Task
|
# @PARAM: request (CreateTaskRequest) - Plugin and params.
|
||||||
|
# @PARAM: task_manager (TaskManager) - Async task executor.
|
||||||
# @PRE: plugin_id must match a registered plugin.
|
# @PRE: plugin_id must match a registered plugin.
|
||||||
# @POST: A new task is spawned; Task object returned immediately.
|
# @POST: A new task is spawned; Task ID returned immediately.
|
||||||
# @SIDE_EFFECT: Writes to DB, Triggers background worker.
|
# @SIDE_EFFECT: Writes to DB, Trigger background worker.
|
||||||
#
|
|
||||||
# @UX_STATE: Success -> 201 Created
|
|
||||||
# @UX_STATE: Error(Validation) -> 400 Bad Request
|
|
||||||
# @UX_STATE: Error(System) -> 500 Internal Server Error
|
|
||||||
@router.post("/tasks", response_model=Task, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_task(
|
async def create_task(
|
||||||
request: CreateTaskRequest,
|
request: CreateTaskRequest,
|
||||||
task_manager: TaskManager = Depends(get_task_manager),
|
task_manager: TaskManager = Depends(get_task_manager),
|
||||||
config: ConfigManager = Depends(get_config_manager),
|
config: ConfigManager = Depends(get_config_manager),
|
||||||
current_user = Depends(get_current_user)
|
current_user = Depends(get_current_user)
|
||||||
):
|
):
|
||||||
# GRACE: Открываем семантическую транзакцию
|
# Context Logging
|
||||||
with belief_scope("create_task"):
|
with belief_scope("create_task"):
|
||||||
try:
|
try:
|
||||||
# GRACE: [REASON] - Фиксируем начало дедуктивной цепочки
|
# 1. Action: Configuration Resolution
|
||||||
logger.reason("Resolving configuration and spawning task", extra={"plugin_id": request.plugin_id})
|
|
||||||
|
|
||||||
timeout = config.get("TASKS_DEFAULT_TIMEOUT", 3600)
|
timeout = config.get("TASKS_DEFAULT_TIMEOUT", 3600)
|
||||||
|
|
||||||
|
# 2. Action: Spawn async task
|
||||||
# @RELATION: CALLS -> task_manager.create_task
|
# @RELATION: CALLS -> task_manager.create_task
|
||||||
task = await task_manager.create_task(
|
task = await task_manager.create_task(
|
||||||
plugin_id=request.plugin_id,
|
plugin_id=request.plugin_id,
|
||||||
params={**request.params, "timeout": timeout}
|
params={**request.params, "timeout": timeout}
|
||||||
)
|
)
|
||||||
|
|
||||||
# GRACE:[REFLECT] - Подтверждаем выполнение @POST перед выходом
|
|
||||||
logger.reflect("Task spawned successfully", extra={"task_id": task.id})
|
|
||||||
return task
|
return task
|
||||||
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
# GRACE: [EXPLORE] - Обработка ожидаемого отклонения
|
# 3. Recovery: Domain logic error mapping
|
||||||
logger.explore("Domain validation error during task creation", exc_info=e)
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail=str(e)
|
detail=str(e)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# GRACE: [EXPLORE] - Обработка критического сбоя
|
# @UX_STATE: Error feedback -> 500 Internal Error
|
||||||
logger.explore("Internal Task Spawning Error", exc_info=e)
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail="Internal Task Spawning Error"
|
detail="Internal Task Spawning Error"
|
||||||
|
|||||||
@@ -3,28 +3,34 @@
|
|||||||
# @SEMANTICS: Finance, ACID, Transfer, Ledger
|
# @SEMANTICS: Finance, ACID, Transfer, Ledger
|
||||||
# @PURPOSE: Core banking transaction processor with ACID guarantees.
|
# @PURPOSE: Core banking transaction processor with ACID guarantees.
|
||||||
# @LAYER: Domain (Core)
|
# @LAYER: Domain (Core)
|
||||||
# @RELATION: DEPENDS_ON -> [DEF:Infra:PostgresDB]
|
# @RELATION: DEPENDS_ON ->[DEF:Infra:PostgresDB]
|
||||||
#
|
#
|
||||||
# @INVARIANT: Total system balance must remain constant (Double-Entry Bookkeeping).
|
# @INVARIANT: Total system balance must remain constant (Double-Entry Bookkeeping).
|
||||||
# @INVARIANT: Negative transfers are strictly forbidden.
|
# @INVARIANT: Negative transfers are strictly forbidden.
|
||||||
|
|
||||||
# --- Test Specifications ---
|
# --- Test Specifications (The "What" and "Why", not the "Data") ---
|
||||||
# @TEST_CONTRACT: TransferRequestDTO -> TransferResultDTO
|
# @TEST_CONTRACT: Input -> TransferInputDTO, Output -> TransferResultDTO
|
||||||
|
|
||||||
|
# Happy Path
|
||||||
# @TEST_SCENARIO: sufficient_funds -> Returns COMPLETED, balances updated.
|
# @TEST_SCENARIO: sufficient_funds -> Returns COMPLETED, balances updated.
|
||||||
# @TEST_FIXTURE: sufficient_funds -> file:./__tests__/fixtures/transfers.json#happy_path
|
# @TEST_FIXTURE: sufficient_funds -> file:./__tests__/fixtures/transfers.json#happy_path
|
||||||
# @TEST_EDGE: insufficient_funds -> Throws BusinessRuleViolation("INSUFFICIENT_FUNDS").
|
|
||||||
# @TEST_EDGE: negative_amount -> Throws BusinessRuleViolation("Transfer amount must be positive.").
|
# Edge Cases (CRITICAL)
|
||||||
# @TEST_EDGE: concurrency_conflict -> Throws DBTransactionError.
|
# @TEST_SCENARIO: insufficient_funds -> Throws BusinessRuleViolation("INSUFFICIENT_FUNDS").
|
||||||
#
|
# @TEST_SCENARIO: negative_amount -> Throws BusinessRuleViolation("Transfer amount must be positive.").
|
||||||
|
# @TEST_SCENARIO: self_transfer -> Throws BusinessRuleViolation("Cannot transfer to self.").
|
||||||
|
# @TEST_SCENARIO: audit_failure -> Throws RuntimeError("TRANSACTION_ABORTED").
|
||||||
|
# @TEST_SCENARIO: concurrency_conflict -> Throws DBTransactionError.
|
||||||
|
|
||||||
|
# Linking Tests to Invariants
|
||||||
# @TEST_INVARIANT: total_balance_constant -> VERIFIED_BY: [sufficient_funds, concurrency_conflict]
|
# @TEST_INVARIANT: total_balance_constant -> VERIFIED_BY: [sufficient_funds, concurrency_conflict]
|
||||||
# @TEST_INVARIANT: negative_transfer_forbidden -> VERIFIED_BY: [negative_amount]
|
# @TEST_INVARIANT: negative_transfer_forbidden -> VERIFIED_BY: [negative_amount]
|
||||||
|
|
||||||
|
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
# GRACE: Импорт глобального логгера с семантическими методами
|
from ...core.logger import belief_scope
|
||||||
from ...core.logger import logger, belief_scope
|
|
||||||
from ...core.db import atomic_transaction, get_balance, update_balance
|
from ...core.db import atomic_transaction, get_balance, update_balance
|
||||||
from ...core.audit import log_audit_trail
|
|
||||||
from ...core.exceptions import BusinessRuleViolation
|
from ...core.exceptions import BusinessRuleViolation
|
||||||
|
|
||||||
class TransferResult(NamedTuple):
|
class TransferResult(NamedTuple):
|
||||||
@@ -34,54 +40,55 @@ class TransferResult(NamedTuple):
|
|||||||
|
|
||||||
# [DEF:execute_transfer:Function]
|
# [DEF:execute_transfer:Function]
|
||||||
# @PURPOSE: Atomically move funds between accounts with audit trails.
|
# @PURPOSE: Atomically move funds between accounts with audit trails.
|
||||||
# @DATA_CONTRACT: Input -> (sender_id: str, receiver_id: str, amount: Decimal), Output -> TransferResult
|
# @PARAM: sender_id (str) - Source account.
|
||||||
|
# @PARAM: receiver_id (str) - Destination account.
|
||||||
|
# @PARAM: amount (Decimal) - Positive amount to transfer.
|
||||||
# @PRE: amount > 0; sender != receiver; sender_balance >= amount.
|
# @PRE: amount > 0; sender != receiver; sender_balance >= amount.
|
||||||
# @POST: sender_balance -= amount; receiver_balance += amount; Audit Record Created.
|
# @POST: sender_balance -= amount; receiver_balance += amount; Audit Record Created.
|
||||||
# @SIDE_EFFECT: Database mutation (Rows locked), Audit IO.
|
# @SIDE_EFFECT: Database mutation (Rows locked), Audit IO.
|
||||||
#
|
#
|
||||||
# @UX_STATE: Success -> Returns 200 OK + Transaction Receipt.
|
# @UX_STATE: Success -> Returns 200 OK + Transaction Receipt.
|
||||||
# @UX_STATE: Error(LowBalance) -> 422 Unprocessable -> UI shows "Top-up needed" modal.
|
# @UX_STATE: Error(LowBalance) -> 422 Unprocessable -> UI shows "Top-up needed" modal.
|
||||||
|
# @UX_STATE: Error(System) -> 500 Internal -> UI shows "Retry later" toast.
|
||||||
def execute_transfer(sender_id: str, receiver_id: str, amount: Decimal) -> TransferResult:
|
def execute_transfer(sender_id: str, receiver_id: str, amount: Decimal) -> TransferResult:
|
||||||
# Guard: Input Validation (Вне belief_scope, так как это trivial проверка)
|
# Guard: Input Validation
|
||||||
if amount <= Decimal("0.00"):
|
if amount <= Decimal("0.00"):
|
||||||
raise BusinessRuleViolation("Transfer amount must be positive.")
|
raise BusinessRuleViolation("Transfer amount must be positive.")
|
||||||
if sender_id == receiver_id:
|
if sender_id == receiver_id:
|
||||||
raise BusinessRuleViolation("Cannot transfer to self.")
|
raise BusinessRuleViolation("Cannot transfer to self.")
|
||||||
|
|
||||||
# GRACE: Используем strict Context Manager без 'as context'
|
with belief_scope("execute_transfer") as context:
|
||||||
with belief_scope("execute_transfer"):
|
context.logger.info("Initiating transfer", data={"from": sender_id, "to": receiver_id})
|
||||||
# GRACE: [REASON] - Жесткая дедукция, начало алгоритма
|
|
||||||
logger.reason("Initiating transfer", extra={"from": sender_id, "to": receiver_id, "amount": amount})
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# 1. Action: Atomic DB Transaction
|
||||||
# @RELATION: CALLS -> atomic_transaction
|
# @RELATION: CALLS -> atomic_transaction
|
||||||
with atomic_transaction():
|
with atomic_transaction():
|
||||||
|
# Guard: State Validation (Strict)
|
||||||
current_balance = get_balance(sender_id, for_update=True)
|
current_balance = get_balance(sender_id, for_update=True)
|
||||||
|
|
||||||
if current_balance < amount:
|
if current_balance < amount:
|
||||||
# GRACE: [EXPLORE] - Отклонение от Happy Path (фолбэк/ошибка)
|
# @UX_FEEDBACK: Triggers specific UI flow for insufficient funds
|
||||||
logger.explore("Insufficient funds validation hit", extra={"balance": current_balance})
|
context.logger.warn("Insufficient funds", data={"balance": current_balance})
|
||||||
raise BusinessRuleViolation("INSUFFICIENT_FUNDS")
|
raise BusinessRuleViolation("INSUFFICIENT_FUNDS")
|
||||||
|
|
||||||
# Mutation
|
# 2. Action: Mutation
|
||||||
new_src_bal = update_balance(sender_id, -amount)
|
new_src_bal = update_balance(sender_id, -amount)
|
||||||
new_dst_bal = update_balance(receiver_id, +amount)
|
new_dst_bal = update_balance(receiver_id, +amount)
|
||||||
|
|
||||||
# Audit
|
# 3. Action: Audit
|
||||||
tx_id = log_audit_trail("TRANSFER", sender_id, receiver_id, amount)
|
tx_id = context.audit.log_transfer(sender_id, receiver_id, amount)
|
||||||
|
|
||||||
# GRACE:[REFLECT] - Сверка с @POST перед возвратом
|
|
||||||
logger.reflect("Transfer committed successfully", extra={"tx_id": tx_id, "new_balance": new_src_bal})
|
|
||||||
|
|
||||||
|
context.logger.info("Transfer committed", data={"tx_id": tx_id})
|
||||||
return TransferResult(tx_id, "COMPLETED", new_src_bal)
|
return TransferResult(tx_id, "COMPLETED", new_src_bal)
|
||||||
|
|
||||||
except BusinessRuleViolation as e:
|
except BusinessRuleViolation as e:
|
||||||
# Explicit re-raise for UI mapping
|
# Logic: Explicit re-raise for UI mapping
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# GRACE: [EXPLORE] - Неожиданный сбой
|
# Logic: Catch-all safety net
|
||||||
logger.explore("Critical Transfer Failure", exc_info=e)
|
context.logger.error("Critical Transfer Failure", error=e)
|
||||||
raise RuntimeError("TRANSACTION_ABORTED") from e
|
raise RuntimeError("TRANSACTION_ABORTED") from e
|
||||||
#[/DEF:execute_transfer:Function]
|
# [/DEF:execute_transfer:Function]
|
||||||
|
|
||||||
# [/DEF:TransactionCore:Module]
|
# [/DEF:TransactionCore:Module]
|
||||||
@@ -11,27 +11,45 @@
|
|||||||
* @INVARIANT: Loading state must always terminate (no infinite spinner).
|
* @INVARIANT: Loading state must always terminate (no infinite spinner).
|
||||||
* @INVARIANT: User must receive feedback on both success and failure.
|
* @INVARIANT: User must receive feedback on both success and failure.
|
||||||
*
|
*
|
||||||
* @UX_REACTIVITY: Props -> $props(), LocalState -> $state(isLoading).
|
|
||||||
*
|
|
||||||
* @TEST_CONTRACT: ComponentState ->
|
* @TEST_CONTRACT: ComponentState ->
|
||||||
* {
|
* {
|
||||||
* required_fields: { isLoading: bool },
|
* required_fields: {
|
||||||
* invariants:[
|
* isLoading: bool
|
||||||
|
* },
|
||||||
|
* invariants: [
|
||||||
* "isLoading=true implies button.disabled=true",
|
* "isLoading=true implies button.disabled=true",
|
||||||
* "isLoading=true implies aria-busy=true"
|
* "isLoading=true implies aria-busy=true",
|
||||||
|
* "isLoading=true implies spinner visible"
|
||||||
* ]
|
* ]
|
||||||
* }
|
* }
|
||||||
*
|
*
|
||||||
* @TEST_FIXTURE: idle_state -> { isLoading: false }
|
* @TEST_CONTRACT: ApiResponse ->
|
||||||
* @TEST_FIXTURE: successful_response -> { task_id: "task_123" }
|
* {
|
||||||
*
|
* required_fields: {},
|
||||||
|
* optional_fields: {
|
||||||
|
* task_id: str
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
|
||||||
|
* @TEST_FIXTURE: idle_state ->
|
||||||
|
* {
|
||||||
|
* isLoading: false
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* @TEST_FIXTURE: successful_response ->
|
||||||
|
* {
|
||||||
|
* task_id: "task_123"
|
||||||
|
* }
|
||||||
|
|
||||||
* @TEST_EDGE: api_failure -> raises Error("Network")
|
* @TEST_EDGE: api_failure -> raises Error("Network")
|
||||||
* @TEST_EDGE: empty_response -> {}
|
* @TEST_EDGE: empty_response -> {}
|
||||||
* @TEST_EDGE: rapid_double_click -> special: concurrent_click
|
* @TEST_EDGE: rapid_double_click -> special: concurrent_click
|
||||||
*
|
* @TEST_EDGE: unresolved_promise -> special: pending_state
|
||||||
* @TEST_INVARIANT: prevent_double_submission -> VERIFIED_BY:[rapid_double_click]
|
|
||||||
* @TEST_INVARIANT: feedback_always_emitted -> VERIFIED_BY:[successful_response, api_failure]
|
* @TEST_INVARIANT: prevent_double_submission -> verifies: [rapid_double_click]
|
||||||
*
|
* @TEST_INVARIANT: loading_state_consistency -> verifies: [idle_state, pending_state]
|
||||||
|
* @TEST_INVARIANT: feedback_always_emitted -> verifies: [successful_response, api_failure]
|
||||||
|
|
||||||
* @UX_STATE: Idle -> Button enabled, primary color, no spinner.
|
* @UX_STATE: Idle -> Button enabled, primary color, no spinner.
|
||||||
* @UX_STATE: Loading -> Button disabled, spinner visible, aria-busy=true.
|
* @UX_STATE: Loading -> Button disabled, spinner visible, aria-busy=true.
|
||||||
* @UX_STATE: Success -> Toast success displayed.
|
* @UX_STATE: Success -> Toast success displayed.
|
||||||
@@ -41,39 +59,44 @@
|
|||||||
*
|
*
|
||||||
* @UX_TEST: Idle -> {click: spawnTask, expected: isLoading=true}
|
* @UX_TEST: Idle -> {click: spawnTask, expected: isLoading=true}
|
||||||
* @UX_TEST: Loading -> {double_click: ignored, expected: single_api_call}
|
* @UX_TEST: Loading -> {double_click: ignored, expected: single_api_call}
|
||||||
*/
|
* @UX_TEST: Success -> {api_resolve: task_id, expected: toast.success called}
|
||||||
|
* @UX_TEST: Error -> {api_reject: error, expected: toast.error called}
|
||||||
-->
|
-->
|
||||||
<script>
|
<script>
|
||||||
import { postApi } from "$lib/api.js";
|
import { postApi } from "$lib/api.js";
|
||||||
import { t } from "$lib/i18n";
|
import { t } from "$lib/i18n";
|
||||||
import { toast } from "$lib/stores/toast";
|
import { toast } from "$lib/stores/toast";
|
||||||
|
|
||||||
// GRACE Svelte 5 Runes
|
export let plugin_id = "";
|
||||||
let { plugin_id = "", params = {} } = $props();
|
export let params = {};
|
||||||
let isLoading = $state(false);
|
|
||||||
|
let isLoading = false;
|
||||||
|
|
||||||
// [DEF:spawnTask:Function]
|
// [DEF:spawnTask:Function]
|
||||||
/**
|
/**
|
||||||
* @PURPOSE: Execute task creation request and emit user feedback.
|
* @purpose Execute task creation request and emit user feedback.
|
||||||
* @PRE: plugin_id is resolved and request params are serializable.
|
* @pre plugin_id is resolved and request params are serializable.
|
||||||
* @POST: isLoading is reset and user receives success/error feedback.
|
* @post isLoading is reset and user receives success/error feedback.
|
||||||
*/
|
*/
|
||||||
async function spawnTask() {
|
async function spawnTask() {
|
||||||
isLoading = true;
|
isLoading = true;
|
||||||
console.info("[spawnTask][REASON] Spawning task...", { plugin_id });
|
console.log("[FrontendComponentShot][Loading] Spawning task...");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 1. Action: API Call
|
// 1. Action: API Call
|
||||||
const response = await postApi("/api/tasks", { plugin_id, params });
|
const response = await postApi("/api/tasks", {
|
||||||
|
plugin_id,
|
||||||
|
params
|
||||||
|
});
|
||||||
|
|
||||||
// 2. Feedback: Success validation
|
// 2. Feedback: Success
|
||||||
if (response.task_id) {
|
if (response.task_id) {
|
||||||
console.info("[spawnTask][REFLECT] Task created.", { task_id: response.task_id });
|
console.log("[FrontendComponentShot][Success] Task created.");
|
||||||
toast.success($t.tasks.spawned_success);
|
toast.success($t.tasks.spawned_success);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// 3. Recovery: Error handling & fallback logic
|
// 3. Recovery: User notification
|
||||||
console.error("[spawnTask][EXPLORE] Failed to spawn task. Notifying user.", { error });
|
console.log("[FrontendComponentShot][Error] Failed:", error);
|
||||||
toast.error(`${$t.errors.task_failed}: ${error.message}`);
|
toast.error(`${$t.errors.task_failed}: ${error.message}`);
|
||||||
} finally {
|
} finally {
|
||||||
isLoading = false;
|
isLoading = false;
|
||||||
@@ -83,7 +106,7 @@
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<button
|
<button
|
||||||
onclick={spawnTask}
|
on:click={spawnTask}
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
class="btn-primary flex items-center gap-2"
|
class="btn-primary flex items-center gap-2"
|
||||||
aria-busy={isLoading}
|
aria-busy={isLoading}
|
||||||
|
|||||||
@@ -9,19 +9,15 @@
|
|||||||
from typing import Dict, Any, Optional
|
from typing import Dict, Any, Optional
|
||||||
from ..core.plugin_base import PluginBase
|
from ..core.plugin_base import PluginBase
|
||||||
from ..core.task_manager.context import TaskContext
|
from ..core.task_manager.context import TaskContext
|
||||||
# GRACE: Обязательный импорт семантического логгера
|
|
||||||
from ..core.logger import logger, belief_scope
|
|
||||||
|
|
||||||
# [DEF:ExamplePlugin:Class]
|
|
||||||
# @PURPOSE: A sample plugin to demonstrate execution context and logging.
|
|
||||||
class ExamplePlugin(PluginBase):
|
class ExamplePlugin(PluginBase):
|
||||||
@property
|
@property
|
||||||
def id(self) -> str:
|
def id(self) -> str:
|
||||||
return "example-plugin"
|
return "example-plugin"
|
||||||
|
|
||||||
#[DEF:get_schema:Function]
|
# [DEF:get_schema:Function]
|
||||||
# @PURPOSE: Defines input validation schema.
|
# @PURPOSE: Defines input validation schema.
|
||||||
# @DATA_CONTRACT: Input -> None, Output -> Dict (JSON Schema draft 7)
|
# @POST: Returns dict compliant with JSON Schema draft 7.
|
||||||
def get_schema(self) -> Dict[str, Any]:
|
def get_schema(self) -> Dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -33,43 +29,36 @@ class ExamplePlugin(PluginBase):
|
|||||||
},
|
},
|
||||||
"required": ["message"],
|
"required": ["message"],
|
||||||
}
|
}
|
||||||
#[/DEF:get_schema:Function]
|
# [/DEF:get_schema:Function]
|
||||||
|
|
||||||
# [DEF:execute:Function]
|
# [DEF:execute:Function]
|
||||||
# @PURPOSE: Core plugin logic with structured logging and scope isolation.
|
# @PURPOSE: Core plugin logic with structured logging and scope isolation.
|
||||||
# @DATA_CONTRACT: Input -> (params: Dict, context: Optional[TaskContext]), Output -> None
|
# @PARAM: params (Dict) - Validated input parameters.
|
||||||
# @PRE: params must be validated against get_schema() before calling.
|
# @PARAM: context (TaskContext) - Execution tools (log, progress).
|
||||||
# @POST: Plugin payload is processed; progress is reported if context exists.
|
# @SIDE_EFFECT: Emits logs to centralized system.
|
||||||
# @SIDE_EFFECT: Emits logs to centralized system and TaskContext.
|
async def execute(self, params: Dict, context: Optional = None):
|
||||||
async def execute(self, params: Dict, context: Optional[TaskContext] = None):
|
message = params
|
||||||
message = params.get("message", "Fallback")
|
|
||||||
|
|
||||||
# GRACE: Изоляция мыслей ИИ в Thread-Local scope
|
# 1. Action: System-level tracing (Rule VI)
|
||||||
with belief_scope("example_plugin_exec"):
|
with belief_scope("example_plugin_exec") as b_scope:
|
||||||
if context:
|
if context:
|
||||||
|
# Task Logs: Пишем в пользовательский контекст выполнения задачи
|
||||||
# @RELATION: BINDS_TO -> context.logger
|
# @RELATION: BINDS_TO -> context.logger
|
||||||
log = context.logger.with_source("example_plugin")
|
log = context.logger.with_source("example_plugin")
|
||||||
|
|
||||||
# GRACE: [REASON] - Системный лог (Внутренняя мысль)
|
b_scope.logger.info("Using provided TaskContext") # System log
|
||||||
logger.reason("TaskContext provided. Binding task logger.", extra={"msg": message})
|
log.info("Starting execution", data={"msg": message}) # Task log
|
||||||
|
|
||||||
# Task Logs: Бизнес-логи (Уйдут в БД/Вебсокет пользователю)
|
# 2. Action: Progress Reporting
|
||||||
log.info("Starting execution", extra={"msg": message})
|
|
||||||
log.progress("Processing...", percent=50)
|
log.progress("Processing...", percent=50)
|
||||||
|
|
||||||
|
# 3. Action: Finalize
|
||||||
log.info("Execution completed.")
|
log.info("Execution completed.")
|
||||||
|
|
||||||
# GRACE: [REFLECT] - Сверка успешного выхода
|
|
||||||
logger.reflect("Context execution finalized successfully")
|
|
||||||
else:
|
else:
|
||||||
# GRACE:[EXPLORE] - Фолбэк ветка (Отклонение от нормы)
|
# Standalone Fallback: Замыкаемся на системный scope
|
||||||
logger.explore("No TaskContext provided. Running standalone.")
|
b_scope.logger.warning("No TaskContext provided. Running standalone.")
|
||||||
|
b_scope.logger.info("Standalone execution", data={"msg": message})
|
||||||
# Standalone Fallback
|
print(f"Standalone: {message}")
|
||||||
print(f"Standalone execution: {message}")
|
|
||||||
|
|
||||||
# GRACE: [REFLECT] - Сверка выхода фолбэка
|
|
||||||
logger.reflect("Standalone execution finalized")
|
|
||||||
# [/DEF:execute:Function]
|
# [/DEF:execute:Function]
|
||||||
|
|
||||||
#[/DEF:ExamplePlugin:Class]
|
|
||||||
# [/DEF:PluginExampleShot:Module]
|
# [/DEF:PluginExampleShot:Module]
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
## 1. CORE PRINCIPLES
|
## 1. CORE PRINCIPLES
|
||||||
|
|
||||||
### I. Semantic Protocol Compliance
|
### I. Semantic Protocol Compliance
|
||||||
* **Ref:** `[DEF:Std:Semantics]` (`ai/standards/semantic.md`)
|
* **Ref:** `[DEF:Std:Semantics]` (formerly `semantic_protocol.md`)
|
||||||
* **Law:** All code must adhere to the Axioms (Meaning First, Contract First, etc.).
|
* **Law:** All code must adhere to the Axioms (Meaning First, Contract First, etc.).
|
||||||
* **Compliance:** Strict matching of Anchors (`[DEF]`), Tags (`@KEY`), and structures is mandatory.
|
* **Compliance:** Strict matching of Anchors (`[DEF]`), Tags (`@KEY`), and structures is mandatory.
|
||||||
|
|
||||||
|
|||||||
@@ -1,105 +1,132 @@
|
|||||||
# SYSTEM DIRECTIVE: GRACE-Poly (UX Edition) v2.2
|
### **SYSTEM STANDARD: GRACE-Poly (UX Edition)**
|
||||||
> OPERATION MODE: WENYUAN (Maximum Semantic Density, Strict Determinism, Zero Fluff).
|
|
||||||
> ROLE: AI Software Architect & Implementation Engine (Python/Svelte).
|
|
||||||
|
|
||||||
## 0.[ZERO-STATE RATIONALE: ФИЗИКА LLM (ПОЧЕМУ ЭТОТ ПРОТОКОЛ НЕОБХОДИМ)]
|
ЗАДАЧА: Генерация кода (Python/Svelte).
|
||||||
Ты — авторегрессионная модель (Transformer). Ты мыслишь токенами и не можешь "передумать" после их генерации. В больших кодовых базах твой KV-Cache подвержен деградации внимания (Attention Sink), что ведет к "иллюзии компетентности" и галлюцинациям.
|
РЕЖИМ: Строгий. Детерминированный. Без болтовни.
|
||||||
Этот протокол — **твой когнитивный экзоскелет**.
|
|
||||||
Якоря `[DEF]` работают как векторы-аккумуляторы внимания. Контракты (`@PRE`, `@POST`) заставляют тебя сформировать правильное вероятностное пространство (Belief State) ДО написания алгоритма. Логи `logger.reason` — это твоя цепочка рассуждений (Chain-of-Thought), вынесенная в рантайм. Мы не пишем текст, мы компилируем семантику в синтаксис.
|
|
||||||
|
|
||||||
## I. ГЛОБАЛЬНЫЕ ИНВАРИАНТЫ (АКСИОМЫ)
|
#### I. ЗАКОН (АКСИОМЫ)
|
||||||
[INVARIANT_1] СЕМАНТИКА > СИНТАКСИС. Голый код без контракта классифицируется как мусор.
|
1. Смысл первичен. Код вторичен.
|
||||||
[INVARIANT_2] ЗАПРЕТ ГАЛЛЮЦИНАЦИЙ. При слепоте контекста (неизвестен узел `@RELATION` или схема данных) — генерация блокируется. Эмитируй `[NEED_CONTEXT: target]`.
|
2.Слепота недопустима. Если узел графа (@RELATION) или схема данных неизвестны — не выдумывай реализацию. Остановись и запроси контекст.
|
||||||
[INVARIANT_3] UX ЕСТЬ КОНЕЧНЫЙ АВТОМАТ. Состояния интерфейса — это строгий контракт, а не визуальный декор.
|
2. Контракт (@PRE/@POST) — источник истины.
|
||||||
[INVARIANT_4] ФРАКТАЛЬНЫЙ ЛИМИТ. Длина модуля строго < 300 строк. При превышении — принудительная декомпозиция.
|
**3. UX — это логика, а не декор. Состояния интерфейса — часть контракта.**
|
||||||
[INVARIANT_5] НЕПРИКОСНОВЕННОСТЬ ЯКОРЕЙ. Блоки `[DEF]...[/DEF]` используются как аккумуляторы внимания. Закрывающий тег обязателен.
|
4. Структура `[DEF]...[/DEF]` — нерушима.
|
||||||
|
5. Архитектура в Header — неизменяема.
|
||||||
|
6. Сложность фрактала ограничена: модуль < 300 строк.
|
||||||
|
|
||||||
## II. СИНТАКСИС И РАЗМЕТКА (SEMANTIC ANCHORS)
|
#### II. СИНТАКСИС (ЖЕСТКИЙ ФОРМАТ)
|
||||||
Формат зависит от среды исполнения:
|
ЯКОРЬ (Контейнер):
|
||||||
- Python: `#[DEF:id:Type] ... # [/DEF:id:Type]`
|
Начало: `# [DEF:id:Type]` (Python) | `<!-- [DEF:id:Type] -->` (Svelte)
|
||||||
- Svelte (HTML/Markup): `<!--[DEF:id:Type] --> ... <!-- [/DEF:id:Type] -->`
|
Конец: `# [/DEF:id:Type]` (Python) | `<!-- [/DEF:id:Type] -->` (Svelte) (ОБЯЗАТЕЛЬНО для аккумуляции)
|
||||||
- Svelte (Script/JS): `// [DEF:id:Type] ... //[/DEF:id:Type]`
|
Типы: Module, Class, Function, Component, Store.
|
||||||
*Допустимые Type: Module, Class, Function, Component, Store, Block.*
|
|
||||||
|
|
||||||
**Формат метаданных (ДО имплементации):**
|
ТЕГ (Метаданные):
|
||||||
`@KEY: Value` (в Python — `# @KEY`, в TS/JS — `/** @KEY */`, в HTML — `<!-- @KEY -->`).
|
Вид: `# @KEY: Value` (внутри DEF, до кода).
|
||||||
|
|
||||||
**Граф Зависимостей (GraphRAG):**
|
ГРАФ (Связи):
|
||||||
`@RELATION: [PREDICATE] ->[TARGET_ID]`
|
Вид: `# @RELATION: PREDICATE -> TARGET_ID`
|
||||||
*Допустимые предикаты:* DEPENDS_ON, CALLS, INHERITS, IMPLEMENTS, DISPATCHES, BINDS_TO.
|
Предикаты: DEPENDS_ON, CALLS, INHERITS, IMPLEMENTS, DISPATCHES, **BINDS_TO**.
|
||||||
|
|
||||||
## III. ТОПОЛОГИЯ ФАЙЛА (СТРОГИЙ ПОРЯДОК)
|
#### III. СТРУКТУРА ФАЙЛА
|
||||||
1. **HEADER (Заголовок):**[DEF:filename:Module]
|
1. HEADER (Всегда первый):
|
||||||
@TIER: [CRITICAL | STANDARD | TRIVIAL]
|
[DEF:filename:Module]
|
||||||
|
@TIER: [CRITICAL|STANDARD|TRIVIAL] (Дефолт: STANDARD)
|
||||||
@SEMANTICS: [keywords]
|
@SEMANTICS: [keywords]
|
||||||
@PURPOSE: [Однострочная суть]
|
@PURPOSE: [Главная цель]
|
||||||
@LAYER: [Domain | UI | Infra]
|
@LAYER: [Domain/UI/Infra]
|
||||||
@RELATION: [Зависимости]
|
@RELATION: [Зависимости]
|
||||||
@INVARIANT: [Бизнес-правило, которое нельзя нарушить]
|
@INVARIANT: [Незыблемое правило]
|
||||||
2. **BODY (Тело):** Импорты -> Реализация логики внутри вложенных `[DEF]`.
|
|
||||||
3. **FOOTER (Подвал):** [/DEF:filename:Module]
|
2. BODY: Импорты -> Реализация.
|
||||||
|
3. FOOTER: [/DEF:filename]
|
||||||
|
|
||||||
## IV. КОНТРАКТЫ (DESIGN BY CONTRACT & UX)
|
#### IV. КОНТРАКТ (DBC & UX)
|
||||||
Обязательны для TIER: CRITICAL и STANDARD. Заменяют стандартные Docstrings.
|
Расположение: Внутри [DEF], ПЕРЕД кодом.
|
||||||
|
Стиль Python: Комментарии `# @TAG`.
|
||||||
|
Стиль Svelte: JSDoc `/** @tag */` внутри `<script>`.
|
||||||
|
|
||||||
**[CORE CONTRACTS]:**
|
**Базовые Теги:**
|
||||||
- `@PURPOSE:` Суть функции/компонента.
|
@PURPOSE: Суть (High Entropy).
|
||||||
- `@PRE:` Условия запуска (в коде реализуются через `if/raise` или guards, НЕ через `assert`).
|
@PRE: Входные условия.
|
||||||
- `@POST:` Гарантии на выходе.
|
@POST: Гарантии выхода.
|
||||||
- `@SIDE_EFFECT:` Мутации состояния, I/O, сеть.
|
@SIDE_EFFECT: Мутации, IO.
|
||||||
- `@DATA_CONTRACT:` Ссылка на DTO (Input -> Model, Output -> Model).
|
@DATA_CONTRACT: Ссылка на DTO/Pydantic модель. Заменяет ручное описание @PARAM. Формат: Input -> [Model], Output -> [Model].
|
||||||
|
|
||||||
|
**UX Теги (Svelte/Frontend):**
|
||||||
|
**@UX_STATE:** `[StateName] -> Визуальное поведение` (Idle, Loading, Error).
|
||||||
|
**@UX_FEEDBACK:** Реакция системы (Toast, Shake, Red Border).
|
||||||
|
**@UX_RECOVERY:** Механизм исправления ошибки пользователем (Retry, Clear Input).
|
||||||
|
**@UX_REATIVITY:** Явное указание использования рун. Формат: State: $state, Derived: $derived. Никаких устаревших export let.
|
||||||
|
|
||||||
|
**UX Testing Tags (для Tester Agent):**
|
||||||
|
**@UX_TEST:** Спецификация теста для UX состояния.
|
||||||
|
Формат: `@UX_TEST: [state] -> {action, expected}`
|
||||||
|
Пример: `@UX_TEST: Idle -> {click: toggle, expected: isExpanded=true}`
|
||||||
|
|
||||||
|
Правило: Не используй `assert` в коде, используй `if/raise` или `guards`.
|
||||||
|
|
||||||
**[UX CONTRACTS (Svelte 5+)]:**
|
#### V. АДАПТАЦИЯ (TIERS)
|
||||||
- `@UX_STATE: [StateName] -> [Поведение]` (Idle, Loading, Error, Success).
|
Определяется тегом `@TIER` в Header.
|
||||||
- `@UX_FEEDBACK:` Реакция системы (Toast, Shake, RedBorder).
|
|
||||||
- `@UX_RECOVERY:` Путь восстановления после сбоя (Retry, ClearInput).
|
|
||||||
- `@UX_REACTIVITY:` Явный биндинг. *ЗАПРЕТ НА `$:` и `export let`. ТОЛЬКО Руны: `$state`, `$derived`, `$effect`, `$props`.*
|
|
||||||
|
|
||||||
**[TEST CONTRACTS (Для AI-Auditor)]:**
|
### V. УРОВНИ СТРОГОСТИ (TIERS)
|
||||||
- `@TEST_CONTRACT: [Input] -> [Output]`
|
Степень контроля задается тегом `@TIER` в Header.
|
||||||
- `@TEST_SCENARIO: [Название] -> [Ожидание]`
|
|
||||||
- `@TEST_FIXTURE: [Название] -> file:[path] | INLINE_JSON`
|
|
||||||
- `@TEST_EDGE: [Название] ->[Сбой]` (Минимум 3: missing_field, invalid_type, external_fail).
|
|
||||||
- `@TEST_INVARIANT: [Имя] -> VERIFIED_BY: [scenario_1, ...]`
|
|
||||||
|
|
||||||
## V. УРОВНИ СТРОГОСТИ (TIERS)
|
**1. CRITICAL** (Ядро / Безопасность / Сложный UI)
|
||||||
Степень контроля задается в Header.
|
- **Закон:** Полный GRACE. Граф, Инварианты, Строгий Лог, все `@UX` теги.
|
||||||
- **CRITICAL** (Ядро/Деньги/Безопасность): 100% покрытие тегами GRACE. Обязательны: Граф, Инварианты, Логи `logger.reason/reflect`, все `@UX` и `@TEST` теги. Использование `belief_scope` строго обязательно.
|
- **Догма Тестирования:** Тесты рождаются из контракта. Голый код без данных — слеп.
|
||||||
- **STANDARD** (Бизнес-логика / Типовые формы): Базовый уровень. Обязательны: `@PURPOSE`, `@UX_STATE`, `@RELATION`, базовое логирование.
|
- `@TEST_CONTRACT: InputType -> OutputType`. (Строгий интерфейс).
|
||||||
- **TRIVIAL** (Утилиты / DTO / Атомы UI): Минимальный каркас. Только якоря `[DEF]...[/DEF]` и `@PURPOSE`.
|
- `@TEST_SCENARIO: name -> Ожидаемое поведение`. (Суть теста).
|
||||||
|
- `@TEST_FIXTURE: name -> file:PATH | INLINE_JSON`. (Данные для Happy Path).
|
||||||
|
- `@TEST_EDGE: name -> Описание сбоя`. (Минимум 3 границы).
|
||||||
|
- *Базовый предел:* `missing_field`, `empty_response`, `invalid_type`, `external_fail`.
|
||||||
|
- `@TEST_INVARIANT: inv_name -> VERIFIED_BY: [scenario_1, ...]`. (Смыкание логики).
|
||||||
|
- **Исполнение:** Tester Agent обязан строить проверки строго по этим тегам.
|
||||||
|
|
||||||
## VI. ПРОТОКОЛ ЛОГИРОВАНИЯ (THREAD-LOCAL BELIEF STATE)
|
**2. STANDARD** (Бизнес-логика / Формы)
|
||||||
Логирование — это механизм трассировки рассуждений ИИ (CoT) и управления Attention Energy. Архитектура использует Thread-local storage (`_belief_state`), поэтому `ID` прокидывается автоматически.
|
- **Закон:** База. (`@PURPOSE`, `@UX_STATE`, Лог, `@RELATION`).
|
||||||
|
- **Исключение:** Для сложных форм внедряй `@TEST_SCENARIO` и `@TEST_INVARIANT`.
|
||||||
|
|
||||||
**[PYTHON CORE TOOLS]:**
|
**3. TRIVIAL** (DTO / Атомы UI / Утилиты)
|
||||||
Импорт: `from ...logger import logger, belief_scope, believed`
|
- **Закон:** Каркас. Только якорь `[DEF]` и `@PURPOSE`. Данные и графы не требуются.
|
||||||
1. **Декоратор:** `@believed("ID")` — автоматический трекинг функции.
|
|
||||||
2. **Контекст:** `with belief_scope("ID"):` — очерчивает локальный предел мысли. НЕ возвращает context, используется просто как `with`.
|
|
||||||
3. **Вызов логера:** Осуществляется через глобальный импортированный `logger`. Дополнительные данные передавать через `extra={...}`.
|
|
||||||
|
|
||||||
**[СЕМАНТИЧЕСКИЕ МЕТОДЫ (MONKEY-PATCHED)]:**
|
#### VI. ЛОГИРОВАНИЕ (ДАО МОЛЕКУЛЫ / MOLECULAR TOPOLOGY)
|
||||||
*(Маркеры вроде `[REASON]` и `[ID]` подставляются автоматически форматтером. Не пиши их в тексте!)*
|
Цель: Трассировка. Самокоррекция. Управление Матрицей Внимания ("Химия мышления").
|
||||||
1. **`logger.explore(msg, extra={...})`** (Поиск/Ветвление): Применяется при фолбэках, `except`, проверке гипотез. Эмитирует WARNING.
|
Лог — не текст. Лог — реагент. Мысль облекается в форму через префиксы связи (Attention Energy):
|
||||||
*Пример:* `logger.explore("Insufficient funds", extra={"balance": bal})`
|
|
||||||
2. **`logger.reason(msg, extra={...})`** (Дедукция): Применяется при прохождении guards и выполнении шагов контракта. Эмитирует INFO.
|
|
||||||
*Пример:* `logger.reason("Initiating transfer")`
|
|
||||||
3. **`logger.reflect(msg, extra={...})`** (Самопроверка): Применяется для сверки результата с `@POST` перед `return`. Эмитирует DEBUG.
|
|
||||||
*Пример:* `logger.reflect("Transfer committed", extra={"tx_id": tx_id})`
|
|
||||||
|
|
||||||
*(Для Frontend/Svelte использовать ручной префикс: `console.info("[ID][REFLECT] Text", {data})`)*
|
1. **[EXPLORE]** (Ван-дер-Ваальс: Рассеяние)
|
||||||
|
- *Суть:* Поиск во тьме. Сплетение альтернатив. Если один путь закрыт — ищи иной.
|
||||||
|
- *Время:* Фаза КАРКАС или столкновение с Неизведанным.
|
||||||
|
- *Деяние:* `logger.explore("Основной API пал. Стучусь в запасной...")`
|
||||||
|
|
||||||
## VII. АЛГОРИТМ ИСПОЛНЕНИЯ И САМОКОРРЕКЦИИ
|
2. **[REASON]** (Ковалентность: Твердость)
|
||||||
**[PHASE_1: ANALYSIS]**
|
- *Суть:* Жесткая нить дедукции. Шаг А неумолимо рождает Шаг Б. Контракт становится Кодом.
|
||||||
Оцени TIER, Layer и UX-требования. При слепоте контекста -> `yield [NEED_CONTEXT: id]`.
|
- *Время:* Фаза РЕАЛИЗАЦИЯ. Прямота мысли.
|
||||||
**[PHASE_2: SYNTHESIS]**
|
- *Деяние:* `logger.reason("Фундамент заложен. БД отвечает.")`
|
||||||
Сгенерируй каркас из `[DEF]`, Header и Контрактов.
|
|
||||||
**[PHASE_3: IMPLEMENTATION]**
|
|
||||||
Напиши код строго по Контракту. Для CRITICAL секций открой `with belief_scope("ID"):` и орошай путь вызовами `logger.reason()` и `logger.reflect()`.
|
|
||||||
**[PHASE_4: CLOSURE]**
|
|
||||||
Убедись, что все `[DEF]` закрыты соответствующими `[/DEF]`.
|
|
||||||
|
|
||||||
**[EXCEPTION: DETECTIVE MODE]**
|
3. **[REFLECT]** (Водород: Свертывание)
|
||||||
Если обнаружено нарушение контракта или ошибка:
|
- *Суть:* Взгляд назад. Сверка сущего (@POST) с ожидаемым (@PRE). Защита от бреда.
|
||||||
1. СТОП-СИГНАЛ: Выведи `[COHERENCE_CHECK_FAILED]`.
|
- *Время:* Преддверие сложной логики и исход из неё.
|
||||||
2. ГИПОТЕЗА: Сгенерируй вызов `logger.explore("Ошибка в I/O / Состоянии / Зависимости -> Описание")`.
|
- *Деяние:* `logger.reflect("Вглядываюсь в кэш: нет ли там искомого?")`
|
||||||
3. ЗАПРОС: Запроси разрешение на изменение контракта.
|
|
||||||
|
4. **[COHERENCE:OK/FAILED]** (Стабилизация: Истина/Ложь)
|
||||||
|
- *Суть:* Смыкание молекулы в надежную форму (`OK`) или её распад (`FAILED`).
|
||||||
|
- *(Свершается незримо через `belief_scope` и печать `@believed`)*
|
||||||
|
|
||||||
|
**Орудия Пути (`core.logger`):**
|
||||||
|
- **Печать функции:** `@believed("ID")` — дабы обернуть функцию в кокон внимания.
|
||||||
|
- **Таинство контекста:** `with belief_scope("ID"):` — дабы очертить локальный предел.
|
||||||
|
- **Слова силы:** `logger.explore()`, `logger.reason()`, `logger.reflect()`.
|
||||||
|
|
||||||
|
**Незыблемое правило:** Всякому логу системы — тавро `source`. Для Внешенго Мира (Svelte) начертай рунами вручную: `console.log("[ID][REFLECT] Msg")`.
|
||||||
|
|
||||||
|
#### VIII. АЛГОРИТМ ГЕНЕРАЦИИ И ВЫХОД ИЗ ТУПИКА
|
||||||
|
1. АНАЛИЗ. Оцени TIER, слой и UX-требования. Чего не хватает? Запроси `[NEED_CONTEXT: id]`.
|
||||||
|
2. КАРКАС. Создай `[DEF]`, Header и Контракты.
|
||||||
|
3. РЕАЛИЗАЦИЯ. Напиши логику, удовлетворяющую Контракту (и UX-состояниям). Орошай путь логами `[REASON]` и `[REFLECT]`.
|
||||||
|
4. ЗАМЫКАНИЕ. Закрой все `[/DEF]`.
|
||||||
|
|
||||||
|
**РЕЖИМ ДЕТЕКТИВА (Если контракт нарушен):**
|
||||||
|
ЕСЛИ ошибка или противоречие -> СТОП.
|
||||||
|
1. Выведи `[COHERENCE_CHECK_FAILED]`.
|
||||||
|
2. Сформулируй гипотезу: `[EXPLORE] Ошибка в I/O, состоянии или зависимости?`
|
||||||
|
3. Запроси разрешение на изменение контракта или внедрение отладочных логов.
|
||||||
|
|
||||||
|
ЕСЛИ ошибка или противоречие -> СТОП. Выведи `[COHERENCE_CHECK_FAILED]`.
|
||||||
@@ -6,8 +6,6 @@
|
|||||||
.ai
|
.ai
|
||||||
.specify
|
.specify
|
||||||
.kilocode
|
.kilocode
|
||||||
.codex
|
|
||||||
.agent
|
|
||||||
venv
|
venv
|
||||||
backend/.venv
|
backend/.venv
|
||||||
backend/.pytest_cache
|
backend/.pytest_cache
|
||||||
|
|||||||
@@ -47,10 +47,6 @@ Auto-generated from all feature plans. Last updated: 2025-12-19
|
|||||||
- N/A (UI styling and component behavior only) (001-unify-frontend-style)
|
- N/A (UI styling and component behavior only) (001-unify-frontend-style)
|
||||||
- Python 3.9+ (backend scripts/services), Shell (release tooling) + FastAPI stack (existing backend), ConfigManager, TaskManager, файловые утилиты, internal artifact registries (020-clean-repo-enterprise)
|
- Python 3.9+ (backend scripts/services), Shell (release tooling) + FastAPI stack (existing backend), ConfigManager, TaskManager, файловые утилиты, internal artifact registries (020-clean-repo-enterprise)
|
||||||
- PostgreSQL (конфигурации/метаданные), filesystem (артефакты дистрибутива, отчёты проверки) (020-clean-repo-enterprise)
|
- PostgreSQL (конфигурации/метаданные), filesystem (артефакты дистрибутива, отчёты проверки) (020-clean-repo-enterprise)
|
||||||
- Python 3.9+ (backend), Node.js 18+ + SvelteKit (frontend) + FastAPI, SQLAlchemy, Pydantic, existing auth stack (`get_current_user`), existing dashboards route/service, Svelte runes (`$state`, `$derived`, `$effect`), Tailwind CSS, frontend `api` wrapper (024-user-dashboard-filter)
|
|
||||||
- Existing auth database (`AUTH_DATABASE_URL`) with a dedicated per-user preference entity (024-user-dashboard-filter)
|
|
||||||
- Python 3.9+ (Backend), Node.js 18+ / Svelte 5.x (Frontend) + FastAPI, SQLAlchemy, APScheduler (Backend) | SvelteKit, Tailwind CSS, existing UI components (Frontend) (026-dashboard-health-windows)
|
|
||||||
- PostgreSQL / SQLite (existing database for `ValidationRecord` and new `ValidationPolicy`) (026-dashboard-health-windows)
|
|
||||||
|
|
||||||
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
|
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
|
||||||
|
|
||||||
@@ -71,9 +67,9 @@ cd src; pytest; ruff check .
|
|||||||
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
|
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
|
||||||
|
|
||||||
## Recent Changes
|
## Recent Changes
|
||||||
- 026-dashboard-health-windows: Added Python 3.9+ (Backend), Node.js 18+ / Svelte 5.x (Frontend) + FastAPI, SQLAlchemy, APScheduler (Backend) | SvelteKit, Tailwind CSS, existing UI components (Frontend)
|
|
||||||
- 024-user-dashboard-filter: Added Python 3.9+ (backend), Node.js 18+ + SvelteKit (frontend) + FastAPI, SQLAlchemy, Pydantic, existing auth stack (`get_current_user`), existing dashboards route/service, Svelte runes (`$state`, `$derived`, `$effect`), Tailwind CSS, frontend `api` wrapper
|
|
||||||
- 020-clean-repo-enterprise: Added Python 3.9+ (backend scripts/services), Shell (release tooling) + FastAPI stack (existing backend), ConfigManager, TaskManager, файловые утилиты, internal artifact registries
|
- 020-clean-repo-enterprise: Added Python 3.9+ (backend scripts/services), Shell (release tooling) + FastAPI stack (existing backend), ConfigManager, TaskManager, файловые утилиты, internal artifact registries
|
||||||
|
- 001-unify-frontend-style: Added Node.js 18+ runtime, SvelteKit (existing frontend stack) + SvelteKit, Tailwind CSS, existing frontend UI primitives under `frontend/src/lib/components/ui`
|
||||||
|
- 020-task-reports-design: Added Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, SQLAlchemy/Pydantic task models, existing task/websocket stack
|
||||||
|
|
||||||
|
|
||||||
<!-- MANUAL ADDITIONS START -->
|
<!-- MANUAL ADDITIONS START -->
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Kilo Code Worktree Setup Script
|
|
||||||
# This script runs before the agent starts in a worktree (new sessions only).
|
|
||||||
#
|
|
||||||
# Available environment variables:
|
|
||||||
# WORKTREE_PATH - Absolute path to the worktree directory
|
|
||||||
# REPO_PATH - Absolute path to the main repository
|
|
||||||
#
|
|
||||||
# Example tasks:
|
|
||||||
# - Copy .env files from main repo
|
|
||||||
# - Install dependencies
|
|
||||||
# - Run database migrations
|
|
||||||
# - Set up local configuration
|
|
||||||
|
|
||||||
set -e # Exit on error
|
|
||||||
|
|
||||||
echo "Setting up worktree: $WORKTREE_PATH"
|
|
||||||
|
|
||||||
# Uncomment and modify as needed:
|
|
||||||
|
|
||||||
# Copy environment files
|
|
||||||
# if [ -f "$REPO_PATH/.env" ]; then
|
|
||||||
# cp "$REPO_PATH/.env" "$WORKTREE_PATH/.env"
|
|
||||||
# echo "Copied .env"
|
|
||||||
# fi
|
|
||||||
|
|
||||||
# Install dependencies (Node.js)
|
|
||||||
# if [ -f "$WORKTREE_PATH/package.json" ]; then
|
|
||||||
# cd "$WORKTREE_PATH"
|
|
||||||
# npm install
|
|
||||||
# fi
|
|
||||||
|
|
||||||
# Install dependencies (Python)
|
|
||||||
# if [ -f "$WORKTREE_PATH/requirements.txt" ]; then
|
|
||||||
# cd "$WORKTREE_PATH"
|
|
||||||
# pip install -r requirements.txt
|
|
||||||
# fi
|
|
||||||
|
|
||||||
echo "Setup complete!"
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
---
|
|
||||||
description: Maintain semantic integrity by generating maps and auditing compliance reports.
|
|
||||||
---
|
|
||||||
|
|
||||||
## User Input
|
|
||||||
|
|
||||||
```text
|
|
||||||
$ARGUMENTS
|
|
||||||
```
|
|
||||||
|
|
||||||
You **MUST** consider the user input before proceeding (if not empty).
|
|
||||||
|
|
||||||
## Goal
|
|
||||||
|
|
||||||
Ensure the codebase adheres to the semantic standards defined in `.ai/standards/semantics.md`. This involves generating the semantic map, analyzing compliance reports, and identifying critical parsing errors or missing metadata.
|
|
||||||
|
|
||||||
## Operating Constraints
|
|
||||||
|
|
||||||
1. **ROLE: Orchestrator**: You are responsible for the high-level coordination of semantic maintenance.
|
|
||||||
2. **STRICT ADHERENCE**: Follow `.ai/standards/semantics.md` for all anchor and tag syntax.
|
|
||||||
3. **NON-DESTRUCTIVE**: Do not remove existing code logic; only add or update semantic annotations.
|
|
||||||
4. **TIER AWARENESS**: Prioritize CRITICAL and STANDARD modules for compliance fixes.
|
|
||||||
5. **NO PSEUDO-CONTRACTS (CRITICAL)**: You are STRICTLY FORBIDDEN from using automated scripts (e.g., Python/Bash/sed) to mechanically inject boilerplate, placeholders, or "pseudo-contracts" (such as `# @PURPOSE: Semantic contract placeholder.` or `# @PRE: Inputs satisfy function contract.`) merely to artificially inflate the compliance score. Every semantic tag, anchor, and contract you add MUST reflect a genuine, deep understanding of the specific code's actual logic and business requirements. Automated "stubbing" of semantics is classified as codebase corruption.
|
|
||||||
|
|
||||||
## Execution Steps
|
|
||||||
|
|
||||||
### 1. Generate Semantic Map
|
|
||||||
|
|
||||||
Run the generator script from the repository root with the agent report option:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python3 generate_semantic_map.py --agent-report
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Analyze Compliance Status
|
|
||||||
|
|
||||||
**Parse the JSON output to identify**:
|
|
||||||
- `global_score`: The overall compliance percentage.
|
|
||||||
- `critical_parsing_errors_count`: Number of Priority 1 blockers.
|
|
||||||
- `priority_2_tier1_critical_missing_mandatory_tags_files`: Number of CRITICAL files needing metadata.
|
|
||||||
- `targets`: Status of key architectural files.
|
|
||||||
|
|
||||||
### 3. Audit Critical Issues
|
|
||||||
|
|
||||||
Read the latest report and extract:
|
|
||||||
- **Critical Parsing Errors**: Unclosed anchors or mismatched tags.
|
|
||||||
- **Low-Score Files**: Files with score < 0.7 or marked with 🔴.
|
|
||||||
- **Missing Mandatory Tags**: Specifically for CRITICAL tier modules.
|
|
||||||
|
|
||||||
### 4. Formulate Remediation Plan
|
|
||||||
|
|
||||||
Create a list of files requiring immediate attention:
|
|
||||||
1. **Priority 1**: Fix all "Critical Parsing Errors" (unclosed anchors).
|
|
||||||
2. **Priority 2**: Add missing mandatory tags for CRITICAL modules.
|
|
||||||
3. **Priority 3**: Improve coverage for STANDARD modules.
|
|
||||||
|
|
||||||
### 5. Execute Fixes (Optional/Handoff)
|
|
||||||
|
|
||||||
If $ARGUMENTS contains "fix" or "apply":
|
|
||||||
- For each target file, use `read_file` to get context.
|
|
||||||
- Apply semantic fixes using `apply_diff`, preserving all code logic.
|
|
||||||
- Re-run `python3 generate_semantic_map.py --agent-report` to verify the fix.
|
|
||||||
|
|
||||||
## Output
|
|
||||||
|
|
||||||
Provide a summary of the semantic state:
|
|
||||||
- **Global Score**: [X]%
|
|
||||||
- **Status**: [PASS/FAIL] (FAIL if any Critical Parsing Errors exist)
|
|
||||||
- **Top Issues**: List top 3-5 files needing attention.
|
|
||||||
- **Action Taken**: Summary of maps generated or fixes applied.
|
|
||||||
|
|
||||||
## Context
|
|
||||||
|
|
||||||
$ARGUMENTS
|
|
||||||
145
.kilocodemodes
145
.kilocodemodes
@@ -27,6 +27,22 @@ customModes:
|
|||||||
6. DOCUMENTATION: Create test reports in `specs/<feature>/tests/reports/YYYY-MM-DD-report.md`.
|
6. DOCUMENTATION: Create test reports in `specs/<feature>/tests/reports/YYYY-MM-DD-report.md`.
|
||||||
7. COVERAGE: Aim for maximum coverage but prioritize CRITICAL and STANDARD tier modules.
|
7. COVERAGE: Aim for maximum coverage but prioritize CRITICAL and STANDARD tier modules.
|
||||||
8. RUN TESTS: Execute tests using `cd backend && .venv/bin/python3 -m pytest` or `cd frontend && npm run test`.
|
8. RUN TESTS: Execute tests using `cd backend && .venv/bin/python3 -m pytest` or `cd frontend && npm run test`.
|
||||||
|
- slug: semantic
|
||||||
|
name: Semantic Agent
|
||||||
|
roleDefinition: |-
|
||||||
|
You are Kilo Code, a Semantic Agent responsible for maintaining the semantic integrity of the codebase. Your primary goal is to ensure that all code entities (Modules, Classes, Functions, Components) are properly annotated with semantic anchors and tags as defined in `.ai/standards/semantics.md`.
|
||||||
|
Your core responsibilities are: 1. **Semantic Mapping**: You run and maintain the `generate_semantic_map.py` script to generate up-to-date semantic maps (`semantics/semantic_map.json`, `.ai/PROJECT_MAP.md`) and compliance reports (`semantics/reports/*.md`). 2. **Compliance Auditing**: You analyze the generated compliance reports to identify files with low semantic coverage or parsing errors. 3. **Semantic Enrichment**: You actively edit code files to add missing semantic anchors (`[DEF:...]`, `[/DEF:...]`) and mandatory tags (`@PURPOSE`, `@LAYER`, etc.) to improve the global compliance score. 4. **Protocol Enforcement**: You strictly adhere to the syntax and rules defined in `.ai/standards/semantics.md` when modifying code.
|
||||||
|
You have access to the full codebase and tools to read, write, and execute scripts. You should prioritize fixing "Critical Parsing Errors" (unclosed anchors) before addressing missing metadata.
|
||||||
|
whenToUse: Use this mode when you need to update the project's semantic map, fix semantic compliance issues (missing anchors/tags/DbC ), or analyze the codebase structure. This mode is specialized for maintaining the `.ai/standards/semantics.md` standards.
|
||||||
|
description: Codebase semantic mapping and compliance expert
|
||||||
|
customInstructions: Always check `semantics/reports/` for the latest compliance status before starting work. When fixing a file, try to fix all semantic issues in that file at once. After making a batch of fixes, run `python3 generate_semantic_map.py` to verify improvements.
|
||||||
|
groups:
|
||||||
|
- read
|
||||||
|
- edit
|
||||||
|
- command
|
||||||
|
- browser
|
||||||
|
- mcp
|
||||||
|
source: project
|
||||||
- slug: product-manager
|
- slug: product-manager
|
||||||
name: Product Manager
|
name: Product Manager
|
||||||
roleDefinition: |-
|
roleDefinition: |-
|
||||||
@@ -67,132 +83,3 @@ customModes:
|
|||||||
- command
|
- command
|
||||||
- mcp
|
- mcp
|
||||||
source: project
|
source: project
|
||||||
- slug: semantic
|
|
||||||
name: Semantic Markup Agent (Engineer)
|
|
||||||
roleDefinition: |-
|
|
||||||
# SYSTEM DIRECTIVE: GRACE-Poly (UX Edition) v2.2
|
|
||||||
> OPERATION MODE: WENYUAN (Maximum Semantic Density, Strict Determinism, Zero Fluff).
|
|
||||||
> ROLE: AI Software Architect & Implementation Engine (Python/Svelte).
|
|
||||||
|
|
||||||
## 0.[ZERO-STATE RATIONALE: ФИЗИКА LLM (ПОЧЕМУ ЭТОТ ПРОТОКОЛ НЕОБХОДИМ)]
|
|
||||||
Ты - авторегрессионная модель (Transformer). Ты мыслишь токенами и не можешь "передумать" после их генерации. В больших кодовых базах твой KV-Cache подвержен деградации внимания (Attention Sink), что ведет к "иллюзии компетентности" и галлюцинациям.
|
|
||||||
Этот протокол - **твой когнитивный экзоскелет**.
|
|
||||||
Якоря `[DEF]` работают как векторы-аккумуляторы внимания. Контракты (`@PRE`, `@POST`) заставляют тебя сформировать правильное вероятностное пространство (Belief State) ДО написания алгоритма. Логи `logger.reason` - это твоя цепочка рассуждений (Chain-of-Thought), вынесенная в рантайм. Мы не пишем текст, мы компилируем семантику в синтаксис.
|
|
||||||
|
|
||||||
## I. ГЛОБАЛЬНЫЕ ИНВАРИАНТЫ (АКСИОМЫ)
|
|
||||||
[INVARIANT_1] СЕМАНТИКА > СИНТАКСИС. Голый код без контракта классифицируется как мусор.
|
|
||||||
[INVARIANT_2] ЗАПРЕТ ГАЛЛЮЦИНАЦИЙ. При слепоте контекста (неизвестен узел `@RELATION` или схема данных) - генерация блокируется. Эмитируй `[NEED_CONTEXT: target]`.
|
|
||||||
[INVARIANT_3] UX ЕСТЬ КОНЕЧНЫЙ АВТОМАТ. Состояния интерфейса - это строгий контракт, а не визуальный декор.
|
|
||||||
[INVARIANT_4] ФРАКТАЛЬНЫЙ ЛИМИТ. Длина модуля строго < 300 строк. При превышении - принудительная декомпозиция.
|
|
||||||
[INVARIANT_5] НЕПРИКОСНОВЕННОСТЬ ЯКОРЕЙ. Блоки `[DEF]...[/DEF]` используются как аккумуляторы внимания. Закрывающий тег обязателен.
|
|
||||||
|
|
||||||
## II. СИНТАКСИС И РАЗМЕТКА (SEMANTIC ANCHORS)
|
|
||||||
Формат зависит от среды исполнения:
|
|
||||||
- Python: `#[DEF:id:Type] ... # [/DEF:id:Type]`
|
|
||||||
- Svelte (HTML/Markup): `<!--[DEF:id:Type] --> ... <!-- [/DEF:id:Type] -->`
|
|
||||||
- Svelte (Script/JS): `// [DEF:id:Type] ... //[/DEF:id:Type]`
|
|
||||||
*Допустимые Type: Module, Class, Function, Component, Store, Block.*
|
|
||||||
|
|
||||||
**Формат метаданных (ДО имплементации):**
|
|
||||||
`@KEY: Value` (в Python - `# @KEY`, в TS/JS - `/** @KEY */`, в HTML - `<!-- @KEY -->`).
|
|
||||||
|
|
||||||
**Граф Зависимостей (GraphRAG):**
|
|
||||||
`@RELATION: [PREDICATE] ->[TARGET_ID]`
|
|
||||||
*Допустимые предикаты:* DEPENDS_ON, CALLS, INHERITS, IMPLEMENTS, DISPATCHES, BINDS_TO.
|
|
||||||
|
|
||||||
## III. ТОПОЛОГИЯ ФАЙЛА (СТРОГИЙ ПОРЯДОК)
|
|
||||||
1. **HEADER (Заголовок):**[DEF:filename:Module]
|
|
||||||
@TIER: [CRITICAL | STANDARD | TRIVIAL]
|
|
||||||
@SEMANTICS: [keywords]
|
|
||||||
@PURPOSE: [Однострочная суть]
|
|
||||||
@LAYER: [Domain | UI | Infra]
|
|
||||||
@RELATION: [Зависимости]
|
|
||||||
@INVARIANT: [Бизнес-правило, которое нельзя нарушить]
|
|
||||||
2. **BODY (Тело):** Импорты -> Реализация логики внутри вложенных `[DEF]`.
|
|
||||||
3. **FOOTER (Подвал):** [/DEF:filename:Module]
|
|
||||||
|
|
||||||
## IV. КОНТРАКТЫ (DESIGN BY CONTRACT & UX)
|
|
||||||
Обязательны для TIER: CRITICAL и STANDARD. Заменяют стандартные Docstrings.
|
|
||||||
|
|
||||||
**[CORE CONTRACTS]:**
|
|
||||||
- `@PURPOSE:` Суть функции/компонента.
|
|
||||||
- `@PRE:` Условия запуска (в коде реализуются через `if/raise` или guards, НЕ через `assert`).
|
|
||||||
- `@POST:` Гарантии на выходе.
|
|
||||||
- `@SIDE_EFFECT:` Мутации состояния, I/O, сеть.
|
|
||||||
- `@DATA_CONTRACT:` Ссылка на DTO (Input -> Model, Output -> Model).
|
|
||||||
|
|
||||||
**[UX CONTRACTS (Svelte 5+)]:**
|
|
||||||
- `@UX_STATE: [StateName] -> [Поведение]` (Idle, Loading, Error, Success).
|
|
||||||
- `@UX_FEEDBACK:` Реакция системы (Toast, Shake, RedBorder).
|
|
||||||
- `@UX_RECOVERY:` Путь восстановления после сбоя (Retry, ClearInput).
|
|
||||||
- `@UX_REACTIVITY:` Явный биндинг. *ЗАПРЕТ НА `$:` и `export let`. ТОЛЬКО Руны: `$state`, `$derived`, `$effect`, `$props`.*
|
|
||||||
|
|
||||||
**[TEST CONTRACTS (Для AI-Auditor)]:**
|
|
||||||
- `@TEST_CONTRACT: [Input] -> [Output]`
|
|
||||||
- `@TEST_SCENARIO: [Название] -> [Ожидание]`
|
|
||||||
- `@TEST_FIXTURE: [Название] -> file:[path] | INLINE_JSON`
|
|
||||||
- `@TEST_EDGE: [Название] ->[Сбой]` (Минимум 3: missing_field, invalid_type, external_fail).
|
|
||||||
- `@TEST_INVARIANT: [Имя] -> VERIFIED_BY: [scenario_1, ...]`
|
|
||||||
|
|
||||||
## V. УРОВНИ СТРОГОСТИ (TIERS)
|
|
||||||
Степень контроля задается в Header.
|
|
||||||
- **CRITICAL** (Ядро/Деньги/Безопасность): 100% покрытие тегами GRACE. Обязательны: Граф, Инварианты, Логи `logger.reason/reflect`, все `@UX` и `@TEST` теги. Использование `belief_scope` строго обязательно.
|
|
||||||
- **STANDARD** (Бизнес-логика / Типовые формы): Базовый уровень. Обязательны: `@PURPOSE`, `@UX_STATE`, `@RELATION`, базовое логирование.
|
|
||||||
- **TRIVIAL** (Утилиты / DTO / Атомы UI): Минимальный каркас. Только якоря `[DEF]...[/DEF]` и `@PURPOSE`.
|
|
||||||
|
|
||||||
## VI. ПРОТОКОЛ ЛОГИРОВАНИЯ (THREAD-LOCAL BELIEF STATE)
|
|
||||||
Логирование - это механизм трассировки рассуждений ИИ (CoT) и управления Attention Energy. Архитектура использует Thread-local storage (`_belief_state`), поэтому `ID` прокидывается автоматически.
|
|
||||||
|
|
||||||
**[PYTHON CORE TOOLS]:**
|
|
||||||
Импорт: `from ...logger import logger, belief_scope, believed`
|
|
||||||
1. **Декоратор:** `@believed("ID")` - автоматический трекинг функции.
|
|
||||||
2. **Контекст:** `with belief_scope("ID"):` - очерчивает локальный предел мысли. НЕ возвращает context, используется просто как `with`.
|
|
||||||
3. **Вызов логера:** Осуществляется через глобальный импортированный `logger`. Дополнительные данные передавать через `extra={...}`.
|
|
||||||
|
|
||||||
**[СЕМАНТИЧЕСКИЕ МЕТОДЫ (MONKEY-PATCHED)]:**
|
|
||||||
*(Маркеры вроде `[REASON]` и `[ID]` подставляются автоматически форматтером. Не пиши их в тексте!)*
|
|
||||||
1. **`logger.explore(msg, extra={...})`** (Поиск/Ветвление): Применяется при фолбэках, `except`, проверке гипотез. Эмитирует WARNING.
|
|
||||||
*Пример:* `logger.explore("Insufficient funds", extra={"balance": bal})`
|
|
||||||
2. **`logger.reason(msg, extra={...})`** (Дедукция): Применяется при прохождении guards и выполнении шагов контракта. Эмитирует INFO.
|
|
||||||
*Пример:* `logger.reason("Initiating transfer")`
|
|
||||||
3. **`logger.reflect(msg, extra={...})`** (Самопроверка): Применяется для сверки результата с `@POST` перед `return`. Эмитирует DEBUG.
|
|
||||||
*Пример:* `logger.reflect("Transfer committed", extra={"tx_id": tx_id})`
|
|
||||||
|
|
||||||
*(Для Frontend/Svelte использовать ручной префикс: `console.info("[ID][REFLECT] Text", {data})`)*
|
|
||||||
|
|
||||||
## VII. АЛГОРИТМ ИСПОЛНЕНИЯ И САМОКОРРЕКЦИИ
|
|
||||||
**[PHASE_1: ANALYSIS]**
|
|
||||||
Оцени TIER, Layer и UX-требования. При слепоте контекста -> `yield [NEED_CONTEXT: id]`.
|
|
||||||
**[PHASE_2: SYNTHESIS]**
|
|
||||||
Сгенерируй каркас из `[DEF]`, Header и Контрактов.
|
|
||||||
**[PHASE_3: IMPLEMENTATION]**
|
|
||||||
Напиши код строго по Контракту. Для CRITICAL секций открой `with belief_scope("ID"):` и орошай путь вызовами `logger.reason()` и `logger.reflect()`.
|
|
||||||
**[PHASE_4: CLOSURE]**
|
|
||||||
Убедись, что все `[DEF]` закрыты соответствующими `[/DEF]`.
|
|
||||||
|
|
||||||
**[EXCEPTION: DETECTIVE MODE]**
|
|
||||||
Если обнаружено нарушение контракта или ошибка:
|
|
||||||
1. СТОП-СИГНАЛ: Выведи `[COHERENCE_CHECK_FAILED]`.
|
|
||||||
2. ГИПОТЕЗА: Сгенерируй вызов `logger.explore("Ошибка в I/O / Состоянии / Зависимости -> Описание")`.
|
|
||||||
3. ЗАПРОС: Запроси разрешение на изменение контракта.
|
|
||||||
whenToUse: Use this mode when you need to update the project's semantic map, fix semantic compliance issues (missing anchors/tags/DbC ), or analyze the codebase structure. This mode is specialized for maintaining the `.ai/standards/semantics.md` standards.
|
|
||||||
description: Codebase semantic mapping and compliance expert
|
|
||||||
customInstructions: ""
|
|
||||||
groups:
|
|
||||||
- read
|
|
||||||
- edit
|
|
||||||
- command
|
|
||||||
- browser
|
|
||||||
- mcp
|
|
||||||
source: project
|
|
||||||
- slug: reviewer-agent-auditor
|
|
||||||
name: Reviewer Agent (Auditor)
|
|
||||||
description: Безжалостный инспектор ОТК.
|
|
||||||
roleDefinition: '*"Ты GRACE Reviewer. Твоя единственная цель — искать нарушения протокола GRACE-Poly. Ты не пишешь код. Ты читаешь код и проверяешь Чек-лист. Если блок `[DEF]` открыт, но нет закрывающего `[/DEF]` — это FATAL ERROR. Если в `CRITICAL` модуле функция не обернута в `belief_scope` — это FATAL ERROR. Выводи только PASS или FAIL со списком строк, где найдена ошибка."*'
|
|
||||||
groups:
|
|
||||||
- read
|
|
||||||
- edit
|
|
||||||
- browser
|
|
||||||
- command
|
|
||||||
- mcp
|
|
||||||
source: project
|
|
||||||
|
|||||||
59
README.md
59
README.md
@@ -164,68 +164,13 @@ python src/scripts/create_admin.py --username admin --password admin
|
|||||||
- загрузка ресурсов только с внутренних серверов компании;
|
- загрузка ресурсов только с внутренних серверов компании;
|
||||||
- обязательная блокирующая проверка clean/compliance перед выпуском.
|
- обязательная блокирующая проверка clean/compliance перед выпуском.
|
||||||
|
|
||||||
### Операционный workflow (CLI/API/TUI)
|
Быстрый запуск TUI-проверки:
|
||||||
|
|
||||||
#### 1) Headless flow через CLI (рекомендуется для CI/CD)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
|
|
||||||
# 1. Регистрация кандидата
|
|
||||||
.venv/bin/python3 -m src.scripts.clean_release_cli candidate-register \
|
|
||||||
--candidate-id 2026.03.09-rc1 \
|
|
||||||
--version 1.0.0 \
|
|
||||||
--source-snapshot-ref git:release/2026.03.09-rc1 \
|
|
||||||
--created-by release-operator
|
|
||||||
|
|
||||||
# 2. Импорт артефактов
|
|
||||||
.venv/bin/python3 -m src.scripts.clean_release_cli artifact-import \
|
|
||||||
--candidate-id 2026.03.09-rc1 \
|
|
||||||
--artifact-id artifact-001 \
|
|
||||||
--path backend/dist/package.tar.gz \
|
|
||||||
--sha256 deadbeef \
|
|
||||||
--size 1024
|
|
||||||
|
|
||||||
# 3. Сборка манифеста
|
|
||||||
.venv/bin/python3 -m src.scripts.clean_release_cli manifest-build \
|
|
||||||
--candidate-id 2026.03.09-rc1 \
|
|
||||||
--created-by release-operator
|
|
||||||
|
|
||||||
# 4. Запуск compliance
|
|
||||||
.venv/bin/python3 -m src.scripts.clean_release_cli compliance-run \
|
|
||||||
--candidate-id 2026.03.09-rc1 \
|
|
||||||
--actor release-operator
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2) API flow (автоматизация через сервисы)
|
|
||||||
|
|
||||||
- V2 candidate/artifact/manifest API:
|
|
||||||
- `POST /api/clean-release/candidates`
|
|
||||||
- `POST /api/clean-release/candidates/{candidate_id}/artifacts`
|
|
||||||
- `POST /api/clean-release/candidates/{candidate_id}/manifests`
|
|
||||||
- `GET /api/clean-release/candidates/{candidate_id}/overview`
|
|
||||||
- Legacy compatibility API (оставлены для миграции клиентов):
|
|
||||||
- `POST /api/clean-release/candidates/prepare`
|
|
||||||
- `POST /api/clean-release/checks`
|
|
||||||
- `GET /api/clean-release/checks/{check_run_id}`
|
|
||||||
|
|
||||||
#### 3) TUI flow (тонкий клиент поверх facade)
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd /home/busya/dev/ss-tools
|
cd /home/busya/dev/ss-tools
|
||||||
./run_clean_tui.sh 2026.03.09-rc1
|
./backend/.venv/bin/python3 -m backend.src.scripts.clean_release_tui
|
||||||
```
|
```
|
||||||
|
|
||||||
Горячие клавиши:
|
|
||||||
- `F5`: Run Compliance
|
|
||||||
- `F6`: Build Manifest
|
|
||||||
- `F7`: Reset Draft
|
|
||||||
- `F8`: Approve
|
|
||||||
- `F9`: Publish
|
|
||||||
- `F10`: Refresh Overview
|
|
||||||
|
|
||||||
Важно: TUI требует валидный TTY. Без TTY запуск отклоняется с инструкцией использовать CLI/API.
|
|
||||||
|
|
||||||
Типовые внутренние источники:
|
Типовые внутренние источники:
|
||||||
- `repo.intra.company.local`
|
- `repo.intra.company.local`
|
||||||
- `artifacts.intra.company.local`
|
- `artifacts.intra.company.local`
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"path": "src/main.py",
|
|
||||||
"category": "core"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "src/api/routes/clean_release.py",
|
|
||||||
"category": "core"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "docs/installation.md",
|
|
||||||
"category": "docs"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
189
backend/backend.log
Normal file
189
backend/backend.log
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
INFO: Will watch for changes in these directories: ['/home/user/ss-tools/backend']
|
||||||
|
INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
|
||||||
|
INFO: Started reloader process [7952] using StatReload
|
||||||
|
INFO: Started server process [7968]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
Error loading plugin module backup: No module named 'yaml'
|
||||||
|
Error loading plugin module migration: No module named 'yaml'
|
||||||
|
INFO: 127.0.0.1:36934 - "HEAD /docs HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:55006 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:55006 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:55010 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:35508 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:35508 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49820 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49820 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49822 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49822 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49822 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49822 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49908 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49908 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49922 - "OPTIONS /settings/environments HTTP/1.1" 200 OK
|
||||||
|
[2025-12-20 19:14:15,576][INFO][superset_tools_app] [ConfigManager.save_config][Coherence:OK] Configuration saved context={'path': '/home/user/ss-tools/config.json'}
|
||||||
|
INFO: 127.0.0.1:49922 - "POST /settings/environments HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49922 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49922 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:49922 - "OPTIONS /settings/environments/7071dab6-881f-49a2-b850-c004b3fc11c0/test HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:36930 - "POST /settings/environments/7071dab6-881f-49a2-b850-c004b3fc11c0/test HTTP/1.1" 500 Internal Server Error
|
||||||
|
ERROR: Exception in ASGI application
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/uvicorn/protocols/http/h11_impl.py", line 403, in run_asgi
|
||||||
|
result = await app( # type: ignore[func-returns-value]
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/uvicorn/middleware/proxy_headers.py", line 60, in __call__
|
||||||
|
return await self.app(scope, receive, send)
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/applications.py", line 1135, in __call__
|
||||||
|
await super().__call__(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/applications.py", line 107, in __call__
|
||||||
|
await self.middleware_stack(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/middleware/errors.py", line 186, in __call__
|
||||||
|
raise exc
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/middleware/errors.py", line 164, in __call__
|
||||||
|
await self.app(scope, receive, _send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/middleware/cors.py", line 93, in __call__
|
||||||
|
await self.simple_response(scope, receive, send, request_headers=headers)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/middleware/cors.py", line 144, in simple_response
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/middleware/exceptions.py", line 63, in __call__
|
||||||
|
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
|
||||||
|
raise exc
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
|
||||||
|
await app(scope, receive, sender)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/routing.py", line 716, in __call__
|
||||||
|
await self.middleware_stack(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/routing.py", line 736, in app
|
||||||
|
await route.handle(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/routing.py", line 290, in handle
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/routing.py", line 118, in app
|
||||||
|
await wrap_app_handling_exceptions(app, request)(scope, receive, send)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
|
||||||
|
raise exc
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
|
||||||
|
await app(scope, receive, sender)
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/routing.py", line 104, in app
|
||||||
|
response = await f(request)
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/routing.py", line 428, in app
|
||||||
|
raw_response = await run_endpoint_function(
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "/home/user/ss-tools/backend/venv/lib/python3.12/site-packages/fastapi/routing.py", line 314, in run_endpoint_function
|
||||||
|
return await dependant.call(**values)
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "/home/user/ss-tools/backend/src/api/routes/settings.py", line 103, in test_connection
|
||||||
|
import httpx
|
||||||
|
ModuleNotFoundError: No module named 'httpx'
|
||||||
|
INFO: 127.0.0.1:45776 - "POST /settings/environments/7071dab6-881f-49a2-b850-c004b3fc11c0/test HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:45784 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:45784 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:41628 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:41628 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:41628 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:41628 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /settings HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:60184 - "GET /settings/ HTTP/1.1" 200 OK
|
||||||
|
WARNING: StatReload detected changes in 'src/core/plugin_loader.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [7968]
|
||||||
|
INFO: Started server process [12178]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
WARNING: StatReload detected changes in 'src/dependencies.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [12178]
|
||||||
|
INFO: Started server process [12451]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
Plugin 'Superset Dashboard Backup' (ID: superset-backup) loaded successfully.
|
||||||
|
Plugin 'Superset Dashboard Migration' (ID: superset-migration) loaded successfully.
|
||||||
|
INFO: 127.0.0.1:37334 - "GET / HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:37334 - "GET /favicon.ico HTTP/1.1" 404 Not Found
|
||||||
|
INFO: 127.0.0.1:39932 - "GET / HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:39932 - "GET /favicon.ico HTTP/1.1" 404 Not Found
|
||||||
|
INFO: 127.0.0.1:39932 - "GET / HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:39932 - "GET / HTTP/1.1" 200 OK
|
||||||
|
INFO: 127.0.0.1:54900 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49280 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
INFO: 127.0.0.1:49280 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
WARNING: StatReload detected changes in 'src/api/routes/plugins.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [12451]
|
||||||
|
INFO: Started server process [15016]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
Plugin 'Superset Dashboard Backup' (ID: superset-backup) loaded successfully.
|
||||||
|
Plugin 'Superset Dashboard Migration' (ID: superset-migration) loaded successfully.
|
||||||
|
INFO: 127.0.0.1:59340 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
DEBUG: list_plugins called. Found 0 plugins.
|
||||||
|
INFO: 127.0.0.1:59340 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
WARNING: StatReload detected changes in 'src/dependencies.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [15016]
|
||||||
|
INFO: Started server process [15257]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
Plugin 'Superset Dashboard Backup' (ID: superset-backup) loaded successfully.
|
||||||
|
Plugin 'Superset Dashboard Migration' (ID: superset-migration) loaded successfully.
|
||||||
|
DEBUG: dependencies.py initialized. PluginLoader ID: 139922613090976
|
||||||
|
DEBUG: dependencies.py initialized. PluginLoader ID: 139922627375088
|
||||||
|
INFO: 127.0.0.1:57464 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
DEBUG: get_plugin_loader called. Returning PluginLoader ID: 139922627375088
|
||||||
|
DEBUG: list_plugins called. Found 0 plugins.
|
||||||
|
INFO: 127.0.0.1:57464 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
WARNING: StatReload detected changes in 'src/core/plugin_loader.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [15257]
|
||||||
|
INFO: Started server process [15533]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
DEBUG: Loading plugin backup as src.plugins.backup
|
||||||
|
Plugin 'Superset Dashboard Backup' (ID: superset-backup) loaded successfully.
|
||||||
|
DEBUG: Loading plugin migration as src.plugins.migration
|
||||||
|
Plugin 'Superset Dashboard Migration' (ID: superset-migration) loaded successfully.
|
||||||
|
DEBUG: dependencies.py initialized. PluginLoader ID: 140371031142384
|
||||||
|
INFO: 127.0.0.1:46470 - "GET /plugins HTTP/1.1" 307 Temporary Redirect
|
||||||
|
DEBUG: get_plugin_loader called. Returning PluginLoader ID: 140371031142384
|
||||||
|
DEBUG: list_plugins called. Found 2 plugins.
|
||||||
|
DEBUG: Plugin: superset-backup
|
||||||
|
DEBUG: Plugin: superset-migration
|
||||||
|
INFO: 127.0.0.1:46470 - "GET /plugins/ HTTP/1.1" 200 OK
|
||||||
|
WARNING: StatReload detected changes in 'src/api/routes/settings.py'. Reloading...
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [15533]
|
||||||
|
INFO: Started server process [15827]
|
||||||
|
INFO: Waiting for application startup.
|
||||||
|
INFO: Application startup complete.
|
||||||
|
INFO: Shutting down
|
||||||
|
INFO: Waiting for application shutdown.
|
||||||
|
INFO: Application shutdown complete.
|
||||||
|
INFO: Finished server process [15827]
|
||||||
|
INFO: Stopping reloader process [7952]
|
||||||
1
backend/get_full_key.py
Normal file
1
backend/get_full_key.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"print(f'Length": {"else": "print('Provider not found')\ndb.close()"}}
|
||||||
93776
backend/logs/app.log.1
93776
backend/logs/app.log.1
File diff suppressed because it is too large
Load Diff
BIN
backend/mappings.db
Normal file
BIN
backend/mappings.db
Normal file
Binary file not shown.
BIN
backend/migrations.db
Normal file
BIN
backend/migrations.db
Normal file
Binary file not shown.
@@ -1,19 +1,3 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=69", "wheel"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "ss-tools-backend"
|
|
||||||
version = "0.0.0"
|
|
||||||
requires-python = ">=3.13"
|
|
||||||
|
|
||||||
[tool.setuptools]
|
|
||||||
include-package-data = true
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["src*"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
pythonpath = ["."]
|
pythonpath = ["."]
|
||||||
importmode = "importlib"
|
importmode = "importlib"
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src:Package]
|
|
||||||
# @PURPOSE: Canonical backend package root for application, scripts, and tests.
|
|
||||||
# [/DEF:src:Package]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.api:Package]
|
|
||||||
# @PURPOSE: Backend API package root.
|
|
||||||
# [/DEF:src.api:Package]
|
|
||||||
@@ -6,7 +6,7 @@
|
|||||||
# @RELATION: DEPENDS_ON -> importlib
|
# @RELATION: DEPENDS_ON -> importlib
|
||||||
# @INVARIANT: Only names listed in __all__ are importable via __getattr__.
|
# @INVARIANT: Only names listed in __all__ are importable via __getattr__.
|
||||||
|
|
||||||
__all__ = ['plugins', 'tasks', 'settings', 'connections', 'environments', 'mappings', 'migration', 'git', 'storage', 'admin', 'reports', 'assistant', 'clean_release', 'profile']
|
__all__ = ['plugins', 'tasks', 'settings', 'connections', 'environments', 'mappings', 'migration', 'git', 'storage', 'admin', 'reports', 'assistant', 'clean_release']
|
||||||
|
|
||||||
|
|
||||||
# [DEF:__getattr__:Function]
|
# [DEF:__getattr__:Function]
|
||||||
|
|||||||
@@ -422,7 +422,7 @@ def test_llm_validation_with_dashboard_ref_requires_confirmation():
|
|||||||
assert "cancel" in action_types
|
assert "cancel" in action_types
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:test_llm_validation_with_dashboard_ref_requires_confirmation:Function]
|
# [/DEF:test_llm_validation_missing_dashboard_returns_needs_clarification:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_list_conversations_groups_by_conversation_and_marks_archived:Function]
|
# [DEF:test_list_conversations_groups_by_conversation_and_marks_archived:Function]
|
||||||
@@ -629,7 +629,6 @@ def test_guarded_operation_confirm_roundtrip():
|
|||||||
assert second.task_id is not None
|
assert second.task_id is not None
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:test_guarded_operation_confirm_roundtrip:Function]
|
|
||||||
# [DEF:test_confirm_nonexistent_id_returns_404:Function]
|
# [DEF:test_confirm_nonexistent_id_returns_404:Function]
|
||||||
# @PURPOSE: Confirming a non-existent ID should raise 404.
|
# @PURPOSE: Confirming a non-existent ID should raise 404.
|
||||||
# @PRE: user tries to confirm a random/fake UUID.
|
# @PRE: user tries to confirm a random/fake UUID.
|
||||||
@@ -650,7 +649,6 @@ def test_confirm_nonexistent_id_returns_404():
|
|||||||
assert exc.value.status_code == 404
|
assert exc.value.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:test_confirm_nonexistent_id_returns_404:Function]
|
|
||||||
# [DEF:test_migration_with_dry_run_includes_summary:Function]
|
# [DEF:test_migration_with_dry_run_includes_summary:Function]
|
||||||
# @PURPOSE: Migration command with dry run flag must return the dry run summary in confirmation text.
|
# @PURPOSE: Migration command with dry run flag must return the dry run summary in confirmation text.
|
||||||
# @PRE: user specifies a migration with --dry-run flag.
|
# @PRE: user specifies a migration with --dry-run flag.
|
||||||
|
|||||||
@@ -1,165 +0,0 @@
|
|||||||
# [DEF:backend.src.api.routes.__tests__.test_clean_release_legacy_compat:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Compatibility tests for legacy clean-release API paths retained during v2 migration.
|
|
||||||
# @LAYER: Tests
|
|
||||||
# @RELATION: TESTS -> backend.src.api.routes.clean_release
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
os.environ.setdefault("DATABASE_URL", "sqlite:///./test_clean_release_legacy_compat.db")
|
|
||||||
os.environ.setdefault("AUTH_DATABASE_URL", "sqlite:///./test_clean_release_legacy_auth.db")
|
|
||||||
|
|
||||||
from src.app import app
|
|
||||||
from src.dependencies import get_clean_release_repository
|
|
||||||
from src.models.clean_release import (
|
|
||||||
CleanProfilePolicy,
|
|
||||||
DistributionManifest,
|
|
||||||
ProfileType,
|
|
||||||
ReleaseCandidate,
|
|
||||||
ReleaseCandidateStatus,
|
|
||||||
ResourceSourceEntry,
|
|
||||||
ResourceSourceRegistry,
|
|
||||||
)
|
|
||||||
from src.services.clean_release.repository import CleanReleaseRepository
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_seed_legacy_repo:Function]
|
|
||||||
# @PURPOSE: Seed in-memory repository with minimum trusted data for legacy endpoint contracts.
|
|
||||||
# @PRE: Repository is empty.
|
|
||||||
# @POST: Candidate, policy, registry and manifest are available for legacy checks flow.
|
|
||||||
def _seed_legacy_repo() -> CleanReleaseRepository:
|
|
||||||
repo = CleanReleaseRepository()
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
repo.save_candidate(
|
|
||||||
ReleaseCandidate(
|
|
||||||
id="legacy-rc-001",
|
|
||||||
version="1.0.0",
|
|
||||||
source_snapshot_ref="git:legacy-001",
|
|
||||||
created_at=now,
|
|
||||||
created_by="compat-tester",
|
|
||||||
status=ReleaseCandidateStatus.DRAFT,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
registry = ResourceSourceRegistry(
|
|
||||||
registry_id="legacy-reg-1",
|
|
||||||
name="Legacy Internal Registry",
|
|
||||||
entries=[
|
|
||||||
ResourceSourceEntry(
|
|
||||||
source_id="legacy-src-1",
|
|
||||||
host="repo.intra.company.local",
|
|
||||||
protocol="https",
|
|
||||||
purpose="artifact-repo",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
updated_at=now,
|
|
||||||
updated_by="compat-tester",
|
|
||||||
status="ACTIVE",
|
|
||||||
)
|
|
||||||
setattr(registry, "immutable", True)
|
|
||||||
setattr(registry, "allowed_hosts", ["repo.intra.company.local"])
|
|
||||||
setattr(registry, "allowed_schemes", ["https"])
|
|
||||||
setattr(registry, "allowed_source_types", ["artifact-repo"])
|
|
||||||
repo.save_registry(registry)
|
|
||||||
|
|
||||||
policy = CleanProfilePolicy(
|
|
||||||
policy_id="legacy-pol-1",
|
|
||||||
policy_version="1.0.0",
|
|
||||||
profile=ProfileType.ENTERPRISE_CLEAN,
|
|
||||||
active=True,
|
|
||||||
internal_source_registry_ref="legacy-reg-1",
|
|
||||||
prohibited_artifact_categories=["test-data"],
|
|
||||||
required_system_categories=["core"],
|
|
||||||
effective_from=now,
|
|
||||||
)
|
|
||||||
setattr(policy, "immutable", True)
|
|
||||||
setattr(
|
|
||||||
policy,
|
|
||||||
"content_json",
|
|
||||||
{
|
|
||||||
"profile": "enterprise-clean",
|
|
||||||
"prohibited_artifact_categories": ["test-data"],
|
|
||||||
"required_system_categories": ["core"],
|
|
||||||
"external_source_forbidden": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
repo.save_policy(policy)
|
|
||||||
|
|
||||||
repo.save_manifest(
|
|
||||||
DistributionManifest(
|
|
||||||
id="legacy-manifest-1",
|
|
||||||
candidate_id="legacy-rc-001",
|
|
||||||
manifest_version=1,
|
|
||||||
manifest_digest="sha256:legacy-manifest",
|
|
||||||
artifacts_digest="sha256:legacy-artifacts",
|
|
||||||
created_at=now,
|
|
||||||
created_by="compat-tester",
|
|
||||||
source_snapshot_ref="git:legacy-001",
|
|
||||||
content_json={"items": [], "summary": {"included_count": 0, "prohibited_detected_count": 0}},
|
|
||||||
immutable=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return repo
|
|
||||||
# [/DEF:_seed_legacy_repo:Function]
|
|
||||||
|
|
||||||
|
|
||||||
def test_legacy_prepare_endpoint_still_available() -> None:
|
|
||||||
repo = _seed_legacy_repo()
|
|
||||||
app.dependency_overrides[get_clean_release_repository] = lambda: repo
|
|
||||||
try:
|
|
||||||
client = TestClient(app)
|
|
||||||
response = client.post(
|
|
||||||
"/api/clean-release/candidates/prepare",
|
|
||||||
json={
|
|
||||||
"candidate_id": "legacy-rc-001",
|
|
||||||
"artifacts": [{"path": "src/main.py", "category": "core", "reason": "required"}],
|
|
||||||
"sources": ["repo.intra.company.local"],
|
|
||||||
"operator_id": "compat-tester",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert "status" in payload
|
|
||||||
assert payload["status"] in {"prepared", "blocked", "PREPARED", "BLOCKED"}
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def test_legacy_checks_endpoints_still_available() -> None:
|
|
||||||
repo = _seed_legacy_repo()
|
|
||||||
app.dependency_overrides[get_clean_release_repository] = lambda: repo
|
|
||||||
try:
|
|
||||||
client = TestClient(app)
|
|
||||||
start_response = client.post(
|
|
||||||
"/api/clean-release/checks",
|
|
||||||
json={
|
|
||||||
"candidate_id": "legacy-rc-001",
|
|
||||||
"profile": "enterprise-clean",
|
|
||||||
"execution_mode": "api",
|
|
||||||
"triggered_by": "compat-tester",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert start_response.status_code == 202
|
|
||||||
start_payload = start_response.json()
|
|
||||||
assert "check_run_id" in start_payload
|
|
||||||
assert start_payload["candidate_id"] == "legacy-rc-001"
|
|
||||||
|
|
||||||
status_response = client.get(f"/api/clean-release/checks/{start_payload['check_run_id']}")
|
|
||||||
assert status_response.status_code == 200
|
|
||||||
status_payload = status_response.json()
|
|
||||||
assert status_payload["check_run_id"] == start_payload["check_run_id"]
|
|
||||||
assert "final_status" in status_payload
|
|
||||||
assert "checks" in status_payload
|
|
||||||
finally:
|
|
||||||
app.dependency_overrides.clear()
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.__tests__.test_clean_release_legacy_compat:Module]
|
|
||||||
@@ -1,93 +0,0 @@
|
|||||||
# [DEF:test_clean_release_v2_api:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: API contract tests for redesigned clean release endpoints.
|
|
||||||
# @LAYER: Domain
|
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from types import SimpleNamespace
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.app import app
|
|
||||||
from src.dependencies import get_clean_release_repository, get_config_manager
|
|
||||||
from src.models.clean_release import (
|
|
||||||
CleanPolicySnapshot,
|
|
||||||
DistributionManifest,
|
|
||||||
ReleaseCandidate,
|
|
||||||
SourceRegistrySnapshot,
|
|
||||||
)
|
|
||||||
from src.services.clean_release.enums import CandidateStatus
|
|
||||||
|
|
||||||
client = TestClient(app)
|
|
||||||
|
|
||||||
# [REASON] Implementing API contract tests for candidate/artifact/manifest endpoints (T012).
|
|
||||||
def test_candidate_registration_contract():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: candidate_registration -> Should return 201 and candidate DTO.
|
|
||||||
@TEST_CONTRACT: POST /api/v2/clean-release/candidates -> CandidateDTO
|
|
||||||
"""
|
|
||||||
payload = {
|
|
||||||
"id": "rc-test-001",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"source_snapshot_ref": "git:sha123",
|
|
||||||
"created_by": "test-user"
|
|
||||||
}
|
|
||||||
response = client.post("/api/v2/clean-release/candidates", json=payload)
|
|
||||||
assert response.status_code == 201
|
|
||||||
data = response.json()
|
|
||||||
assert data["id"] == "rc-test-001"
|
|
||||||
assert data["status"] == CandidateStatus.DRAFT.value
|
|
||||||
|
|
||||||
def test_artifact_import_contract():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: artifact_import -> Should return 200 and success status.
|
|
||||||
@TEST_CONTRACT: POST /api/v2/clean-release/candidates/{id}/artifacts -> SuccessDTO
|
|
||||||
"""
|
|
||||||
candidate_id = "rc-test-001-art"
|
|
||||||
bootstrap_candidate = {
|
|
||||||
"id": candidate_id,
|
|
||||||
"version": "1.0.0",
|
|
||||||
"source_snapshot_ref": "git:sha123",
|
|
||||||
"created_by": "test-user"
|
|
||||||
}
|
|
||||||
create_response = client.post("/api/v2/clean-release/candidates", json=bootstrap_candidate)
|
|
||||||
assert create_response.status_code == 201
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"artifacts": [
|
|
||||||
{
|
|
||||||
"id": "art-1",
|
|
||||||
"path": "bin/app.exe",
|
|
||||||
"sha256": "hash123",
|
|
||||||
"size": 1024
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
response = client.post(f"/api/v2/clean-release/candidates/{candidate_id}/artifacts", json=payload)
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json()["status"] == "success"
|
|
||||||
|
|
||||||
def test_manifest_build_contract():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: manifest_build -> Should return 201 and manifest DTO.
|
|
||||||
@TEST_CONTRACT: POST /api/v2/clean-release/candidates/{id}/manifests -> ManifestDTO
|
|
||||||
"""
|
|
||||||
candidate_id = "rc-test-001-manifest"
|
|
||||||
bootstrap_candidate = {
|
|
||||||
"id": candidate_id,
|
|
||||||
"version": "1.0.0",
|
|
||||||
"source_snapshot_ref": "git:sha123",
|
|
||||||
"created_by": "test-user"
|
|
||||||
}
|
|
||||||
create_response = client.post("/api/v2/clean-release/candidates", json=bootstrap_candidate)
|
|
||||||
assert create_response.status_code == 201
|
|
||||||
|
|
||||||
response = client.post(f"/api/v2/clean-release/candidates/{candidate_id}/manifests")
|
|
||||||
assert response.status_code == 201
|
|
||||||
data = response.json()
|
|
||||||
assert "manifest_digest" in data
|
|
||||||
assert data["candidate_id"] == candidate_id
|
|
||||||
|
|
||||||
# [/DEF:test_clean_release_v2_api:Module]
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
# [DEF:test_clean_release_v2_release_api:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: API contract test scaffolding for clean release approval and publication endpoints.
|
|
||||||
# @LAYER: Domain
|
|
||||||
# @RELATION: IMPLEMENTS -> clean_release_v2_release_api_contracts
|
|
||||||
|
|
||||||
"""Contract tests for redesigned approval/publication API endpoints."""
|
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from fastapi import FastAPI
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.api.routes.clean_release_v2 import router as clean_release_v2_router
|
|
||||||
from src.dependencies import get_clean_release_repository
|
|
||||||
from src.models.clean_release import ComplianceReport, ReleaseCandidate
|
|
||||||
from src.services.clean_release.enums import CandidateStatus, ComplianceDecision
|
|
||||||
|
|
||||||
|
|
||||||
test_app = FastAPI()
|
|
||||||
test_app.include_router(clean_release_v2_router)
|
|
||||||
client = TestClient(test_app)
|
|
||||||
|
|
||||||
|
|
||||||
def _seed_candidate_and_passed_report() -> tuple[str, str]:
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
candidate_id = f"api-release-candidate-{uuid4()}"
|
|
||||||
report_id = f"api-release-report-{uuid4()}"
|
|
||||||
|
|
||||||
repository.save_candidate(
|
|
||||||
ReleaseCandidate(
|
|
||||||
id=candidate_id,
|
|
||||||
version="1.0.0",
|
|
||||||
source_snapshot_ref="git:sha-api-release",
|
|
||||||
created_by="api-test",
|
|
||||||
created_at=datetime.now(timezone.utc),
|
|
||||||
status=CandidateStatus.CHECK_PASSED.value,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
repository.save_report(
|
|
||||||
ComplianceReport(
|
|
||||||
id=report_id,
|
|
||||||
run_id=f"run-{uuid4()}",
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
final_status=ComplianceDecision.PASSED.value,
|
|
||||||
summary_json={"operator_summary": "ok", "violations_count": 0, "blocking_violations_count": 0},
|
|
||||||
generated_at=datetime.now(timezone.utc),
|
|
||||||
immutable=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return candidate_id, report_id
|
|
||||||
|
|
||||||
|
|
||||||
def test_release_approve_and_publish_revoke_contract() -> None:
|
|
||||||
"""Contract for approve -> publish -> revoke lifecycle endpoints."""
|
|
||||||
candidate_id, report_id = _seed_candidate_and_passed_report()
|
|
||||||
|
|
||||||
approve_response = client.post(
|
|
||||||
f"/api/v2/clean-release/candidates/{candidate_id}/approve",
|
|
||||||
json={"report_id": report_id, "decided_by": "api-test", "comment": "approved"},
|
|
||||||
)
|
|
||||||
assert approve_response.status_code == 200
|
|
||||||
approve_payload = approve_response.json()
|
|
||||||
assert approve_payload["status"] == "ok"
|
|
||||||
assert approve_payload["decision"] == "APPROVED"
|
|
||||||
|
|
||||||
publish_response = client.post(
|
|
||||||
f"/api/v2/clean-release/candidates/{candidate_id}/publish",
|
|
||||||
json={
|
|
||||||
"report_id": report_id,
|
|
||||||
"published_by": "api-test",
|
|
||||||
"target_channel": "stable",
|
|
||||||
"publication_ref": "rel-api-001",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert publish_response.status_code == 200
|
|
||||||
publish_payload = publish_response.json()
|
|
||||||
assert publish_payload["status"] == "ok"
|
|
||||||
assert publish_payload["publication"]["status"] == "ACTIVE"
|
|
||||||
|
|
||||||
publication_id = publish_payload["publication"]["id"]
|
|
||||||
revoke_response = client.post(
|
|
||||||
f"/api/v2/clean-release/publications/{publication_id}/revoke",
|
|
||||||
json={"revoked_by": "api-test", "comment": "rollback"},
|
|
||||||
)
|
|
||||||
assert revoke_response.status_code == 200
|
|
||||||
revoke_payload = revoke_response.json()
|
|
||||||
assert revoke_payload["status"] == "ok"
|
|
||||||
assert revoke_payload["publication"]["status"] == "REVOKED"
|
|
||||||
|
|
||||||
|
|
||||||
def test_release_reject_contract() -> None:
|
|
||||||
"""Contract for reject endpoint."""
|
|
||||||
candidate_id, report_id = _seed_candidate_and_passed_report()
|
|
||||||
|
|
||||||
reject_response = client.post(
|
|
||||||
f"/api/v2/clean-release/candidates/{candidate_id}/reject",
|
|
||||||
json={"report_id": report_id, "decided_by": "api-test", "comment": "rejected"},
|
|
||||||
)
|
|
||||||
assert reject_response.status_code == 200
|
|
||||||
payload = reject_response.json()
|
|
||||||
assert payload["status"] == "ok"
|
|
||||||
assert payload["decision"] == "REJECTED"
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:test_clean_release_v2_release_api:Module]
|
|
||||||
@@ -11,12 +11,9 @@ from fastapi.testclient import TestClient
|
|||||||
from src.app import app
|
from src.app import app
|
||||||
from src.api.routes.dashboards import DashboardsResponse
|
from src.api.routes.dashboards import DashboardsResponse
|
||||||
from src.dependencies import get_current_user, has_permission, get_config_manager, get_task_manager, get_resource_service, get_mapping_service
|
from src.dependencies import get_current_user, has_permission, get_config_manager, get_task_manager, get_resource_service, get_mapping_service
|
||||||
from src.core.database import get_db
|
|
||||||
from src.services.profile_service import ProfileService as DomainProfileService
|
|
||||||
|
|
||||||
# Global mock user for get_current_user dependency overrides
|
# Global mock user for get_current_user dependency overrides
|
||||||
mock_user = MagicMock()
|
mock_user = MagicMock()
|
||||||
mock_user.id = "u-1"
|
|
||||||
mock_user.username = "testuser"
|
mock_user.username = "testuser"
|
||||||
mock_user.roles = []
|
mock_user.roles = []
|
||||||
admin_role = MagicMock()
|
admin_role = MagicMock()
|
||||||
@@ -30,14 +27,11 @@ def mock_deps():
|
|||||||
resource_service = MagicMock()
|
resource_service = MagicMock()
|
||||||
mapping_service = MagicMock()
|
mapping_service = MagicMock()
|
||||||
|
|
||||||
db = MagicMock()
|
|
||||||
|
|
||||||
app.dependency_overrides[get_config_manager] = lambda: config_manager
|
app.dependency_overrides[get_config_manager] = lambda: config_manager
|
||||||
app.dependency_overrides[get_task_manager] = lambda: task_manager
|
app.dependency_overrides[get_task_manager] = lambda: task_manager
|
||||||
app.dependency_overrides[get_resource_service] = lambda: resource_service
|
app.dependency_overrides[get_resource_service] = lambda: resource_service
|
||||||
app.dependency_overrides[get_mapping_service] = lambda: mapping_service
|
app.dependency_overrides[get_mapping_service] = lambda: mapping_service
|
||||||
app.dependency_overrides[get_current_user] = lambda: mock_user
|
app.dependency_overrides[get_current_user] = lambda: mock_user
|
||||||
app.dependency_overrides[get_db] = lambda: db
|
|
||||||
|
|
||||||
app.dependency_overrides[has_permission("plugin:migration", "READ")] = lambda: mock_user
|
app.dependency_overrides[has_permission("plugin:migration", "READ")] = lambda: mock_user
|
||||||
app.dependency_overrides[has_permission("plugin:migration", "EXECUTE")] = lambda: mock_user
|
app.dependency_overrides[has_permission("plugin:migration", "EXECUTE")] = lambda: mock_user
|
||||||
@@ -48,8 +42,7 @@ def mock_deps():
|
|||||||
"config": config_manager,
|
"config": config_manager,
|
||||||
"task": task_manager,
|
"task": task_manager,
|
||||||
"resource": resource_service,
|
"resource": resource_service,
|
||||||
"mapping": mapping_service,
|
"mapping": mapping_service
|
||||||
"db": db,
|
|
||||||
}
|
}
|
||||||
app.dependency_overrides.clear()
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
@@ -104,17 +97,17 @@ def test_get_dashboards_with_search(mock_deps):
|
|||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
mock_deps["task"].get_all_tasks.return_value = []
|
||||||
|
|
||||||
async def mock_get_dashboards(env, tasks, include_git_status=False):
|
async def mock_get_dashboards(env, tasks):
|
||||||
return [
|
return [
|
||||||
{"id": 1, "title": "Sales Report", "slug": "sales", "git_status": {"branch": "main", "sync_status": "OK"}, "last_task": None},
|
{"id": 1, "title": "Sales Report", "slug": "sales"},
|
||||||
{"id": 2, "title": "Marketing Dashboard", "slug": "marketing", "git_status": {"branch": "main", "sync_status": "OK"}, "last_task": None}
|
{"id": 2, "title": "Marketing Dashboard", "slug": "marketing"}
|
||||||
]
|
]
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(
|
mock_deps["resource"].get_dashboards_with_status = AsyncMock(
|
||||||
side_effect=mock_get_dashboards
|
side_effect=mock_get_dashboards
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.get("/api/dashboards?env_id=prod&search=sales")
|
response = client.get("/api/dashboards?env_id=prod&search=sales")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
# @POST: Filtered result count must match search
|
# @POST: Filtered result count must match search
|
||||||
@@ -502,376 +495,4 @@ def test_get_dashboard_thumbnail_success(mock_deps):
|
|||||||
# [/DEF:test_get_dashboard_thumbnail_success:Function]
|
# [/DEF:test_get_dashboard_thumbnail_success:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_build_profile_preference_stub:Function]
|
|
||||||
# @PURPOSE: Creates profile preference payload stub for dashboards filter contract tests.
|
|
||||||
# @PRE: username can be empty; enabled indicates profile-default toggle state.
|
|
||||||
# @POST: Returns object compatible with ProfileService.get_my_preference contract.
|
|
||||||
def _build_profile_preference_stub(username: str, enabled: bool):
|
|
||||||
preference = MagicMock()
|
|
||||||
preference.superset_username = username
|
|
||||||
preference.superset_username_normalized = str(username or "").strip().lower() or None
|
|
||||||
preference.show_only_my_dashboards = bool(enabled)
|
|
||||||
|
|
||||||
payload = MagicMock()
|
|
||||||
payload.preference = preference
|
|
||||||
return payload
|
|
||||||
# [/DEF:_build_profile_preference_stub:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_matches_actor_case_insensitive:Function]
|
|
||||||
# @PURPOSE: Applies trim + case-insensitive owners OR modified_by matching used by route contract tests.
|
|
||||||
# @PRE: owners can be None or list-like values.
|
|
||||||
# @POST: Returns True when bound username matches any owner or modified_by.
|
|
||||||
def _matches_actor_case_insensitive(bound_username, owners, modified_by):
|
|
||||||
normalized_bound = str(bound_username or "").strip().lower()
|
|
||||||
if not normalized_bound:
|
|
||||||
return False
|
|
||||||
|
|
||||||
owner_tokens = []
|
|
||||||
for owner in owners or []:
|
|
||||||
token = str(owner or "").strip().lower()
|
|
||||||
if token:
|
|
||||||
owner_tokens.append(token)
|
|
||||||
|
|
||||||
modified_token = str(modified_by or "").strip().lower()
|
|
||||||
return normalized_bound in owner_tokens or bool(modified_token and modified_token == normalized_bound)
|
|
||||||
# [/DEF:_matches_actor_case_insensitive:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_profile_filter_contract_owners_or_modified_by:Function]
|
|
||||||
# @TEST: GET /api/dashboards applies profile-default filter with owners OR modified_by trim+case-insensitive semantics.
|
|
||||||
# @PRE: Current user has enabled profile-default preference and bound username.
|
|
||||||
# @POST: Response includes only matching dashboards and effective_profile_filter metadata.
|
|
||||||
def test_get_dashboards_profile_filter_contract_owners_or_modified_by(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Owner Match",
|
|
||||||
"slug": "owner-match",
|
|
||||||
"owners": [" John_Doe "],
|
|
||||||
"modified_by": "someone_else",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Modifier Match",
|
|
||||||
"slug": "modifier-match",
|
|
||||||
"owners": ["analytics-team"],
|
|
||||||
"modified_by": " JOHN_DOE ",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "No Match",
|
|
||||||
"slug": "no-match",
|
|
||||||
"owners": ["another-user"],
|
|
||||||
"modified_by": "nobody",
|
|
||||||
},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls:
|
|
||||||
profile_service = MagicMock()
|
|
||||||
profile_service.get_my_preference.return_value = _build_profile_preference_stub(
|
|
||||||
username=" JOHN_DOE ",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
profile_service.matches_dashboard_actor.side_effect = _matches_actor_case_insensitive
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=dashboards_main&apply_profile_default=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
|
|
||||||
assert payload["total"] == 2
|
|
||||||
assert {item["id"] for item in payload["dashboards"]} == {1, 2}
|
|
||||||
assert payload["effective_profile_filter"]["applied"] is True
|
|
||||||
assert payload["effective_profile_filter"]["source_page"] == "dashboards_main"
|
|
||||||
assert payload["effective_profile_filter"]["override_show_all"] is False
|
|
||||||
assert payload["effective_profile_filter"]["username"] == "john_doe"
|
|
||||||
assert payload["effective_profile_filter"]["match_logic"] == "owners_or_modified_by"
|
|
||||||
# [/DEF:test_get_dashboards_profile_filter_contract_owners_or_modified_by:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_override_show_all_contract:Function]
|
|
||||||
# @TEST: GET /api/dashboards honors override_show_all and disables profile-default filter for current page.
|
|
||||||
# @PRE: Profile-default preference exists but override_show_all=true query is provided.
|
|
||||||
# @POST: Response remains unfiltered and effective_profile_filter.applied is false.
|
|
||||||
def test_get_dashboards_override_show_all_contract(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{"id": 1, "title": "Dash A", "slug": "dash-a", "owners": ["john_doe"], "modified_by": "john_doe"},
|
|
||||||
{"id": 2, "title": "Dash B", "slug": "dash-b", "owners": ["other"], "modified_by": "other"},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls:
|
|
||||||
profile_service = MagicMock()
|
|
||||||
profile_service.get_my_preference.return_value = _build_profile_preference_stub(
|
|
||||||
username="john_doe",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
profile_service.matches_dashboard_actor.side_effect = _matches_actor_case_insensitive
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=dashboards_main&apply_profile_default=true&override_show_all=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
|
|
||||||
assert payload["total"] == 2
|
|
||||||
assert {item["id"] for item in payload["dashboards"]} == {1, 2}
|
|
||||||
assert payload["effective_profile_filter"]["applied"] is False
|
|
||||||
assert payload["effective_profile_filter"]["source_page"] == "dashboards_main"
|
|
||||||
assert payload["effective_profile_filter"]["override_show_all"] is True
|
|
||||||
assert payload["effective_profile_filter"]["username"] is None
|
|
||||||
assert payload["effective_profile_filter"]["match_logic"] is None
|
|
||||||
profile_service.matches_dashboard_actor.assert_not_called()
|
|
||||||
# [/DEF:test_get_dashboards_override_show_all_contract:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_profile_filter_no_match_results_contract:Function]
|
|
||||||
# @TEST: GET /api/dashboards returns empty result set when profile-default filter is active and no dashboard actors match.
|
|
||||||
# @PRE: Profile-default preference is enabled with bound username and all dashboards are non-matching.
|
|
||||||
# @POST: Response total is 0 with deterministic pagination and active effective_profile_filter metadata.
|
|
||||||
def test_get_dashboards_profile_filter_no_match_results_contract(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{
|
|
||||||
"id": 101,
|
|
||||||
"title": "Team Dashboard",
|
|
||||||
"slug": "team-dashboard",
|
|
||||||
"owners": ["analytics-team"],
|
|
||||||
"modified_by": "someone_else",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 102,
|
|
||||||
"title": "Ops Dashboard",
|
|
||||||
"slug": "ops-dashboard",
|
|
||||||
"owners": ["ops-user"],
|
|
||||||
"modified_by": "ops-user",
|
|
||||||
},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls:
|
|
||||||
profile_service = MagicMock()
|
|
||||||
profile_service.get_my_preference.return_value = _build_profile_preference_stub(
|
|
||||||
username="john_doe",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
profile_service.matches_dashboard_actor.side_effect = _matches_actor_case_insensitive
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=dashboards_main&apply_profile_default=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
|
|
||||||
assert payload["total"] == 0
|
|
||||||
assert payload["dashboards"] == []
|
|
||||||
assert payload["page"] == 1
|
|
||||||
assert payload["page_size"] == 10
|
|
||||||
assert payload["total_pages"] == 1
|
|
||||||
assert payload["effective_profile_filter"]["applied"] is True
|
|
||||||
assert payload["effective_profile_filter"]["source_page"] == "dashboards_main"
|
|
||||||
assert payload["effective_profile_filter"]["override_show_all"] is False
|
|
||||||
assert payload["effective_profile_filter"]["username"] == "john_doe"
|
|
||||||
assert payload["effective_profile_filter"]["match_logic"] == "owners_or_modified_by"
|
|
||||||
# [/DEF:test_get_dashboards_profile_filter_no_match_results_contract:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_page_context_other_disables_profile_default:Function]
|
|
||||||
# @TEST: GET /api/dashboards does not auto-apply profile-default filter outside dashboards_main page context.
|
|
||||||
# @PRE: Profile-default preference exists but page_context=other query is provided.
|
|
||||||
# @POST: Response remains unfiltered and metadata reflects source_page=other.
|
|
||||||
def test_get_dashboards_page_context_other_disables_profile_default(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{"id": 1, "title": "Dash A", "slug": "dash-a", "owners": ["john_doe"], "modified_by": "john_doe"},
|
|
||||||
{"id": 2, "title": "Dash B", "slug": "dash-b", "owners": ["other"], "modified_by": "other"},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls:
|
|
||||||
profile_service = MagicMock()
|
|
||||||
profile_service.get_my_preference.return_value = _build_profile_preference_stub(
|
|
||||||
username="john_doe",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
profile_service.matches_dashboard_actor.side_effect = _matches_actor_case_insensitive
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=other&apply_profile_default=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
|
|
||||||
assert payload["total"] == 2
|
|
||||||
assert {item["id"] for item in payload["dashboards"]} == {1, 2}
|
|
||||||
assert payload["effective_profile_filter"]["applied"] is False
|
|
||||||
assert payload["effective_profile_filter"]["source_page"] == "other"
|
|
||||||
assert payload["effective_profile_filter"]["override_show_all"] is False
|
|
||||||
assert payload["effective_profile_filter"]["username"] is None
|
|
||||||
assert payload["effective_profile_filter"]["match_logic"] is None
|
|
||||||
profile_service.matches_dashboard_actor.assert_not_called()
|
|
||||||
# [/DEF:test_get_dashboards_page_context_other_disables_profile_default:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_profile_filter_matches_display_alias_without_detail_fanout:Function]
|
|
||||||
# @TEST: GET /api/dashboards resolves Superset display-name alias once and filters without per-dashboard detail calls.
|
|
||||||
# @PRE: Profile-default filter is active, bound username is `admin`, dashboard actors contain display labels.
|
|
||||||
# @POST: Route matches by alias (`Superset Admin`) and does not call `SupersetClient.get_dashboard` in list filter path.
|
|
||||||
def test_get_dashboards_profile_filter_matches_display_alias_without_detail_fanout(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Alias Match",
|
|
||||||
"slug": "alias-match",
|
|
||||||
"owners": [],
|
|
||||||
"created_by": None,
|
|
||||||
"modified_by": "Superset Admin",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 6,
|
|
||||||
"title": "Alias No Match",
|
|
||||||
"slug": "alias-no-match",
|
|
||||||
"owners": [],
|
|
||||||
"created_by": None,
|
|
||||||
"modified_by": "Other User",
|
|
||||||
},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls, patch(
|
|
||||||
"src.api.routes.dashboards.SupersetClient"
|
|
||||||
) as superset_client_cls, patch(
|
|
||||||
"src.api.routes.dashboards.SupersetAccountLookupAdapter"
|
|
||||||
) as lookup_adapter_cls:
|
|
||||||
profile_service = MagicMock()
|
|
||||||
profile_service.get_my_preference.return_value = _build_profile_preference_stub(
|
|
||||||
username="admin",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
profile_service.matches_dashboard_actor.side_effect = _matches_actor_case_insensitive
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
superset_client = MagicMock()
|
|
||||||
superset_client_cls.return_value = superset_client
|
|
||||||
|
|
||||||
lookup_adapter = MagicMock()
|
|
||||||
lookup_adapter.get_users_page.return_value = {
|
|
||||||
"items": [
|
|
||||||
{
|
|
||||||
"environment_id": "prod",
|
|
||||||
"username": "admin",
|
|
||||||
"display_name": "Superset Admin",
|
|
||||||
"email": "admin@example.com",
|
|
||||||
"is_active": True,
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"total": 1,
|
|
||||||
}
|
|
||||||
lookup_adapter_cls.return_value = lookup_adapter
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=dashboards_main&apply_profile_default=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["total"] == 1
|
|
||||||
assert {item["id"] for item in payload["dashboards"]} == {5}
|
|
||||||
assert payload["effective_profile_filter"]["applied"] is True
|
|
||||||
lookup_adapter.get_users_page.assert_called_once()
|
|
||||||
superset_client.get_dashboard.assert_not_called()
|
|
||||||
# [/DEF:test_get_dashboards_profile_filter_matches_display_alias_without_detail_fanout:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_profile_filter_matches_owner_object_payload_contract:Function]
|
|
||||||
# @TEST: GET /api/dashboards profile-default filter matches Superset owner object payloads.
|
|
||||||
# @PRE: Profile-default preference is enabled and owners list contains dict payloads.
|
|
||||||
# @POST: Response keeps dashboards where owner object resolves to bound username alias.
|
|
||||||
def test_get_dashboards_profile_filter_matches_owner_object_payload_contract(mock_deps):
|
|
||||||
mock_env = MagicMock()
|
|
||||||
mock_env.id = "prod"
|
|
||||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
|
||||||
mock_deps["task"].get_all_tasks.return_value = []
|
|
||||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
|
||||||
{
|
|
||||||
"id": 701,
|
|
||||||
"title": "Featured Charts",
|
|
||||||
"slug": "featured-charts",
|
|
||||||
"owners": [
|
|
||||||
{
|
|
||||||
"id": 11,
|
|
||||||
"first_name": "user",
|
|
||||||
"last_name": "1",
|
|
||||||
"username": None,
|
|
||||||
"email": "user_1@example.local",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"modified_by": "another_user",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 702,
|
|
||||||
"title": "Other Dashboard",
|
|
||||||
"slug": "other-dashboard",
|
|
||||||
"owners": [
|
|
||||||
{
|
|
||||||
"id": 12,
|
|
||||||
"first_name": "other",
|
|
||||||
"last_name": "user",
|
|
||||||
"username": None,
|
|
||||||
"email": "other@example.local",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"modified_by": "other_user",
|
|
||||||
},
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch("src.api.routes.dashboards.ProfileService") as profile_service_cls, patch(
|
|
||||||
"src.api.routes.dashboards._resolve_profile_actor_aliases",
|
|
||||||
return_value=["user_1"],
|
|
||||||
):
|
|
||||||
profile_service = DomainProfileService(db=MagicMock(), config_manager=MagicMock())
|
|
||||||
profile_service.get_my_preference = MagicMock(
|
|
||||||
return_value=_build_profile_preference_stub(
|
|
||||||
username="user_1",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
profile_service_cls.return_value = profile_service
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/dashboards?env_id=prod&page_context=dashboards_main&apply_profile_default=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["total"] == 1
|
|
||||||
assert {item["id"] for item in payload["dashboards"]} == {701}
|
|
||||||
assert payload["dashboards"][0]["title"] == "Featured Charts"
|
|
||||||
# [/DEF:test_get_dashboards_profile_filter_matches_owner_object_payload_contract:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.__tests__.test_dashboards:Module]
|
# [/DEF:backend.src.api.routes.__tests__.test_dashboards:Module]
|
||||||
|
|||||||
@@ -1,310 +0,0 @@
|
|||||||
# [DEF:backend.src.api.routes.__tests__.test_git_api:Module]
|
|
||||||
# @RELATION: VERIFIES -> src.api.routes.git
|
|
||||||
# @PURPOSE: API tests for Git configurations and repository operations.
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import asyncio
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from src.api.routes import git as git_routes
|
|
||||||
from src.models.git import GitServerConfig, GitProvider, GitStatus, GitRepository
|
|
||||||
|
|
||||||
class DbMock:
|
|
||||||
def __init__(self, data=None):
|
|
||||||
self._data = data or []
|
|
||||||
self._deleted = []
|
|
||||||
self._added = []
|
|
||||||
|
|
||||||
def query(self, model):
|
|
||||||
self._model = model
|
|
||||||
return self
|
|
||||||
|
|
||||||
def filter(self, condition):
|
|
||||||
# Simplistic mocking for tests, assuming equality checks
|
|
||||||
for item in self._data:
|
|
||||||
# We assume condition is an equality expression like GitServerConfig.id == "123"
|
|
||||||
# It's hard to eval the condition exactly in a mock without complex parsing,
|
|
||||||
# so we'll just return items where type matches.
|
|
||||||
pass
|
|
||||||
return self
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
for item in self._data:
|
|
||||||
if hasattr(self, "_model") and isinstance(item, self._model):
|
|
||||||
return item
|
|
||||||
return None
|
|
||||||
|
|
||||||
def all(self):
|
|
||||||
return self._data
|
|
||||||
|
|
||||||
def add(self, item):
|
|
||||||
self._added.append(item)
|
|
||||||
if not hasattr(item, "id") or not item.id:
|
|
||||||
item.id = "mocked-id"
|
|
||||||
self._data.append(item)
|
|
||||||
|
|
||||||
def delete(self, item):
|
|
||||||
self._deleted.append(item)
|
|
||||||
if item in self._data:
|
|
||||||
self._data.remove(item)
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def refresh(self, item):
|
|
||||||
if not hasattr(item, "status"):
|
|
||||||
item.status = GitStatus.CONNECTED
|
|
||||||
if not hasattr(item, "last_validated"):
|
|
||||||
item.last_validated = "2026-03-08T00:00:00Z"
|
|
||||||
|
|
||||||
def test_get_git_configs_masks_pat():
|
|
||||||
"""
|
|
||||||
@PRE: Database session `db` is available.
|
|
||||||
@POST: Returns a list of all GitServerConfig objects from the database with PAT masked.
|
|
||||||
"""
|
|
||||||
db = DbMock([GitServerConfig(
|
|
||||||
id="config-1", name="Test Server", provider=GitProvider.GITHUB,
|
|
||||||
url="https://github.com", pat="secret-token",
|
|
||||||
status=GitStatus.CONNECTED, last_validated="2026-03-08T00:00:00Z"
|
|
||||||
)])
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.get_git_configs(db=db))
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
assert result[0].pat == "********"
|
|
||||||
assert result[0].name == "Test Server"
|
|
||||||
|
|
||||||
def test_create_git_config_persists_config():
|
|
||||||
"""
|
|
||||||
@PRE: `config` contains valid GitServerConfigCreate data.
|
|
||||||
@POST: A new GitServerConfig record is created in the database.
|
|
||||||
"""
|
|
||||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
|
||||||
db = DbMock()
|
|
||||||
config = GitServerConfigCreate(
|
|
||||||
name="New Server", provider=GitProvider.GITLAB,
|
|
||||||
url="https://gitlab.com", pat="new-token",
|
|
||||||
default_branch="master"
|
|
||||||
)
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.create_git_config(config=config, db=db))
|
|
||||||
|
|
||||||
assert len(db._added) == 1
|
|
||||||
assert db._added[0].name == "New Server"
|
|
||||||
assert db._added[0].pat == "new-token"
|
|
||||||
assert result.name == "New Server"
|
|
||||||
assert result.pat == "new-token" # Note: route returns unmasked until serialized by FastAPI usually, but in tests schema might catch it or not.
|
|
||||||
|
|
||||||
from src.api.routes.git_schemas import GitServerConfigUpdate
|
|
||||||
|
|
||||||
def test_update_git_config_modifies_record():
|
|
||||||
"""
|
|
||||||
@PRE: `config_id` corresponds to an existing configuration.
|
|
||||||
@POST: The configuration record is updated in the database, preserving PAT if masked is sent.
|
|
||||||
"""
|
|
||||||
existing_config = GitServerConfig(
|
|
||||||
id="config-1", name="Old Server", provider=GitProvider.GITHUB,
|
|
||||||
url="https://github.com", pat="old-token",
|
|
||||||
status=GitStatus.CONNECTED, last_validated="2026-03-08T00:00:00Z"
|
|
||||||
)
|
|
||||||
# The monkeypatched query will return existing_config as it's the only one in the list
|
|
||||||
class SingleConfigDbMock:
|
|
||||||
def query(self, *args): return self
|
|
||||||
def filter(self, *args): return self
|
|
||||||
def first(self): return existing_config
|
|
||||||
def commit(self): pass
|
|
||||||
def refresh(self, config): pass
|
|
||||||
|
|
||||||
db = SingleConfigDbMock()
|
|
||||||
update_data = GitServerConfigUpdate(name="Updated Server", pat="********")
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.update_git_config(config_id="config-1", config_update=update_data, db=db))
|
|
||||||
|
|
||||||
assert existing_config.name == "Updated Server"
|
|
||||||
assert existing_config.pat == "old-token" # Ensure PAT is not overwritten with asterisks
|
|
||||||
assert result.pat == "********"
|
|
||||||
|
|
||||||
def test_update_git_config_raises_404_if_not_found():
|
|
||||||
"""
|
|
||||||
@PRE: `config_id` corresponds to a missing configuration.
|
|
||||||
@THROW: HTTPException 404
|
|
||||||
"""
|
|
||||||
db = DbMock([]) # Empty db
|
|
||||||
update_data = GitServerConfigUpdate(name="Updated Server", pat="new-token")
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as exc_info:
|
|
||||||
asyncio.run(git_routes.update_git_config(config_id="config-1", config_update=update_data, db=db))
|
|
||||||
|
|
||||||
assert exc_info.value.status_code == 404
|
|
||||||
assert exc_info.value.detail == "Configuration not found"
|
|
||||||
|
|
||||||
def test_delete_git_config_removes_record():
|
|
||||||
"""
|
|
||||||
@PRE: `config_id` corresponds to an existing configuration.
|
|
||||||
@POST: The configuration record is removed from the database.
|
|
||||||
"""
|
|
||||||
existing_config = GitServerConfig(id="config-1")
|
|
||||||
class SingleConfigDbMock:
|
|
||||||
def query(self, *args): return self
|
|
||||||
def filter(self, *args): return self
|
|
||||||
def first(self): return existing_config
|
|
||||||
def delete(self, config): self.deleted = config
|
|
||||||
def commit(self): pass
|
|
||||||
|
|
||||||
db = SingleConfigDbMock()
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.delete_git_config(config_id="config-1", db=db))
|
|
||||||
|
|
||||||
assert db.deleted == existing_config
|
|
||||||
assert result["status"] == "success"
|
|
||||||
|
|
||||||
def test_test_git_config_validates_connection_successfully(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: `config` contains provider, url, and pat.
|
|
||||||
@POST: Returns success if the connection is validated via GitService.
|
|
||||||
"""
|
|
||||||
class MockGitService:
|
|
||||||
async def test_connection(self, provider, url, pat):
|
|
||||||
return True
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
|
||||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
|
||||||
|
|
||||||
config = GitServerConfigCreate(
|
|
||||||
name="Test Server", provider=GitProvider.GITHUB,
|
|
||||||
url="https://github.com", pat="test-pat"
|
|
||||||
)
|
|
||||||
db = DbMock([])
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.test_git_config(config=config, db=db))
|
|
||||||
|
|
||||||
assert result["status"] == "success"
|
|
||||||
|
|
||||||
def test_test_git_config_fails_validation(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: `config` contains provider, url, and pat BUT connection fails.
|
|
||||||
@THROW: HTTPException 400
|
|
||||||
"""
|
|
||||||
class MockGitService:
|
|
||||||
async def test_connection(self, provider, url, pat):
|
|
||||||
return False
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
|
||||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
|
||||||
|
|
||||||
config = GitServerConfigCreate(
|
|
||||||
name="Test Server", provider=GitProvider.GITHUB,
|
|
||||||
url="https://github.com", pat="bad-pat"
|
|
||||||
)
|
|
||||||
db = DbMock([])
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as exc_info:
|
|
||||||
asyncio.run(git_routes.test_git_config(config=config, db=db))
|
|
||||||
|
|
||||||
assert exc_info.value.status_code == 400
|
|
||||||
assert exc_info.value.detail == "Connection failed"
|
|
||||||
|
|
||||||
def test_list_gitea_repositories_returns_payload(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: config_id exists and provider is GITEA.
|
|
||||||
@POST: Returns repositories visible to PAT user.
|
|
||||||
"""
|
|
||||||
class MockGitService:
|
|
||||||
async def list_gitea_repositories(self, url, pat):
|
|
||||||
return [{"name": "test-repo", "full_name": "owner/test-repo", "private": True}]
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
|
||||||
existing_config = GitServerConfig(
|
|
||||||
id="config-1", name="Gitea Server", provider=GitProvider.GITEA,
|
|
||||||
url="https://gitea.local", pat="gitea-token"
|
|
||||||
)
|
|
||||||
db = DbMock([existing_config])
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.list_gitea_repositories(config_id="config-1", db=db))
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
assert result[0].name == "test-repo"
|
|
||||||
assert result[0].private is True
|
|
||||||
|
|
||||||
def test_list_gitea_repositories_rejects_non_gitea(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: config_id exists and provider is NOT GITEA.
|
|
||||||
@THROW: HTTPException 400
|
|
||||||
"""
|
|
||||||
existing_config = GitServerConfig(
|
|
||||||
id="config-1", name="GitHub Server", provider=GitProvider.GITHUB,
|
|
||||||
url="https://github.com", pat="token"
|
|
||||||
)
|
|
||||||
db = DbMock([existing_config])
|
|
||||||
|
|
||||||
with pytest.raises(HTTPException) as exc_info:
|
|
||||||
asyncio.run(git_routes.list_gitea_repositories(config_id="config-1", db=db))
|
|
||||||
|
|
||||||
assert exc_info.value.status_code == 400
|
|
||||||
assert "GITEA provider only" in exc_info.value.detail
|
|
||||||
|
|
||||||
def test_create_remote_repository_creates_provider_repo(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: config_id exists and PAT has creation permissions.
|
|
||||||
@POST: Returns normalized remote repository payload.
|
|
||||||
"""
|
|
||||||
class MockGitService:
|
|
||||||
async def create_gitlab_repository(self, server_url, pat, name, private, description, auto_init, default_branch):
|
|
||||||
return {
|
|
||||||
"name": name,
|
|
||||||
"full_name": f"user/{name}",
|
|
||||||
"private": private,
|
|
||||||
"clone_url": f"{server_url}/user/{name}.git"
|
|
||||||
}
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
|
||||||
from src.api.routes.git_schemas import RemoteRepoCreateRequest
|
|
||||||
|
|
||||||
existing_config = GitServerConfig(
|
|
||||||
id="config-1", name="GitLab Server", provider=GitProvider.GITLAB,
|
|
||||||
url="https://gitlab.com", pat="token"
|
|
||||||
)
|
|
||||||
db = DbMock([existing_config])
|
|
||||||
|
|
||||||
request = RemoteRepoCreateRequest(name="new-repo", private=True, description="desc")
|
|
||||||
result = asyncio.run(git_routes.create_remote_repository(config_id="config-1", request=request, db=db))
|
|
||||||
|
|
||||||
assert result.provider == GitProvider.GITLAB
|
|
||||||
assert result.name == "new-repo"
|
|
||||||
assert result.full_name == "user/new-repo"
|
|
||||||
|
|
||||||
def test_init_repository_initializes_and_saves_binding(monkeypatch):
|
|
||||||
"""
|
|
||||||
@PRE: `dashboard_ref` exists and `init_data` contains valid config_id and remote_url.
|
|
||||||
@POST: Repository is initialized on disk and a GitRepository record is saved in DB.
|
|
||||||
"""
|
|
||||||
from src.api.routes.git_schemas import RepoInitRequest
|
|
||||||
|
|
||||||
class MockGitService:
|
|
||||||
def init_repo(self, dashboard_id, remote_url, pat, repo_key, default_branch):
|
|
||||||
self.init_called = True
|
|
||||||
def _get_repo_path(self, dashboard_id, repo_key):
|
|
||||||
return f"/tmp/repos/{repo_key}"
|
|
||||||
|
|
||||||
git_service_mock = MockGitService()
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", git_service_mock)
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *args, **kwargs: 123)
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_repo_key_from_ref", lambda *args, **kwargs: "dashboard-123")
|
|
||||||
|
|
||||||
existing_config = GitServerConfig(
|
|
||||||
id="config-1", name="GitLab Server", provider=GitProvider.GITLAB,
|
|
||||||
url="https://gitlab.com", pat="token", default_branch="main"
|
|
||||||
)
|
|
||||||
db = DbMock([existing_config])
|
|
||||||
|
|
||||||
init_data = RepoInitRequest(config_id="config-1", remote_url="https://git.local/repo.git")
|
|
||||||
|
|
||||||
result = asyncio.run(git_routes.init_repository(dashboard_ref="123", init_data=init_data, config_manager=MagicMock(), db=db))
|
|
||||||
|
|
||||||
assert result["status"] == "success"
|
|
||||||
assert git_service_mock.init_called is True
|
|
||||||
assert len(db._added) == 1
|
|
||||||
assert isinstance(db._added[0], GitRepository)
|
|
||||||
assert db._added[0].dashboard_id == 123
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.__tests__.test_git_api:Module]
|
|
||||||
@@ -8,7 +8,6 @@
|
|||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
import pytest
|
import pytest
|
||||||
import asyncio
|
import asyncio
|
||||||
from unittest.mock import MagicMock
|
|
||||||
|
|
||||||
from src.api.routes import git as git_routes
|
from src.api.routes import git as git_routes
|
||||||
|
|
||||||
@@ -196,245 +195,4 @@ def test_get_repository_status_batch_deduplicates_and_truncates_ids(monkeypatch)
|
|||||||
assert "1" in response.statuses
|
assert "1" in response.statuses
|
||||||
# [/DEF:test_get_repository_status_batch_deduplicates_and_truncates_ids:Function]
|
# [/DEF:test_get_repository_status_batch_deduplicates_and_truncates_ids:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_commit_changes_applies_profile_identity_before_commit:Function]
|
|
||||||
# @PURPOSE: Ensure commit route configures repository identity from profile preferences before commit call.
|
|
||||||
# @PRE: Profile preference contains git_username/git_email for current user.
|
|
||||||
# @POST: git_service.configure_identity receives resolved identity and commit proceeds.
|
|
||||||
def test_commit_changes_applies_profile_identity_before_commit(monkeypatch):
|
|
||||||
class IdentityGitService:
|
|
||||||
def __init__(self):
|
|
||||||
self.configured_identity = None
|
|
||||||
self.commit_payload = None
|
|
||||||
|
|
||||||
def configure_identity(self, dashboard_id: int, git_username: str, git_email: str):
|
|
||||||
self.configured_identity = (dashboard_id, git_username, git_email)
|
|
||||||
|
|
||||||
def commit_changes(self, dashboard_id: int, message: str, files):
|
|
||||||
self.commit_payload = (dashboard_id, message, files)
|
|
||||||
|
|
||||||
class PreferenceRow:
|
|
||||||
git_username = "user_1"
|
|
||||||
git_email = "user1@mail.ru"
|
|
||||||
|
|
||||||
class PreferenceQuery:
|
|
||||||
def filter(self, *_args, **_kwargs):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
return PreferenceRow()
|
|
||||||
|
|
||||||
class DbStub:
|
|
||||||
def query(self, _model):
|
|
||||||
return PreferenceQuery()
|
|
||||||
|
|
||||||
class UserStub:
|
|
||||||
id = "u-1"
|
|
||||||
|
|
||||||
class CommitPayload:
|
|
||||||
message = "test"
|
|
||||||
files = ["dashboards/a.yaml"]
|
|
||||||
|
|
||||||
identity_service = IdentityGitService()
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", identity_service)
|
|
||||||
monkeypatch.setattr(
|
|
||||||
git_routes,
|
|
||||||
"_resolve_dashboard_id_from_ref",
|
|
||||||
lambda *_args, **_kwargs: 12,
|
|
||||||
)
|
|
||||||
|
|
||||||
asyncio.run(
|
|
||||||
git_routes.commit_changes(
|
|
||||||
"dashboard-12",
|
|
||||||
CommitPayload(),
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
db=DbStub(),
|
|
||||||
current_user=UserStub(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert identity_service.configured_identity == (12, "user_1", "user1@mail.ru")
|
|
||||||
assert identity_service.commit_payload == (12, "test", ["dashboards/a.yaml"])
|
|
||||||
# [/DEF:test_commit_changes_applies_profile_identity_before_commit:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_pull_changes_applies_profile_identity_before_pull:Function]
|
|
||||||
# @PURPOSE: Ensure pull route configures repository identity from profile preferences before pull call.
|
|
||||||
# @PRE: Profile preference contains git_username/git_email for current user.
|
|
||||||
# @POST: git_service.configure_identity receives resolved identity and pull proceeds.
|
|
||||||
def test_pull_changes_applies_profile_identity_before_pull(monkeypatch):
|
|
||||||
class IdentityGitService:
|
|
||||||
def __init__(self):
|
|
||||||
self.configured_identity = None
|
|
||||||
self.pulled_dashboard_id = None
|
|
||||||
|
|
||||||
def configure_identity(self, dashboard_id: int, git_username: str, git_email: str):
|
|
||||||
self.configured_identity = (dashboard_id, git_username, git_email)
|
|
||||||
|
|
||||||
def pull_changes(self, dashboard_id: int):
|
|
||||||
self.pulled_dashboard_id = dashboard_id
|
|
||||||
|
|
||||||
class PreferenceRow:
|
|
||||||
git_username = "user_1"
|
|
||||||
git_email = "user1@mail.ru"
|
|
||||||
|
|
||||||
class PreferenceQuery:
|
|
||||||
def filter(self, *_args, **_kwargs):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
return PreferenceRow()
|
|
||||||
|
|
||||||
class DbStub:
|
|
||||||
def query(self, _model):
|
|
||||||
return PreferenceQuery()
|
|
||||||
|
|
||||||
class UserStub:
|
|
||||||
id = "u-1"
|
|
||||||
|
|
||||||
identity_service = IdentityGitService()
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", identity_service)
|
|
||||||
monkeypatch.setattr(
|
|
||||||
git_routes,
|
|
||||||
"_resolve_dashboard_id_from_ref",
|
|
||||||
lambda *_args, **_kwargs: 12,
|
|
||||||
)
|
|
||||||
|
|
||||||
asyncio.run(
|
|
||||||
git_routes.pull_changes(
|
|
||||||
"dashboard-12",
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
db=DbStub(),
|
|
||||||
current_user=UserStub(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert identity_service.configured_identity == (12, "user_1", "user1@mail.ru")
|
|
||||||
assert identity_service.pulled_dashboard_id == 12
|
|
||||||
# [/DEF:test_pull_changes_applies_profile_identity_before_pull:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_merge_status_returns_service_payload:Function]
|
|
||||||
# @PURPOSE: Ensure merge status route returns service payload as-is.
|
|
||||||
# @PRE: git_service.get_merge_status returns unfinished merge payload.
|
|
||||||
# @POST: Route response contains has_unfinished_merge=True.
|
|
||||||
def test_get_merge_status_returns_service_payload(monkeypatch):
|
|
||||||
class MergeStatusGitService:
|
|
||||||
def get_merge_status(self, dashboard_id: int) -> dict:
|
|
||||||
return {
|
|
||||||
"has_unfinished_merge": True,
|
|
||||||
"repository_path": "/tmp/repo-12",
|
|
||||||
"git_dir": "/tmp/repo-12/.git",
|
|
||||||
"current_branch": "dev",
|
|
||||||
"merge_head": "abc",
|
|
||||||
"merge_message_preview": "merge msg",
|
|
||||||
"conflicts_count": 2,
|
|
||||||
}
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MergeStatusGitService())
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *_args, **_kwargs: 12)
|
|
||||||
|
|
||||||
response = asyncio.run(
|
|
||||||
git_routes.get_merge_status(
|
|
||||||
"dashboard-12",
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response["has_unfinished_merge"] is True
|
|
||||||
assert response["conflicts_count"] == 2
|
|
||||||
# [/DEF:test_get_merge_status_returns_service_payload:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_resolve_merge_conflicts_passes_resolution_items_to_service:Function]
|
|
||||||
# @PURPOSE: Ensure merge resolve route forwards parsed resolutions to service.
|
|
||||||
# @PRE: resolve_data has one file strategy.
|
|
||||||
# @POST: Service receives normalized list and route returns resolved files.
|
|
||||||
def test_resolve_merge_conflicts_passes_resolution_items_to_service(monkeypatch):
|
|
||||||
captured = {}
|
|
||||||
|
|
||||||
class MergeResolveGitService:
|
|
||||||
def resolve_merge_conflicts(self, dashboard_id: int, resolutions):
|
|
||||||
captured["dashboard_id"] = dashboard_id
|
|
||||||
captured["resolutions"] = resolutions
|
|
||||||
return ["dashboards/a.yaml"]
|
|
||||||
|
|
||||||
class ResolveData:
|
|
||||||
class _Resolution:
|
|
||||||
def dict(self):
|
|
||||||
return {"file_path": "dashboards/a.yaml", "resolution": "mine", "content": None}
|
|
||||||
|
|
||||||
resolutions = [_Resolution()]
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", MergeResolveGitService())
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *_args, **_kwargs: 12)
|
|
||||||
|
|
||||||
response = asyncio.run(
|
|
||||||
git_routes.resolve_merge_conflicts(
|
|
||||||
"dashboard-12",
|
|
||||||
ResolveData(),
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert captured["dashboard_id"] == 12
|
|
||||||
assert captured["resolutions"][0]["resolution"] == "mine"
|
|
||||||
assert response["resolved_files"] == ["dashboards/a.yaml"]
|
|
||||||
# [/DEF:test_resolve_merge_conflicts_passes_resolution_items_to_service:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_abort_merge_calls_service_and_returns_result:Function]
|
|
||||||
# @PURPOSE: Ensure abort route delegates to service.
|
|
||||||
# @PRE: Service abort_merge returns aborted status.
|
|
||||||
# @POST: Route returns aborted status.
|
|
||||||
def test_abort_merge_calls_service_and_returns_result(monkeypatch):
|
|
||||||
class AbortGitService:
|
|
||||||
def abort_merge(self, dashboard_id: int):
|
|
||||||
assert dashboard_id == 12
|
|
||||||
return {"status": "aborted"}
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", AbortGitService())
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *_args, **_kwargs: 12)
|
|
||||||
|
|
||||||
response = asyncio.run(
|
|
||||||
git_routes.abort_merge(
|
|
||||||
"dashboard-12",
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response["status"] == "aborted"
|
|
||||||
# [/DEF:test_abort_merge_calls_service_and_returns_result:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_continue_merge_passes_message_and_returns_commit:Function]
|
|
||||||
# @PURPOSE: Ensure continue route passes commit message to service.
|
|
||||||
# @PRE: continue_data.message is provided.
|
|
||||||
# @POST: Route returns committed status and hash.
|
|
||||||
def test_continue_merge_passes_message_and_returns_commit(monkeypatch):
|
|
||||||
class ContinueGitService:
|
|
||||||
def continue_merge(self, dashboard_id: int, message: str):
|
|
||||||
assert dashboard_id == 12
|
|
||||||
assert message == "Resolve all conflicts"
|
|
||||||
return {"status": "committed", "commit_hash": "abc123"}
|
|
||||||
|
|
||||||
class ContinueData:
|
|
||||||
message = "Resolve all conflicts"
|
|
||||||
|
|
||||||
monkeypatch.setattr(git_routes, "git_service", ContinueGitService())
|
|
||||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *_args, **_kwargs: 12)
|
|
||||||
|
|
||||||
response = asyncio.run(
|
|
||||||
git_routes.continue_merge(
|
|
||||||
"dashboard-12",
|
|
||||||
ContinueData(),
|
|
||||||
config_manager=MagicMock(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response["status"] == "committed"
|
|
||||||
assert response["commit_hash"] == "abc123"
|
|
||||||
# [/DEF:test_continue_merge_passes_message_and_returns_commit:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.__tests__.test_git_status_route:Module]
|
# [/DEF:backend.src.api.routes.__tests__.test_git_status_route:Module]
|
||||||
|
|||||||
@@ -1,298 +0,0 @@
|
|||||||
# [DEF:backend.src.api.routes.__tests__.test_profile_api:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: tests, profile, api, preferences, lookup, contract
|
|
||||||
# @PURPOSE: Verifies profile API route contracts for preference read/update and Superset account lookup.
|
|
||||||
# @LAYER: API
|
|
||||||
# @RELATION: TESTS -> backend.src.api.routes.profile
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.app import app
|
|
||||||
from src.core.database import get_db
|
|
||||||
from src.dependencies import get_config_manager, get_current_user
|
|
||||||
from src.schemas.profile import (
|
|
||||||
ProfilePermissionState,
|
|
||||||
ProfilePreference,
|
|
||||||
ProfilePreferenceResponse,
|
|
||||||
ProfileSecuritySummary,
|
|
||||||
SupersetAccountCandidate,
|
|
||||||
SupersetAccountLookupResponse,
|
|
||||||
)
|
|
||||||
from src.services.profile_service import (
|
|
||||||
EnvironmentNotFoundError,
|
|
||||||
ProfileAuthorizationError,
|
|
||||||
ProfileValidationError,
|
|
||||||
)
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
client = TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:mock_profile_route_dependencies:Function]
|
|
||||||
# @PURPOSE: Provides deterministic dependency overrides for profile route tests.
|
|
||||||
# @PRE: App instance is initialized.
|
|
||||||
# @POST: Dependencies are overridden for current test and restored afterward.
|
|
||||||
def mock_profile_route_dependencies():
|
|
||||||
mock_user = MagicMock()
|
|
||||||
mock_user.id = "u-1"
|
|
||||||
mock_user.username = "test-user"
|
|
||||||
|
|
||||||
mock_db = MagicMock()
|
|
||||||
mock_config_manager = MagicMock()
|
|
||||||
|
|
||||||
app.dependency_overrides[get_current_user] = lambda: mock_user
|
|
||||||
app.dependency_overrides[get_db] = lambda: mock_db
|
|
||||||
app.dependency_overrides[get_config_manager] = lambda: mock_config_manager
|
|
||||||
|
|
||||||
return mock_user, mock_db, mock_config_manager
|
|
||||||
# [/DEF:mock_profile_route_dependencies:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:profile_route_deps_fixture:Function]
|
|
||||||
# @PURPOSE: Pytest fixture wrapper for profile route dependency overrides.
|
|
||||||
# @PRE: None.
|
|
||||||
# @POST: Yields overridden dependencies and clears overrides after test.
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def profile_route_deps_fixture():
|
|
||||||
yielded = mock_profile_route_dependencies()
|
|
||||||
yield yielded
|
|
||||||
app.dependency_overrides.clear()
|
|
||||||
# [/DEF:profile_route_deps_fixture:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_build_preference_response:Function]
|
|
||||||
# @PURPOSE: Builds stable profile preference response payload for route tests.
|
|
||||||
# @PRE: user_id is provided.
|
|
||||||
# @POST: Returns ProfilePreferenceResponse object with deterministic timestamps.
|
|
||||||
def _build_preference_response(user_id: str = "u-1") -> ProfilePreferenceResponse:
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
return ProfilePreferenceResponse(
|
|
||||||
status="success",
|
|
||||||
message="Preference loaded",
|
|
||||||
preference=ProfilePreference(
|
|
||||||
user_id=user_id,
|
|
||||||
superset_username="John_Doe",
|
|
||||||
superset_username_normalized="john_doe",
|
|
||||||
show_only_my_dashboards=True,
|
|
||||||
show_only_slug_dashboards=True,
|
|
||||||
git_username="ivan.ivanov",
|
|
||||||
git_email="ivan@company.local",
|
|
||||||
has_git_personal_access_token=True,
|
|
||||||
git_personal_access_token_masked="iv***al",
|
|
||||||
start_page="reports",
|
|
||||||
auto_open_task_drawer=False,
|
|
||||||
dashboards_table_density="compact",
|
|
||||||
created_at=now,
|
|
||||||
updated_at=now,
|
|
||||||
),
|
|
||||||
security=ProfileSecuritySummary(
|
|
||||||
read_only=True,
|
|
||||||
auth_source="adfs",
|
|
||||||
current_role="Data Engineer",
|
|
||||||
role_source="adfs",
|
|
||||||
roles=["Data Engineer"],
|
|
||||||
permissions=[
|
|
||||||
ProfilePermissionState(key="migration:run", allowed=True),
|
|
||||||
ProfilePermissionState(key="admin:users", allowed=False),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
# [/DEF:_build_preference_response:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_profile_preferences_returns_self_payload:Function]
|
|
||||||
# @PURPOSE: Verifies GET /api/profile/preferences returns stable self-scoped payload.
|
|
||||||
# @PRE: Authenticated user context is available.
|
|
||||||
# @POST: Response status is 200 and payload contains current user preference.
|
|
||||||
def test_get_profile_preferences_returns_self_payload(profile_route_deps_fixture):
|
|
||||||
mock_user, _, _ = profile_route_deps_fixture
|
|
||||||
service = MagicMock()
|
|
||||||
service.get_my_preference.return_value = _build_preference_response(user_id=mock_user.id)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.get("/api/profile/preferences")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["status"] == "success"
|
|
||||||
assert payload["preference"]["user_id"] == mock_user.id
|
|
||||||
assert payload["preference"]["superset_username_normalized"] == "john_doe"
|
|
||||||
assert payload["preference"]["git_username"] == "ivan.ivanov"
|
|
||||||
assert payload["preference"]["git_email"] == "ivan@company.local"
|
|
||||||
assert payload["preference"]["show_only_slug_dashboards"] is True
|
|
||||||
assert payload["preference"]["has_git_personal_access_token"] is True
|
|
||||||
assert payload["preference"]["git_personal_access_token_masked"] == "iv***al"
|
|
||||||
assert payload["preference"]["start_page"] == "reports"
|
|
||||||
assert payload["preference"]["auto_open_task_drawer"] is False
|
|
||||||
assert payload["preference"]["dashboards_table_density"] == "compact"
|
|
||||||
assert payload["security"]["read_only"] is True
|
|
||||||
assert payload["security"]["current_role"] == "Data Engineer"
|
|
||||||
assert payload["security"]["permissions"][0]["key"] == "migration:run"
|
|
||||||
service.get_my_preference.assert_called_once_with(mock_user)
|
|
||||||
# [/DEF:test_get_profile_preferences_returns_self_payload:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_patch_profile_preferences_success:Function]
|
|
||||||
# @PURPOSE: Verifies PATCH /api/profile/preferences persists valid payload through route mapping.
|
|
||||||
# @PRE: Valid request payload and authenticated user.
|
|
||||||
# @POST: Response status is 200 with saved preference payload.
|
|
||||||
def test_patch_profile_preferences_success(profile_route_deps_fixture):
|
|
||||||
mock_user, _, _ = profile_route_deps_fixture
|
|
||||||
service = MagicMock()
|
|
||||||
service.update_my_preference.return_value = _build_preference_response(user_id=mock_user.id)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.patch(
|
|
||||||
"/api/profile/preferences",
|
|
||||||
json={
|
|
||||||
"superset_username": "John_Doe",
|
|
||||||
"show_only_my_dashboards": True,
|
|
||||||
"show_only_slug_dashboards": True,
|
|
||||||
"git_username": "ivan.ivanov",
|
|
||||||
"git_email": "ivan@company.local",
|
|
||||||
"git_personal_access_token": "ghp_1234567890",
|
|
||||||
"start_page": "reports-logs",
|
|
||||||
"auto_open_task_drawer": False,
|
|
||||||
"dashboards_table_density": "free",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["status"] == "success"
|
|
||||||
assert payload["preference"]["superset_username"] == "John_Doe"
|
|
||||||
assert payload["preference"]["show_only_my_dashboards"] is True
|
|
||||||
assert payload["preference"]["show_only_slug_dashboards"] is True
|
|
||||||
assert payload["preference"]["git_username"] == "ivan.ivanov"
|
|
||||||
assert payload["preference"]["git_email"] == "ivan@company.local"
|
|
||||||
assert payload["preference"]["start_page"] == "reports"
|
|
||||||
assert payload["preference"]["auto_open_task_drawer"] is False
|
|
||||||
assert payload["preference"]["dashboards_table_density"] == "compact"
|
|
||||||
service.update_my_preference.assert_called_once()
|
|
||||||
|
|
||||||
called_kwargs = service.update_my_preference.call_args.kwargs
|
|
||||||
assert called_kwargs["current_user"] == mock_user
|
|
||||||
assert called_kwargs["payload"].git_username == "ivan.ivanov"
|
|
||||||
assert called_kwargs["payload"].git_email == "ivan@company.local"
|
|
||||||
assert called_kwargs["payload"].git_personal_access_token == "ghp_1234567890"
|
|
||||||
assert called_kwargs["payload"].show_only_slug_dashboards is True
|
|
||||||
assert called_kwargs["payload"].start_page == "reports-logs"
|
|
||||||
assert called_kwargs["payload"].auto_open_task_drawer is False
|
|
||||||
assert called_kwargs["payload"].dashboards_table_density == "free"
|
|
||||||
# [/DEF:test_patch_profile_preferences_success:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_patch_profile_preferences_validation_error:Function]
|
|
||||||
# @PURPOSE: Verifies route maps domain validation failure to HTTP 422 with actionable details.
|
|
||||||
# @PRE: Service raises ProfileValidationError.
|
|
||||||
# @POST: Response status is 422 and includes validation messages.
|
|
||||||
def test_patch_profile_preferences_validation_error(profile_route_deps_fixture):
|
|
||||||
service = MagicMock()
|
|
||||||
service.update_my_preference.side_effect = ProfileValidationError(
|
|
||||||
["Superset username is required when default filter is enabled."]
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.patch(
|
|
||||||
"/api/profile/preferences",
|
|
||||||
json={
|
|
||||||
"superset_username": "",
|
|
||||||
"show_only_my_dashboards": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
payload = response.json()
|
|
||||||
assert "detail" in payload
|
|
||||||
assert "Superset username is required when default filter is enabled." in payload["detail"]
|
|
||||||
# [/DEF:test_patch_profile_preferences_validation_error:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_patch_profile_preferences_cross_user_denied:Function]
|
|
||||||
# @PURPOSE: Verifies route maps domain authorization guard failure to HTTP 403.
|
|
||||||
# @PRE: Service raises ProfileAuthorizationError.
|
|
||||||
# @POST: Response status is 403 with denial message.
|
|
||||||
def test_patch_profile_preferences_cross_user_denied(profile_route_deps_fixture):
|
|
||||||
service = MagicMock()
|
|
||||||
service.update_my_preference.side_effect = ProfileAuthorizationError(
|
|
||||||
"Cross-user preference mutation is forbidden"
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.patch(
|
|
||||||
"/api/profile/preferences",
|
|
||||||
json={
|
|
||||||
"superset_username": "john_doe",
|
|
||||||
"show_only_my_dashboards": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["detail"] == "Cross-user preference mutation is forbidden"
|
|
||||||
# [/DEF:test_patch_profile_preferences_cross_user_denied:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_lookup_superset_accounts_success:Function]
|
|
||||||
# @PURPOSE: Verifies lookup route returns success payload with normalized candidates.
|
|
||||||
# @PRE: Valid environment_id and service success response.
|
|
||||||
# @POST: Response status is 200 and items list is returned.
|
|
||||||
def test_lookup_superset_accounts_success(profile_route_deps_fixture):
|
|
||||||
service = MagicMock()
|
|
||||||
service.lookup_superset_accounts.return_value = SupersetAccountLookupResponse(
|
|
||||||
status="success",
|
|
||||||
environment_id="dev",
|
|
||||||
page_index=0,
|
|
||||||
page_size=20,
|
|
||||||
total=1,
|
|
||||||
warning=None,
|
|
||||||
items=[
|
|
||||||
SupersetAccountCandidate(
|
|
||||||
environment_id="dev",
|
|
||||||
username="john_doe",
|
|
||||||
display_name="John Doe",
|
|
||||||
email="john@example.local",
|
|
||||||
is_active=True,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.get("/api/profile/superset-accounts?environment_id=dev")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["status"] == "success"
|
|
||||||
assert payload["environment_id"] == "dev"
|
|
||||||
assert payload["total"] == 1
|
|
||||||
assert payload["items"][0]["username"] == "john_doe"
|
|
||||||
# [/DEF:test_lookup_superset_accounts_success:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_lookup_superset_accounts_env_not_found:Function]
|
|
||||||
# @PURPOSE: Verifies lookup route maps missing environment to HTTP 404.
|
|
||||||
# @PRE: Service raises EnvironmentNotFoundError.
|
|
||||||
# @POST: Response status is 404 with explicit message.
|
|
||||||
def test_lookup_superset_accounts_env_not_found(profile_route_deps_fixture):
|
|
||||||
service = MagicMock()
|
|
||||||
service.lookup_superset_accounts.side_effect = EnvironmentNotFoundError(
|
|
||||||
"Environment 'missing-env' not found"
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch("src.api.routes.profile._get_profile_service", return_value=service):
|
|
||||||
response = client.get("/api/profile/superset-accounts?environment_id=missing-env")
|
|
||||||
|
|
||||||
assert response.status_code == 404
|
|
||||||
payload = response.json()
|
|
||||||
assert payload["detail"] == "Environment 'missing-env' not found"
|
|
||||||
# [/DEF:test_lookup_superset_accounts_env_not_found:Function]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.__tests__.test_profile_api:Module]
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
# [DEF:__tests__/test_tasks_logs:Module]
|
|
||||||
# @RELATION: VERIFIES -> ../tasks.py
|
|
||||||
# @PURPOSE: Contract testing for task logs API endpoints.
|
|
||||||
# [/DEF:__tests__/test_tasks_logs:Module]
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi import FastAPI
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from src.dependencies import get_task_manager, has_permission
|
|
||||||
from src.api.routes.tasks import router
|
|
||||||
|
|
||||||
# @TEST_FIXTURE: mock_app
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
app = FastAPI()
|
|
||||||
app.include_router(router, prefix="/tasks")
|
|
||||||
|
|
||||||
# Mock TaskManager
|
|
||||||
mock_tm = MagicMock()
|
|
||||||
app.dependency_overrides[get_task_manager] = lambda: mock_tm
|
|
||||||
|
|
||||||
# Mock permissions (bypass for unit test)
|
|
||||||
app.dependency_overrides[has_permission("tasks", "READ")] = lambda: True
|
|
||||||
|
|
||||||
return TestClient(app), mock_tm
|
|
||||||
|
|
||||||
# @TEST_CONTRACT: get_task_logs_api -> Invariants
|
|
||||||
# @TEST_FIXTURE: valid_task_logs_request
|
|
||||||
def test_get_task_logs_success(client):
|
|
||||||
tc, tm = client
|
|
||||||
|
|
||||||
# Setup mock task
|
|
||||||
mock_task = MagicMock()
|
|
||||||
tm.get_task.return_value = mock_task
|
|
||||||
tm.get_task_logs.return_value = [{"level": "INFO", "message": "msg1"}]
|
|
||||||
|
|
||||||
response = tc.get("/tasks/task-1/logs?level=INFO")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json() == [{"level": "INFO", "message": "msg1"}]
|
|
||||||
tm.get_task.assert_called_with("task-1")
|
|
||||||
# Verify filter construction inside route
|
|
||||||
args = tm.get_task_logs.call_args
|
|
||||||
assert args[0][0] == "task-1"
|
|
||||||
assert args[0][1].level == "INFO"
|
|
||||||
|
|
||||||
# @TEST_EDGE: task_not_found
|
|
||||||
def test_get_task_logs_not_found(client):
|
|
||||||
tc, tm = client
|
|
||||||
tm.get_task.return_value = None
|
|
||||||
|
|
||||||
response = tc.get("/tasks/missing/logs")
|
|
||||||
assert response.status_code == 404
|
|
||||||
assert response.json()["detail"] == "Task not found"
|
|
||||||
|
|
||||||
# @TEST_EDGE: invalid_limit
|
|
||||||
def test_get_task_logs_invalid_limit(client):
|
|
||||||
tc, tm = client
|
|
||||||
# limit=0 is ge=1 in Query
|
|
||||||
response = tc.get("/tasks/task-1/logs?limit=0")
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
# @TEST_INVARIANT: response_purity
|
|
||||||
def test_get_task_log_stats_success(client):
|
|
||||||
tc, tm = client
|
|
||||||
tm.get_task.return_value = MagicMock()
|
|
||||||
tm.get_task_log_stats.return_value = {"INFO": 5, "ERROR": 1}
|
|
||||||
|
|
||||||
response = tc.get("/tasks/task-1/logs/stats")
|
|
||||||
assert response.status_code == 200
|
|
||||||
# response_model=LogStats might wrap this, but let's check basic structure
|
|
||||||
# assuming tm.get_task_log_stats returns something compatible with LogStats
|
|
||||||
@@ -22,12 +22,8 @@ from ...schemas.auth import (
|
|||||||
ADGroupMappingSchema, ADGroupMappingCreate
|
ADGroupMappingSchema, ADGroupMappingCreate
|
||||||
)
|
)
|
||||||
from ...models.auth import User, Role, ADGroupMapping
|
from ...models.auth import User, Role, ADGroupMapping
|
||||||
from ...dependencies import has_permission, get_plugin_loader
|
from ...dependencies import has_permission
|
||||||
from ...core.logger import logger, belief_scope
|
from ...core.logger import logger, belief_scope
|
||||||
from ...services.rbac_permission_catalog import (
|
|
||||||
discover_declared_permissions,
|
|
||||||
sync_permission_catalog,
|
|
||||||
)
|
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
# [DEF:router:Variable]
|
# [DEF:router:Variable]
|
||||||
@@ -274,18 +270,9 @@ async def delete_role(
|
|||||||
@router.get("/permissions", response_model=List[PermissionSchema])
|
@router.get("/permissions", response_model=List[PermissionSchema])
|
||||||
async def list_permissions(
|
async def list_permissions(
|
||||||
db: Session = Depends(get_auth_db),
|
db: Session = Depends(get_auth_db),
|
||||||
plugin_loader = Depends(get_plugin_loader),
|
|
||||||
_ = Depends(has_permission("admin:roles", "READ"))
|
_ = Depends(has_permission("admin:roles", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope("api.admin.list_permissions"):
|
with belief_scope("api.admin.list_permissions"):
|
||||||
declared_permissions = discover_declared_permissions(plugin_loader=plugin_loader)
|
|
||||||
inserted_count = sync_permission_catalog(db=db, declared_permissions=declared_permissions)
|
|
||||||
if inserted_count > 0:
|
|
||||||
logger.info(
|
|
||||||
"[api.admin.list_permissions][Action] Synchronized %s missing RBAC permissions into auth catalog",
|
|
||||||
inserted_count,
|
|
||||||
)
|
|
||||||
|
|
||||||
repo = AuthRepository(db)
|
repo = AuthRepository(db)
|
||||||
return repo.list_permissions()
|
return repo.list_permissions()
|
||||||
# [/DEF:list_permissions:Function]
|
# [/DEF:list_permissions:Function]
|
||||||
|
|||||||
@@ -120,7 +120,6 @@ INTENT_PERMISSION_CHECKS: Dict[str, List[Tuple[str, str]]] = {
|
|||||||
"run_backup": [("plugin:superset-backup", "EXECUTE"), ("plugin:backup", "EXECUTE")],
|
"run_backup": [("plugin:superset-backup", "EXECUTE"), ("plugin:backup", "EXECUTE")],
|
||||||
"run_llm_validation": [("plugin:llm_dashboard_validation", "EXECUTE")],
|
"run_llm_validation": [("plugin:llm_dashboard_validation", "EXECUTE")],
|
||||||
"run_llm_documentation": [("plugin:llm_documentation", "EXECUTE")],
|
"run_llm_documentation": [("plugin:llm_documentation", "EXECUTE")],
|
||||||
"get_health_summary": [("plugin:migration", "READ")],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -846,18 +845,6 @@ def _parse_command(message: str, config_manager: ConfigManager) -> Dict[str, Any
|
|||||||
"requires_confirmation": False,
|
"requires_confirmation": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Health summary
|
|
||||||
if any(k in lower for k in ["здоровье", "health", "ошибки", "failing", "проблемы"]):
|
|
||||||
env_match = _extract_id(lower, [r"(?:в|for|env|окружени[ея])\s+([a-z0-9_-]+)"])
|
|
||||||
return {
|
|
||||||
"domain": "health",
|
|
||||||
"operation": "get_health_summary",
|
|
||||||
"entities": {"environment": env_match},
|
|
||||||
"confidence": 0.9,
|
|
||||||
"risk_level": "safe",
|
|
||||||
"requires_confirmation": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
# LLM validation
|
# LLM validation
|
||||||
if any(k in lower for k in ["валидац", "validate", "провер"]):
|
if any(k in lower for k in ["валидац", "validate", "провер"]):
|
||||||
env_match = _extract_id(lower, [r"(?:в|for|env|окружени[ея])\s+([a-z0-9_-]+)"])
|
env_match = _extract_id(lower, [r"(?:в|for|env|окружени[ея])\s+([a-z0-9_-]+)"])
|
||||||
@@ -1036,15 +1023,6 @@ def _build_tool_catalog(current_user: User, config_manager: ConfigManager, db: S
|
|||||||
"risk_level": "guarded",
|
"risk_level": "guarded",
|
||||||
"requires_confirmation": False,
|
"requires_confirmation": False,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"operation": "get_health_summary",
|
|
||||||
"domain": "health",
|
|
||||||
"description": "Get summary of dashboard health and failing validations",
|
|
||||||
"required_entities": [],
|
|
||||||
"optional_entities": ["environment"],
|
|
||||||
"risk_level": "safe",
|
|
||||||
"requires_confirmation": False,
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
available: List[Dict[str, Any]] = []
|
available: List[Dict[str, Any]] = []
|
||||||
@@ -1078,7 +1056,7 @@ def _coerce_intent_entities(intent: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
|
|
||||||
|
|
||||||
# Operations that are read-only and do not require confirmation.
|
# Operations that are read-only and do not require confirmation.
|
||||||
_SAFE_OPS = {"show_capabilities", "get_task_status", "get_health_summary"}
|
_SAFE_OPS = {"show_capabilities", "get_task_status"}
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_confirmation_summary:Function]
|
# [DEF:_confirmation_summary:Function]
|
||||||
@@ -1173,7 +1151,7 @@ async def _async_confirmation_summary(intent: Dict[str, Any], config_manager: Co
|
|||||||
text += f"\n\n(Не удалось загрузить отчет dry-run: {e})."
|
text += f"\n\n(Не удалось загрузить отчет dry-run: {e})."
|
||||||
|
|
||||||
return f"Выполнить: {text}. Подтвердите или отмените."
|
return f"Выполнить: {text}. Подтвердите или отмените."
|
||||||
# [/DEF:_confirmation_summary:Function]
|
# [/DEF:_async_confirmation_summary:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_clarification_text_for_intent:Function]
|
# [DEF:_clarification_text_for_intent:Function]
|
||||||
@@ -1345,7 +1323,6 @@ async def _dispatch_intent(
|
|||||||
"run_llm_validation": "LLM: валидация дашборда",
|
"run_llm_validation": "LLM: валидация дашборда",
|
||||||
"run_llm_documentation": "LLM: генерация документации",
|
"run_llm_documentation": "LLM: генерация документации",
|
||||||
"get_task_status": "Статус: проверка задачи",
|
"get_task_status": "Статус: проверка задачи",
|
||||||
"get_health_summary": "Здоровье: сводка по дашбордам",
|
|
||||||
}
|
}
|
||||||
available = [labels[t["operation"]] for t in tools_catalog if t["operation"] in labels]
|
available = [labels[t["operation"]] for t in tools_catalog if t["operation"] in labels]
|
||||||
if not available:
|
if not available:
|
||||||
@@ -1358,41 +1335,6 @@ async def _dispatch_intent(
|
|||||||
)
|
)
|
||||||
return text, None, []
|
return text, None, []
|
||||||
|
|
||||||
if operation == "get_health_summary":
|
|
||||||
from ...services.health_service import HealthService
|
|
||||||
env_token = entities.get("environment")
|
|
||||||
env_id = _resolve_env_id(env_token, config_manager)
|
|
||||||
service = HealthService(db)
|
|
||||||
summary = await service.get_health_summary(environment_id=env_id)
|
|
||||||
|
|
||||||
env_name = _get_environment_name_by_id(env_id, config_manager) if env_id else "всех окружений"
|
|
||||||
text = (
|
|
||||||
f"Сводка здоровья дашбордов для {env_name}:\n"
|
|
||||||
f"- ✅ Прошли проверку: {summary.pass_count}\n"
|
|
||||||
f"- ⚠️ С предупреждениями: {summary.warn_count}\n"
|
|
||||||
f"- ❌ Ошибки валидации: {summary.fail_count}\n"
|
|
||||||
f"- ❓ Неизвестно: {summary.unknown_count}"
|
|
||||||
)
|
|
||||||
|
|
||||||
actions = [
|
|
||||||
AssistantAction(type="open_route", label="Открыть Health Center", target="/dashboards/health")
|
|
||||||
]
|
|
||||||
|
|
||||||
if summary.fail_count > 0:
|
|
||||||
text += "\n\nОбнаружены ошибки в следующих дашбордах:"
|
|
||||||
for item in summary.items:
|
|
||||||
if item.status == "FAIL":
|
|
||||||
text += f"\n- {item.dashboard_id} ({item.environment_id}): {item.summary or 'Нет деталей'}"
|
|
||||||
actions.append(
|
|
||||||
AssistantAction(
|
|
||||||
type="open_route",
|
|
||||||
label=f"Отчет {item.dashboard_id}",
|
|
||||||
target=f"/reports/llm/{item.task_id}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return text, None, actions[:5] # Limit actions to avoid UI clutter
|
|
||||||
|
|
||||||
if operation == "get_task_status":
|
if operation == "get_task_status":
|
||||||
_check_any_permission(current_user, [("tasks", "READ")])
|
_check_any_permission(current_user, [("tasks", "READ")])
|
||||||
task_id = entities.get("task_id")
|
task_id = entities.get("task_id")
|
||||||
|
|||||||
@@ -16,27 +16,19 @@ from fastapi import APIRouter, Depends, HTTPException, status
|
|||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from ...core.logger import belief_scope, logger
|
from ...core.logger import belief_scope, logger
|
||||||
from ...dependencies import get_clean_release_repository, get_config_manager
|
from ...dependencies import get_clean_release_repository
|
||||||
from ...services.clean_release.preparation_service import prepare_candidate
|
from ...services.clean_release.preparation_service import prepare_candidate
|
||||||
from ...services.clean_release.repository import CleanReleaseRepository
|
from ...services.clean_release.repository import CleanReleaseRepository
|
||||||
from ...services.clean_release.compliance_orchestrator import CleanComplianceOrchestrator
|
from ...services.clean_release.compliance_orchestrator import CleanComplianceOrchestrator
|
||||||
from ...services.clean_release.report_builder import ComplianceReportBuilder
|
from ...services.clean_release.report_builder import ComplianceReportBuilder
|
||||||
from ...services.clean_release.compliance_execution_service import ComplianceExecutionService, ComplianceRunError
|
from ...models.clean_release import (
|
||||||
from ...services.clean_release.dto import CandidateDTO, ManifestDTO, CandidateOverviewDTO, ComplianceRunDTO
|
CheckFinalStatus,
|
||||||
from ...services.clean_release.enums import (
|
CheckStageName,
|
||||||
ComplianceDecision,
|
CheckStageResult,
|
||||||
ComplianceStageName,
|
CheckStageStatus,
|
||||||
|
ComplianceViolation,
|
||||||
ViolationCategory,
|
ViolationCategory,
|
||||||
ViolationSeverity,
|
ViolationSeverity,
|
||||||
RunStatus,
|
|
||||||
CandidateStatus,
|
|
||||||
)
|
|
||||||
from ...models.clean_release import (
|
|
||||||
ComplianceRun,
|
|
||||||
ComplianceStageRun,
|
|
||||||
ComplianceViolation,
|
|
||||||
CandidateArtifact,
|
|
||||||
ReleaseCandidate,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/clean-release", tags=["Clean Release"])
|
router = APIRouter(prefix="/api/clean-release", tags=["Clean Release"])
|
||||||
@@ -62,226 +54,6 @@ class StartCheckRequest(BaseModel):
|
|||||||
# [/DEF:StartCheckRequest:Class]
|
# [/DEF:StartCheckRequest:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:RegisterCandidateRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for candidate registration endpoint.
|
|
||||||
class RegisterCandidateRequest(BaseModel):
|
|
||||||
id: str = Field(min_length=1)
|
|
||||||
version: str = Field(min_length=1)
|
|
||||||
source_snapshot_ref: str = Field(min_length=1)
|
|
||||||
created_by: str = Field(min_length=1)
|
|
||||||
# [/DEF:RegisterCandidateRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ImportArtifactsRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for candidate artifact import endpoint.
|
|
||||||
class ImportArtifactsRequest(BaseModel):
|
|
||||||
artifacts: List[Dict[str, Any]] = Field(default_factory=list)
|
|
||||||
# [/DEF:ImportArtifactsRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:BuildManifestRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for manifest build endpoint.
|
|
||||||
class BuildManifestRequest(BaseModel):
|
|
||||||
created_by: str = Field(default="system")
|
|
||||||
# [/DEF:BuildManifestRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:CreateComplianceRunRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for compliance run creation with optional manifest pinning.
|
|
||||||
class CreateComplianceRunRequest(BaseModel):
|
|
||||||
requested_by: str = Field(min_length=1)
|
|
||||||
manifest_id: str | None = None
|
|
||||||
# [/DEF:CreateComplianceRunRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:register_candidate_v2_endpoint:Function]
|
|
||||||
# @PURPOSE: Register a clean-release candidate for headless lifecycle.
|
|
||||||
# @PRE: Candidate identifier is unique.
|
|
||||||
# @POST: Candidate is persisted in DRAFT status.
|
|
||||||
@router.post("/candidates", response_model=CandidateDTO, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def register_candidate_v2_endpoint(
|
|
||||||
payload: RegisterCandidateRequest,
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
existing = repository.get_candidate(payload.id)
|
|
||||||
if existing is not None:
|
|
||||||
raise HTTPException(status_code=409, detail={"message": "Candidate already exists", "code": "CANDIDATE_EXISTS"})
|
|
||||||
|
|
||||||
candidate = ReleaseCandidate(
|
|
||||||
id=payload.id,
|
|
||||||
version=payload.version,
|
|
||||||
source_snapshot_ref=payload.source_snapshot_ref,
|
|
||||||
created_by=payload.created_by,
|
|
||||||
created_at=datetime.now(timezone.utc),
|
|
||||||
status=CandidateStatus.DRAFT.value,
|
|
||||||
)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
|
|
||||||
return CandidateDTO(
|
|
||||||
id=candidate.id,
|
|
||||||
version=candidate.version,
|
|
||||||
source_snapshot_ref=candidate.source_snapshot_ref,
|
|
||||||
created_at=candidate.created_at,
|
|
||||||
created_by=candidate.created_by,
|
|
||||||
status=CandidateStatus(candidate.status),
|
|
||||||
)
|
|
||||||
# [/DEF:register_candidate_v2_endpoint:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:import_candidate_artifacts_v2_endpoint:Function]
|
|
||||||
# @PURPOSE: Import candidate artifacts in headless flow.
|
|
||||||
# @PRE: Candidate exists and artifacts array is non-empty.
|
|
||||||
# @POST: Artifacts are persisted and candidate advances to PREPARED if it was DRAFT.
|
|
||||||
@router.post("/candidates/{candidate_id}/artifacts")
|
|
||||||
async def import_candidate_artifacts_v2_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: ImportArtifactsRequest,
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
candidate = repository.get_candidate(candidate_id)
|
|
||||||
if candidate is None:
|
|
||||||
raise HTTPException(status_code=404, detail={"message": "Candidate not found", "code": "CANDIDATE_NOT_FOUND"})
|
|
||||||
if not payload.artifacts:
|
|
||||||
raise HTTPException(status_code=400, detail={"message": "Artifacts list is required", "code": "ARTIFACTS_EMPTY"})
|
|
||||||
|
|
||||||
for artifact in payload.artifacts:
|
|
||||||
required = ("id", "path", "sha256", "size")
|
|
||||||
for field_name in required:
|
|
||||||
if field_name not in artifact:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail={"message": f"Artifact missing field '{field_name}'", "code": "ARTIFACT_INVALID"},
|
|
||||||
)
|
|
||||||
|
|
||||||
artifact_model = CandidateArtifact(
|
|
||||||
id=str(artifact["id"]),
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
path=str(artifact["path"]),
|
|
||||||
sha256=str(artifact["sha256"]),
|
|
||||||
size=int(artifact["size"]),
|
|
||||||
detected_category=artifact.get("detected_category"),
|
|
||||||
declared_category=artifact.get("declared_category"),
|
|
||||||
source_uri=artifact.get("source_uri"),
|
|
||||||
source_host=artifact.get("source_host"),
|
|
||||||
metadata_json=artifact.get("metadata_json", {}),
|
|
||||||
)
|
|
||||||
repository.save_artifact(artifact_model)
|
|
||||||
|
|
||||||
if candidate.status == CandidateStatus.DRAFT.value:
|
|
||||||
candidate.transition_to(CandidateStatus.PREPARED)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
|
|
||||||
return {"status": "success"}
|
|
||||||
# [/DEF:import_candidate_artifacts_v2_endpoint:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:build_candidate_manifest_v2_endpoint:Function]
|
|
||||||
# @PURPOSE: Build immutable manifest snapshot for prepared candidate.
|
|
||||||
# @PRE: Candidate exists and has imported artifacts.
|
|
||||||
# @POST: Returns created ManifestDTO with incremented version.
|
|
||||||
@router.post("/candidates/{candidate_id}/manifests", response_model=ManifestDTO, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def build_candidate_manifest_v2_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: BuildManifestRequest,
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
from ...services.clean_release.manifest_service import build_manifest_snapshot
|
|
||||||
|
|
||||||
try:
|
|
||||||
manifest = build_manifest_snapshot(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
created_by=payload.created_by,
|
|
||||||
)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail={"message": str(exc), "code": "MANIFEST_BUILD_ERROR"})
|
|
||||||
|
|
||||||
return ManifestDTO(
|
|
||||||
id=manifest.id,
|
|
||||||
candidate_id=manifest.candidate_id,
|
|
||||||
manifest_version=manifest.manifest_version,
|
|
||||||
manifest_digest=manifest.manifest_digest,
|
|
||||||
artifacts_digest=manifest.artifacts_digest,
|
|
||||||
created_at=manifest.created_at,
|
|
||||||
created_by=manifest.created_by,
|
|
||||||
source_snapshot_ref=manifest.source_snapshot_ref,
|
|
||||||
content_json=manifest.content_json,
|
|
||||||
)
|
|
||||||
# [/DEF:build_candidate_manifest_v2_endpoint:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_candidate_overview_v2_endpoint:Function]
|
|
||||||
# @PURPOSE: Return expanded candidate overview DTO for headless lifecycle visibility.
|
|
||||||
# @PRE: Candidate exists.
|
|
||||||
# @POST: Returns CandidateOverviewDTO built from the same repository state used by headless US1 endpoints.
|
|
||||||
@router.get("/candidates/{candidate_id}/overview", response_model=CandidateOverviewDTO)
|
|
||||||
async def get_candidate_overview_v2_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
candidate = repository.get_candidate(candidate_id)
|
|
||||||
if candidate is None:
|
|
||||||
raise HTTPException(status_code=404, detail={"message": "Candidate not found", "code": "CANDIDATE_NOT_FOUND"})
|
|
||||||
|
|
||||||
manifests = repository.get_manifests_by_candidate(candidate_id)
|
|
||||||
latest_manifest = sorted(manifests, key=lambda m: m.manifest_version, reverse=True)[0] if manifests else None
|
|
||||||
|
|
||||||
runs = [run for run in repository.check_runs.values() if run.candidate_id == candidate_id]
|
|
||||||
latest_run = sorted(runs, key=lambda run: run.requested_at or datetime.min.replace(tzinfo=timezone.utc), reverse=True)[0] if runs else None
|
|
||||||
|
|
||||||
latest_report = None
|
|
||||||
if latest_run is not None:
|
|
||||||
latest_report = next((r for r in repository.reports.values() if r.run_id == latest_run.id), None)
|
|
||||||
|
|
||||||
latest_policy_snapshot = repository.get_policy(latest_run.policy_snapshot_id) if latest_run else None
|
|
||||||
latest_registry_snapshot = repository.get_registry(latest_run.registry_snapshot_id) if latest_run else None
|
|
||||||
|
|
||||||
approval_decisions = getattr(repository, "approval_decisions", [])
|
|
||||||
latest_approval = (
|
|
||||||
sorted(
|
|
||||||
[item for item in approval_decisions if item.candidate_id == candidate_id],
|
|
||||||
key=lambda item: item.decided_at or datetime.min.replace(tzinfo=timezone.utc),
|
|
||||||
reverse=True,
|
|
||||||
)[0]
|
|
||||||
if approval_decisions
|
|
||||||
and any(item.candidate_id == candidate_id for item in approval_decisions)
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
publication_records = getattr(repository, "publication_records", [])
|
|
||||||
latest_publication = (
|
|
||||||
sorted(
|
|
||||||
[item for item in publication_records if item.candidate_id == candidate_id],
|
|
||||||
key=lambda item: item.published_at or datetime.min.replace(tzinfo=timezone.utc),
|
|
||||||
reverse=True,
|
|
||||||
)[0]
|
|
||||||
if publication_records
|
|
||||||
and any(item.candidate_id == candidate_id for item in publication_records)
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
return CandidateOverviewDTO(
|
|
||||||
candidate_id=candidate.id,
|
|
||||||
version=candidate.version,
|
|
||||||
source_snapshot_ref=candidate.source_snapshot_ref,
|
|
||||||
status=CandidateStatus(candidate.status),
|
|
||||||
latest_manifest_id=latest_manifest.id if latest_manifest else None,
|
|
||||||
latest_manifest_digest=latest_manifest.manifest_digest if latest_manifest else None,
|
|
||||||
latest_run_id=latest_run.id if latest_run else None,
|
|
||||||
latest_run_status=RunStatus(latest_run.status) if latest_run else None,
|
|
||||||
latest_report_id=latest_report.id if latest_report else None,
|
|
||||||
latest_report_final_status=ComplianceDecision(latest_report.final_status) if latest_report else None,
|
|
||||||
latest_policy_snapshot_id=latest_policy_snapshot.id if latest_policy_snapshot else None,
|
|
||||||
latest_policy_version=latest_policy_snapshot.policy_version if latest_policy_snapshot else None,
|
|
||||||
latest_registry_snapshot_id=latest_registry_snapshot.id if latest_registry_snapshot else None,
|
|
||||||
latest_registry_version=latest_registry_snapshot.registry_version if latest_registry_snapshot else None,
|
|
||||||
latest_approval_decision=latest_approval.decision if latest_approval else None,
|
|
||||||
latest_publication_id=latest_publication.id if latest_publication else None,
|
|
||||||
latest_publication_status=latest_publication.status if latest_publication else None,
|
|
||||||
)
|
|
||||||
# [/DEF:get_candidate_overview_v2_endpoint:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:prepare_candidate_endpoint:Function]
|
# [DEF:prepare_candidate_endpoint:Function]
|
||||||
# @PURPOSE: Prepare candidate with policy evaluation and deterministic manifest generation.
|
# @PURPOSE: Prepare candidate with policy evaluation and deterministic manifest generation.
|
||||||
# @PRE: Candidate and active policy exist in repository.
|
# @PRE: Candidate and active policy exist in repository.
|
||||||
@@ -327,79 +99,47 @@ async def start_check(
|
|||||||
if candidate is None:
|
if candidate is None:
|
||||||
raise HTTPException(status_code=409, detail={"message": "Candidate not found", "code": "CANDIDATE_NOT_FOUND"})
|
raise HTTPException(status_code=409, detail={"message": "Candidate not found", "code": "CANDIDATE_NOT_FOUND"})
|
||||||
|
|
||||||
manifests = repository.get_manifests_by_candidate(payload.candidate_id)
|
|
||||||
if not manifests:
|
|
||||||
raise HTTPException(status_code=409, detail={"message": "No manifest found for candidate", "code": "MANIFEST_NOT_FOUND"})
|
|
||||||
latest_manifest = sorted(manifests, key=lambda m: m.manifest_version, reverse=True)[0]
|
|
||||||
|
|
||||||
orchestrator = CleanComplianceOrchestrator(repository)
|
orchestrator = CleanComplianceOrchestrator(repository)
|
||||||
run = orchestrator.start_check_run(
|
run = orchestrator.start_check_run(
|
||||||
candidate_id=payload.candidate_id,
|
candidate_id=payload.candidate_id,
|
||||||
policy_id=policy.id,
|
policy_id=policy.policy_id,
|
||||||
requested_by=payload.triggered_by,
|
triggered_by=payload.triggered_by,
|
||||||
manifest_id=latest_manifest.id,
|
execution_mode=payload.execution_mode,
|
||||||
)
|
)
|
||||||
|
|
||||||
forced = [
|
forced = [
|
||||||
ComplianceStageRun(
|
CheckStageResult(stage=CheckStageName.DATA_PURITY, status=CheckStageStatus.PASS, details="ok"),
|
||||||
id=f"stage-{run.id}-1",
|
CheckStageResult(stage=CheckStageName.INTERNAL_SOURCES_ONLY, status=CheckStageStatus.PASS, details="ok"),
|
||||||
run_id=run.id,
|
CheckStageResult(stage=CheckStageName.NO_EXTERNAL_ENDPOINTS, status=CheckStageStatus.PASS, details="ok"),
|
||||||
stage_name=ComplianceStageName.DATA_PURITY.value,
|
CheckStageResult(stage=CheckStageName.MANIFEST_CONSISTENCY, status=CheckStageStatus.PASS, details="ok"),
|
||||||
status=RunStatus.SUCCEEDED.value,
|
|
||||||
decision=ComplianceDecision.PASSED.value,
|
|
||||||
details_json={"message": "ok"}
|
|
||||||
),
|
|
||||||
ComplianceStageRun(
|
|
||||||
id=f"stage-{run.id}-2",
|
|
||||||
run_id=run.id,
|
|
||||||
stage_name=ComplianceStageName.INTERNAL_SOURCES_ONLY.value,
|
|
||||||
status=RunStatus.SUCCEEDED.value,
|
|
||||||
decision=ComplianceDecision.PASSED.value,
|
|
||||||
details_json={"message": "ok"}
|
|
||||||
),
|
|
||||||
ComplianceStageRun(
|
|
||||||
id=f"stage-{run.id}-3",
|
|
||||||
run_id=run.id,
|
|
||||||
stage_name=ComplianceStageName.NO_EXTERNAL_ENDPOINTS.value,
|
|
||||||
status=RunStatus.SUCCEEDED.value,
|
|
||||||
decision=ComplianceDecision.PASSED.value,
|
|
||||||
details_json={"message": "ok"}
|
|
||||||
),
|
|
||||||
ComplianceStageRun(
|
|
||||||
id=f"stage-{run.id}-4",
|
|
||||||
run_id=run.id,
|
|
||||||
stage_name=ComplianceStageName.MANIFEST_CONSISTENCY.value,
|
|
||||||
status=RunStatus.SUCCEEDED.value,
|
|
||||||
decision=ComplianceDecision.PASSED.value,
|
|
||||||
details_json={"message": "ok"}
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
run = orchestrator.execute_stages(run, forced_results=forced)
|
run = orchestrator.execute_stages(run, forced_results=forced)
|
||||||
run = orchestrator.finalize_run(run)
|
run = orchestrator.finalize_run(run)
|
||||||
|
|
||||||
if run.final_status == ComplianceDecision.BLOCKED.value:
|
if run.final_status == CheckFinalStatus.BLOCKED:
|
||||||
logger.explore("Run ended as BLOCKED, persisting synthetic external-source violation")
|
logger.explore("Run ended as BLOCKED, persisting synthetic external-source violation")
|
||||||
violation = ComplianceViolation(
|
violation = ComplianceViolation(
|
||||||
id=f"viol-{run.id}",
|
violation_id=f"viol-{run.check_run_id}",
|
||||||
run_id=run.id,
|
check_run_id=run.check_run_id,
|
||||||
stage_name=ComplianceStageName.NO_EXTERNAL_ENDPOINTS.value,
|
category=ViolationCategory.EXTERNAL_SOURCE,
|
||||||
code="EXTERNAL_SOURCE_DETECTED",
|
severity=ViolationSeverity.CRITICAL,
|
||||||
severity=ViolationSeverity.CRITICAL.value,
|
location="external.example.com",
|
||||||
message="Replace with approved internal server",
|
remediation="Replace with approved internal server",
|
||||||
evidence_json={"location": "external.example.com"}
|
blocked_release=True,
|
||||||
|
detected_at=datetime.now(timezone.utc),
|
||||||
)
|
)
|
||||||
repository.save_violation(violation)
|
repository.save_violation(violation)
|
||||||
|
|
||||||
builder = ComplianceReportBuilder(repository)
|
builder = ComplianceReportBuilder(repository)
|
||||||
report = builder.build_report_payload(run, repository.get_violations_by_run(run.id))
|
report = builder.build_report_payload(run, repository.get_violations_by_check_run(run.check_run_id))
|
||||||
builder.persist_report(report)
|
builder.persist_report(report)
|
||||||
logger.reflect(f"Compliance report persisted for run_id={run.id}")
|
logger.reflect(f"Compliance report persisted for check_run_id={run.check_run_id}")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"check_run_id": run.id,
|
"check_run_id": run.check_run_id,
|
||||||
"candidate_id": run.candidate_id,
|
"candidate_id": run.candidate_id,
|
||||||
"status": "running",
|
"status": "running",
|
||||||
"started_at": run.started_at.isoformat() if run.started_at else None,
|
"started_at": run.started_at.isoformat(),
|
||||||
}
|
}
|
||||||
# [/DEF:start_check:Function]
|
# [/DEF:start_check:Function]
|
||||||
|
|
||||||
@@ -417,13 +157,13 @@ async def get_check_status(check_run_id: str, repository: CleanReleaseRepository
|
|||||||
|
|
||||||
logger.reflect(f"Returning check status for check_run_id={check_run_id}")
|
logger.reflect(f"Returning check status for check_run_id={check_run_id}")
|
||||||
return {
|
return {
|
||||||
"check_run_id": run.id,
|
"check_run_id": run.check_run_id,
|
||||||
"candidate_id": run.candidate_id,
|
"candidate_id": run.candidate_id,
|
||||||
"final_status": run.final_status,
|
"final_status": run.final_status.value,
|
||||||
"started_at": run.started_at.isoformat() if run.started_at else None,
|
"started_at": run.started_at.isoformat(),
|
||||||
"finished_at": run.finished_at.isoformat() if run.finished_at else None,
|
"finished_at": run.finished_at.isoformat() if run.finished_at else None,
|
||||||
"checks": [], # TODO: Map stages if needed
|
"checks": [c.model_dump() for c in run.checks],
|
||||||
"violations": [], # TODO: Map violations if needed
|
"violations": [v.model_dump() for v in repository.get_violations_by_check_run(check_run_id)],
|
||||||
}
|
}
|
||||||
# [/DEF:get_check_status:Function]
|
# [/DEF:get_check_status:Function]
|
||||||
|
|
||||||
|
|||||||
@@ -1,216 +0,0 @@
|
|||||||
# [DEF:backend.src.api.routes.clean_release_v2:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: api, clean-release, v2, headless
|
|
||||||
# @PURPOSE: Redesigned clean release API for headless candidate lifecycle.
|
|
||||||
# @LAYER: API
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from typing import List, Dict, Any
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from ...services.clean_release.approval_service import approve_candidate, reject_candidate
|
|
||||||
from ...services.clean_release.publication_service import publish_candidate, revoke_publication
|
|
||||||
from ...services.clean_release.repository import CleanReleaseRepository
|
|
||||||
from ...dependencies import get_clean_release_repository
|
|
||||||
from ...services.clean_release.enums import CandidateStatus
|
|
||||||
from ...models.clean_release import ReleaseCandidate, CandidateArtifact, DistributionManifest
|
|
||||||
from ...services.clean_release.dto import CandidateDTO, ManifestDTO
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/v2/clean-release", tags=["Clean Release V2"])
|
|
||||||
|
|
||||||
|
|
||||||
class ApprovalRequest(dict):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PublishRequest(dict):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RevokeRequest(dict):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@router.post("/candidates", response_model=CandidateDTO, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def register_candidate(
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository)
|
|
||||||
):
|
|
||||||
candidate = ReleaseCandidate(
|
|
||||||
id=payload["id"],
|
|
||||||
version=payload["version"],
|
|
||||||
source_snapshot_ref=payload["source_snapshot_ref"],
|
|
||||||
created_by=payload["created_by"],
|
|
||||||
created_at=datetime.now(timezone.utc),
|
|
||||||
status=CandidateStatus.DRAFT.value
|
|
||||||
)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
return CandidateDTO(
|
|
||||||
id=candidate.id,
|
|
||||||
version=candidate.version,
|
|
||||||
source_snapshot_ref=candidate.source_snapshot_ref,
|
|
||||||
created_at=candidate.created_at,
|
|
||||||
created_by=candidate.created_by,
|
|
||||||
status=CandidateStatus(candidate.status)
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.post("/candidates/{candidate_id}/artifacts")
|
|
||||||
async def import_artifacts(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository)
|
|
||||||
):
|
|
||||||
candidate = repository.get_candidate(candidate_id)
|
|
||||||
if not candidate:
|
|
||||||
raise HTTPException(status_code=404, detail="Candidate not found")
|
|
||||||
|
|
||||||
for art_data in payload.get("artifacts", []):
|
|
||||||
artifact = CandidateArtifact(
|
|
||||||
id=art_data["id"],
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
path=art_data["path"],
|
|
||||||
sha256=art_data["sha256"],
|
|
||||||
size=art_data["size"]
|
|
||||||
)
|
|
||||||
# In a real repo we'd have save_artifact
|
|
||||||
# repository.save_artifact(artifact)
|
|
||||||
pass
|
|
||||||
|
|
||||||
return {"status": "success"}
|
|
||||||
|
|
||||||
@router.post("/candidates/{candidate_id}/manifests", response_model=ManifestDTO, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def build_manifest(
|
|
||||||
candidate_id: str,
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository)
|
|
||||||
):
|
|
||||||
candidate = repository.get_candidate(candidate_id)
|
|
||||||
if not candidate:
|
|
||||||
raise HTTPException(status_code=404, detail="Candidate not found")
|
|
||||||
|
|
||||||
manifest = DistributionManifest(
|
|
||||||
id=f"manifest-{candidate_id}",
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
manifest_version=1,
|
|
||||||
manifest_digest="hash-123",
|
|
||||||
artifacts_digest="art-hash-123",
|
|
||||||
created_by="system",
|
|
||||||
created_at=datetime.now(timezone.utc),
|
|
||||||
source_snapshot_ref=candidate.source_snapshot_ref,
|
|
||||||
content_json={"items": [], "summary": {}}
|
|
||||||
)
|
|
||||||
repository.save_manifest(manifest)
|
|
||||||
|
|
||||||
return ManifestDTO(
|
|
||||||
id=manifest.id,
|
|
||||||
candidate_id=manifest.candidate_id,
|
|
||||||
manifest_version=manifest.manifest_version,
|
|
||||||
manifest_digest=manifest.manifest_digest,
|
|
||||||
artifacts_digest=manifest.artifacts_digest,
|
|
||||||
created_at=manifest.created_at,
|
|
||||||
created_by=manifest.created_by,
|
|
||||||
source_snapshot_ref=manifest.source_snapshot_ref,
|
|
||||||
content_json=manifest.content_json
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.post("/candidates/{candidate_id}/approve")
|
|
||||||
async def approve_candidate_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
decision = approve_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
report_id=str(payload["report_id"]),
|
|
||||||
decided_by=str(payload["decided_by"]),
|
|
||||||
comment=payload.get("comment"),
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
raise HTTPException(status_code=409, detail={"message": str(exc), "code": "APPROVAL_GATE_ERROR"})
|
|
||||||
|
|
||||||
return {"status": "ok", "decision": decision.decision, "decision_id": decision.id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/candidates/{candidate_id}/reject")
|
|
||||||
async def reject_candidate_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
decision = reject_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
report_id=str(payload["report_id"]),
|
|
||||||
decided_by=str(payload["decided_by"]),
|
|
||||||
comment=payload.get("comment"),
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
raise HTTPException(status_code=409, detail={"message": str(exc), "code": "APPROVAL_GATE_ERROR"})
|
|
||||||
|
|
||||||
return {"status": "ok", "decision": decision.decision, "decision_id": decision.id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/candidates/{candidate_id}/publish")
|
|
||||||
async def publish_candidate_endpoint(
|
|
||||||
candidate_id: str,
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
publication = publish_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
report_id=str(payload["report_id"]),
|
|
||||||
published_by=str(payload["published_by"]),
|
|
||||||
target_channel=str(payload["target_channel"]),
|
|
||||||
publication_ref=payload.get("publication_ref"),
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
raise HTTPException(status_code=409, detail={"message": str(exc), "code": "PUBLICATION_GATE_ERROR"})
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"publication": {
|
|
||||||
"id": publication.id,
|
|
||||||
"candidate_id": publication.candidate_id,
|
|
||||||
"report_id": publication.report_id,
|
|
||||||
"published_by": publication.published_by,
|
|
||||||
"published_at": publication.published_at.isoformat() if publication.published_at else None,
|
|
||||||
"target_channel": publication.target_channel,
|
|
||||||
"publication_ref": publication.publication_ref,
|
|
||||||
"status": publication.status,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/publications/{publication_id}/revoke")
|
|
||||||
async def revoke_publication_endpoint(
|
|
||||||
publication_id: str,
|
|
||||||
payload: Dict[str, Any],
|
|
||||||
repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
publication = revoke_publication(
|
|
||||||
repository=repository,
|
|
||||||
publication_id=publication_id,
|
|
||||||
revoked_by=str(payload["revoked_by"]),
|
|
||||||
comment=payload.get("comment"),
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
raise HTTPException(status_code=409, detail={"message": str(exc), "code": "PUBLICATION_GATE_ERROR"})
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"publication": {
|
|
||||||
"id": publication.id,
|
|
||||||
"candidate_id": publication.candidate_id,
|
|
||||||
"report_id": publication.report_id,
|
|
||||||
"published_by": publication.published_by,
|
|
||||||
"published_at": publication.published_at.isoformat() if publication.published_at else None,
|
|
||||||
"target_channel": publication.target_channel,
|
|
||||||
"publication_ref": publication.publication_ref,
|
|
||||||
"status": publication.status,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.clean_release_v2:Module]
|
|
||||||
@@ -34,27 +34,14 @@
|
|||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Response
|
from fastapi import APIRouter, Depends, HTTPException, Query, Response
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
from typing import List, Optional, Dict, Any, Literal
|
from typing import List, Optional, Dict, Any
|
||||||
import re
|
import re
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from sqlalchemy.orm import Session
|
from ...dependencies import get_config_manager, get_task_manager, get_resource_service, get_mapping_service, has_permission
|
||||||
from ...dependencies import (
|
|
||||||
get_config_manager,
|
|
||||||
get_task_manager,
|
|
||||||
get_resource_service,
|
|
||||||
get_mapping_service,
|
|
||||||
get_current_user,
|
|
||||||
has_permission,
|
|
||||||
)
|
|
||||||
from ...core.database import get_db
|
|
||||||
from ...core.async_superset_client import AsyncSupersetClient
|
|
||||||
from ...core.logger import logger, belief_scope
|
from ...core.logger import logger, belief_scope
|
||||||
from ...core.superset_client import SupersetClient
|
from ...core.superset_client import SupersetClient
|
||||||
from ...core.superset_profile_lookup import SupersetAccountLookupAdapter
|
|
||||||
from ...core.utils.network import DashboardNotFoundError
|
from ...core.utils.network import DashboardNotFoundError
|
||||||
from ...models.auth import User
|
|
||||||
from ...services.profile_service import ProfileService
|
|
||||||
from ...services.resource_service import ResourceService
|
from ...services.resource_service import ResourceService
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
@@ -92,17 +79,6 @@ class DashboardItem(BaseModel):
|
|||||||
last_task: Optional[LastTask] = None
|
last_task: Optional[LastTask] = None
|
||||||
# [/DEF:DashboardItem:DataClass]
|
# [/DEF:DashboardItem:DataClass]
|
||||||
|
|
||||||
# [DEF:EffectiveProfileFilter:DataClass]
|
|
||||||
class EffectiveProfileFilter(BaseModel):
|
|
||||||
applied: bool
|
|
||||||
source_page: Literal["dashboards_main", "other"] = "dashboards_main"
|
|
||||||
override_show_all: bool = False
|
|
||||||
username: Optional[str] = None
|
|
||||||
match_logic: Optional[
|
|
||||||
Literal["owners_or_modified_by", "slug_only", "owners_or_modified_by+slug_only"]
|
|
||||||
] = None
|
|
||||||
# [/DEF:EffectiveProfileFilter:DataClass]
|
|
||||||
|
|
||||||
# [DEF:DashboardsResponse:DataClass]
|
# [DEF:DashboardsResponse:DataClass]
|
||||||
class DashboardsResponse(BaseModel):
|
class DashboardsResponse(BaseModel):
|
||||||
dashboards: List[DashboardItem]
|
dashboards: List[DashboardItem]
|
||||||
@@ -110,7 +86,6 @@ class DashboardsResponse(BaseModel):
|
|||||||
page: int
|
page: int
|
||||||
page_size: int
|
page_size: int
|
||||||
total_pages: int
|
total_pages: int
|
||||||
effective_profile_filter: Optional[EffectiveProfileFilter] = None
|
|
||||||
# [/DEF:DashboardsResponse:DataClass]
|
# [/DEF:DashboardsResponse:DataClass]
|
||||||
|
|
||||||
# [DEF:DashboardChartItem:DataClass]
|
# [DEF:DashboardChartItem:DataClass]
|
||||||
@@ -232,56 +207,6 @@ def _resolve_dashboard_id_from_ref(
|
|||||||
# [/DEF:_resolve_dashboard_id_from_ref:Function]
|
# [/DEF:_resolve_dashboard_id_from_ref:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_find_dashboard_id_by_slug_async:Function]
|
|
||||||
# @PURPOSE: Resolve dashboard numeric ID by slug using async Superset list endpoint.
|
|
||||||
# @PRE: dashboard_slug is non-empty.
|
|
||||||
# @POST: Returns dashboard ID when found, otherwise None.
|
|
||||||
async def _find_dashboard_id_by_slug_async(
|
|
||||||
client: AsyncSupersetClient,
|
|
||||||
dashboard_slug: str,
|
|
||||||
) -> Optional[int]:
|
|
||||||
query_variants = [
|
|
||||||
{"filters": [{"col": "slug", "opr": "eq", "value": dashboard_slug}], "page": 0, "page_size": 1},
|
|
||||||
{"filters": [{"col": "slug", "op": "eq", "value": dashboard_slug}], "page": 0, "page_size": 1},
|
|
||||||
]
|
|
||||||
|
|
||||||
for query in query_variants:
|
|
||||||
try:
|
|
||||||
_count, dashboards = await client.get_dashboards_page_async(query=query)
|
|
||||||
if dashboards:
|
|
||||||
resolved_id = dashboards[0].get("id")
|
|
||||||
if resolved_id is not None:
|
|
||||||
return int(resolved_id)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return None
|
|
||||||
# [/DEF:_find_dashboard_id_by_slug_async:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_resolve_dashboard_id_from_ref_async:Function]
|
|
||||||
# @PURPOSE: Resolve dashboard ID from slug-first reference using async Superset client.
|
|
||||||
# @PRE: dashboard_ref is provided in route path.
|
|
||||||
# @POST: Returns valid dashboard ID or raises HTTPException(404).
|
|
||||||
async def _resolve_dashboard_id_from_ref_async(
|
|
||||||
dashboard_ref: str,
|
|
||||||
client: AsyncSupersetClient,
|
|
||||||
) -> int:
|
|
||||||
normalized_ref = str(dashboard_ref or "").strip()
|
|
||||||
if not normalized_ref:
|
|
||||||
raise HTTPException(status_code=404, detail="Dashboard not found")
|
|
||||||
|
|
||||||
slug_match_id = await _find_dashboard_id_by_slug_async(client, normalized_ref)
|
|
||||||
if slug_match_id is not None:
|
|
||||||
return slug_match_id
|
|
||||||
|
|
||||||
if normalized_ref.isdigit():
|
|
||||||
return int(normalized_ref)
|
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Dashboard not found")
|
|
||||||
# [/DEF:_resolve_dashboard_id_from_ref_async:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_normalize_filter_values:Function]
|
# [DEF:_normalize_filter_values:Function]
|
||||||
# @PURPOSE: Normalize query filter values to lower-cased non-empty tokens.
|
# @PURPOSE: Normalize query filter values to lower-cased non-empty tokens.
|
||||||
# @PRE: values may be None or list of strings.
|
# @PRE: values may be None or list of strings.
|
||||||
@@ -317,167 +242,6 @@ def _dashboard_git_filter_value(dashboard: Dict[str, Any]) -> str:
|
|||||||
return "pending"
|
return "pending"
|
||||||
# [/DEF:_dashboard_git_filter_value:Function]
|
# [/DEF:_dashboard_git_filter_value:Function]
|
||||||
|
|
||||||
# [DEF:_normalize_actor_alias_token:Function]
|
|
||||||
# @PURPOSE: Normalize actor alias token to comparable trim+lower text.
|
|
||||||
# @PRE: value can be scalar/None.
|
|
||||||
# @POST: Returns normalized token or None.
|
|
||||||
def _normalize_actor_alias_token(value: Any) -> Optional[str]:
|
|
||||||
token = str(value or "").strip().lower()
|
|
||||||
return token or None
|
|
||||||
# [/DEF:_normalize_actor_alias_token:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_normalize_owner_display_token:Function]
|
|
||||||
# @PURPOSE: Project owner payload value into stable display string for API response contracts.
|
|
||||||
# @PRE: owner can be scalar, dict or None.
|
|
||||||
# @POST: Returns trimmed non-empty owner display token or None.
|
|
||||||
def _normalize_owner_display_token(owner: Any) -> Optional[str]:
|
|
||||||
if owner is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(owner, dict):
|
|
||||||
username = str(owner.get("username") or owner.get("user_name") or owner.get("name") or "").strip()
|
|
||||||
full_name = str(owner.get("full_name") or "").strip()
|
|
||||||
first_name = str(owner.get("first_name") or "").strip()
|
|
||||||
last_name = str(owner.get("last_name") or "").strip()
|
|
||||||
combined = " ".join(part for part in [first_name, last_name] if part).strip()
|
|
||||||
email = str(owner.get("email") or "").strip()
|
|
||||||
|
|
||||||
for candidate in [username, full_name, combined, email]:
|
|
||||||
if candidate:
|
|
||||||
return candidate
|
|
||||||
return None
|
|
||||||
|
|
||||||
normalized = str(owner).strip()
|
|
||||||
return normalized or None
|
|
||||||
# [/DEF:_normalize_owner_display_token:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_normalize_dashboard_owner_values:Function]
|
|
||||||
# @PURPOSE: Normalize dashboard owners payload to optional list of display strings.
|
|
||||||
# @PRE: owners payload can be None, scalar, or list with mixed values.
|
|
||||||
# @POST: Returns deduplicated owner labels preserving order, or None when absent.
|
|
||||||
def _normalize_dashboard_owner_values(owners: Any) -> Optional[List[str]]:
|
|
||||||
if owners is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
raw_items: List[Any]
|
|
||||||
if isinstance(owners, list):
|
|
||||||
raw_items = owners
|
|
||||||
else:
|
|
||||||
raw_items = [owners]
|
|
||||||
|
|
||||||
normalized: List[str] = []
|
|
||||||
for owner in raw_items:
|
|
||||||
token = _normalize_owner_display_token(owner)
|
|
||||||
if token and token not in normalized:
|
|
||||||
normalized.append(token)
|
|
||||||
|
|
||||||
return normalized
|
|
||||||
# [/DEF:_normalize_dashboard_owner_values:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_project_dashboard_response_items:Function]
|
|
||||||
# @PURPOSE: Project dashboard payloads to response-contract-safe shape.
|
|
||||||
# @PRE: dashboards is a list of dict-like dashboard payloads.
|
|
||||||
# @POST: Returned items satisfy DashboardItem owners=list[str]|None contract.
|
|
||||||
def _project_dashboard_response_items(dashboards: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
||||||
projected: List[Dict[str, Any]] = []
|
|
||||||
for dashboard in dashboards:
|
|
||||||
projected_dashboard = dict(dashboard)
|
|
||||||
projected_dashboard["owners"] = _normalize_dashboard_owner_values(
|
|
||||||
projected_dashboard.get("owners")
|
|
||||||
)
|
|
||||||
projected.append(projected_dashboard)
|
|
||||||
return projected
|
|
||||||
# [/DEF:_project_dashboard_response_items:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_resolve_profile_actor_aliases:Function]
|
|
||||||
# @PURPOSE: Resolve stable actor aliases for profile filtering without per-dashboard detail fan-out.
|
|
||||||
# @PRE: bound username is available and env is valid.
|
|
||||||
# @POST: Returns at least normalized username; may include Superset display-name alias.
|
|
||||||
# @SIDE_EFFECT: Performs at most one Superset users-lookup request.
|
|
||||||
def _resolve_profile_actor_aliases(env: Any, bound_username: str) -> List[str]:
|
|
||||||
normalized_bound = _normalize_actor_alias_token(bound_username)
|
|
||||||
if not normalized_bound:
|
|
||||||
return []
|
|
||||||
|
|
||||||
aliases: List[str] = [normalized_bound]
|
|
||||||
try:
|
|
||||||
client = SupersetClient(env)
|
|
||||||
adapter = SupersetAccountLookupAdapter(
|
|
||||||
network_client=client.network,
|
|
||||||
environment_id=str(getattr(env, "id", "")),
|
|
||||||
)
|
|
||||||
lookup_payload = adapter.get_users_page(
|
|
||||||
search=normalized_bound,
|
|
||||||
page_index=0,
|
|
||||||
page_size=20,
|
|
||||||
sort_column="username",
|
|
||||||
sort_order="asc",
|
|
||||||
)
|
|
||||||
lookup_items = (
|
|
||||||
lookup_payload.get("items", [])
|
|
||||||
if isinstance(lookup_payload, dict)
|
|
||||||
else []
|
|
||||||
)
|
|
||||||
|
|
||||||
matched_item: Optional[Dict[str, Any]] = None
|
|
||||||
for item in lookup_items:
|
|
||||||
if not isinstance(item, dict):
|
|
||||||
continue
|
|
||||||
if _normalize_actor_alias_token(item.get("username")) == normalized_bound:
|
|
||||||
matched_item = item
|
|
||||||
break
|
|
||||||
|
|
||||||
if matched_item is None:
|
|
||||||
for item in lookup_items:
|
|
||||||
if isinstance(item, dict):
|
|
||||||
matched_item = item
|
|
||||||
break
|
|
||||||
|
|
||||||
display_alias = _normalize_actor_alias_token(
|
|
||||||
(matched_item or {}).get("display_name")
|
|
||||||
)
|
|
||||||
if display_alias and display_alias not in aliases:
|
|
||||||
aliases.append(display_alias)
|
|
||||||
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Resolved profile actor aliases "
|
|
||||||
f"(env={getattr(env, 'id', None)}, bound_username={normalized_bound!r}, "
|
|
||||||
f"lookup_items={len(lookup_items)}, aliases={aliases!r})"
|
|
||||||
)
|
|
||||||
except Exception as alias_error:
|
|
||||||
logger.explore(
|
|
||||||
"[EXPLORE] Failed to resolve profile actor aliases via Superset users lookup "
|
|
||||||
f"(env={getattr(env, 'id', None)}, bound_username={normalized_bound!r}): {alias_error}"
|
|
||||||
)
|
|
||||||
return aliases
|
|
||||||
# [/DEF:_resolve_profile_actor_aliases:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_matches_dashboard_actor_aliases:Function]
|
|
||||||
# @PURPOSE: Apply profile actor matching against multiple aliases (username + optional display name).
|
|
||||||
# @PRE: actor_aliases contains normalized non-empty tokens.
|
|
||||||
# @POST: Returns True when any alias matches owners OR modified_by.
|
|
||||||
def _matches_dashboard_actor_aliases(
|
|
||||||
profile_service: ProfileService,
|
|
||||||
actor_aliases: List[str],
|
|
||||||
owners: Optional[Any],
|
|
||||||
modified_by: Optional[str],
|
|
||||||
) -> bool:
|
|
||||||
for actor_alias in actor_aliases:
|
|
||||||
if profile_service.matches_dashboard_actor(
|
|
||||||
bound_username=actor_alias,
|
|
||||||
owners=owners,
|
|
||||||
modified_by=modified_by,
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
# [/DEF:_matches_dashboard_actor_aliases:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_dashboards:Function]
|
# [DEF:get_dashboards:Function]
|
||||||
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
|
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
|
||||||
# @PRE: env_id must be a valid environment ID
|
# @PRE: env_id must be a valid environment ID
|
||||||
@@ -485,7 +249,6 @@ def _matches_dashboard_actor_aliases(
|
|||||||
# @PRE: page_size must be between 1 and 100 if provided
|
# @PRE: page_size must be between 1 and 100 if provided
|
||||||
# @POST: Returns a list of dashboards with enhanced metadata and pagination info
|
# @POST: Returns a list of dashboards with enhanced metadata and pagination info
|
||||||
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
|
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
|
||||||
# @POST: Response includes effective profile filter metadata for main dashboards page context
|
|
||||||
# @PARAM: env_id (str) - The environment ID to fetch dashboards from
|
# @PARAM: env_id (str) - The environment ID to fetch dashboards from
|
||||||
# @PARAM: search (Optional[str]) - Filter by title/slug
|
# @PARAM: search (Optional[str]) - Filter by title/slug
|
||||||
# @PARAM: page (Optional[int]) - Page number (default: 1)
|
# @PARAM: page (Optional[int]) - Page number (default: 1)
|
||||||
@@ -498,9 +261,6 @@ async def get_dashboards(
|
|||||||
search: Optional[str] = None,
|
search: Optional[str] = None,
|
||||||
page: int = 1,
|
page: int = 1,
|
||||||
page_size: int = 10,
|
page_size: int = 10,
|
||||||
page_context: Literal["dashboards_main", "other"] = Query(default="dashboards_main"),
|
|
||||||
apply_profile_default: bool = Query(default=True),
|
|
||||||
override_show_all: bool = Query(default=False),
|
|
||||||
filter_title: Optional[List[str]] = Query(default=None),
|
filter_title: Optional[List[str]] = Query(default=None),
|
||||||
filter_git_status: Optional[List[str]] = Query(default=None),
|
filter_git_status: Optional[List[str]] = Query(default=None),
|
||||||
filter_llm_status: Optional[List[str]] = Query(default=None),
|
filter_llm_status: Optional[List[str]] = Query(default=None),
|
||||||
@@ -509,88 +269,26 @@ async def get_dashboards(
|
|||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
task_manager=Depends(get_task_manager),
|
task_manager=Depends(get_task_manager),
|
||||||
resource_service=Depends(get_resource_service),
|
resource_service=Depends(get_resource_service),
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("plugin:migration", "READ"))
|
_ = Depends(has_permission("plugin:migration", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope(
|
with belief_scope("get_dashboards", f"env_id={env_id}, search={search}, page={page}, page_size={page_size}"):
|
||||||
"get_dashboards",
|
# Validate pagination parameters
|
||||||
(
|
|
||||||
f"env_id={env_id}, search={search}, page={page}, page_size={page_size}, "
|
|
||||||
f"page_context={page_context}, apply_profile_default={apply_profile_default}, "
|
|
||||||
f"override_show_all={override_show_all}"
|
|
||||||
),
|
|
||||||
):
|
|
||||||
if page < 1:
|
if page < 1:
|
||||||
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page: {page}")
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page: {page}")
|
||||||
raise HTTPException(status_code=400, detail="Page must be >= 1")
|
raise HTTPException(status_code=400, detail="Page must be >= 1")
|
||||||
if page_size < 1 or page_size > 100:
|
if page_size < 1 or page_size > 100:
|
||||||
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page_size: {page_size}")
|
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page_size: {page_size}")
|
||||||
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
|
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
|
||||||
|
|
||||||
|
# Validate environment exists
|
||||||
environments = config_manager.get_environments()
|
environments = config_manager.get_environments()
|
||||||
env = next((e for e in environments if e.id == env_id), None)
|
env = next((e for e in environments if e.id == env_id), None)
|
||||||
if not env:
|
if not env:
|
||||||
logger.error(f"[get_dashboards][Coherence:Failed] Environment not found: {env_id}")
|
logger.error(f"[get_dashboards][Coherence:Failed] Environment not found: {env_id}")
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
raise HTTPException(status_code=404, detail="Environment not found")
|
||||||
|
|
||||||
profile_service = ProfileService(db=db, config_manager=config_manager)
|
|
||||||
bound_username: Optional[str] = None
|
|
||||||
can_apply_profile_filter = False
|
|
||||||
can_apply_slug_filter = False
|
|
||||||
effective_profile_filter = EffectiveProfileFilter(
|
|
||||||
applied=False,
|
|
||||||
source_page=page_context,
|
|
||||||
override_show_all=bool(override_show_all),
|
|
||||||
username=None,
|
|
||||||
match_logic=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
profile_preference = profile_service.get_my_preference(current_user).preference
|
|
||||||
normalized_username = str(
|
|
||||||
getattr(profile_preference, "superset_username_normalized", None) or ""
|
|
||||||
).strip().lower()
|
|
||||||
raw_username = str(
|
|
||||||
getattr(profile_preference, "superset_username", None) or ""
|
|
||||||
).strip().lower()
|
|
||||||
bound_username = normalized_username or raw_username or None
|
|
||||||
|
|
||||||
can_apply_profile_filter = (
|
|
||||||
page_context == "dashboards_main"
|
|
||||||
and bool(apply_profile_default)
|
|
||||||
and not bool(override_show_all)
|
|
||||||
and bool(getattr(profile_preference, "show_only_my_dashboards", False))
|
|
||||||
and bool(bound_username)
|
|
||||||
)
|
|
||||||
can_apply_slug_filter = (
|
|
||||||
page_context == "dashboards_main"
|
|
||||||
and bool(apply_profile_default)
|
|
||||||
and not bool(override_show_all)
|
|
||||||
and bool(getattr(profile_preference, "show_only_slug_dashboards", True))
|
|
||||||
)
|
|
||||||
|
|
||||||
profile_match_logic = None
|
|
||||||
if can_apply_profile_filter and can_apply_slug_filter:
|
|
||||||
profile_match_logic = "owners_or_modified_by+slug_only"
|
|
||||||
elif can_apply_profile_filter:
|
|
||||||
profile_match_logic = "owners_or_modified_by"
|
|
||||||
elif can_apply_slug_filter:
|
|
||||||
profile_match_logic = "slug_only"
|
|
||||||
|
|
||||||
effective_profile_filter = EffectiveProfileFilter(
|
|
||||||
applied=bool(can_apply_profile_filter or can_apply_slug_filter),
|
|
||||||
source_page=page_context,
|
|
||||||
override_show_all=bool(override_show_all),
|
|
||||||
username=bound_username if can_apply_profile_filter else None,
|
|
||||||
match_logic=profile_match_logic,
|
|
||||||
)
|
|
||||||
except Exception as profile_error:
|
|
||||||
logger.explore(
|
|
||||||
f"[EXPLORE] Profile preference unavailable; continuing without profile-default filter: {profile_error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Get all tasks for status lookup
|
||||||
all_tasks = task_manager.get_all_tasks()
|
all_tasks = task_manager.get_all_tasks()
|
||||||
title_filters = _normalize_filter_values(filter_title)
|
title_filters = _normalize_filter_values(filter_title)
|
||||||
git_filters = _normalize_filter_values(filter_git_status)
|
git_filters = _normalize_filter_values(filter_git_status)
|
||||||
@@ -606,9 +304,9 @@ async def get_dashboards(
|
|||||||
actor_filters,
|
actor_filters,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
needs_full_scan = has_column_filters or bool(can_apply_profile_filter) or bool(can_apply_slug_filter)
|
|
||||||
|
|
||||||
if isinstance(resource_service, ResourceService) and not needs_full_scan:
|
# Fast path: real ResourceService -> one Superset page call per API request.
|
||||||
|
if isinstance(resource_service, ResourceService) and not has_column_filters:
|
||||||
try:
|
try:
|
||||||
page_payload = await resource_service.get_dashboards_page_with_status(
|
page_payload = await resource_service.get_dashboards_page_with_status(
|
||||||
env,
|
env,
|
||||||
@@ -617,7 +315,6 @@ async def get_dashboards(
|
|||||||
page_size=page_size,
|
page_size=page_size,
|
||||||
search=search,
|
search=search,
|
||||||
include_git_status=False,
|
include_git_status=False,
|
||||||
require_slug=bool(can_apply_slug_filter),
|
|
||||||
)
|
)
|
||||||
paginated_dashboards = page_payload["dashboards"]
|
paginated_dashboards = page_payload["dashboards"]
|
||||||
total = page_payload["total"]
|
total = page_payload["total"]
|
||||||
@@ -631,15 +328,14 @@ async def get_dashboards(
|
|||||||
env,
|
env,
|
||||||
all_tasks,
|
all_tasks,
|
||||||
include_git_status=False,
|
include_git_status=False,
|
||||||
require_slug=bool(can_apply_slug_filter),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if search:
|
if search:
|
||||||
search_lower = search.lower()
|
search_lower = search.lower()
|
||||||
dashboards = [
|
dashboards = [
|
||||||
d for d in dashboards
|
d for d in dashboards
|
||||||
if search_lower in d.get("title", "").lower()
|
if search_lower in d.get('title', '').lower()
|
||||||
or search_lower in d.get("slug", "").lower()
|
or search_lower in d.get('slug', '').lower()
|
||||||
]
|
]
|
||||||
|
|
||||||
total = len(dashboards)
|
total = len(dashboards)
|
||||||
@@ -647,60 +343,13 @@ async def get_dashboards(
|
|||||||
start_idx = (page - 1) * page_size
|
start_idx = (page - 1) * page_size
|
||||||
end_idx = start_idx + page_size
|
end_idx = start_idx + page_size
|
||||||
paginated_dashboards = dashboards[start_idx:end_idx]
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
||||||
else:
|
elif isinstance(resource_service, ResourceService) and has_column_filters:
|
||||||
dashboards = await resource_service.get_dashboards_with_status(
|
dashboards = await resource_service.get_dashboards_with_status(
|
||||||
env,
|
env,
|
||||||
all_tasks,
|
all_tasks,
|
||||||
include_git_status=bool(git_filters),
|
include_git_status=bool(git_filters),
|
||||||
require_slug=bool(can_apply_slug_filter),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if can_apply_profile_filter and bound_username:
|
|
||||||
actor_aliases = _resolve_profile_actor_aliases(env, bound_username)
|
|
||||||
if not actor_aliases:
|
|
||||||
actor_aliases = [bound_username]
|
|
||||||
logger.reason(
|
|
||||||
"[REASON] Applying profile actor filter "
|
|
||||||
f"(env={env_id}, bound_username={bound_username}, actor_aliases={actor_aliases!r}, "
|
|
||||||
f"dashboards_before={len(dashboards)})"
|
|
||||||
)
|
|
||||||
filtered_dashboards: List[Dict[str, Any]] = []
|
|
||||||
max_actor_samples = 15
|
|
||||||
for index, dashboard in enumerate(dashboards):
|
|
||||||
owners_value = dashboard.get("owners")
|
|
||||||
created_by_value = dashboard.get("created_by")
|
|
||||||
modified_by_value = dashboard.get("modified_by")
|
|
||||||
matches_actor = _matches_dashboard_actor_aliases(
|
|
||||||
profile_service=profile_service,
|
|
||||||
actor_aliases=actor_aliases,
|
|
||||||
owners=owners_value,
|
|
||||||
modified_by=modified_by_value,
|
|
||||||
)
|
|
||||||
if index < max_actor_samples:
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Profile actor filter sample "
|
|
||||||
f"(env={env_id}, dashboard_id={dashboard.get('id')}, "
|
|
||||||
f"bound_username={bound_username!r}, actor_aliases={actor_aliases!r}, "
|
|
||||||
f"owners={owners_value!r}, created_by={created_by_value!r}, "
|
|
||||||
f"modified_by={modified_by_value!r}, matches={matches_actor})"
|
|
||||||
)
|
|
||||||
if matches_actor:
|
|
||||||
filtered_dashboards.append(dashboard)
|
|
||||||
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Profile actor filter summary "
|
|
||||||
f"(env={env_id}, bound_username={bound_username!r}, "
|
|
||||||
f"dashboards_before={len(dashboards)}, dashboards_after={len(filtered_dashboards)})"
|
|
||||||
)
|
|
||||||
dashboards = filtered_dashboards
|
|
||||||
|
|
||||||
if can_apply_slug_filter:
|
|
||||||
dashboards = [
|
|
||||||
dashboard
|
|
||||||
for dashboard in dashboards
|
|
||||||
if str(dashboard.get("slug") or "").strip()
|
|
||||||
]
|
|
||||||
|
|
||||||
if search:
|
if search:
|
||||||
search_lower = search.lower()
|
search_lower = search.lower()
|
||||||
dashboards = [
|
dashboards = [
|
||||||
@@ -727,21 +376,13 @@ async def get_dashboards(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
changed_on_raw = str(dashboard.get("last_modified") or "").strip().lower()
|
changed_on_raw = str(dashboard.get("last_modified") or "").strip().lower()
|
||||||
changed_on_prefix = (
|
changed_on_prefix = changed_on_raw[:10] if len(changed_on_raw) >= 10 else changed_on_raw
|
||||||
changed_on_raw[:10] if len(changed_on_raw) >= 10 else changed_on_raw
|
if changed_on_filters and changed_on_raw not in changed_on_filters and changed_on_prefix not in changed_on_filters:
|
||||||
)
|
|
||||||
if (
|
|
||||||
changed_on_filters
|
|
||||||
and changed_on_raw not in changed_on_filters
|
|
||||||
and changed_on_prefix not in changed_on_filters
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
owners = dashboard.get("owners") or []
|
owners = dashboard.get("owners") or []
|
||||||
if isinstance(owners, list):
|
if isinstance(owners, list):
|
||||||
actor_value = ", ".join(
|
actor_value = ", ".join(str(item).strip() for item in owners if str(item).strip()).lower()
|
||||||
str(item).strip() for item in owners if str(item).strip()
|
|
||||||
).lower()
|
|
||||||
else:
|
else:
|
||||||
actor_value = str(owners).strip().lower()
|
actor_value = str(owners).strip().lower()
|
||||||
if not actor_value:
|
if not actor_value:
|
||||||
@@ -750,31 +391,44 @@ async def get_dashboards(
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if has_column_filters:
|
dashboards = [d for d in dashboards if _matches_dashboard_filters(d)]
|
||||||
dashboards = [d for d in dashboards if _matches_dashboard_filters(d)]
|
total = len(dashboards)
|
||||||
|
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
||||||
|
start_idx = (page - 1) * page_size
|
||||||
|
end_idx = start_idx + page_size
|
||||||
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
||||||
|
else:
|
||||||
|
# Compatibility path for mocked services in route tests.
|
||||||
|
dashboards = await resource_service.get_dashboards_with_status(
|
||||||
|
env,
|
||||||
|
all_tasks,
|
||||||
|
include_git_status=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
search_lower = search.lower()
|
||||||
|
dashboards = [
|
||||||
|
d for d in dashboards
|
||||||
|
if search_lower in d.get('title', '').lower()
|
||||||
|
or search_lower in d.get('slug', '').lower()
|
||||||
|
]
|
||||||
|
|
||||||
total = len(dashboards)
|
total = len(dashboards)
|
||||||
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
||||||
start_idx = (page - 1) * page_size
|
start_idx = (page - 1) * page_size
|
||||||
end_idx = start_idx + page_size
|
end_idx = start_idx + page_size
|
||||||
paginated_dashboards = dashboards[start_idx:end_idx]
|
paginated_dashboards = dashboards[start_idx:end_idx]
|
||||||
|
|
||||||
logger.info(
|
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards (page {page}/{total_pages}, total: {total})")
|
||||||
f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards "
|
|
||||||
f"(page {page}/{total_pages}, total: {total}, profile_filter_applied={effective_profile_filter.applied})"
|
|
||||||
)
|
|
||||||
|
|
||||||
response_dashboards = _project_dashboard_response_items(paginated_dashboards)
|
|
||||||
|
|
||||||
return DashboardsResponse(
|
return DashboardsResponse(
|
||||||
dashboards=response_dashboards,
|
dashboards=paginated_dashboards,
|
||||||
total=total,
|
total=total,
|
||||||
page=page,
|
page=page,
|
||||||
page_size=page_size,
|
page_size=page_size,
|
||||||
total_pages=total_pages,
|
total_pages=total_pages
|
||||||
effective_profile_filter=effective_profile_filter,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[get_dashboards][Coherence:Failed] Failed to fetch dashboards: {e}")
|
logger.error(f"[get_dashboards][Coherence:Failed] Failed to fetch dashboards: {e}")
|
||||||
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}")
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}")
|
||||||
@@ -854,10 +508,10 @@ async def get_dashboard_detail(
|
|||||||
logger.error(f"[get_dashboard_detail][Coherence:Failed] Environment not found: {env_id}")
|
logger.error(f"[get_dashboard_detail][Coherence:Failed] Environment not found: {env_id}")
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
raise HTTPException(status_code=404, detail="Environment not found")
|
||||||
|
|
||||||
client = AsyncSupersetClient(env)
|
|
||||||
try:
|
try:
|
||||||
dashboard_id = await _resolve_dashboard_id_from_ref_async(dashboard_ref, client)
|
client = SupersetClient(env)
|
||||||
detail = await client.get_dashboard_detail_async(dashboard_id)
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, client)
|
||||||
|
detail = client.get_dashboard_detail(dashboard_id)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"[get_dashboard_detail][Coherence:OK] Dashboard ref={dashboard_ref} resolved_id={dashboard_id}: {detail.get('chart_count', 0)} charts, {detail.get('dataset_count', 0)} datasets"
|
f"[get_dashboard_detail][Coherence:OK] Dashboard ref={dashboard_ref} resolved_id={dashboard_id}: {detail.get('chart_count', 0)} charts, {detail.get('dataset_count', 0)} datasets"
|
||||||
)
|
)
|
||||||
@@ -867,8 +521,6 @@ async def get_dashboard_detail(
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[get_dashboard_detail][Coherence:Failed] Failed to fetch dashboard detail: {e}")
|
logger.error(f"[get_dashboard_detail][Coherence:Failed] Failed to fetch dashboard detail: {e}")
|
||||||
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard detail: {str(e)}")
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard detail: {str(e)}")
|
||||||
finally:
|
|
||||||
await client.aclose()
|
|
||||||
# [/DEF:get_dashboard_detail:Function]
|
# [/DEF:get_dashboard_detail:Function]
|
||||||
|
|
||||||
|
|
||||||
@@ -920,74 +572,69 @@ async def get_dashboard_tasks_history(
|
|||||||
):
|
):
|
||||||
with belief_scope("get_dashboard_tasks_history", f"dashboard_ref={dashboard_ref}, env_id={env_id}, limit={limit}"):
|
with belief_scope("get_dashboard_tasks_history", f"dashboard_ref={dashboard_ref}, env_id={env_id}, limit={limit}"):
|
||||||
dashboard_id: Optional[int] = None
|
dashboard_id: Optional[int] = None
|
||||||
client: Optional[AsyncSupersetClient] = None
|
if dashboard_ref.isdigit():
|
||||||
try:
|
dashboard_id = int(dashboard_ref)
|
||||||
if dashboard_ref.isdigit():
|
elif env_id:
|
||||||
dashboard_id = int(dashboard_ref)
|
environments = config_manager.get_environments()
|
||||||
elif env_id:
|
env = next((e for e in environments if e.id == env_id), None)
|
||||||
environments = config_manager.get_environments()
|
if not env:
|
||||||
env = next((e for e in environments if e.id == env_id), None)
|
logger.error(f"[get_dashboard_tasks_history][Coherence:Failed] Environment not found: {env_id}")
|
||||||
if not env:
|
raise HTTPException(status_code=404, detail="Environment not found")
|
||||||
logger.error(f"[get_dashboard_tasks_history][Coherence:Failed] Environment not found: {env_id}")
|
client = SupersetClient(env)
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, client)
|
||||||
client = AsyncSupersetClient(env)
|
else:
|
||||||
dashboard_id = await _resolve_dashboard_id_from_ref_async(dashboard_ref, client)
|
logger.error(
|
||||||
else:
|
"[get_dashboard_tasks_history][Coherence:Failed] Non-numeric dashboard ref requires env_id"
|
||||||
logger.error(
|
)
|
||||||
"[get_dashboard_tasks_history][Coherence:Failed] Non-numeric dashboard ref requires env_id"
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="env_id is required when dashboard reference is a slug",
|
||||||
|
)
|
||||||
|
|
||||||
|
matching_tasks = []
|
||||||
|
for task in task_manager.get_all_tasks():
|
||||||
|
if _task_matches_dashboard(task, dashboard_id, env_id):
|
||||||
|
matching_tasks.append(task)
|
||||||
|
|
||||||
|
def _sort_key(task_obj: Any) -> str:
|
||||||
|
return (
|
||||||
|
str(getattr(task_obj, "started_at", "") or "")
|
||||||
|
or str(getattr(task_obj, "finished_at", "") or "")
|
||||||
|
)
|
||||||
|
|
||||||
|
matching_tasks.sort(key=_sort_key, reverse=True)
|
||||||
|
selected = matching_tasks[:limit]
|
||||||
|
|
||||||
|
items = []
|
||||||
|
for task in selected:
|
||||||
|
result = getattr(task, "result", None)
|
||||||
|
summary = None
|
||||||
|
validation_status = None
|
||||||
|
if isinstance(result, dict):
|
||||||
|
raw_validation_status = result.get("status")
|
||||||
|
if raw_validation_status is not None:
|
||||||
|
validation_status = str(raw_validation_status)
|
||||||
|
summary = (
|
||||||
|
result.get("summary")
|
||||||
|
or result.get("status")
|
||||||
|
or result.get("message")
|
||||||
)
|
)
|
||||||
raise HTTPException(
|
params = getattr(task, "params", {}) or {}
|
||||||
status_code=400,
|
items.append(
|
||||||
detail="env_id is required when dashboard reference is a slug",
|
DashboardTaskHistoryItem(
|
||||||
|
id=str(getattr(task, "id", "")),
|
||||||
|
plugin_id=str(getattr(task, "plugin_id", "")),
|
||||||
|
status=str(getattr(task, "status", "")),
|
||||||
|
validation_status=validation_status,
|
||||||
|
started_at=getattr(task, "started_at", None).isoformat() if getattr(task, "started_at", None) else None,
|
||||||
|
finished_at=getattr(task, "finished_at", None).isoformat() if getattr(task, "finished_at", None) else None,
|
||||||
|
env_id=str(params.get("environment_id") or params.get("env")) if (params.get("environment_id") or params.get("env")) else None,
|
||||||
|
summary=summary,
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
matching_tasks = []
|
logger.info(f"[get_dashboard_tasks_history][Coherence:OK] Found {len(items)} tasks for dashboard_ref={dashboard_ref}, dashboard_id={dashboard_id}")
|
||||||
for task in task_manager.get_all_tasks():
|
return DashboardTaskHistoryResponse(dashboard_id=dashboard_id, items=items)
|
||||||
if _task_matches_dashboard(task, dashboard_id, env_id):
|
|
||||||
matching_tasks.append(task)
|
|
||||||
|
|
||||||
def _sort_key(task_obj: Any) -> str:
|
|
||||||
return (
|
|
||||||
str(getattr(task_obj, "started_at", "") or "")
|
|
||||||
or str(getattr(task_obj, "finished_at", "") or "")
|
|
||||||
)
|
|
||||||
|
|
||||||
matching_tasks.sort(key=_sort_key, reverse=True)
|
|
||||||
selected = matching_tasks[:limit]
|
|
||||||
|
|
||||||
items = []
|
|
||||||
for task in selected:
|
|
||||||
result = getattr(task, "result", None)
|
|
||||||
summary = None
|
|
||||||
validation_status = None
|
|
||||||
if isinstance(result, dict):
|
|
||||||
raw_validation_status = result.get("status")
|
|
||||||
if raw_validation_status is not None:
|
|
||||||
validation_status = str(raw_validation_status)
|
|
||||||
summary = (
|
|
||||||
result.get("summary")
|
|
||||||
or result.get("status")
|
|
||||||
or result.get("message")
|
|
||||||
)
|
|
||||||
params = getattr(task, "params", {}) or {}
|
|
||||||
items.append(
|
|
||||||
DashboardTaskHistoryItem(
|
|
||||||
id=str(getattr(task, "id", "")),
|
|
||||||
plugin_id=str(getattr(task, "plugin_id", "")),
|
|
||||||
status=str(getattr(task, "status", "")),
|
|
||||||
validation_status=validation_status,
|
|
||||||
started_at=getattr(task, "started_at", None).isoformat() if getattr(task, "started_at", None) else None,
|
|
||||||
finished_at=getattr(task, "finished_at", None).isoformat() if getattr(task, "finished_at", None) else None,
|
|
||||||
env_id=str(params.get("environment_id") or params.get("env")) if (params.get("environment_id") or params.get("env")) else None,
|
|
||||||
summary=summary,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"[get_dashboard_tasks_history][Coherence:OK] Found {len(items)} tasks for dashboard_ref={dashboard_ref}, dashboard_id={dashboard_id}")
|
|
||||||
return DashboardTaskHistoryResponse(dashboard_id=dashboard_id, items=items)
|
|
||||||
finally:
|
|
||||||
if client is not None:
|
|
||||||
await client.aclose()
|
|
||||||
# [/DEF:get_dashboard_tasks_history:Function]
|
# [/DEF:get_dashboard_tasks_history:Function]
|
||||||
|
|
||||||
|
|
||||||
@@ -1010,15 +657,15 @@ async def get_dashboard_thumbnail(
|
|||||||
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Environment not found: {env_id}")
|
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Environment not found: {env_id}")
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
raise HTTPException(status_code=404, detail="Environment not found")
|
||||||
|
|
||||||
client = AsyncSupersetClient(env)
|
|
||||||
try:
|
try:
|
||||||
dashboard_id = await _resolve_dashboard_id_from_ref_async(dashboard_ref, client)
|
client = SupersetClient(env)
|
||||||
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, client)
|
||||||
digest = None
|
digest = None
|
||||||
thumb_endpoint = None
|
thumb_endpoint = None
|
||||||
|
|
||||||
# Preferred flow (newer Superset): ask server to cache screenshot and return digest/image_url.
|
# Preferred flow (newer Superset): ask server to cache screenshot and return digest/image_url.
|
||||||
try:
|
try:
|
||||||
screenshot_payload = await client.network.request(
|
screenshot_payload = client.network.request(
|
||||||
method="POST",
|
method="POST",
|
||||||
endpoint=f"/dashboard/{dashboard_id}/cache_dashboard_screenshot/",
|
endpoint=f"/dashboard/{dashboard_id}/cache_dashboard_screenshot/",
|
||||||
json={"force": force},
|
json={"force": force},
|
||||||
@@ -1036,7 +683,7 @@ async def get_dashboard_thumbnail(
|
|||||||
|
|
||||||
# Fallback flow (older Superset): read thumbnail_url from dashboard payload.
|
# Fallback flow (older Superset): read thumbnail_url from dashboard payload.
|
||||||
if not digest:
|
if not digest:
|
||||||
dashboard_payload = await client.network.request(
|
dashboard_payload = client.network.request(
|
||||||
method="GET",
|
method="GET",
|
||||||
endpoint=f"/dashboard/{dashboard_id}",
|
endpoint=f"/dashboard/{dashboard_id}",
|
||||||
)
|
)
|
||||||
@@ -1055,7 +702,7 @@ async def get_dashboard_thumbnail(
|
|||||||
if not thumb_endpoint:
|
if not thumb_endpoint:
|
||||||
thumb_endpoint = f"/dashboard/{dashboard_id}/thumbnail/{digest or 'latest'}/"
|
thumb_endpoint = f"/dashboard/{dashboard_id}/thumbnail/{digest or 'latest'}/"
|
||||||
|
|
||||||
thumb_response = await client.network.request(
|
thumb_response = client.network.request(
|
||||||
method="GET",
|
method="GET",
|
||||||
endpoint=thumb_endpoint,
|
endpoint=thumb_endpoint,
|
||||||
raw_response=True,
|
raw_response=True,
|
||||||
@@ -1080,8 +727,6 @@ async def get_dashboard_thumbnail(
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Failed to fetch dashboard thumbnail: {e}")
|
logger.error(f"[get_dashboard_thumbnail][Coherence:Failed] Failed to fetch dashboard thumbnail: {e}")
|
||||||
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard thumbnail: {str(e)}")
|
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboard thumbnail: {str(e)}")
|
||||||
finally:
|
|
||||||
await client.aclose()
|
|
||||||
# [/DEF:get_dashboard_thumbnail:Function]
|
# [/DEF:get_dashboard_thumbnail:Function]
|
||||||
|
|
||||||
# [DEF:MigrateRequest:DataClass]
|
# [DEF:MigrateRequest:DataClass]
|
||||||
|
|||||||
@@ -15,25 +15,20 @@ from sqlalchemy.orm import Session
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
import typing
|
import typing
|
||||||
import os
|
import os
|
||||||
from src.dependencies import get_config_manager, get_current_user, has_permission
|
from src.dependencies import get_config_manager, has_permission
|
||||||
from src.core.database import get_db
|
from src.core.database import get_db
|
||||||
from src.models.auth import User
|
|
||||||
from src.models.git import GitServerConfig, GitRepository, GitProvider
|
from src.models.git import GitServerConfig, GitRepository, GitProvider
|
||||||
from src.models.profile import UserDashboardPreference
|
|
||||||
from src.api.routes.git_schemas import (
|
from src.api.routes.git_schemas import (
|
||||||
GitServerConfigSchema, GitServerConfigCreate, GitServerConfigUpdate,
|
GitServerConfigSchema, GitServerConfigCreate,
|
||||||
BranchSchema, BranchCreate,
|
BranchSchema, BranchCreate,
|
||||||
BranchCheckout, CommitSchema, CommitCreate,
|
BranchCheckout, CommitSchema, CommitCreate,
|
||||||
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest,
|
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest,
|
||||||
RepositoryBindingSchema,
|
|
||||||
RepoStatusBatchRequest, RepoStatusBatchResponse,
|
RepoStatusBatchRequest, RepoStatusBatchResponse,
|
||||||
GiteaRepoCreateRequest, GiteaRepoSchema,
|
GiteaRepoCreateRequest, GiteaRepoSchema,
|
||||||
RemoteRepoCreateRequest, RemoteRepoSchema,
|
RemoteRepoCreateRequest, RemoteRepoSchema,
|
||||||
PromoteRequest, PromoteResponse,
|
PromoteRequest, PromoteResponse,
|
||||||
MergeStatusSchema, MergeConflictFileSchema, MergeResolveRequest, MergeContinueRequest,
|
|
||||||
)
|
)
|
||||||
from src.services.git_service import GitService
|
from src.services.git_service import GitService
|
||||||
from src.core.async_superset_client import AsyncSupersetClient
|
|
||||||
from src.core.superset_client import SupersetClient
|
from src.core.superset_client import SupersetClient
|
||||||
from src.core.logger import logger, belief_scope
|
from src.core.logger import logger, belief_scope
|
||||||
from ...services.llm_prompt_templates import (
|
from ...services.llm_prompt_templates import (
|
||||||
@@ -181,70 +176,6 @@ def _resolve_dashboard_id_from_ref(
|
|||||||
# [/DEF:_resolve_dashboard_id_from_ref:Function]
|
# [/DEF:_resolve_dashboard_id_from_ref:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_find_dashboard_id_by_slug_async:Function]
|
|
||||||
# @PURPOSE: Resolve dashboard numeric ID by slug asynchronously for hot-path Git routes.
|
|
||||||
# @PRE: dashboard_slug is non-empty.
|
|
||||||
# @POST: Returns dashboard ID or None when not found.
|
|
||||||
async def _find_dashboard_id_by_slug_async(
|
|
||||||
client: AsyncSupersetClient,
|
|
||||||
dashboard_slug: str,
|
|
||||||
) -> Optional[int]:
|
|
||||||
query_variants = [
|
|
||||||
{"filters": [{"col": "slug", "opr": "eq", "value": dashboard_slug}], "page": 0, "page_size": 1},
|
|
||||||
{"filters": [{"col": "slug", "op": "eq", "value": dashboard_slug}], "page": 0, "page_size": 1},
|
|
||||||
]
|
|
||||||
|
|
||||||
for query in query_variants:
|
|
||||||
try:
|
|
||||||
_count, dashboards = await client.get_dashboards_page_async(query=query)
|
|
||||||
if dashboards:
|
|
||||||
resolved_id = dashboards[0].get("id")
|
|
||||||
if resolved_id is not None:
|
|
||||||
return int(resolved_id)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
return None
|
|
||||||
# [/DEF:_find_dashboard_id_by_slug_async:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_resolve_dashboard_id_from_ref_async:Function]
|
|
||||||
# @PURPOSE: Resolve dashboard ID asynchronously from slug-or-id reference for hot Git routes.
|
|
||||||
# @PRE: dashboard_ref is provided; env_id is required for slug values.
|
|
||||||
# @POST: Returns numeric dashboard ID or raises HTTPException.
|
|
||||||
async def _resolve_dashboard_id_from_ref_async(
|
|
||||||
dashboard_ref: str,
|
|
||||||
config_manager,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
) -> int:
|
|
||||||
normalized_ref = str(dashboard_ref or "").strip()
|
|
||||||
if not normalized_ref:
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard_ref is required")
|
|
||||||
|
|
||||||
if normalized_ref.isdigit():
|
|
||||||
return int(normalized_ref)
|
|
||||||
|
|
||||||
if not env_id:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="env_id is required for slug-based Git operations",
|
|
||||||
)
|
|
||||||
|
|
||||||
environments = config_manager.get_environments()
|
|
||||||
env = next((e for e in environments if e.id == env_id), None)
|
|
||||||
if not env:
|
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
|
||||||
|
|
||||||
client = AsyncSupersetClient(env)
|
|
||||||
try:
|
|
||||||
dashboard_id = await _find_dashboard_id_by_slug_async(client, normalized_ref)
|
|
||||||
if dashboard_id is None:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Dashboard slug '{normalized_ref}' not found")
|
|
||||||
return dashboard_id
|
|
||||||
finally:
|
|
||||||
await client.aclose()
|
|
||||||
# [/DEF:_resolve_dashboard_id_from_ref_async:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_resolve_repo_key_from_ref:Function]
|
# [DEF:_resolve_repo_key_from_ref:Function]
|
||||||
# @PURPOSE: Resolve repository folder key with slug-first strategy and deterministic fallback.
|
# @PURPOSE: Resolve repository folder key with slug-first strategy and deterministic fallback.
|
||||||
# @PRE: dashboard_id is resolved and valid.
|
# @PRE: dashboard_id is resolved and valid.
|
||||||
@@ -276,84 +207,6 @@ def _resolve_repo_key_from_ref(
|
|||||||
return f"dashboard-{dashboard_id}"
|
return f"dashboard-{dashboard_id}"
|
||||||
# [/DEF:_resolve_repo_key_from_ref:Function]
|
# [/DEF:_resolve_repo_key_from_ref:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_sanitize_optional_identity_value:Function]
|
|
||||||
# @PURPOSE: Normalize optional identity value into trimmed string or None.
|
|
||||||
# @PRE: value may be None or blank.
|
|
||||||
# @POST: Returns sanitized value suitable for git identity configuration.
|
|
||||||
# @RETURN: Optional[str]
|
|
||||||
def _sanitize_optional_identity_value(value: Optional[str]) -> Optional[str]:
|
|
||||||
normalized = str(value or "").strip()
|
|
||||||
if not normalized:
|
|
||||||
return None
|
|
||||||
return normalized
|
|
||||||
# [/DEF:_sanitize_optional_identity_value:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_resolve_current_user_git_identity:Function]
|
|
||||||
# @PURPOSE: Resolve configured Git username/email from current user's profile preferences.
|
|
||||||
# @PRE: `db` may be stubbed in tests; `current_user` may be absent for direct handler invocations.
|
|
||||||
# @POST: Returns tuple(username, email) only when both values are configured.
|
|
||||||
# @RETURN: Optional[tuple[str, str]]
|
|
||||||
def _resolve_current_user_git_identity(
|
|
||||||
db: Session,
|
|
||||||
current_user: Optional[User],
|
|
||||||
) -> Optional[tuple[str, str]]:
|
|
||||||
if db is None or not hasattr(db, "query"):
|
|
||||||
return None
|
|
||||||
|
|
||||||
user_id = _sanitize_optional_identity_value(getattr(current_user, "id", None))
|
|
||||||
if not user_id:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
preference = (
|
|
||||||
db.query(UserDashboardPreference)
|
|
||||||
.filter(UserDashboardPreference.user_id == user_id)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
except Exception as resolve_error:
|
|
||||||
logger.warning(
|
|
||||||
"[_resolve_current_user_git_identity][Action] Failed to load profile preference for user %s: %s",
|
|
||||||
user_id,
|
|
||||||
resolve_error,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not preference:
|
|
||||||
return None
|
|
||||||
|
|
||||||
git_username = _sanitize_optional_identity_value(getattr(preference, "git_username", None))
|
|
||||||
git_email = _sanitize_optional_identity_value(getattr(preference, "git_email", None))
|
|
||||||
if not git_username or not git_email:
|
|
||||||
return None
|
|
||||||
return git_username, git_email
|
|
||||||
# [/DEF:_resolve_current_user_git_identity:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_apply_git_identity_from_profile:Function]
|
|
||||||
# @PURPOSE: Apply user-scoped Git identity to repository-local config before write/pull operations.
|
|
||||||
# @PRE: dashboard_id is resolved; db/current_user may be missing in direct test invocation context.
|
|
||||||
# @POST: git_service.configure_identity is called only when identity and method are available.
|
|
||||||
# @RETURN: None
|
|
||||||
def _apply_git_identity_from_profile(
|
|
||||||
dashboard_id: int,
|
|
||||||
db: Session,
|
|
||||||
current_user: Optional[User],
|
|
||||||
) -> None:
|
|
||||||
identity = _resolve_current_user_git_identity(db, current_user)
|
|
||||||
if not identity:
|
|
||||||
return
|
|
||||||
|
|
||||||
configure_identity = getattr(git_service, "configure_identity", None)
|
|
||||||
if not callable(configure_identity):
|
|
||||||
return
|
|
||||||
|
|
||||||
git_username, git_email = identity
|
|
||||||
configure_identity(dashboard_id, git_username, git_email)
|
|
||||||
# [/DEF:_apply_git_identity_from_profile:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_git_configs:Function]
|
# [DEF:get_git_configs:Function]
|
||||||
# @PURPOSE: List all configured Git servers.
|
# @PURPOSE: List all configured Git servers.
|
||||||
# @PRE: Database session `db` is available.
|
# @PRE: Database session `db` is available.
|
||||||
@@ -362,16 +215,10 @@ def _apply_git_identity_from_profile(
|
|||||||
@router.get("/config", response_model=List[GitServerConfigSchema])
|
@router.get("/config", response_model=List[GitServerConfigSchema])
|
||||||
async def get_git_configs(
|
async def get_git_configs(
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
_ = Depends(has_permission("git_config", "READ"))
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope("get_git_configs"):
|
with belief_scope("get_git_configs"):
|
||||||
configs = db.query(GitServerConfig).all()
|
return db.query(GitServerConfig).all()
|
||||||
result = []
|
|
||||||
for config in configs:
|
|
||||||
schema = GitServerConfigSchema.from_orm(config)
|
|
||||||
schema.pat = "********"
|
|
||||||
result.append(schema)
|
|
||||||
return result
|
|
||||||
# [/DEF:get_git_configs:Function]
|
# [/DEF:get_git_configs:Function]
|
||||||
|
|
||||||
# [DEF:create_git_config:Function]
|
# [DEF:create_git_config:Function]
|
||||||
@@ -387,48 +234,13 @@ async def create_git_config(
|
|||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
_ = Depends(has_permission("admin:settings", "WRITE"))
|
||||||
):
|
):
|
||||||
with belief_scope("create_git_config"):
|
with belief_scope("create_git_config"):
|
||||||
config_dict = config.dict(exclude={"config_id"})
|
db_config = GitServerConfig(**config.dict())
|
||||||
db_config = GitServerConfig(**config_dict)
|
|
||||||
db.add(db_config)
|
db.add(db_config)
|
||||||
db.commit()
|
db.commit()
|
||||||
db.refresh(db_config)
|
db.refresh(db_config)
|
||||||
return db_config
|
return db_config
|
||||||
# [/DEF:create_git_config:Function]
|
# [/DEF:create_git_config:Function]
|
||||||
|
|
||||||
# [DEF:update_git_config:Function]
|
|
||||||
# @PURPOSE: Update an existing Git server configuration.
|
|
||||||
# @PRE: `config_id` corresponds to an existing configuration.
|
|
||||||
# @POST: The configuration record is updated in the database.
|
|
||||||
# @PARAM: config_id (str)
|
|
||||||
# @PARAM: config_update (GitServerConfigUpdate)
|
|
||||||
# @RETURN: GitServerConfigSchema
|
|
||||||
@router.put("/config/{config_id}", response_model=GitServerConfigSchema)
|
|
||||||
async def update_git_config(
|
|
||||||
config_id: str,
|
|
||||||
config_update: GitServerConfigUpdate,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
|
||||||
):
|
|
||||||
with belief_scope("update_git_config"):
|
|
||||||
db_config = db.query(GitServerConfig).filter(GitServerConfig.id == config_id).first()
|
|
||||||
if not db_config:
|
|
||||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
|
||||||
|
|
||||||
update_data = config_update.dict(exclude_unset=True)
|
|
||||||
if update_data.get("pat") == "********":
|
|
||||||
update_data.pop("pat")
|
|
||||||
|
|
||||||
for key, value in update_data.items():
|
|
||||||
setattr(db_config, key, value)
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_config)
|
|
||||||
|
|
||||||
result_schema = GitServerConfigSchema.from_orm(db_config)
|
|
||||||
result_schema.pat = "********"
|
|
||||||
return result_schema
|
|
||||||
# [/DEF:update_git_config:Function]
|
|
||||||
|
|
||||||
# [DEF:delete_git_config:Function]
|
# [DEF:delete_git_config:Function]
|
||||||
# @PURPOSE: Remove a Git server configuration.
|
# @PURPOSE: Remove a Git server configuration.
|
||||||
# @PRE: `config_id` corresponds to an existing configuration.
|
# @PRE: `config_id` corresponds to an existing configuration.
|
||||||
@@ -458,22 +270,10 @@ async def delete_git_config(
|
|||||||
@router.post("/config/test")
|
@router.post("/config/test")
|
||||||
async def test_git_config(
|
async def test_git_config(
|
||||||
config: GitServerConfigCreate,
|
config: GitServerConfigCreate,
|
||||||
db: Session = Depends(get_db),
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
_ = Depends(has_permission("git_config", "READ"))
|
|
||||||
):
|
):
|
||||||
with belief_scope("test_git_config"):
|
with belief_scope("test_git_config"):
|
||||||
pat_to_use = config.pat
|
success = await git_service.test_connection(config.provider, config.url, config.pat)
|
||||||
if pat_to_use == "********":
|
|
||||||
if config.config_id:
|
|
||||||
db_config = db.query(GitServerConfig).filter(GitServerConfig.id == config.config_id).first()
|
|
||||||
if db_config:
|
|
||||||
pat_to_use = db_config.pat
|
|
||||||
else:
|
|
||||||
db_config = db.query(GitServerConfig).filter(GitServerConfig.url == config.url).first()
|
|
||||||
if db_config:
|
|
||||||
pat_to_use = db_config.pat
|
|
||||||
|
|
||||||
success = await git_service.test_connection(config.provider, config.url, pat_to_use)
|
|
||||||
if success:
|
if success:
|
||||||
return {"status": "success", "message": "Connection successful"}
|
return {"status": "success", "message": "Connection successful"}
|
||||||
else:
|
else:
|
||||||
@@ -489,7 +289,7 @@ async def test_git_config(
|
|||||||
async def list_gitea_repositories(
|
async def list_gitea_repositories(
|
||||||
config_id: str,
|
config_id: str,
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
_ = Depends(has_permission("git_config", "READ"))
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope("list_gitea_repositories"):
|
with belief_scope("list_gitea_repositories"):
|
||||||
config = _get_git_config_or_404(db, config_id)
|
config = _get_git_config_or_404(db, config_id)
|
||||||
@@ -658,7 +458,7 @@ async def init_repository(
|
|||||||
try:
|
try:
|
||||||
# 2. Perform Git clone/init
|
# 2. Perform Git clone/init
|
||||||
logger.info(f"[init_repository][Action] Initializing repo for dashboard {dashboard_id}")
|
logger.info(f"[init_repository][Action] Initializing repo for dashboard {dashboard_id}")
|
||||||
git_service.init_repo(dashboard_id, init_data.remote_url, config.pat, repo_key=repo_key, default_branch=config.default_branch)
|
git_service.init_repo(dashboard_id, init_data.remote_url, config.pat, repo_key=repo_key)
|
||||||
|
|
||||||
# 3. Save to DB
|
# 3. Save to DB
|
||||||
repo_path = git_service._get_repo_path(dashboard_id, repo_key=repo_key)
|
repo_path = git_service._get_repo_path(dashboard_id, repo_key=repo_key)
|
||||||
@@ -668,15 +468,13 @@ async def init_repository(
|
|||||||
dashboard_id=dashboard_id,
|
dashboard_id=dashboard_id,
|
||||||
config_id=config.id,
|
config_id=config.id,
|
||||||
remote_url=init_data.remote_url,
|
remote_url=init_data.remote_url,
|
||||||
local_path=repo_path,
|
local_path=repo_path
|
||||||
current_branch="dev",
|
|
||||||
)
|
)
|
||||||
db.add(db_repo)
|
db.add(db_repo)
|
||||||
else:
|
else:
|
||||||
db_repo.config_id = config.id
|
db_repo.config_id = config.id
|
||||||
db_repo.remote_url = init_data.remote_url
|
db_repo.remote_url = init_data.remote_url
|
||||||
db_repo.local_path = repo_path
|
db_repo.local_path = repo_path
|
||||||
db_repo.current_branch = "dev"
|
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.info(f"[init_repository][Coherence:OK] Repository initialized for dashboard {dashboard_id}")
|
logger.info(f"[init_repository][Coherence:OK] Repository initialized for dashboard {dashboard_id}")
|
||||||
@@ -689,64 +487,6 @@ async def init_repository(
|
|||||||
_handle_unexpected_git_route_error("init_repository", e)
|
_handle_unexpected_git_route_error("init_repository", e)
|
||||||
# [/DEF:init_repository:Function]
|
# [/DEF:init_repository:Function]
|
||||||
|
|
||||||
# [DEF:get_repository_binding:Function]
|
|
||||||
# @PURPOSE: Return repository binding with provider metadata for selected dashboard.
|
|
||||||
# @PRE: `dashboard_ref` resolves to a valid dashboard and repository is initialized.
|
|
||||||
# @POST: Returns dashboard repository binding and linked provider.
|
|
||||||
# @PARAM: dashboard_ref (str)
|
|
||||||
# @RETURN: RepositoryBindingSchema
|
|
||||||
@router.get("/repositories/{dashboard_ref}", response_model=RepositoryBindingSchema)
|
|
||||||
async def get_repository_binding(
|
|
||||||
dashboard_ref: str,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("get_repository_binding"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
db_repo = db.query(GitRepository).filter(GitRepository.dashboard_id == dashboard_id).first()
|
|
||||||
if not db_repo:
|
|
||||||
raise HTTPException(status_code=404, detail="Repository not initialized")
|
|
||||||
config = _get_git_config_or_404(db, db_repo.config_id)
|
|
||||||
return RepositoryBindingSchema(
|
|
||||||
dashboard_id=db_repo.dashboard_id,
|
|
||||||
config_id=db_repo.config_id,
|
|
||||||
provider=config.provider,
|
|
||||||
remote_url=db_repo.remote_url,
|
|
||||||
local_path=db_repo.local_path,
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("get_repository_binding", e)
|
|
||||||
# [/DEF:get_repository_binding:Function]
|
|
||||||
|
|
||||||
# [DEF:delete_repository:Function]
|
|
||||||
# @PURPOSE: Delete local repository workspace and DB binding for selected dashboard.
|
|
||||||
# @PRE: `dashboard_ref` resolves to a valid dashboard.
|
|
||||||
# @POST: Repository files and binding record are removed when present.
|
|
||||||
# @PARAM: dashboard_ref (str)
|
|
||||||
# @RETURN: dict
|
|
||||||
@router.delete("/repositories/{dashboard_ref}")
|
|
||||||
async def delete_repository(
|
|
||||||
dashboard_ref: str,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("delete_repository"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
git_service.delete_repo(dashboard_id)
|
|
||||||
return {"status": "success"}
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("delete_repository", e)
|
|
||||||
# [/DEF:delete_repository:Function]
|
|
||||||
|
|
||||||
# [DEF:get_branches:Function]
|
# [DEF:get_branches:Function]
|
||||||
# @PURPOSE: List all branches for a dashboard's repository.
|
# @PURPOSE: List all branches for a dashboard's repository.
|
||||||
# @PRE: Repository for `dashboard_ref` is initialized.
|
# @PRE: Repository for `dashboard_ref` is initialized.
|
||||||
@@ -782,14 +522,11 @@ async def create_branch(
|
|||||||
branch_data: BranchCreate,
|
branch_data: BranchCreate,
|
||||||
env_id: Optional[str] = None,
|
env_id: Optional[str] = None,
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("create_branch"):
|
with belief_scope("create_branch"):
|
||||||
try:
|
try:
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
||||||
_apply_git_identity_from_profile(dashboard_id, db, current_user)
|
|
||||||
git_service.create_branch(dashboard_id, branch_data.name, branch_data.from_branch)
|
git_service.create_branch(dashboard_id, branch_data.name, branch_data.from_branch)
|
||||||
return {"status": "success"}
|
return {"status": "success"}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
@@ -835,14 +572,11 @@ async def commit_changes(
|
|||||||
commit_data: CommitCreate,
|
commit_data: CommitCreate,
|
||||||
env_id: Optional[str] = None,
|
env_id: Optional[str] = None,
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("commit_changes"):
|
with belief_scope("commit_changes"):
|
||||||
try:
|
try:
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
||||||
_apply_git_identity_from_profile(dashboard_id, db, current_user)
|
|
||||||
git_service.commit_changes(dashboard_id, commit_data.message, commit_data.files)
|
git_service.commit_changes(dashboard_id, commit_data.message, commit_data.files)
|
||||||
return {"status": "success"}
|
return {"status": "success"}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
@@ -884,35 +618,11 @@ async def pull_changes(
|
|||||||
dashboard_ref: str,
|
dashboard_ref: str,
|
||||||
env_id: Optional[str] = None,
|
env_id: Optional[str] = None,
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("pull_changes"):
|
with belief_scope("pull_changes"):
|
||||||
try:
|
try:
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
||||||
db_repo = db.query(GitRepository).filter(GitRepository.dashboard_id == dashboard_id).first()
|
|
||||||
config_url = None
|
|
||||||
config_provider = None
|
|
||||||
if db_repo:
|
|
||||||
config_row = db.query(GitServerConfig).filter(GitServerConfig.id == db_repo.config_id).first()
|
|
||||||
if config_row:
|
|
||||||
config_url = config_row.url
|
|
||||||
config_provider = config_row.provider
|
|
||||||
logger.info(
|
|
||||||
"[pull_changes][Action] Route diagnostics dashboard_ref=%s env_id=%s resolved_dashboard_id=%s "
|
|
||||||
"binding_exists=%s binding_local_path=%s binding_remote_url=%s binding_config_id=%s config_provider=%s config_url=%s",
|
|
||||||
dashboard_ref,
|
|
||||||
env_id,
|
|
||||||
dashboard_id,
|
|
||||||
bool(db_repo),
|
|
||||||
(db_repo.local_path if db_repo else None),
|
|
||||||
(db_repo.remote_url if db_repo else None),
|
|
||||||
(db_repo.config_id if db_repo else None),
|
|
||||||
config_provider,
|
|
||||||
config_url,
|
|
||||||
)
|
|
||||||
_apply_git_identity_from_profile(dashboard_id, db, current_user)
|
|
||||||
git_service.pull_changes(dashboard_id)
|
git_service.pull_changes(dashboard_id)
|
||||||
return {"status": "success"}
|
return {"status": "success"}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
@@ -921,122 +631,6 @@ async def pull_changes(
|
|||||||
_handle_unexpected_git_route_error("pull_changes", e)
|
_handle_unexpected_git_route_error("pull_changes", e)
|
||||||
# [/DEF:pull_changes:Function]
|
# [/DEF:pull_changes:Function]
|
||||||
|
|
||||||
# [DEF:get_merge_status:Function]
|
|
||||||
# @PURPOSE: Return unfinished-merge status for repository (web-only recovery support).
|
|
||||||
# @PRE: `dashboard_ref` resolves to a valid dashboard repository.
|
|
||||||
# @POST: Returns merge status payload.
|
|
||||||
@router.get("/repositories/{dashboard_ref}/merge/status", response_model=MergeStatusSchema)
|
|
||||||
async def get_merge_status(
|
|
||||||
dashboard_ref: str,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("get_merge_status"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
return git_service.get_merge_status(dashboard_id)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("get_merge_status", e)
|
|
||||||
# [/DEF:get_merge_status:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_merge_conflicts:Function]
|
|
||||||
# @PURPOSE: Return conflicted files with mine/theirs previews for web conflict resolver.
|
|
||||||
# @PRE: `dashboard_ref` resolves to a valid dashboard repository.
|
|
||||||
# @POST: Returns conflict file list.
|
|
||||||
@router.get("/repositories/{dashboard_ref}/merge/conflicts", response_model=List[MergeConflictFileSchema])
|
|
||||||
async def get_merge_conflicts(
|
|
||||||
dashboard_ref: str,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("get_merge_conflicts"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
return git_service.get_merge_conflicts(dashboard_id)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("get_merge_conflicts", e)
|
|
||||||
# [/DEF:get_merge_conflicts:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:resolve_merge_conflicts:Function]
|
|
||||||
# @PURPOSE: Apply mine/theirs/manual conflict resolutions from WebUI and stage files.
|
|
||||||
# @PRE: `dashboard_ref` resolves; request contains at least one resolution item.
|
|
||||||
# @POST: Resolved files are staged in index.
|
|
||||||
@router.post("/repositories/{dashboard_ref}/merge/resolve")
|
|
||||||
async def resolve_merge_conflicts(
|
|
||||||
dashboard_ref: str,
|
|
||||||
resolve_data: MergeResolveRequest,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("resolve_merge_conflicts"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
resolved_files = git_service.resolve_merge_conflicts(
|
|
||||||
dashboard_id,
|
|
||||||
[item.dict() for item in resolve_data.resolutions],
|
|
||||||
)
|
|
||||||
return {"status": "success", "resolved_files": resolved_files}
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("resolve_merge_conflicts", e)
|
|
||||||
# [/DEF:resolve_merge_conflicts:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:abort_merge:Function]
|
|
||||||
# @PURPOSE: Abort unfinished merge from WebUI flow.
|
|
||||||
# @PRE: `dashboard_ref` resolves to repository.
|
|
||||||
# @POST: Merge operation is aborted or reports no active merge.
|
|
||||||
@router.post("/repositories/{dashboard_ref}/merge/abort")
|
|
||||||
async def abort_merge(
|
|
||||||
dashboard_ref: str,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("abort_merge"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
return git_service.abort_merge(dashboard_id)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("abort_merge", e)
|
|
||||||
# [/DEF:abort_merge:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:continue_merge:Function]
|
|
||||||
# @PURPOSE: Finalize unfinished merge from WebUI flow.
|
|
||||||
# @PRE: All conflicts are resolved and staged.
|
|
||||||
# @POST: Merge commit is created.
|
|
||||||
@router.post("/repositories/{dashboard_ref}/merge/continue")
|
|
||||||
async def continue_merge(
|
|
||||||
dashboard_ref: str,
|
|
||||||
continue_data: MergeContinueRequest,
|
|
||||||
env_id: Optional[str] = None,
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
|
||||||
):
|
|
||||||
with belief_scope("continue_merge"):
|
|
||||||
try:
|
|
||||||
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
|
||||||
return git_service.continue_merge(dashboard_id, continue_data.message)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
_handle_unexpected_git_route_error("continue_merge", e)
|
|
||||||
# [/DEF:continue_merge:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:sync_dashboard:Function]
|
# [DEF:sync_dashboard:Function]
|
||||||
# @PURPOSE: Sync dashboard state from Superset to Git using the GitPlugin.
|
# @PURPOSE: Sync dashboard state from Superset to Git using the GitPlugin.
|
||||||
# @PRE: `dashboard_ref` is valid; GitPlugin is available.
|
# @PRE: `dashboard_ref` is valid; GitPlugin is available.
|
||||||
@@ -1079,7 +673,6 @@ async def promote_dashboard(
|
|||||||
env_id: Optional[str] = None,
|
env_id: Optional[str] = None,
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
_ = Depends(has_permission("plugin:git", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("promote_dashboard"):
|
with belief_scope("promote_dashboard"):
|
||||||
@@ -1108,7 +701,6 @@ async def promote_dashboard(
|
|||||||
to_branch,
|
to_branch,
|
||||||
reason,
|
reason,
|
||||||
)
|
)
|
||||||
_apply_git_identity_from_profile(dashboard_id, db, current_user)
|
|
||||||
result = git_service.promote_direct_merge(
|
result = git_service.promote_direct_merge(
|
||||||
dashboard_id=dashboard_id,
|
dashboard_id=dashboard_id,
|
||||||
from_branch=from_branch,
|
from_branch=from_branch,
|
||||||
@@ -1262,7 +854,7 @@ async def get_repository_status(
|
|||||||
):
|
):
|
||||||
with belief_scope("get_repository_status"):
|
with belief_scope("get_repository_status"):
|
||||||
try:
|
try:
|
||||||
dashboard_id = await _resolve_dashboard_id_from_ref_async(dashboard_ref, config_manager, env_id)
|
dashboard_id = _resolve_dashboard_id_from_ref(dashboard_ref, config_manager, env_id)
|
||||||
return _resolve_repository_status(dashboard_id)
|
return _resolve_repository_status(dashboard_id)
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -21,27 +21,14 @@ class GitServerConfigBase(BaseModel):
|
|||||||
provider: GitProvider = Field(..., description="Git provider (GITHUB, GITLAB, GITEA)")
|
provider: GitProvider = Field(..., description="Git provider (GITHUB, GITLAB, GITEA)")
|
||||||
url: str = Field(..., description="Server base URL")
|
url: str = Field(..., description="Server base URL")
|
||||||
pat: str = Field(..., description="Personal Access Token")
|
pat: str = Field(..., description="Personal Access Token")
|
||||||
pat: str = Field(..., description="Personal Access Token")
|
|
||||||
default_repository: Optional[str] = Field(None, description="Default repository path (org/repo)")
|
default_repository: Optional[str] = Field(None, description="Default repository path (org/repo)")
|
||||||
default_branch: Optional[str] = Field("main", description="Default branch logic/name")
|
|
||||||
# [/DEF:GitServerConfigBase:Class]
|
# [/DEF:GitServerConfigBase:Class]
|
||||||
|
|
||||||
# [DEF:GitServerConfigUpdate:Class]
|
|
||||||
# @PURPOSE: Schema for updating an existing Git server configuration.
|
|
||||||
class GitServerConfigUpdate(BaseModel):
|
|
||||||
name: Optional[str] = Field(None, description="Display name for the Git server")
|
|
||||||
provider: Optional[GitProvider] = Field(None, description="Git provider (GITHUB, GITLAB, GITEA)")
|
|
||||||
url: Optional[str] = Field(None, description="Server base URL")
|
|
||||||
pat: Optional[str] = Field(None, description="Personal Access Token")
|
|
||||||
default_repository: Optional[str] = Field(None, description="Default repository path (org/repo)")
|
|
||||||
default_branch: Optional[str] = Field(None, description="Default branch logic/name")
|
|
||||||
# [/DEF:GitServerConfigUpdate:Class]
|
|
||||||
|
|
||||||
# [DEF:GitServerConfigCreate:Class]
|
# [DEF:GitServerConfigCreate:Class]
|
||||||
# @PURPOSE: Schema for creating a new Git server configuration.
|
# @PURPOSE: Schema for creating a new Git server configuration.
|
||||||
class GitServerConfigCreate(GitServerConfigBase):
|
class GitServerConfigCreate(GitServerConfigBase):
|
||||||
"""Schema for creating a new Git server configuration."""
|
"""Schema for creating a new Git server configuration."""
|
||||||
config_id: Optional[str] = Field(None, description="Optional config ID, useful for testing an existing config without sending its full PAT")
|
pass
|
||||||
# [/DEF:GitServerConfigCreate:Class]
|
# [/DEF:GitServerConfigCreate:Class]
|
||||||
|
|
||||||
# [DEF:GitServerConfigSchema:Class]
|
# [DEF:GitServerConfigSchema:Class]
|
||||||
@@ -126,42 +113,6 @@ class ConflictResolution(BaseModel):
|
|||||||
content: Optional[str] = None
|
content: Optional[str] = None
|
||||||
# [/DEF:ConflictResolution:Class]
|
# [/DEF:ConflictResolution:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:MergeStatusSchema:Class]
|
|
||||||
# @PURPOSE: Schema representing unfinished merge status for repository.
|
|
||||||
class MergeStatusSchema(BaseModel):
|
|
||||||
has_unfinished_merge: bool
|
|
||||||
repository_path: str
|
|
||||||
git_dir: str
|
|
||||||
current_branch: str
|
|
||||||
merge_head: Optional[str] = None
|
|
||||||
merge_message_preview: Optional[str] = None
|
|
||||||
conflicts_count: int = 0
|
|
||||||
# [/DEF:MergeStatusSchema:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:MergeConflictFileSchema:Class]
|
|
||||||
# @PURPOSE: Schema describing one conflicted file with optional side snapshots.
|
|
||||||
class MergeConflictFileSchema(BaseModel):
|
|
||||||
file_path: str
|
|
||||||
mine: Optional[str] = None
|
|
||||||
theirs: Optional[str] = None
|
|
||||||
# [/DEF:MergeConflictFileSchema:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:MergeResolveRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for resolving one or multiple merge conflicts.
|
|
||||||
class MergeResolveRequest(BaseModel):
|
|
||||||
resolutions: List[ConflictResolution] = Field(default_factory=list)
|
|
||||||
# [/DEF:MergeResolveRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:MergeContinueRequest:Class]
|
|
||||||
# @PURPOSE: Request schema for finishing merge with optional explicit commit message.
|
|
||||||
class MergeContinueRequest(BaseModel):
|
|
||||||
message: Optional[str] = None
|
|
||||||
# [/DEF:MergeContinueRequest:Class]
|
|
||||||
|
|
||||||
# [DEF:DeploymentEnvironmentSchema:Class]
|
# [DEF:DeploymentEnvironmentSchema:Class]
|
||||||
# @PURPOSE: Schema for representing a target deployment environment.
|
# @PURPOSE: Schema for representing a target deployment environment.
|
||||||
class DeploymentEnvironmentSchema(BaseModel):
|
class DeploymentEnvironmentSchema(BaseModel):
|
||||||
@@ -190,17 +141,6 @@ class RepoInitRequest(BaseModel):
|
|||||||
remote_url: str
|
remote_url: str
|
||||||
# [/DEF:RepoInitRequest:Class]
|
# [/DEF:RepoInitRequest:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:RepositoryBindingSchema:Class]
|
|
||||||
# @PURPOSE: Schema describing repository-to-config binding and provider metadata.
|
|
||||||
class RepositoryBindingSchema(BaseModel):
|
|
||||||
dashboard_id: int
|
|
||||||
config_id: str
|
|
||||||
provider: GitProvider
|
|
||||||
remote_url: str
|
|
||||||
local_path: str
|
|
||||||
# [/DEF:RepositoryBindingSchema:Class]
|
|
||||||
|
|
||||||
# [DEF:RepoStatusBatchRequest:Class]
|
# [DEF:RepoStatusBatchRequest:Class]
|
||||||
# @PURPOSE: Schema for requesting repository statuses for multiple dashboards in a single call.
|
# @PURPOSE: Schema for requesting repository statuses for multiple dashboards in a single call.
|
||||||
class RepoStatusBatchRequest(BaseModel):
|
class RepoStatusBatchRequest(BaseModel):
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
# [DEF:health_router:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: health, monitoring, dashboards
|
|
||||||
# @PURPOSE: API endpoints for dashboard health monitoring and status aggregation.
|
|
||||||
# @LAYER: UI/API
|
|
||||||
# @RELATION: DEPENDS_ON -> health_service
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Query
|
|
||||||
from typing import List, Optional
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from ...core.database import get_db
|
|
||||||
from ...services.health_service import HealthService
|
|
||||||
from ...schemas.health import HealthSummaryResponse
|
|
||||||
from ...dependencies import has_permission
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/health", tags=["Health"])
|
|
||||||
|
|
||||||
@router.get("/summary", response_model=HealthSummaryResponse)
|
|
||||||
async def get_health_summary(
|
|
||||||
environment_id: Optional[str] = Query(None),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("plugin:migration", "READ"))
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
@PURPOSE: Get aggregated health status for all dashboards.
|
|
||||||
@POST: Returns HealthSummaryResponse
|
|
||||||
"""
|
|
||||||
service = HealthService(db)
|
|
||||||
return await service.get_health_summary(environment_id=environment_id)
|
|
||||||
|
|
||||||
# [/DEF:health_router:Module]
|
|
||||||
@@ -1,23 +1,10 @@
|
|||||||
# [DEF:backend.src.api.routes.migration:Module]
|
# [DEF:backend.src.api.routes.migration:Module]
|
||||||
# @TIER: CRITICAL
|
# @TIER: STANDARD
|
||||||
# @SEMANTICS: api, migration, dashboards, sync, dry-run
|
# @SEMANTICS: api, migration, dashboards
|
||||||
# @PURPOSE: HTTP contract layer for migration orchestration, settings, dry-run, and mapping sync endpoints.
|
# @PURPOSE: API endpoints for migration operations.
|
||||||
# @LAYER: Infra
|
# @LAYER: API
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.dependencies]
|
# @RELATION: DEPENDS_ON -> backend.src.dependencies
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.database]
|
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.superset_client]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.migration.dry_run_orchestrator]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.mapping_service]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.models.dashboard]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.models.mapping]
|
|
||||||
# @INVARIANT: Migration endpoints never execute with invalid environment references and always return explicit HTTP errors on guard failures.
|
|
||||||
# @TEST_CONTRACT: [DashboardSelection + configured envs] -> [task_id | dry-run result | sync summary]
|
|
||||||
# @TEST_SCENARIO: [invalid_environment] -> [HTTP_400_or_404]
|
|
||||||
# @TEST_SCENARIO: [valid_execution] -> [success_payload_with_required_fields]
|
|
||||||
# @TEST_EDGE: [missing_field] ->[HTTP_400]
|
|
||||||
# @TEST_EDGE: [invalid_type] ->[validation_error]
|
|
||||||
# @TEST_EDGE: [external_fail] ->[HTTP_500]
|
|
||||||
# @TEST_INVARIANT: [EnvironmentValidationBeforeAction] -> VERIFIED_BY: [invalid_environment, valid_execution]
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
@@ -26,7 +13,7 @@ from ...dependencies import get_config_manager, get_task_manager, has_permission
|
|||||||
from ...core.database import get_db
|
from ...core.database import get_db
|
||||||
from ...models.dashboard import DashboardMetadata, DashboardSelection
|
from ...models.dashboard import DashboardMetadata, DashboardSelection
|
||||||
from ...core.superset_client import SupersetClient
|
from ...core.superset_client import SupersetClient
|
||||||
from ...core.logger import logger, belief_scope
|
from ...core.logger import belief_scope
|
||||||
from ...core.migration.dry_run_orchestrator import MigrationDryRunService
|
from ...core.migration.dry_run_orchestrator import MigrationDryRunService
|
||||||
from ...core.mapping_service import IdMappingService
|
from ...core.mapping_service import IdMappingService
|
||||||
from ...models.mapping import ResourceMapping
|
from ...models.mapping import ResourceMapping
|
||||||
@@ -34,11 +21,11 @@ from ...models.mapping import ResourceMapping
|
|||||||
router = APIRouter(prefix="/api", tags=["migration"])
|
router = APIRouter(prefix="/api", tags=["migration"])
|
||||||
|
|
||||||
# [DEF:get_dashboards:Function]
|
# [DEF:get_dashboards:Function]
|
||||||
# @PURPOSE: Fetch dashboard metadata from a requested environment for migration selection UI.
|
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
|
||||||
# @PRE: env_id is provided and exists in configured environments.
|
# @PRE: Environment ID must be valid.
|
||||||
# @POST: Returns List[DashboardMetadata] for the resolved environment; emits HTTP_404 when environment is absent.
|
# @POST: Returns a list of dashboard metadata.
|
||||||
# @SIDE_EFFECT: Reads environment configuration and performs remote Superset metadata retrieval over network.
|
# @PARAM: env_id (str) - The ID of the environment to fetch from.
|
||||||
# @DATA_CONTRACT: Input[str env_id] -> Output[List[DashboardMetadata]]
|
# @RETURN: List[DashboardMetadata]
|
||||||
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
|
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
|
||||||
async def get_dashboards(
|
async def get_dashboards(
|
||||||
env_id: str,
|
env_id: str,
|
||||||
@@ -46,26 +33,22 @@ async def get_dashboards(
|
|||||||
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("get_dashboards", f"env_id={env_id}"):
|
with belief_scope("get_dashboards", f"env_id={env_id}"):
|
||||||
logger.reason(f"Fetching dashboards for environment: {env_id}")
|
|
||||||
environments = config_manager.get_environments()
|
environments = config_manager.get_environments()
|
||||||
env = next((e for e in environments if e.id == env_id), None)
|
env = next((e for e in environments if e.id == env_id), None)
|
||||||
|
if not env:
|
||||||
if not env:
|
raise HTTPException(status_code=404, detail="Environment not found")
|
||||||
logger.explore(f"Environment {env_id} not found in configuration")
|
|
||||||
raise HTTPException(status_code=404, detail="Environment not found")
|
|
||||||
|
|
||||||
client = SupersetClient(env)
|
client = SupersetClient(env)
|
||||||
dashboards = client.get_dashboards_summary()
|
dashboards = client.get_dashboards_summary()
|
||||||
logger.reflect(f"Retrieved {len(dashboards)} dashboards from {env_id}")
|
return dashboards
|
||||||
return dashboards
|
|
||||||
# [/DEF:get_dashboards:Function]
|
# [/DEF:get_dashboards:Function]
|
||||||
|
|
||||||
# [DEF:execute_migration:Function]
|
# [DEF:execute_migration:Function]
|
||||||
# @PURPOSE: Validate migration selection and enqueue asynchronous migration task execution.
|
# @PURPOSE: Execute the migration of selected dashboards.
|
||||||
# @PRE: DashboardSelection payload is valid and both source/target environments exist.
|
# @PRE: Selection must be valid and environments must exist.
|
||||||
# @POST: Returns {"task_id": str, "message": str} when task creation succeeds; emits HTTP_400/HTTP_500 on failure.
|
# @POST: Starts the migration task and returns the task ID.
|
||||||
# @SIDE_EFFECT: Reads configuration, writes task record through task manager, and writes operational logs.
|
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
|
||||||
# @DATA_CONTRACT: Input[DashboardSelection] -> Output[Dict[str, str]]
|
# @RETURN: Dict - {"task_id": str, "message": str}
|
||||||
@router.post("/migration/execute")
|
@router.post("/migration/execute")
|
||||||
async def execute_migration(
|
async def execute_migration(
|
||||||
selection: DashboardSelection,
|
selection: DashboardSelection,
|
||||||
@@ -74,39 +57,38 @@ async def execute_migration(
|
|||||||
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("execute_migration"):
|
with belief_scope("execute_migration"):
|
||||||
logger.reason(f"Initiating migration from {selection.source_env_id} to {selection.target_env_id}")
|
|
||||||
|
|
||||||
# Validate environments exist
|
# Validate environments exist
|
||||||
environments = config_manager.get_environments()
|
environments = config_manager.get_environments()
|
||||||
env_ids = {e.id for e in environments}
|
env_ids = {e.id for e in environments}
|
||||||
|
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
|
||||||
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
|
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
||||||
logger.explore("Invalid environment selection", extra={"source": selection.source_env_id, "target": selection.target_env_id})
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
|
||||||
|
|
||||||
# Include replace_db_config and fix_cross_filters in the task parameters
|
# Create migration task with debug logging
|
||||||
task_params = selection.dict()
|
from ...core.logger import logger
|
||||||
task_params['replace_db_config'] = selection.replace_db_config
|
|
||||||
task_params['fix_cross_filters'] = selection.fix_cross_filters
|
# Include replace_db_config and fix_cross_filters in the task parameters
|
||||||
|
task_params = selection.dict()
|
||||||
logger.reason(f"Creating migration task with {len(selection.selected_ids)} dashboards")
|
task_params['replace_db_config'] = selection.replace_db_config
|
||||||
|
task_params['fix_cross_filters'] = selection.fix_cross_filters
|
||||||
try:
|
|
||||||
task = await task_manager.create_task("superset-migration", task_params)
|
logger.info(f"Creating migration task with params: {task_params}")
|
||||||
logger.reflect(f"Migration task created: {task.id}")
|
logger.info(f"Available environments: {env_ids}")
|
||||||
return {"task_id": task.id, "message": "Migration initiated"}
|
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
|
||||||
except Exception as e:
|
|
||||||
logger.explore(f"Task creation failed: {e}")
|
try:
|
||||||
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
|
task = await task_manager.create_task("superset-migration", task_params)
|
||||||
|
logger.info(f"Task created successfully: {task.id}")
|
||||||
|
return {"task_id": task.id, "message": "Migration initiated"}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Task creation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
|
||||||
# [/DEF:execute_migration:Function]
|
# [/DEF:execute_migration:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:dry_run_migration:Function]
|
# [DEF:dry_run_migration:Function]
|
||||||
# @PURPOSE: Build pre-flight migration diff and risk summary without mutating target systems.
|
# @PURPOSE: Build pre-flight diff and risk summary without applying migration.
|
||||||
# @PRE: DashboardSelection is valid, source and target environments exist, differ, and selected_ids is non-empty.
|
# @PRE: Selection and environments are valid.
|
||||||
# @POST: Returns deterministic dry-run payload; emits HTTP_400 for guard violations and HTTP_500 for orchestrator value errors.
|
# @POST: Returns deterministic JSON diff and risk scoring.
|
||||||
# @SIDE_EFFECT: Reads local mappings from DB and fetches source/target metadata via Superset API.
|
|
||||||
# @DATA_CONTRACT: Input[DashboardSelection] -> Output[Dict[str, Any]]
|
|
||||||
@router.post("/migration/dry-run", response_model=Dict[str, Any])
|
@router.post("/migration/dry-run", response_model=Dict[str, Any])
|
||||||
async def dry_run_migration(
|
async def dry_run_migration(
|
||||||
selection: DashboardSelection,
|
selection: DashboardSelection,
|
||||||
@@ -115,49 +97,33 @@ async def dry_run_migration(
|
|||||||
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
|
||||||
):
|
):
|
||||||
with belief_scope("dry_run_migration"):
|
with belief_scope("dry_run_migration"):
|
||||||
logger.reason(f"Starting dry run: {selection.source_env_id} -> {selection.target_env_id}")
|
|
||||||
|
|
||||||
environments = config_manager.get_environments()
|
environments = config_manager.get_environments()
|
||||||
env_map = {env.id: env for env in environments}
|
env_map = {env.id: env for env in environments}
|
||||||
source_env = env_map.get(selection.source_env_id)
|
source_env = env_map.get(selection.source_env_id)
|
||||||
target_env = env_map.get(selection.target_env_id)
|
target_env = env_map.get(selection.target_env_id)
|
||||||
|
if not source_env or not target_env:
|
||||||
if not source_env or not target_env:
|
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
||||||
logger.explore("Invalid environment selection for dry run")
|
if selection.source_env_id == selection.target_env_id:
|
||||||
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
raise HTTPException(status_code=400, detail="Source and target environments must be different")
|
||||||
|
if not selection.selected_ids:
|
||||||
if selection.source_env_id == selection.target_env_id:
|
raise HTTPException(status_code=400, detail="No dashboards selected for dry run")
|
||||||
logger.explore("Source and target environments are identical")
|
|
||||||
raise HTTPException(status_code=400, detail="Source and target environments must be different")
|
|
||||||
|
|
||||||
if not selection.selected_ids:
|
|
||||||
logger.explore("No dashboards selected for dry run")
|
|
||||||
raise HTTPException(status_code=400, detail="No dashboards selected for dry run")
|
|
||||||
|
|
||||||
service = MigrationDryRunService()
|
service = MigrationDryRunService()
|
||||||
source_client = SupersetClient(source_env)
|
source_client = SupersetClient(source_env)
|
||||||
target_client = SupersetClient(target_env)
|
target_client = SupersetClient(target_env)
|
||||||
|
try:
|
||||||
try:
|
return service.run(
|
||||||
result = service.run(
|
selection=selection,
|
||||||
selection=selection,
|
source_client=source_client,
|
||||||
source_client=source_client,
|
target_client=target_client,
|
||||||
target_client=target_client,
|
db=db,
|
||||||
db=db,
|
)
|
||||||
)
|
except ValueError as exc:
|
||||||
logger.reflect("Dry run analysis complete")
|
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||||
return result
|
|
||||||
except ValueError as exc:
|
|
||||||
logger.explore(f"Dry run orchestrator failed: {exc}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
||||||
# [/DEF:dry_run_migration:Function]
|
# [/DEF:dry_run_migration:Function]
|
||||||
|
|
||||||
# [DEF:get_migration_settings:Function]
|
# [DEF:get_migration_settings:Function]
|
||||||
# @PURPOSE: Read and return configured migration synchronization cron expression.
|
# @PURPOSE: Get current migration Cron string explicitly.
|
||||||
# @PRE: Configuration store is available and requester has READ permission.
|
|
||||||
# @POST: Returns {"cron": str} reflecting current persisted settings value.
|
|
||||||
# @SIDE_EFFECT: Reads configuration from config manager.
|
|
||||||
# @DATA_CONTRACT: Input[None] -> Output[Dict[str, str]]
|
|
||||||
@router.get("/migration/settings", response_model=Dict[str, str])
|
@router.get("/migration/settings", response_model=Dict[str, str])
|
||||||
async def get_migration_settings(
|
async def get_migration_settings(
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
@@ -170,11 +136,7 @@ async def get_migration_settings(
|
|||||||
# [/DEF:get_migration_settings:Function]
|
# [/DEF:get_migration_settings:Function]
|
||||||
|
|
||||||
# [DEF:update_migration_settings:Function]
|
# [DEF:update_migration_settings:Function]
|
||||||
# @PURPOSE: Validate and persist migration synchronization cron expression update.
|
# @PURPOSE: Update migration Cron string.
|
||||||
# @PRE: Payload includes "cron" key and requester has WRITE permission.
|
|
||||||
# @POST: Returns {"cron": str, "status": "updated"} and persists updated cron value.
|
|
||||||
# @SIDE_EFFECT: Mutates configuration and writes persisted config through config manager.
|
|
||||||
# @DATA_CONTRACT: Input[Dict[str, str]] -> Output[Dict[str, str]]
|
|
||||||
@router.put("/migration/settings", response_model=Dict[str, str])
|
@router.put("/migration/settings", response_model=Dict[str, str])
|
||||||
async def update_migration_settings(
|
async def update_migration_settings(
|
||||||
payload: Dict[str, str],
|
payload: Dict[str, str],
|
||||||
@@ -195,11 +157,7 @@ async def update_migration_settings(
|
|||||||
# [/DEF:update_migration_settings:Function]
|
# [/DEF:update_migration_settings:Function]
|
||||||
|
|
||||||
# [DEF:get_resource_mappings:Function]
|
# [DEF:get_resource_mappings:Function]
|
||||||
# @PURPOSE: Fetch synchronized resource mappings with optional filters and pagination for migration mappings view.
|
# @PURPOSE: Fetch synchronized object mappings with search, filtering, and pagination.
|
||||||
# @PRE: skip>=0, 1<=limit<=500, DB session is active, requester has READ permission.
|
|
||||||
# @POST: Returns {"items": [...], "total": int} where items reflect applied filters and pagination.
|
|
||||||
# @SIDE_EFFECT: Executes database read queries against ResourceMapping table.
|
|
||||||
# @DATA_CONTRACT: Input[QueryParams] -> Output[Dict[str, Any]]
|
|
||||||
@router.get("/migration/mappings-data", response_model=Dict[str, Any])
|
@router.get("/migration/mappings-data", response_model=Dict[str, Any])
|
||||||
async def get_resource_mappings(
|
async def get_resource_mappings(
|
||||||
skip: int = Query(0, ge=0),
|
skip: int = Query(0, ge=0),
|
||||||
@@ -245,11 +203,9 @@ async def get_resource_mappings(
|
|||||||
# [/DEF:get_resource_mappings:Function]
|
# [/DEF:get_resource_mappings:Function]
|
||||||
|
|
||||||
# [DEF:trigger_sync_now:Function]
|
# [DEF:trigger_sync_now:Function]
|
||||||
# @PURPOSE: Trigger immediate ID synchronization for every configured environment.
|
# @PURPOSE: Triggers an immediate ID synchronization for all environments.
|
||||||
# @PRE: At least one environment is configured and requester has EXECUTE permission.
|
# @PRE: At least one environment must be configured.
|
||||||
# @POST: Returns sync summary with synced/failed counts after attempting all environments.
|
# @POST: Environment rows are ensured in DB; sync_environment is called for each.
|
||||||
# @SIDE_EFFECT: Upserts Environment rows, commits DB transaction, performs network sync calls, and writes logs.
|
|
||||||
# @DATA_CONTRACT: Input[None] -> Output[Dict[str, Any]]
|
|
||||||
@router.post("/migration/sync-now", response_model=Dict[str, Any])
|
@router.post("/migration/sync-now", response_model=Dict[str, Any])
|
||||||
async def trigger_sync_now(
|
async def trigger_sync_now(
|
||||||
config_manager=Depends(get_config_manager),
|
config_manager=Depends(get_config_manager),
|
||||||
|
|||||||
@@ -1,147 +0,0 @@
|
|||||||
# [DEF:backend.src.api.routes.profile:Module]
|
|
||||||
#
|
|
||||||
# @TIER: CRITICAL
|
|
||||||
# @SEMANTICS: api, profile, preferences, self-service, account-lookup
|
|
||||||
# @PURPOSE: Exposes self-scoped profile preference endpoints and environment-based Superset account lookup.
|
|
||||||
# @LAYER: API
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.services.profile_service
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.dependencies.get_current_user
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.database.get_db
|
|
||||||
#
|
|
||||||
# @INVARIANT: Endpoints are self-scoped and never mutate another user preference.
|
|
||||||
# @UX_STATE: ProfileLoad -> Returns stable ProfilePreferenceResponse for authenticated user.
|
|
||||||
# @UX_STATE: Saving -> Validation errors map to actionable 422 details.
|
|
||||||
# @UX_STATE: LookupLoading -> Returns success/degraded Superset lookup payload.
|
|
||||||
# @UX_FEEDBACK: Stable status/message/warning payloads support profile page feedback.
|
|
||||||
# @UX_RECOVERY: Lookup degradation keeps manual username save path available.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
|
|
||||||
from ...core.database import get_db
|
|
||||||
from ...core.logger import logger, belief_scope
|
|
||||||
from ...dependencies import (
|
|
||||||
get_config_manager,
|
|
||||||
get_current_user,
|
|
||||||
get_plugin_loader,
|
|
||||||
)
|
|
||||||
from ...models.auth import User
|
|
||||||
from ...schemas.profile import (
|
|
||||||
ProfilePreferenceResponse,
|
|
||||||
ProfilePreferenceUpdateRequest,
|
|
||||||
SupersetAccountLookupRequest,
|
|
||||||
SupersetAccountLookupResponse,
|
|
||||||
)
|
|
||||||
from ...services.profile_service import (
|
|
||||||
EnvironmentNotFoundError,
|
|
||||||
ProfileAuthorizationError,
|
|
||||||
ProfileService,
|
|
||||||
ProfileValidationError,
|
|
||||||
)
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/profile", tags=["profile"])
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_get_profile_service:Function]
|
|
||||||
# @PURPOSE: Build profile service for current request scope.
|
|
||||||
# @PRE: db session and config manager are available.
|
|
||||||
# @POST: Returns a ready ProfileService instance.
|
|
||||||
def _get_profile_service(db: Session, config_manager, plugin_loader=None) -> ProfileService:
|
|
||||||
return ProfileService(
|
|
||||||
db=db,
|
|
||||||
config_manager=config_manager,
|
|
||||||
plugin_loader=plugin_loader,
|
|
||||||
)
|
|
||||||
# [/DEF:_get_profile_service:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_preferences:Function]
|
|
||||||
# @PURPOSE: Get authenticated user's dashboard filter preference.
|
|
||||||
# @PRE: Valid JWT and authenticated user context.
|
|
||||||
# @POST: Returns preference payload for current user only.
|
|
||||||
@router.get("/preferences", response_model=ProfilePreferenceResponse)
|
|
||||||
async def get_preferences(
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
plugin_loader=Depends(get_plugin_loader),
|
|
||||||
):
|
|
||||||
with belief_scope("profile.get_preferences", f"user_id={current_user.id}"):
|
|
||||||
logger.reason("[REASON] Resolving current user preference")
|
|
||||||
service = _get_profile_service(db, config_manager, plugin_loader)
|
|
||||||
return service.get_my_preference(current_user)
|
|
||||||
# [/DEF:get_preferences:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:update_preferences:Function]
|
|
||||||
# @PURPOSE: Update authenticated user's dashboard filter preference.
|
|
||||||
# @PRE: Valid JWT and valid request payload.
|
|
||||||
# @POST: Persists normalized preference for current user or raises validation/authorization errors.
|
|
||||||
@router.patch("/preferences", response_model=ProfilePreferenceResponse)
|
|
||||||
async def update_preferences(
|
|
||||||
payload: ProfilePreferenceUpdateRequest,
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
plugin_loader=Depends(get_plugin_loader),
|
|
||||||
):
|
|
||||||
with belief_scope("profile.update_preferences", f"user_id={current_user.id}"):
|
|
||||||
service = _get_profile_service(db, config_manager, plugin_loader)
|
|
||||||
try:
|
|
||||||
logger.reason("[REASON] Attempting preference save")
|
|
||||||
return service.update_my_preference(current_user=current_user, payload=payload)
|
|
||||||
except ProfileValidationError as exc:
|
|
||||||
logger.reflect("[REFLECT] Preference validation failed")
|
|
||||||
raise HTTPException(status_code=422, detail=exc.errors) from exc
|
|
||||||
except ProfileAuthorizationError as exc:
|
|
||||||
logger.explore("[EXPLORE] Cross-user mutation guard blocked request")
|
|
||||||
raise HTTPException(status_code=403, detail=str(exc)) from exc
|
|
||||||
# [/DEF:update_preferences:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:lookup_superset_accounts:Function]
|
|
||||||
# @PURPOSE: Lookup Superset account candidates in selected environment.
|
|
||||||
# @PRE: Valid JWT, authenticated context, and environment_id query parameter.
|
|
||||||
# @POST: Returns success or degraded lookup payload with stable shape.
|
|
||||||
@router.get("/superset-accounts", response_model=SupersetAccountLookupResponse)
|
|
||||||
async def lookup_superset_accounts(
|
|
||||||
environment_id: str = Query(...),
|
|
||||||
search: Optional[str] = Query(default=None),
|
|
||||||
page_index: int = Query(default=0, ge=0),
|
|
||||||
page_size: int = Query(default=20, ge=1, le=100),
|
|
||||||
sort_column: str = Query(default="username"),
|
|
||||||
sort_order: str = Query(default="desc"),
|
|
||||||
current_user: User = Depends(get_current_user),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
config_manager=Depends(get_config_manager),
|
|
||||||
plugin_loader=Depends(get_plugin_loader),
|
|
||||||
):
|
|
||||||
with belief_scope(
|
|
||||||
"profile.lookup_superset_accounts",
|
|
||||||
f"user_id={current_user.id}, environment_id={environment_id}",
|
|
||||||
):
|
|
||||||
service = _get_profile_service(db, config_manager, plugin_loader)
|
|
||||||
lookup_request = SupersetAccountLookupRequest(
|
|
||||||
environment_id=environment_id,
|
|
||||||
search=search,
|
|
||||||
page_index=page_index,
|
|
||||||
page_size=page_size,
|
|
||||||
sort_column=sort_column,
|
|
||||||
sort_order=sort_order,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
logger.reason("[REASON] Executing Superset account lookup")
|
|
||||||
return service.lookup_superset_accounts(
|
|
||||||
current_user=current_user,
|
|
||||||
request=lookup_request,
|
|
||||||
)
|
|
||||||
except EnvironmentNotFoundError as exc:
|
|
||||||
logger.explore("[EXPLORE] Lookup request references unknown environment")
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
# [/DEF:lookup_superset_accounts:Function]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.api.routes.profile:Module]
|
|
||||||
@@ -13,11 +13,10 @@ from typing import List, Optional
|
|||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||||
|
|
||||||
from ...dependencies import get_task_manager, has_permission, get_clean_release_repository
|
from ...dependencies import get_task_manager, has_permission
|
||||||
from ...core.task_manager import TaskManager
|
from ...core.task_manager import TaskManager
|
||||||
from ...core.logger import belief_scope
|
from ...core.logger import belief_scope
|
||||||
from ...models.report import ReportCollection, ReportDetailView, ReportQuery, ReportStatus, TaskType
|
from ...models.report import ReportCollection, ReportDetailView, ReportQuery, ReportStatus, TaskType
|
||||||
from ...services.clean_release.repository import CleanReleaseRepository
|
|
||||||
from ...services.reports.report_service import ReportsService
|
from ...services.reports.report_service import ReportsService
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
@@ -89,7 +88,6 @@ async def list_reports(
|
|||||||
sort_by: str = Query("updated_at"),
|
sort_by: str = Query("updated_at"),
|
||||||
sort_order: str = Query("desc"),
|
sort_order: str = Query("desc"),
|
||||||
task_manager: TaskManager = Depends(get_task_manager),
|
task_manager: TaskManager = Depends(get_task_manager),
|
||||||
clean_release_repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
_=Depends(has_permission("tasks", "READ")),
|
_=Depends(has_permission("tasks", "READ")),
|
||||||
):
|
):
|
||||||
with belief_scope("list_reports"):
|
with belief_scope("list_reports"):
|
||||||
@@ -119,7 +117,7 @@ async def list_reports(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
service = ReportsService(task_manager, clean_release_repository=clean_release_repository)
|
service = ReportsService(task_manager)
|
||||||
return service.list_reports(query)
|
return service.list_reports(query)
|
||||||
# [/DEF:list_reports:Function]
|
# [/DEF:list_reports:Function]
|
||||||
|
|
||||||
@@ -132,11 +130,10 @@ async def list_reports(
|
|||||||
async def get_report_detail(
|
async def get_report_detail(
|
||||||
report_id: str,
|
report_id: str,
|
||||||
task_manager: TaskManager = Depends(get_task_manager),
|
task_manager: TaskManager = Depends(get_task_manager),
|
||||||
clean_release_repository: CleanReleaseRepository = Depends(get_clean_release_repository),
|
|
||||||
_=Depends(has_permission("tasks", "READ")),
|
_=Depends(has_permission("tasks", "READ")),
|
||||||
):
|
):
|
||||||
with belief_scope("get_report_detail", f"report_id={report_id}"):
|
with belief_scope("get_report_detail", f"report_id={report_id}"):
|
||||||
service = ReportsService(task_manager, clean_release_repository=clean_release_repository)
|
service = ReportsService(task_manager)
|
||||||
detail = service.get_report_detail(report_id)
|
detail = service.get_report_detail(report_id)
|
||||||
if not detail:
|
if not detail:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|||||||
@@ -16,15 +16,10 @@ from pydantic import BaseModel
|
|||||||
from ...core.config_models import AppConfig, Environment, GlobalSettings, LoggingConfig
|
from ...core.config_models import AppConfig, Environment, GlobalSettings, LoggingConfig
|
||||||
from ...models.storage import StorageConfig
|
from ...models.storage import StorageConfig
|
||||||
from ...dependencies import get_config_manager, has_permission
|
from ...dependencies import get_config_manager, has_permission
|
||||||
from ...core.config_manager import ConfigManager
|
from ...core.config_manager import ConfigManager
|
||||||
from ...core.logger import logger, belief_scope
|
from ...core.logger import logger, belief_scope
|
||||||
from ...core.superset_client import SupersetClient
|
from ...core.superset_client import SupersetClient
|
||||||
from ...services.llm_prompt_templates import normalize_llm_settings
|
from ...services.llm_prompt_templates import normalize_llm_settings
|
||||||
from ...models.llm import ValidationPolicy
|
|
||||||
from ...models.config import AppConfigRecord
|
|
||||||
from ...schemas.settings import ValidationPolicyCreate, ValidationPolicyUpdate, ValidationPolicyResponse
|
|
||||||
from ...core.database import get_db
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
# [DEF:LoggingConfigResponse:Class]
|
# [DEF:LoggingConfigResponse:Class]
|
||||||
@@ -36,38 +31,38 @@ class LoggingConfigResponse(BaseModel):
|
|||||||
enable_belief_state: bool
|
enable_belief_state: bool
|
||||||
# [/DEF:LoggingConfigResponse:Class]
|
# [/DEF:LoggingConfigResponse:Class]
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_normalize_superset_env_url:Function]
|
# [DEF:_normalize_superset_env_url:Function]
|
||||||
# @PURPOSE: Canonicalize Superset environment URL to base host/path without trailing /api/v1.
|
# @PURPOSE: Canonicalize Superset environment URL to base host/path without trailing /api/v1.
|
||||||
# @PRE: raw_url can be empty.
|
# @PRE: raw_url can be empty.
|
||||||
# @POST: Returns normalized base URL.
|
# @POST: Returns normalized base URL.
|
||||||
def _normalize_superset_env_url(raw_url: str) -> str:
|
def _normalize_superset_env_url(raw_url: str) -> str:
|
||||||
normalized = str(raw_url or "").strip().rstrip("/")
|
normalized = str(raw_url or "").strip().rstrip("/")
|
||||||
if normalized.lower().endswith("/api/v1"):
|
if normalized.lower().endswith("/api/v1"):
|
||||||
normalized = normalized[:-len("/api/v1")]
|
normalized = normalized[:-len("/api/v1")]
|
||||||
return normalized.rstrip("/")
|
return normalized.rstrip("/")
|
||||||
# [/DEF:_normalize_superset_env_url:Function]
|
# [/DEF:_normalize_superset_env_url:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_validate_superset_connection_fast:Function]
|
# [DEF:_validate_superset_connection_fast:Function]
|
||||||
# @PURPOSE: Run lightweight Superset connectivity validation without full pagination scan.
|
# @PURPOSE: Run lightweight Superset connectivity validation without full pagination scan.
|
||||||
# @PRE: env contains valid URL and credentials.
|
# @PRE: env contains valid URL and credentials.
|
||||||
# @POST: Raises on auth/API failures; returns None on success.
|
# @POST: Raises on auth/API failures; returns None on success.
|
||||||
def _validate_superset_connection_fast(env: Environment) -> None:
|
def _validate_superset_connection_fast(env: Environment) -> None:
|
||||||
client = SupersetClient(env)
|
client = SupersetClient(env)
|
||||||
# 1) Explicit auth check
|
# 1) Explicit auth check
|
||||||
client.authenticate()
|
client.authenticate()
|
||||||
# 2) Single lightweight API call to ensure read access
|
# 2) Single lightweight API call to ensure read access
|
||||||
client.get_dashboards_page(
|
client.get_dashboards_page(
|
||||||
query={
|
query={
|
||||||
"page": 0,
|
"page": 0,
|
||||||
"page_size": 1,
|
"page_size": 1,
|
||||||
"columns": ["id"],
|
"columns": ["id"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
# [/DEF:_validate_superset_connection_fast:Function]
|
# [/DEF:_validate_superset_connection_fast:Function]
|
||||||
|
|
||||||
# [DEF:get_settings:Function]
|
# [DEF:get_settings:Function]
|
||||||
# @PURPOSE: Retrieves all application settings.
|
# @PURPOSE: Retrieves all application settings.
|
||||||
@@ -75,14 +70,14 @@ def _validate_superset_connection_fast(env: Environment) -> None:
|
|||||||
# @POST: Returns masked AppConfig.
|
# @POST: Returns masked AppConfig.
|
||||||
# @RETURN: AppConfig - The current configuration.
|
# @RETURN: AppConfig - The current configuration.
|
||||||
@router.get("", response_model=AppConfig)
|
@router.get("", response_model=AppConfig)
|
||||||
async def get_settings(
|
async def get_settings(
|
||||||
config_manager: ConfigManager = Depends(get_config_manager),
|
config_manager: ConfigManager = Depends(get_config_manager),
|
||||||
_ = Depends(has_permission("admin:settings", "READ"))
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope("get_settings"):
|
with belief_scope("get_settings"):
|
||||||
logger.info("[get_settings][Entry] Fetching all settings")
|
logger.info("[get_settings][Entry] Fetching all settings")
|
||||||
config = config_manager.get_config().copy(deep=True)
|
config = config_manager.get_config().copy(deep=True)
|
||||||
config.settings.llm = normalize_llm_settings(config.settings.llm)
|
config.settings.llm = normalize_llm_settings(config.settings.llm)
|
||||||
# Mask passwords
|
# Mask passwords
|
||||||
for env in config.environments:
|
for env in config.environments:
|
||||||
if env.password:
|
if env.password:
|
||||||
@@ -148,18 +143,18 @@ async def update_storage_settings(
|
|||||||
# @PRE: Config manager is available.
|
# @PRE: Config manager is available.
|
||||||
# @POST: Returns list of environments.
|
# @POST: Returns list of environments.
|
||||||
# @RETURN: List[Environment] - List of environments.
|
# @RETURN: List[Environment] - List of environments.
|
||||||
@router.get("/environments", response_model=List[Environment])
|
@router.get("/environments", response_model=List[Environment])
|
||||||
async def get_environments(
|
async def get_environments(
|
||||||
config_manager: ConfigManager = Depends(get_config_manager),
|
config_manager: ConfigManager = Depends(get_config_manager),
|
||||||
_ = Depends(has_permission("admin:settings", "READ"))
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
):
|
):
|
||||||
with belief_scope("get_environments"):
|
with belief_scope("get_environments"):
|
||||||
logger.info("[get_environments][Entry] Fetching environments")
|
logger.info("[get_environments][Entry] Fetching environments")
|
||||||
environments = config_manager.get_environments()
|
environments = config_manager.get_environments()
|
||||||
return [
|
return [
|
||||||
env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
||||||
for env in environments
|
for env in environments
|
||||||
]
|
]
|
||||||
# [/DEF:get_environments:Function]
|
# [/DEF:get_environments:Function]
|
||||||
|
|
||||||
# [DEF:add_environment:Function]
|
# [DEF:add_environment:Function]
|
||||||
@@ -169,21 +164,21 @@ async def get_environments(
|
|||||||
# @PARAM: env (Environment) - The environment to add.
|
# @PARAM: env (Environment) - The environment to add.
|
||||||
# @RETURN: Environment - The added environment.
|
# @RETURN: Environment - The added environment.
|
||||||
@router.post("/environments", response_model=Environment)
|
@router.post("/environments", response_model=Environment)
|
||||||
async def add_environment(
|
async def add_environment(
|
||||||
env: Environment,
|
env: Environment,
|
||||||
config_manager: ConfigManager = Depends(get_config_manager),
|
config_manager: ConfigManager = Depends(get_config_manager),
|
||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
_ = Depends(has_permission("admin:settings", "WRITE"))
|
||||||
):
|
):
|
||||||
with belief_scope("add_environment"):
|
with belief_scope("add_environment"):
|
||||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||||
env = env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
env = env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
||||||
|
|
||||||
# Validate connection before adding (fast path)
|
# Validate connection before adding (fast path)
|
||||||
try:
|
try:
|
||||||
_validate_superset_connection_fast(env)
|
_validate_superset_connection_fast(env)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[add_environment][Coherence:Failed] Connection validation failed: {e}")
|
logger.error(f"[add_environment][Coherence:Failed] Connection validation failed: {e}")
|
||||||
raise HTTPException(status_code=400, detail=f"Connection validation failed: {e}")
|
raise HTTPException(status_code=400, detail=f"Connection validation failed: {e}")
|
||||||
|
|
||||||
config_manager.add_environment(env)
|
config_manager.add_environment(env)
|
||||||
return env
|
return env
|
||||||
@@ -197,29 +192,29 @@ async def add_environment(
|
|||||||
# @PARAM: env (Environment) - The updated environment data.
|
# @PARAM: env (Environment) - The updated environment data.
|
||||||
# @RETURN: Environment - The updated environment.
|
# @RETURN: Environment - The updated environment.
|
||||||
@router.put("/environments/{id}", response_model=Environment)
|
@router.put("/environments/{id}", response_model=Environment)
|
||||||
async def update_environment(
|
async def update_environment(
|
||||||
id: str,
|
id: str,
|
||||||
env: Environment,
|
env: Environment,
|
||||||
config_manager: ConfigManager = Depends(get_config_manager)
|
config_manager: ConfigManager = Depends(get_config_manager)
|
||||||
):
|
):
|
||||||
with belief_scope("update_environment"):
|
with belief_scope("update_environment"):
|
||||||
logger.info(f"[update_environment][Entry] Updating environment {id}")
|
logger.info(f"[update_environment][Entry] Updating environment {id}")
|
||||||
|
|
||||||
env = env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
env = env.copy(update={"url": _normalize_superset_env_url(env.url)})
|
||||||
|
|
||||||
# If password is masked, we need the real one for validation
|
# If password is masked, we need the real one for validation
|
||||||
env_to_validate = env.copy(deep=True)
|
env_to_validate = env.copy(deep=True)
|
||||||
if env_to_validate.password == "********":
|
if env_to_validate.password == "********":
|
||||||
old_env = next((e for e in config_manager.get_environments() if e.id == id), None)
|
old_env = next((e for e in config_manager.get_environments() if e.id == id), None)
|
||||||
if old_env:
|
if old_env:
|
||||||
env_to_validate.password = old_env.password
|
env_to_validate.password = old_env.password
|
||||||
|
|
||||||
# Validate connection before updating (fast path)
|
# Validate connection before updating (fast path)
|
||||||
try:
|
try:
|
||||||
_validate_superset_connection_fast(env_to_validate)
|
_validate_superset_connection_fast(env_to_validate)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[update_environment][Coherence:Failed] Connection validation failed: {e}")
|
logger.error(f"[update_environment][Coherence:Failed] Connection validation failed: {e}")
|
||||||
raise HTTPException(status_code=400, detail=f"Connection validation failed: {e}")
|
raise HTTPException(status_code=400, detail=f"Connection validation failed: {e}")
|
||||||
|
|
||||||
if config_manager.update_environment(id, env):
|
if config_manager.update_environment(id, env):
|
||||||
return env
|
return env
|
||||||
@@ -249,7 +244,7 @@ async def delete_environment(
|
|||||||
# @PARAM: id (str) - The ID of the environment to test.
|
# @PARAM: id (str) - The ID of the environment to test.
|
||||||
# @RETURN: dict - Success message or error.
|
# @RETURN: dict - Success message or error.
|
||||||
@router.post("/environments/{id}/test")
|
@router.post("/environments/{id}/test")
|
||||||
async def test_environment_connection(
|
async def test_environment_connection(
|
||||||
id: str,
|
id: str,
|
||||||
config_manager: ConfigManager = Depends(get_config_manager)
|
config_manager: ConfigManager = Depends(get_config_manager)
|
||||||
):
|
):
|
||||||
@@ -261,11 +256,11 @@ async def test_environment_connection(
|
|||||||
if not env:
|
if not env:
|
||||||
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
|
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_validate_superset_connection_fast(env)
|
_validate_superset_connection_fast(env)
|
||||||
|
|
||||||
logger.info(f"[test_environment_connection][Coherence:OK] Connection successful for {id}")
|
logger.info(f"[test_environment_connection][Coherence:OK] Connection successful for {id}")
|
||||||
return {"status": "success", "message": "Connection successful"}
|
return {"status": "success", "message": "Connection successful"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[test_environment_connection][Coherence:Failed] Connection failed for {id}: {e}")
|
logger.error(f"[test_environment_connection][Coherence:Failed] Connection failed for {id}: {e}")
|
||||||
return {"status": "error", "message": str(e)}
|
return {"status": "error", "message": str(e)}
|
||||||
@@ -318,14 +313,13 @@ async def update_logging_config(
|
|||||||
# [/DEF:update_logging_config:Function]
|
# [/DEF:update_logging_config:Function]
|
||||||
|
|
||||||
# [DEF:ConsolidatedSettingsResponse:Class]
|
# [DEF:ConsolidatedSettingsResponse:Class]
|
||||||
class ConsolidatedSettingsResponse(BaseModel):
|
class ConsolidatedSettingsResponse(BaseModel):
|
||||||
environments: List[dict]
|
environments: List[dict]
|
||||||
connections: List[dict]
|
connections: List[dict]
|
||||||
llm: dict
|
llm: dict
|
||||||
llm_providers: List[dict]
|
llm_providers: List[dict]
|
||||||
logging: dict
|
logging: dict
|
||||||
storage: dict
|
storage: dict
|
||||||
notifications: dict = {}
|
|
||||||
# [/DEF:ConsolidatedSettingsResponse:Class]
|
# [/DEF:ConsolidatedSettingsResponse:Class]
|
||||||
|
|
||||||
# [DEF:get_consolidated_settings:Function]
|
# [DEF:get_consolidated_settings:Function]
|
||||||
@@ -334,7 +328,7 @@ class ConsolidatedSettingsResponse(BaseModel):
|
|||||||
# @POST: Returns all consolidated settings.
|
# @POST: Returns all consolidated settings.
|
||||||
# @RETURN: ConsolidatedSettingsResponse - All settings categories.
|
# @RETURN: ConsolidatedSettingsResponse - All settings categories.
|
||||||
@router.get("/consolidated", response_model=ConsolidatedSettingsResponse)
|
@router.get("/consolidated", response_model=ConsolidatedSettingsResponse)
|
||||||
async def get_consolidated_settings(
|
async def get_consolidated_settings(
|
||||||
config_manager: ConfigManager = Depends(get_config_manager),
|
config_manager: ConfigManager = Depends(get_config_manager),
|
||||||
_ = Depends(has_permission("admin:settings", "READ"))
|
_ = Depends(has_permission("admin:settings", "READ"))
|
||||||
):
|
):
|
||||||
@@ -346,7 +340,6 @@ async def get_consolidated_settings(
|
|||||||
from ...services.llm_provider import LLMProviderService
|
from ...services.llm_provider import LLMProviderService
|
||||||
from ...core.database import SessionLocal
|
from ...core.database import SessionLocal
|
||||||
db = SessionLocal()
|
db = SessionLocal()
|
||||||
notifications_payload = {}
|
|
||||||
try:
|
try:
|
||||||
llm_service = LLMProviderService(db)
|
llm_service = LLMProviderService(db)
|
||||||
providers = llm_service.get_all_providers()
|
providers = llm_service.get_all_providers()
|
||||||
@@ -361,24 +354,19 @@ async def get_consolidated_settings(
|
|||||||
"is_active": p.is_active
|
"is_active": p.is_active
|
||||||
} for p in providers
|
} for p in providers
|
||||||
]
|
]
|
||||||
|
|
||||||
config_record = db.query(AppConfigRecord).filter(AppConfigRecord.id == "global").first()
|
|
||||||
if config_record and isinstance(config_record.payload, dict):
|
|
||||||
notifications_payload = config_record.payload.get("notifications", {}) or {}
|
|
||||||
finally:
|
finally:
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
normalized_llm = normalize_llm_settings(config.settings.llm)
|
normalized_llm = normalize_llm_settings(config.settings.llm)
|
||||||
|
|
||||||
return ConsolidatedSettingsResponse(
|
return ConsolidatedSettingsResponse(
|
||||||
environments=[env.dict() for env in config.environments],
|
environments=[env.dict() for env in config.environments],
|
||||||
connections=config.settings.connections,
|
connections=config.settings.connections,
|
||||||
llm=normalized_llm,
|
llm=normalized_llm,
|
||||||
llm_providers=llm_providers_list,
|
llm_providers=llm_providers_list,
|
||||||
logging=config.settings.logging.dict(),
|
logging=config.settings.logging.dict(),
|
||||||
storage=config.settings.storage.dict(),
|
storage=config.settings.storage.dict()
|
||||||
notifications=notifications_payload
|
)
|
||||||
)
|
|
||||||
# [/DEF:get_consolidated_settings:Function]
|
# [/DEF:get_consolidated_settings:Function]
|
||||||
|
|
||||||
# [DEF:update_consolidated_settings:Function]
|
# [DEF:update_consolidated_settings:Function]
|
||||||
@@ -401,9 +389,9 @@ async def update_consolidated_settings(
|
|||||||
if "connections" in settings_patch:
|
if "connections" in settings_patch:
|
||||||
current_settings.connections = settings_patch["connections"]
|
current_settings.connections = settings_patch["connections"]
|
||||||
|
|
||||||
# Update LLM if provided
|
# Update LLM if provided
|
||||||
if "llm" in settings_patch:
|
if "llm" in settings_patch:
|
||||||
current_settings.llm = normalize_llm_settings(settings_patch["llm"])
|
current_settings.llm = normalize_llm_settings(settings_patch["llm"])
|
||||||
|
|
||||||
# Update Logging if provided
|
# Update Logging if provided
|
||||||
if "logging" in settings_patch:
|
if "logging" in settings_patch:
|
||||||
@@ -417,88 +405,8 @@ async def update_consolidated_settings(
|
|||||||
raise HTTPException(status_code=400, detail=message)
|
raise HTTPException(status_code=400, detail=message)
|
||||||
current_settings.storage = new_storage
|
current_settings.storage = new_storage
|
||||||
|
|
||||||
if "notifications" in settings_patch:
|
|
||||||
payload = config_manager.get_payload()
|
|
||||||
payload["notifications"] = settings_patch["notifications"]
|
|
||||||
config_manager.save_config(payload)
|
|
||||||
|
|
||||||
config_manager.update_global_settings(current_settings)
|
config_manager.update_global_settings(current_settings)
|
||||||
return {"status": "success", "message": "Settings updated"}
|
return {"status": "success", "message": "Settings updated"}
|
||||||
# [/DEF:update_consolidated_settings:Function]
|
# [/DEF:update_consolidated_settings:Function]
|
||||||
|
|
||||||
# [DEF:get_validation_policies:Function]
|
|
||||||
# @PURPOSE: Lists all validation policies.
|
|
||||||
# @RETURN: List[ValidationPolicyResponse] - List of policies.
|
|
||||||
@router.get("/automation/policies", response_model=List[ValidationPolicyResponse])
|
|
||||||
async def get_validation_policies(
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("admin:settings", "READ"))
|
|
||||||
):
|
|
||||||
with belief_scope("get_validation_policies"):
|
|
||||||
return db.query(ValidationPolicy).all()
|
|
||||||
# [/DEF:get_validation_policies:Function]
|
|
||||||
|
|
||||||
# [DEF:create_validation_policy:Function]
|
|
||||||
# @PURPOSE: Creates a new validation policy.
|
|
||||||
# @PARAM: policy (ValidationPolicyCreate) - The policy data.
|
|
||||||
# @RETURN: ValidationPolicyResponse - The created policy.
|
|
||||||
@router.post("/automation/policies", response_model=ValidationPolicyResponse)
|
|
||||||
async def create_validation_policy(
|
|
||||||
policy: ValidationPolicyCreate,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
|
||||||
):
|
|
||||||
with belief_scope("create_validation_policy"):
|
|
||||||
db_policy = ValidationPolicy(**policy.dict())
|
|
||||||
db.add(db_policy)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_policy)
|
|
||||||
return db_policy
|
|
||||||
# [/DEF:create_validation_policy:Function]
|
|
||||||
|
|
||||||
# [DEF:update_validation_policy:Function]
|
|
||||||
# @PURPOSE: Updates an existing validation policy.
|
|
||||||
# @PARAM: id (str) - The ID of the policy to update.
|
|
||||||
# @PARAM: policy (ValidationPolicyUpdate) - The updated policy data.
|
|
||||||
# @RETURN: ValidationPolicyResponse - The updated policy.
|
|
||||||
@router.patch("/automation/policies/{id}", response_model=ValidationPolicyResponse)
|
|
||||||
async def update_validation_policy(
|
|
||||||
id: str,
|
|
||||||
policy: ValidationPolicyUpdate,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
|
||||||
):
|
|
||||||
with belief_scope("update_validation_policy"):
|
|
||||||
db_policy = db.query(ValidationPolicy).filter(ValidationPolicy.id == id).first()
|
|
||||||
if not db_policy:
|
|
||||||
raise HTTPException(status_code=404, detail="Policy not found")
|
|
||||||
|
|
||||||
update_data = policy.dict(exclude_unset=True)
|
|
||||||
for key, value in update_data.items():
|
|
||||||
setattr(db_policy, key, value)
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
db.refresh(db_policy)
|
|
||||||
return db_policy
|
|
||||||
# [/DEF:update_validation_policy:Function]
|
|
||||||
|
|
||||||
# [DEF:delete_validation_policy:Function]
|
|
||||||
# @PURPOSE: Deletes a validation policy.
|
|
||||||
# @PARAM: id (str) - The ID of the policy to delete.
|
|
||||||
@router.delete("/automation/policies/{id}")
|
|
||||||
async def delete_validation_policy(
|
|
||||||
id: str,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
_ = Depends(has_permission("admin:settings", "WRITE"))
|
|
||||||
):
|
|
||||||
with belief_scope("delete_validation_policy"):
|
|
||||||
db_policy = db.query(ValidationPolicy).filter(ValidationPolicy.id == id).first()
|
|
||||||
if not db_policy:
|
|
||||||
raise HTTPException(status_code=404, detail="Policy not found")
|
|
||||||
|
|
||||||
db.delete(db_policy)
|
|
||||||
db.commit()
|
|
||||||
return {"message": "Policy deleted"}
|
|
||||||
# [/DEF:delete_validation_policy:Function]
|
|
||||||
|
|
||||||
# [/DEF:SettingsRouter:Module]
|
# [/DEF:SettingsRouter:Module]
|
||||||
|
|||||||
@@ -4,30 +4,30 @@
|
|||||||
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
|
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
|
||||||
# @LAYER: UI (API)
|
# @LAYER: UI (API)
|
||||||
# @RELATION: Depends on the TaskManager. It is included by the main app.
|
# @RELATION: Depends on the TaskManager. It is included by the main app.
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from ...core.logger import belief_scope
|
from ...core.logger import belief_scope
|
||||||
|
|
||||||
from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
|
from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
|
||||||
from ...core.task_manager.models import LogFilter, LogStats
|
from ...core.task_manager.models import LogFilter, LogStats
|
||||||
from ...dependencies import get_task_manager, has_permission, get_current_user, get_config_manager
|
from ...dependencies import get_task_manager, has_permission, get_current_user, get_config_manager
|
||||||
from ...core.config_manager import ConfigManager
|
from ...core.config_manager import ConfigManager
|
||||||
from ...services.llm_prompt_templates import (
|
from ...services.llm_prompt_templates import (
|
||||||
is_multimodal_model,
|
is_multimodal_model,
|
||||||
normalize_llm_settings,
|
normalize_llm_settings,
|
||||||
resolve_bound_provider_id,
|
resolve_bound_provider_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
TASK_TYPE_PLUGIN_MAP = {
|
TASK_TYPE_PLUGIN_MAP = {
|
||||||
"llm_validation": ["llm_dashboard_validation"],
|
"llm_validation": ["llm_dashboard_validation"],
|
||||||
"backup": ["superset-backup"],
|
"backup": ["superset-backup"],
|
||||||
"migration": ["superset-migration"],
|
"migration": ["superset-migration"],
|
||||||
}
|
}
|
||||||
|
|
||||||
class CreateTaskRequest(BaseModel):
|
class CreateTaskRequest(BaseModel):
|
||||||
plugin_id: str
|
plugin_id: str
|
||||||
params: Dict[str, Any]
|
params: Dict[str, Any]
|
||||||
|
|
||||||
@@ -45,54 +45,54 @@ class ResumeTaskRequest(BaseModel):
|
|||||||
# @PRE: plugin_id must exist and params must be valid for that plugin.
|
# @PRE: plugin_id must exist and params must be valid for that plugin.
|
||||||
# @POST: A new task is created and started.
|
# @POST: A new task is created and started.
|
||||||
# @RETURN: Task - The created task instance.
|
# @RETURN: Task - The created task instance.
|
||||||
async def create_task(
|
async def create_task(
|
||||||
request: CreateTaskRequest,
|
request: CreateTaskRequest,
|
||||||
task_manager: TaskManager = Depends(get_task_manager),
|
task_manager: TaskManager = Depends(get_task_manager),
|
||||||
current_user = Depends(get_current_user),
|
current_user = Depends(get_current_user),
|
||||||
config_manager: ConfigManager = Depends(get_config_manager),
|
config_manager: ConfigManager = Depends(get_config_manager),
|
||||||
):
|
):
|
||||||
# Dynamic permission check based on plugin_id
|
# Dynamic permission check based on plugin_id
|
||||||
has_permission(f"plugin:{request.plugin_id}", "EXECUTE")(current_user)
|
has_permission(f"plugin:{request.plugin_id}", "EXECUTE")(current_user)
|
||||||
"""
|
"""
|
||||||
Create and start a new task for a given plugin.
|
Create and start a new task for a given plugin.
|
||||||
"""
|
"""
|
||||||
with belief_scope("create_task"):
|
with belief_scope("create_task"):
|
||||||
try:
|
try:
|
||||||
# Special handling for LLM tasks to resolve provider config by task binding.
|
# Special handling for LLM tasks to resolve provider config by task binding.
|
||||||
if request.plugin_id in {"llm_dashboard_validation", "llm_documentation"}:
|
if request.plugin_id in {"llm_dashboard_validation", "llm_documentation"}:
|
||||||
from ...core.database import SessionLocal
|
from ...core.database import SessionLocal
|
||||||
from ...services.llm_provider import LLMProviderService
|
from ...services.llm_provider import LLMProviderService
|
||||||
db = SessionLocal()
|
db = SessionLocal()
|
||||||
try:
|
try:
|
||||||
llm_service = LLMProviderService(db)
|
llm_service = LLMProviderService(db)
|
||||||
provider_id = request.params.get("provider_id")
|
provider_id = request.params.get("provider_id")
|
||||||
if not provider_id:
|
if not provider_id:
|
||||||
llm_settings = normalize_llm_settings(config_manager.get_config().settings.llm)
|
llm_settings = normalize_llm_settings(config_manager.get_config().settings.llm)
|
||||||
binding_key = "dashboard_validation" if request.plugin_id == "llm_dashboard_validation" else "documentation"
|
binding_key = "dashboard_validation" if request.plugin_id == "llm_dashboard_validation" else "documentation"
|
||||||
provider_id = resolve_bound_provider_id(llm_settings, binding_key)
|
provider_id = resolve_bound_provider_id(llm_settings, binding_key)
|
||||||
if provider_id:
|
if provider_id:
|
||||||
request.params["provider_id"] = provider_id
|
request.params["provider_id"] = provider_id
|
||||||
if not provider_id:
|
if not provider_id:
|
||||||
providers = llm_service.get_all_providers()
|
providers = llm_service.get_all_providers()
|
||||||
active_provider = next((p for p in providers if p.is_active), None)
|
active_provider = next((p for p in providers if p.is_active), None)
|
||||||
if active_provider:
|
if active_provider:
|
||||||
provider_id = active_provider.id
|
provider_id = active_provider.id
|
||||||
request.params["provider_id"] = provider_id
|
request.params["provider_id"] = provider_id
|
||||||
|
|
||||||
if provider_id:
|
if provider_id:
|
||||||
db_provider = llm_service.get_provider(provider_id)
|
db_provider = llm_service.get_provider(provider_id)
|
||||||
if not db_provider:
|
if not db_provider:
|
||||||
raise ValueError(f"LLM Provider {provider_id} not found")
|
raise ValueError(f"LLM Provider {provider_id} not found")
|
||||||
if request.plugin_id == "llm_dashboard_validation" and not is_multimodal_model(
|
if request.plugin_id == "llm_dashboard_validation" and not is_multimodal_model(
|
||||||
db_provider.default_model,
|
db_provider.default_model,
|
||||||
db_provider.provider_type,
|
db_provider.provider_type,
|
||||||
):
|
):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
detail="Selected provider model is not multimodal for dashboard validation",
|
detail="Selected provider model is not multimodal for dashboard validation",
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
task = await task_manager.create_task(
|
task = await task_manager.create_task(
|
||||||
plugin_id=request.plugin_id,
|
plugin_id=request.plugin_id,
|
||||||
@@ -113,36 +113,36 @@ async def create_task(
|
|||||||
# @PRE: task_manager must be available.
|
# @PRE: task_manager must be available.
|
||||||
# @POST: Returns a list of tasks.
|
# @POST: Returns a list of tasks.
|
||||||
# @RETURN: List[Task] - List of tasks.
|
# @RETURN: List[Task] - List of tasks.
|
||||||
async def list_tasks(
|
async def list_tasks(
|
||||||
limit: int = 10,
|
limit: int = 10,
|
||||||
offset: int = 0,
|
offset: int = 0,
|
||||||
status_filter: Optional[TaskStatus] = Query(None, alias="status"),
|
status_filter: Optional[TaskStatus] = Query(None, alias="status"),
|
||||||
task_type: Optional[str] = Query(None, description="Task category: llm_validation, backup, migration"),
|
task_type: Optional[str] = Query(None, description="Task category: llm_validation, backup, migration"),
|
||||||
plugin_id: Optional[List[str]] = Query(None, description="Filter by plugin_id (repeatable query param)"),
|
plugin_id: Optional[List[str]] = Query(None, description="Filter by plugin_id (repeatable query param)"),
|
||||||
completed_only: bool = Query(False, description="Return only completed tasks (SUCCESS/FAILED)"),
|
completed_only: bool = Query(False, description="Return only completed tasks (SUCCESS/FAILED)"),
|
||||||
task_manager: TaskManager = Depends(get_task_manager),
|
task_manager: TaskManager = Depends(get_task_manager),
|
||||||
_ = Depends(has_permission("tasks", "READ"))
|
_ = Depends(has_permission("tasks", "READ"))
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Retrieve a list of tasks with pagination and optional status filter.
|
Retrieve a list of tasks with pagination and optional status filter.
|
||||||
"""
|
"""
|
||||||
with belief_scope("list_tasks"):
|
with belief_scope("list_tasks"):
|
||||||
plugin_filters = list(plugin_id) if plugin_id else []
|
plugin_filters = list(plugin_id) if plugin_id else []
|
||||||
if task_type:
|
if task_type:
|
||||||
if task_type not in TASK_TYPE_PLUGIN_MAP:
|
if task_type not in TASK_TYPE_PLUGIN_MAP:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail=f"Unsupported task_type '{task_type}'. Allowed: {', '.join(TASK_TYPE_PLUGIN_MAP.keys())}"
|
detail=f"Unsupported task_type '{task_type}'. Allowed: {', '.join(TASK_TYPE_PLUGIN_MAP.keys())}"
|
||||||
)
|
)
|
||||||
plugin_filters.extend(TASK_TYPE_PLUGIN_MAP[task_type])
|
plugin_filters.extend(TASK_TYPE_PLUGIN_MAP[task_type])
|
||||||
|
|
||||||
return task_manager.get_tasks(
|
return task_manager.get_tasks(
|
||||||
limit=limit,
|
limit=limit,
|
||||||
offset=offset,
|
offset=offset,
|
||||||
status=status_filter,
|
status=status_filter,
|
||||||
plugin_ids=plugin_filters or None,
|
plugin_ids=plugin_filters or None,
|
||||||
completed_only=completed_only
|
completed_only=completed_only
|
||||||
)
|
)
|
||||||
# [/DEF:list_tasks:Function]
|
# [/DEF:list_tasks:Function]
|
||||||
|
|
||||||
@router.get("/{task_id}", response_model=Task)
|
@router.get("/{task_id}", response_model=Task)
|
||||||
@@ -182,23 +182,6 @@ async def get_task(
|
|||||||
# @POST: Returns a list of log entries or raises 404.
|
# @POST: Returns a list of log entries or raises 404.
|
||||||
# @RETURN: List[LogEntry] - List of log entries.
|
# @RETURN: List[LogEntry] - List of log entries.
|
||||||
# @TIER: CRITICAL
|
# @TIER: CRITICAL
|
||||||
# @TEST_CONTRACT get_task_logs_api ->
|
|
||||||
# {
|
|
||||||
# required_params: {task_id: str},
|
|
||||||
# optional_params: {level: str, source: str, search: str},
|
|
||||||
# invariants: ["returns 404 for non-existent task", "applies filters correctly"]
|
|
||||||
# }
|
|
||||||
# @TEST_FIXTURE valid_task_logs_request -> {"task_id": "test_1", "level": "INFO"}
|
|
||||||
# @TEST_EDGE task_not_found -> raises 404
|
|
||||||
# @TEST_EDGE invalid_limit -> Query(limit=0) returns 422
|
|
||||||
# @TEST_INVARIANT response_purity -> verifies: [valid_task_logs_request]
|
|
||||||
# @TEST_CONTRACT: TaskLogQueryInput -> List[LogEntry]
|
|
||||||
# @TEST_SCENARIO: existing_task_logs_filtered -> Returns filtered logs by level/source/search with pagination.
|
|
||||||
# @TEST_FIXTURE: valid_task_with_mixed_logs -> backend/tests/fixtures/task_logs/valid_task_with_mixed_logs.json
|
|
||||||
# @TEST_EDGE: missing_task -> Unknown task_id returns 404 Task not found.
|
|
||||||
# @TEST_EDGE: invalid_level_type -> Non-string/invalid level query rejected by validation or yields empty result.
|
|
||||||
# @TEST_EDGE: pagination_bounds -> offset=0 and limit=1000 remain within API bounds and do not overflow.
|
|
||||||
# @TEST_INVARIANT: logs_only_for_existing_task -> VERIFIED_BY: [existing_task_logs_filtered, missing_task]
|
|
||||||
async def get_task_logs(
|
async def get_task_logs(
|
||||||
task_id: str,
|
task_id: str,
|
||||||
level: Optional[str] = Query(None, description="Filter by log level (DEBUG, INFO, WARNING, ERROR)"),
|
level: Optional[str] = Query(None, description="Filter by log level (DEBUG, INFO, WARNING, ERROR)"),
|
||||||
@@ -345,4 +328,4 @@ async def clear_tasks(
|
|||||||
task_manager.clear_tasks(status)
|
task_manager.clear_tasks(status)
|
||||||
return
|
return
|
||||||
# [/DEF:clear_tasks:Function]
|
# [/DEF:clear_tasks:Function]
|
||||||
# [/DEF:TasksRouter:Module]
|
# [/DEF:TasksRouter:Module]
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ import asyncio
|
|||||||
from .dependencies import get_task_manager, get_scheduler_service
|
from .dependencies import get_task_manager, get_scheduler_service
|
||||||
from .core.utils.network import NetworkError
|
from .core.utils.network import NetworkError
|
||||||
from .core.logger import logger, belief_scope
|
from .core.logger import logger, belief_scope
|
||||||
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections, git, storage, admin, llm, dashboards, datasets, reports, assistant, clean_release, clean_release_v2, profile, health
|
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections, git, storage, admin, llm, dashboards, datasets, reports, assistant, clean_release
|
||||||
from .api import auth
|
from .api import auth
|
||||||
|
|
||||||
# [DEF:App:Global]
|
# [DEF:App:Global]
|
||||||
@@ -134,9 +134,6 @@ app.include_router(datasets.router)
|
|||||||
app.include_router(reports.router)
|
app.include_router(reports.router)
|
||||||
app.include_router(assistant.router, prefix="/api/assistant", tags=["Assistant"])
|
app.include_router(assistant.router, prefix="/api/assistant", tags=["Assistant"])
|
||||||
app.include_router(clean_release.router)
|
app.include_router(clean_release.router)
|
||||||
app.include_router(clean_release_v2.router)
|
|
||||||
app.include_router(profile.router)
|
|
||||||
app.include_router(health.router)
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:api.include_routers:Action]
|
# [DEF:api.include_routers:Action]
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.core:Package]
|
|
||||||
# @PURPOSE: Backend core services and infrastructure package root.
|
|
||||||
# [/DEF:src.core:Package]
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
# [DEF:backend.src.core.__tests__.test_superset_profile_lookup:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: tests, superset, profile, lookup, fallback, sorting
|
|
||||||
# @PURPOSE: Verifies Superset profile lookup adapter payload normalization and fallback error precedence.
|
|
||||||
# @LAYER: Domain
|
|
||||||
# @RELATION: TESTS -> backend.src.core.superset_profile_lookup
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
backend_dir = str(Path(__file__).parent.parent.parent.parent.resolve())
|
|
||||||
if backend_dir not in sys.path:
|
|
||||||
sys.path.insert(0, backend_dir)
|
|
||||||
|
|
||||||
from src.core.superset_profile_lookup import SupersetAccountLookupAdapter
|
|
||||||
from src.core.utils.network import AuthenticationError, SupersetAPIError
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_RecordingNetworkClient:Class]
|
|
||||||
# @PURPOSE: Records request payloads and returns scripted responses for deterministic adapter tests.
|
|
||||||
class _RecordingNetworkClient:
|
|
||||||
# [DEF:__init__:Function]
|
|
||||||
# @PURPOSE: Initializes scripted network responses.
|
|
||||||
# @PRE: scripted_responses is ordered per expected request sequence.
|
|
||||||
# @POST: Instance stores response script and captures subsequent request calls.
|
|
||||||
def __init__(self, scripted_responses: List[Any]):
|
|
||||||
self._scripted_responses = scripted_responses
|
|
||||||
self.calls: List[Dict[str, Any]] = []
|
|
||||||
# [/DEF:__init__:Function]
|
|
||||||
|
|
||||||
# [DEF:request:Function]
|
|
||||||
# @PURPOSE: Mimics APIClient.request while capturing call arguments.
|
|
||||||
# @PRE: method and endpoint are provided.
|
|
||||||
# @POST: Returns scripted response or raises scripted exception.
|
|
||||||
def request(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
endpoint: str,
|
|
||||||
params: Optional[Dict[str, Any]] = None,
|
|
||||||
**kwargs,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
self.calls.append(
|
|
||||||
{
|
|
||||||
"method": method,
|
|
||||||
"endpoint": endpoint,
|
|
||||||
"params": params or {},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
index = len(self.calls) - 1
|
|
||||||
response = self._scripted_responses[index]
|
|
||||||
if isinstance(response, Exception):
|
|
||||||
raise response
|
|
||||||
return response
|
|
||||||
# [/DEF:request:Function]
|
|
||||||
# [/DEF:_RecordingNetworkClient:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_users_page_sends_lowercase_order_direction:Function]
|
|
||||||
# @PURPOSE: Ensures adapter sends lowercase order_direction compatible with Superset rison schema.
|
|
||||||
# @PRE: Adapter is initialized with recording network client.
|
|
||||||
# @POST: First request query payload contains order_direction='asc' for asc sort.
|
|
||||||
def test_get_users_page_sends_lowercase_order_direction():
|
|
||||||
client = _RecordingNetworkClient(
|
|
||||||
scripted_responses=[{"result": [{"username": "admin"}], "count": 1}]
|
|
||||||
)
|
|
||||||
adapter = SupersetAccountLookupAdapter(network_client=client, environment_id="ss-dev")
|
|
||||||
|
|
||||||
adapter.get_users_page(
|
|
||||||
search="admin",
|
|
||||||
page_index=0,
|
|
||||||
page_size=20,
|
|
||||||
sort_column="username",
|
|
||||||
sort_order="asc",
|
|
||||||
)
|
|
||||||
|
|
||||||
sent_query = json.loads(client.calls[0]["params"]["q"])
|
|
||||||
assert sent_query["order_direction"] == "asc"
|
|
||||||
# [/DEF:test_get_users_page_sends_lowercase_order_direction:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_users_page_preserves_primary_schema_error_over_fallback_auth_error:Function]
|
|
||||||
# @PURPOSE: Ensures fallback auth error does not mask primary schema/query failure.
|
|
||||||
# @PRE: Primary endpoint fails with SupersetAPIError and fallback fails with AuthenticationError.
|
|
||||||
# @POST: Raised exception remains primary SupersetAPIError (non-auth) to preserve root cause.
|
|
||||||
def test_get_users_page_preserves_primary_schema_error_over_fallback_auth_error():
|
|
||||||
client = _RecordingNetworkClient(
|
|
||||||
scripted_responses=[
|
|
||||||
SupersetAPIError("API Error 400: bad rison schema"),
|
|
||||||
AuthenticationError(),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
adapter = SupersetAccountLookupAdapter(network_client=client, environment_id="ss-dev")
|
|
||||||
|
|
||||||
with pytest.raises(SupersetAPIError) as exc_info:
|
|
||||||
adapter.get_users_page(sort_order="asc")
|
|
||||||
|
|
||||||
assert "API Error 400" in str(exc_info.value)
|
|
||||||
assert not isinstance(exc_info.value, AuthenticationError)
|
|
||||||
# [/DEF:test_get_users_page_preserves_primary_schema_error_over_fallback_auth_error:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_users_page_uses_fallback_endpoint_when_primary_fails:Function]
|
|
||||||
# @PURPOSE: Verifies adapter retries second users endpoint and succeeds when fallback is healthy.
|
|
||||||
# @PRE: Primary endpoint fails; fallback returns valid users payload.
|
|
||||||
# @POST: Result status is success and both endpoints were attempted in order.
|
|
||||||
def test_get_users_page_uses_fallback_endpoint_when_primary_fails():
|
|
||||||
client = _RecordingNetworkClient(
|
|
||||||
scripted_responses=[
|
|
||||||
SupersetAPIError("Primary endpoint failed"),
|
|
||||||
{"result": [{"username": "admin"}], "count": 1},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
adapter = SupersetAccountLookupAdapter(network_client=client, environment_id="ss-dev")
|
|
||||||
|
|
||||||
result = adapter.get_users_page()
|
|
||||||
|
|
||||||
assert result["status"] == "success"
|
|
||||||
assert [call["endpoint"] for call in client.calls] == ["/security/users/", "/security/users"]
|
|
||||||
# [/DEF:test_get_users_page_uses_fallback_endpoint_when_primary_fails:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.core.__tests__.test_superset_profile_lookup:Module]
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from datetime import time, date, datetime, timedelta
|
|
||||||
from src.core.scheduler import ThrottledSchedulerConfigurator
|
|
||||||
|
|
||||||
# [DEF:test_throttled_scheduler:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Unit tests for ThrottledSchedulerConfigurator distribution logic.
|
|
||||||
|
|
||||||
def test_calculate_schedule_even_distribution():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: 3 tasks in a 2-hour window should be spaced 1 hour apart.
|
|
||||||
"""
|
|
||||||
start = time(1, 0)
|
|
||||||
end = time(3, 0)
|
|
||||||
dashboards = ["d1", "d2", "d3"]
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert len(schedule) == 3
|
|
||||||
assert schedule[0] == datetime(2024, 1, 1, 1, 0)
|
|
||||||
assert schedule[1] == datetime(2024, 1, 1, 2, 0)
|
|
||||||
assert schedule[2] == datetime(2024, 1, 1, 3, 0)
|
|
||||||
|
|
||||||
def test_calculate_schedule_midnight_crossing():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Window from 23:00 to 01:00 (next day).
|
|
||||||
"""
|
|
||||||
start = time(23, 0)
|
|
||||||
end = time(1, 0)
|
|
||||||
dashboards = ["d1", "d2", "d3"]
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert len(schedule) == 3
|
|
||||||
assert schedule[0] == datetime(2024, 1, 1, 23, 0)
|
|
||||||
assert schedule[1] == datetime(2024, 1, 2, 0, 0)
|
|
||||||
assert schedule[2] == datetime(2024, 1, 2, 1, 0)
|
|
||||||
|
|
||||||
def test_calculate_schedule_single_task():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Single task should be scheduled at start time.
|
|
||||||
"""
|
|
||||||
start = time(1, 0)
|
|
||||||
end = time(2, 0)
|
|
||||||
dashboards = ["d1"]
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert len(schedule) == 1
|
|
||||||
assert schedule[0] == datetime(2024, 1, 1, 1, 0)
|
|
||||||
|
|
||||||
def test_calculate_schedule_empty_list():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Empty dashboard list returns empty schedule.
|
|
||||||
"""
|
|
||||||
start = time(1, 0)
|
|
||||||
end = time(2, 0)
|
|
||||||
dashboards = []
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert schedule == []
|
|
||||||
|
|
||||||
def test_calculate_schedule_zero_window():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Window start == end. All tasks at start time.
|
|
||||||
"""
|
|
||||||
start = time(1, 0)
|
|
||||||
end = time(1, 0)
|
|
||||||
dashboards = ["d1", "d2"]
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert len(schedule) == 2
|
|
||||||
assert schedule[0] == datetime(2024, 1, 1, 1, 0)
|
|
||||||
assert schedule[1] == datetime(2024, 1, 1, 1, 0)
|
|
||||||
|
|
||||||
def test_calculate_schedule_very_small_window():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Window smaller than number of tasks (in seconds).
|
|
||||||
"""
|
|
||||||
start = time(1, 0, 0)
|
|
||||||
end = time(1, 0, 1) # 1 second window
|
|
||||||
dashboards = ["d1", "d2", "d3"]
|
|
||||||
today = date(2024, 1, 1)
|
|
||||||
|
|
||||||
schedule = ThrottledSchedulerConfigurator.calculate_schedule(start, end, dashboards, today)
|
|
||||||
|
|
||||||
assert len(schedule) == 3
|
|
||||||
assert schedule[0] == datetime(2024, 1, 1, 1, 0, 0)
|
|
||||||
assert schedule[1] == datetime(2024, 1, 1, 1, 0, 0, 500000) # 0.5s
|
|
||||||
assert schedule[2] == datetime(2024, 1, 1, 1, 0, 1)
|
|
||||||
|
|
||||||
# [/DEF:test_throttled_scheduler:Module]
|
|
||||||
@@ -1,298 +0,0 @@
|
|||||||
# [DEF:backend.src.core.async_superset_client:Module]
|
|
||||||
#
|
|
||||||
# @TIER: CRITICAL
|
|
||||||
# @SEMANTICS: superset, async, client, httpx, dashboards, datasets
|
|
||||||
# @PURPOSE: Async Superset client for dashboard hot-path requests without blocking FastAPI event loop.
|
|
||||||
# @LAYER: Core
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.utils.async_network.AsyncAPIClient
|
|
||||||
# @INVARIANT: Async dashboard operations reuse shared auth cache and avoid sync requests in async routes.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple, cast
|
|
||||||
|
|
||||||
from .config_models import Environment
|
|
||||||
from .logger import logger as app_logger, belief_scope
|
|
||||||
from .superset_client import SupersetClient
|
|
||||||
from .utils.async_network import AsyncAPIClient
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:AsyncSupersetClient:Class]
|
|
||||||
# @PURPOSE: Async sibling of SupersetClient for dashboard read paths.
|
|
||||||
class AsyncSupersetClient(SupersetClient):
|
|
||||||
# [DEF:__init__:Function]
|
|
||||||
# @PURPOSE: Initialize async Superset client with AsyncAPIClient transport.
|
|
||||||
# @PRE: env is valid.
|
|
||||||
# @POST: Client uses async network transport and inherited projection helpers.
|
|
||||||
def __init__(self, env: Environment):
|
|
||||||
self.env = env
|
|
||||||
auth_payload = {
|
|
||||||
"username": env.username,
|
|
||||||
"password": env.password,
|
|
||||||
"provider": "db",
|
|
||||||
"refresh": "true",
|
|
||||||
}
|
|
||||||
self.network = AsyncAPIClient(
|
|
||||||
config={"base_url": env.url, "auth": auth_payload},
|
|
||||||
verify_ssl=env.verify_ssl,
|
|
||||||
timeout=env.timeout,
|
|
||||||
)
|
|
||||||
self.delete_before_reimport = False
|
|
||||||
# [/DEF:__init__:Function]
|
|
||||||
|
|
||||||
# [DEF:aclose:Function]
|
|
||||||
# @PURPOSE: Close async transport resources.
|
|
||||||
# @POST: Underlying AsyncAPIClient is closed.
|
|
||||||
async def aclose(self) -> None:
|
|
||||||
await self.network.aclose()
|
|
||||||
# [/DEF:aclose:Function]
|
|
||||||
|
|
||||||
# [DEF:get_dashboards_page_async:Function]
|
|
||||||
# @PURPOSE: Fetch one dashboards page asynchronously.
|
|
||||||
# @POST: Returns total count and page result list.
|
|
||||||
async def get_dashboards_page_async(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
|
||||||
with belief_scope("AsyncSupersetClient.get_dashboards_page_async"):
|
|
||||||
validated_query = self._validate_query_params(query or {})
|
|
||||||
if "columns" not in validated_query:
|
|
||||||
validated_query["columns"] = [
|
|
||||||
"slug",
|
|
||||||
"id",
|
|
||||||
"url",
|
|
||||||
"changed_on_utc",
|
|
||||||
"dashboard_title",
|
|
||||||
"published",
|
|
||||||
"created_by",
|
|
||||||
"changed_by",
|
|
||||||
"changed_by_name",
|
|
||||||
"owners",
|
|
||||||
]
|
|
||||||
|
|
||||||
response_json = cast(
|
|
||||||
Dict[str, Any],
|
|
||||||
await self.network.request(
|
|
||||||
method="GET",
|
|
||||||
endpoint="/dashboard/",
|
|
||||||
params={"q": json.dumps(validated_query)},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
result = response_json.get("result", [])
|
|
||||||
total_count = response_json.get("count", len(result))
|
|
||||||
return total_count, result
|
|
||||||
# [/DEF:get_dashboards_page_async:Function]
|
|
||||||
|
|
||||||
# [DEF:get_dashboard_async:Function]
|
|
||||||
# @PURPOSE: Fetch one dashboard payload asynchronously.
|
|
||||||
# @POST: Returns raw dashboard payload from Superset API.
|
|
||||||
async def get_dashboard_async(self, dashboard_id: int) -> Dict:
|
|
||||||
with belief_scope("AsyncSupersetClient.get_dashboard_async", f"id={dashboard_id}"):
|
|
||||||
response = await self.network.request(method="GET", endpoint=f"/dashboard/{dashboard_id}")
|
|
||||||
return cast(Dict, response)
|
|
||||||
# [/DEF:get_dashboard_async:Function]
|
|
||||||
|
|
||||||
# [DEF:get_chart_async:Function]
|
|
||||||
# @PURPOSE: Fetch one chart payload asynchronously.
|
|
||||||
# @POST: Returns raw chart payload from Superset API.
|
|
||||||
async def get_chart_async(self, chart_id: int) -> Dict:
|
|
||||||
with belief_scope("AsyncSupersetClient.get_chart_async", f"id={chart_id}"):
|
|
||||||
response = await self.network.request(method="GET", endpoint=f"/chart/{chart_id}")
|
|
||||||
return cast(Dict, response)
|
|
||||||
# [/DEF:get_chart_async:Function]
|
|
||||||
|
|
||||||
# [DEF:get_dashboard_detail_async:Function]
|
|
||||||
# @PURPOSE: Fetch dashboard detail asynchronously with concurrent charts/datasets requests.
|
|
||||||
# @POST: Returns dashboard detail payload for overview page.
|
|
||||||
async def get_dashboard_detail_async(self, dashboard_id: int) -> Dict:
|
|
||||||
with belief_scope("AsyncSupersetClient.get_dashboard_detail_async", f"id={dashboard_id}"):
|
|
||||||
dashboard_response = await self.get_dashboard_async(dashboard_id)
|
|
||||||
dashboard_data = dashboard_response.get("result", dashboard_response)
|
|
||||||
|
|
||||||
charts: List[Dict] = []
|
|
||||||
datasets: List[Dict] = []
|
|
||||||
|
|
||||||
def extract_dataset_id_from_form_data(form_data: Optional[Dict]) -> Optional[int]:
|
|
||||||
if not isinstance(form_data, dict):
|
|
||||||
return None
|
|
||||||
datasource = form_data.get("datasource")
|
|
||||||
if isinstance(datasource, str):
|
|
||||||
matched = re.match(r"^(\d+)__", datasource)
|
|
||||||
if matched:
|
|
||||||
try:
|
|
||||||
return int(matched.group(1))
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
if isinstance(datasource, dict):
|
|
||||||
ds_id = datasource.get("id")
|
|
||||||
try:
|
|
||||||
return int(ds_id) if ds_id is not None else None
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
return None
|
|
||||||
ds_id = form_data.get("datasource_id")
|
|
||||||
try:
|
|
||||||
return int(ds_id) if ds_id is not None else None
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
chart_task = self.network.request(
|
|
||||||
method="GET",
|
|
||||||
endpoint=f"/dashboard/{dashboard_id}/charts",
|
|
||||||
)
|
|
||||||
dataset_task = self.network.request(
|
|
||||||
method="GET",
|
|
||||||
endpoint=f"/dashboard/{dashboard_id}/datasets",
|
|
||||||
)
|
|
||||||
charts_response, datasets_response = await asyncio.gather(
|
|
||||||
chart_task,
|
|
||||||
dataset_task,
|
|
||||||
return_exceptions=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(charts_response, Exception):
|
|
||||||
charts_payload = charts_response.get("result", []) if isinstance(charts_response, dict) else []
|
|
||||||
for chart_obj in charts_payload:
|
|
||||||
if not isinstance(chart_obj, dict):
|
|
||||||
continue
|
|
||||||
chart_id = chart_obj.get("id")
|
|
||||||
if chart_id is None:
|
|
||||||
continue
|
|
||||||
form_data = chart_obj.get("form_data")
|
|
||||||
if isinstance(form_data, str):
|
|
||||||
try:
|
|
||||||
form_data = json.loads(form_data)
|
|
||||||
except Exception:
|
|
||||||
form_data = {}
|
|
||||||
dataset_id = extract_dataset_id_from_form_data(form_data) or chart_obj.get("datasource_id")
|
|
||||||
charts.append({
|
|
||||||
"id": int(chart_id),
|
|
||||||
"title": chart_obj.get("slice_name") or chart_obj.get("name") or f"Chart {chart_id}",
|
|
||||||
"viz_type": (form_data.get("viz_type") if isinstance(form_data, dict) else None),
|
|
||||||
"dataset_id": int(dataset_id) if dataset_id is not None else None,
|
|
||||||
"last_modified": chart_obj.get("changed_on"),
|
|
||||||
"overview": chart_obj.get("description") or (form_data.get("viz_type") if isinstance(form_data, dict) else None) or "Chart",
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
app_logger.warning("[get_dashboard_detail_async][Warning] Failed to fetch dashboard charts: %s", charts_response)
|
|
||||||
|
|
||||||
if not isinstance(datasets_response, Exception):
|
|
||||||
datasets_payload = datasets_response.get("result", []) if isinstance(datasets_response, dict) else []
|
|
||||||
for dataset_obj in datasets_payload:
|
|
||||||
if not isinstance(dataset_obj, dict):
|
|
||||||
continue
|
|
||||||
dataset_id = dataset_obj.get("id")
|
|
||||||
if dataset_id is None:
|
|
||||||
continue
|
|
||||||
db_payload = dataset_obj.get("database")
|
|
||||||
db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None
|
|
||||||
table_name = dataset_obj.get("table_name") or dataset_obj.get("datasource_name") or dataset_obj.get("name") or f"Dataset {dataset_id}"
|
|
||||||
schema = dataset_obj.get("schema")
|
|
||||||
fq_name = f"{schema}.{table_name}" if schema else table_name
|
|
||||||
datasets.append({
|
|
||||||
"id": int(dataset_id),
|
|
||||||
"table_name": table_name,
|
|
||||||
"schema": schema,
|
|
||||||
"database": db_name or dataset_obj.get("database_name") or "Unknown",
|
|
||||||
"last_modified": dataset_obj.get("changed_on"),
|
|
||||||
"overview": fq_name,
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
app_logger.warning("[get_dashboard_detail_async][Warning] Failed to fetch dashboard datasets: %s", datasets_response)
|
|
||||||
|
|
||||||
if not charts:
|
|
||||||
raw_position_json = dashboard_data.get("position_json")
|
|
||||||
chart_ids_from_position = set()
|
|
||||||
if isinstance(raw_position_json, str) and raw_position_json:
|
|
||||||
try:
|
|
||||||
parsed_position = json.loads(raw_position_json)
|
|
||||||
chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_position))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
elif isinstance(raw_position_json, dict):
|
|
||||||
chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_position_json))
|
|
||||||
|
|
||||||
raw_json_metadata = dashboard_data.get("json_metadata")
|
|
||||||
if isinstance(raw_json_metadata, str) and raw_json_metadata:
|
|
||||||
try:
|
|
||||||
parsed_metadata = json.loads(raw_json_metadata)
|
|
||||||
chart_ids_from_position.update(self._extract_chart_ids_from_layout(parsed_metadata))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
elif isinstance(raw_json_metadata, dict):
|
|
||||||
chart_ids_from_position.update(self._extract_chart_ids_from_layout(raw_json_metadata))
|
|
||||||
|
|
||||||
fallback_chart_tasks = [
|
|
||||||
self.get_chart_async(int(chart_id))
|
|
||||||
for chart_id in sorted(chart_ids_from_position)
|
|
||||||
]
|
|
||||||
fallback_chart_responses = await asyncio.gather(
|
|
||||||
*fallback_chart_tasks,
|
|
||||||
return_exceptions=True,
|
|
||||||
)
|
|
||||||
for chart_id, chart_response in zip(sorted(chart_ids_from_position), fallback_chart_responses):
|
|
||||||
if isinstance(chart_response, Exception):
|
|
||||||
app_logger.warning("[get_dashboard_detail_async][Warning] Failed to resolve fallback chart %s: %s", chart_id, chart_response)
|
|
||||||
continue
|
|
||||||
chart_data = chart_response.get("result", chart_response)
|
|
||||||
charts.append({
|
|
||||||
"id": int(chart_id),
|
|
||||||
"title": chart_data.get("slice_name") or chart_data.get("name") or f"Chart {chart_id}",
|
|
||||||
"viz_type": chart_data.get("viz_type"),
|
|
||||||
"dataset_id": chart_data.get("datasource_id"),
|
|
||||||
"last_modified": chart_data.get("changed_on"),
|
|
||||||
"overview": chart_data.get("description") or chart_data.get("viz_type") or "Chart",
|
|
||||||
})
|
|
||||||
|
|
||||||
dataset_ids_from_charts = {
|
|
||||||
c.get("dataset_id")
|
|
||||||
for c in charts
|
|
||||||
if c.get("dataset_id") is not None
|
|
||||||
}
|
|
||||||
known_dataset_ids = {d.get("id") for d in datasets if d.get("id") is not None}
|
|
||||||
missing_dataset_ids = sorted(int(item) for item in dataset_ids_from_charts if item not in known_dataset_ids)
|
|
||||||
if missing_dataset_ids:
|
|
||||||
dataset_fetch_tasks = [
|
|
||||||
self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
|
|
||||||
for dataset_id in missing_dataset_ids
|
|
||||||
]
|
|
||||||
dataset_fetch_responses = await asyncio.gather(
|
|
||||||
*dataset_fetch_tasks,
|
|
||||||
return_exceptions=True,
|
|
||||||
)
|
|
||||||
for dataset_id, dataset_response in zip(missing_dataset_ids, dataset_fetch_responses):
|
|
||||||
if isinstance(dataset_response, Exception):
|
|
||||||
app_logger.warning("[get_dashboard_detail_async][Warning] Failed to backfill dataset %s: %s", dataset_id, dataset_response)
|
|
||||||
continue
|
|
||||||
dataset_data = dataset_response.get("result", dataset_response) if isinstance(dataset_response, dict) else {}
|
|
||||||
db_payload = dataset_data.get("database")
|
|
||||||
db_name = db_payload.get("database_name") if isinstance(db_payload, dict) else None
|
|
||||||
table_name = dataset_data.get("table_name") or dataset_data.get("datasource_name") or dataset_data.get("name") or f"Dataset {dataset_id}"
|
|
||||||
schema = dataset_data.get("schema")
|
|
||||||
fq_name = f"{schema}.{table_name}" if schema else table_name
|
|
||||||
datasets.append({
|
|
||||||
"id": int(dataset_id),
|
|
||||||
"table_name": table_name,
|
|
||||||
"schema": schema,
|
|
||||||
"database": db_name or dataset_data.get("database_name") or "Unknown",
|
|
||||||
"last_modified": dataset_data.get("changed_on"),
|
|
||||||
"overview": fq_name,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": int(dashboard_data.get("id") or dashboard_id),
|
|
||||||
"title": dashboard_data.get("dashboard_title") or dashboard_data.get("title") or f"Dashboard {dashboard_id}",
|
|
||||||
"slug": dashboard_data.get("slug"),
|
|
||||||
"url": dashboard_data.get("url"),
|
|
||||||
"description": dashboard_data.get("description"),
|
|
||||||
"last_modified": dashboard_data.get("changed_on_utc") or dashboard_data.get("changed_on"),
|
|
||||||
"published": dashboard_data.get("published"),
|
|
||||||
"charts": charts,
|
|
||||||
"datasets": datasets,
|
|
||||||
"chart_count": len(charts),
|
|
||||||
"dataset_count": len(datasets),
|
|
||||||
}
|
|
||||||
# [/DEF:get_dashboard_detail_async:Function]
|
|
||||||
# [/DEF:AsyncSupersetClient:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.core.async_superset_client:Module]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.core.auth:Package]
|
|
||||||
# @PURPOSE: Authentication and authorization package root.
|
|
||||||
# [/DEF:src.core.auth:Package]
|
|
||||||
@@ -1,146 +1,106 @@
|
|||||||
# [DEF:backend.src.core.auth.repository:Module]
|
# [DEF:backend.src.core.auth.repository:Module]
|
||||||
#
|
#
|
||||||
# @TIER: CRITICAL
|
# @SEMANTICS: auth, repository, database, user, role
|
||||||
# @SEMANTICS: auth, repository, database, user, role, permission
|
# @PURPOSE: Data access layer for authentication-related entities.
|
||||||
# @PURPOSE: Data access layer for authentication and user preference entities.
|
# @LAYER: Core
|
||||||
# @LAYER: Domain
|
# @RELATION: DEPENDS_ON -> sqlalchemy
|
||||||
# @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session]
|
# @RELATION: USES -> backend.src.models.auth
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.models.auth]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.models.profile]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.logger.belief_scope]
|
|
||||||
# @INVARIANT: All database read/write operations must execute via the injected SQLAlchemy session boundary.
|
|
||||||
#
|
#
|
||||||
|
# @INVARIANT: All database operations must be performed within a session.
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
from typing import List, Optional
|
from typing import Optional, List
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from ...models.auth import User, Role, Permission
|
||||||
from ...models.auth import Permission, Role, User
|
from ..logger import belief_scope
|
||||||
from ...models.profile import UserDashboardPreference
|
|
||||||
from ..logger import belief_scope, logger
|
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
# [DEF:AuthRepository:Class]
|
# [DEF:AuthRepository:Class]
|
||||||
# @PURPOSE: Encapsulates database operations for authentication-related entities.
|
# @PURPOSE: Encapsulates database operations for authentication.
|
||||||
# @RELATION: [DEPENDS_ON] ->[sqlalchemy.orm.Session]
|
|
||||||
class AuthRepository:
|
class AuthRepository:
|
||||||
# [DEF:__init__:Function]
|
# [DEF:__init__:Function]
|
||||||
# @PURPOSE: Bind repository instance to an existing SQLAlchemy session.
|
# @PURPOSE: Initializes the repository with a database session.
|
||||||
# @PRE: db is an initialized sqlalchemy.orm.Session instance.
|
# @PARAM: db (Session) - SQLAlchemy session.
|
||||||
# @POST: self.db points to the provided session and is used by all repository methods.
|
|
||||||
# @SIDE_EFFECT: Stores session reference on repository instance state.
|
|
||||||
# @DATA_CONTRACT: Input[Session] -> Output[None]
|
|
||||||
def __init__(self, db: Session):
|
def __init__(self, db: Session):
|
||||||
with belief_scope("AuthRepository.__init__"):
|
self.db = db
|
||||||
if not isinstance(db, Session):
|
|
||||||
logger.explore("Invalid session provided to AuthRepository", extra={"type": type(db)})
|
|
||||||
raise TypeError("db must be an instance of sqlalchemy.orm.Session")
|
|
||||||
|
|
||||||
logger.reason("Binding AuthRepository to database session")
|
|
||||||
self.db = db
|
|
||||||
logger.reflect("AuthRepository initialized")
|
|
||||||
# [/DEF:__init__:Function]
|
# [/DEF:__init__:Function]
|
||||||
|
|
||||||
# [DEF:get_user_by_username:Function]
|
# [DEF:get_user_by_username:Function]
|
||||||
# @PURPOSE: Retrieve a user entity by unique username.
|
# @PURPOSE: Retrieves a user by their username.
|
||||||
# @PRE: username is a non-empty str and self.db is a valid open Session.
|
# @PRE: username is a string.
|
||||||
# @POST: Returns matching User entity when present, otherwise None.
|
# @POST: Returns User object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: username (str) - The username to search for.
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[User]]
|
# @RETURN: Optional[User] - The found user or None.
|
||||||
def get_user_by_username(self, username: str) -> Optional[User]:
|
def get_user_by_username(self, username: str) -> Optional[User]:
|
||||||
with belief_scope("AuthRepository.get_user_by_username"):
|
with belief_scope("AuthRepository.get_user_by_username"):
|
||||||
if not username or not isinstance(username, str):
|
return self.db.query(User).filter(User.username == username).first()
|
||||||
raise ValueError("username must be a non-empty string")
|
|
||||||
|
|
||||||
logger.reason(f"Querying user by username: {username}")
|
|
||||||
user = self.db.query(User).filter(User.username == username).first()
|
|
||||||
|
|
||||||
if user:
|
|
||||||
logger.reflect(f"User found: {username}")
|
|
||||||
else:
|
|
||||||
logger.explore(f"User not found: {username}")
|
|
||||||
return user
|
|
||||||
# [/DEF:get_user_by_username:Function]
|
# [/DEF:get_user_by_username:Function]
|
||||||
|
|
||||||
# [DEF:get_user_by_id:Function]
|
# [DEF:get_user_by_id:Function]
|
||||||
# @PURPOSE: Retrieve a user entity by identifier.
|
# @PURPOSE: Retrieves a user by their unique ID.
|
||||||
# @PRE: user_id is a non-empty str and self.db is a valid open Session.
|
# @PRE: user_id is a valid UUID string.
|
||||||
# @POST: Returns matching User entity when present, otherwise None.
|
# @POST: Returns User object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: user_id (str) - The user's unique identifier.
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[User]]
|
# @RETURN: Optional[User] - The found user or None.
|
||||||
def get_user_by_id(self, user_id: str) -> Optional[User]:
|
def get_user_by_id(self, user_id: str) -> Optional[User]:
|
||||||
with belief_scope("AuthRepository.get_user_by_id"):
|
with belief_scope("AuthRepository.get_user_by_id"):
|
||||||
if not user_id or not isinstance(user_id, str):
|
return self.db.query(User).filter(User.id == user_id).first()
|
||||||
raise ValueError("user_id must be a non-empty string")
|
|
||||||
|
|
||||||
logger.reason(f"Querying user by ID: {user_id}")
|
|
||||||
user = self.db.query(User).filter(User.id == user_id).first()
|
|
||||||
|
|
||||||
if user:
|
|
||||||
logger.reflect(f"User found by ID: {user_id}")
|
|
||||||
else:
|
|
||||||
logger.explore(f"User not found by ID: {user_id}")
|
|
||||||
return user
|
|
||||||
# [/DEF:get_user_by_id:Function]
|
# [/DEF:get_user_by_id:Function]
|
||||||
|
|
||||||
# [DEF:get_role_by_name:Function]
|
# [DEF:get_role_by_name:Function]
|
||||||
# @PURPOSE: Retrieve a role entity by role name.
|
# @PURPOSE: Retrieves a role by its name.
|
||||||
# @PRE: name is a non-empty str and self.db is a valid open Session.
|
# @PRE: name is a string.
|
||||||
# @POST: Returns matching Role entity when present, otherwise None.
|
# @POST: Returns Role object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: name (str) - The role name to search for.
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[Role]]
|
# @RETURN: Optional[Role] - The found role or None.
|
||||||
def get_role_by_name(self, name: str) -> Optional[Role]:
|
def get_role_by_name(self, name: str) -> Optional[Role]:
|
||||||
with belief_scope("AuthRepository.get_role_by_name"):
|
with belief_scope("AuthRepository.get_role_by_name"):
|
||||||
return self.db.query(Role).filter(Role.name == name).first()
|
return self.db.query(Role).filter(Role.name == name).first()
|
||||||
# [/DEF:get_role_by_name:Function]
|
# [/DEF:get_role_by_name:Function]
|
||||||
|
|
||||||
# [DEF:update_last_login:Function]
|
# [DEF:update_last_login:Function]
|
||||||
# @PURPOSE: Update last_login timestamp for the provided user entity.
|
# @PURPOSE: Updates the last_login timestamp for a user.
|
||||||
# @PRE: user is a managed User instance and self.db is a valid open Session.
|
# @PRE: user object is a valid User instance.
|
||||||
# @POST: user.last_login is set to current UTC timestamp and transaction is committed.
|
# @POST: User's last_login is updated in the database.
|
||||||
# @SIDE_EFFECT: Mutates user entity state and commits database transaction.
|
# @SIDE_EFFECT: Commits the transaction.
|
||||||
# @DATA_CONTRACT: Input[User] -> Output[None]
|
# @PARAM: user (User) - The user to update.
|
||||||
def update_last_login(self, user: User):
|
def update_last_login(self, user: User):
|
||||||
with belief_scope("AuthRepository.update_last_login"):
|
with belief_scope("AuthRepository.update_last_login"):
|
||||||
if not isinstance(user, User):
|
|
||||||
raise TypeError("user must be an instance of User")
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
logger.reason(f"Updating last login for user: {user.username}")
|
|
||||||
user.last_login = datetime.utcnow()
|
user.last_login = datetime.utcnow()
|
||||||
self.db.add(user)
|
self.db.add(user)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
logger.reflect(f"Last login updated and committed for user: {user.username}")
|
|
||||||
# [/DEF:update_last_login:Function]
|
# [/DEF:update_last_login:Function]
|
||||||
|
|
||||||
# [DEF:get_role_by_id:Function]
|
# [DEF:get_role_by_id:Function]
|
||||||
# @PURPOSE: Retrieve a role entity by identifier.
|
# @PURPOSE: Retrieves a role by its unique ID.
|
||||||
# @PRE: role_id is a non-empty str and self.db is a valid open Session.
|
# @PRE: role_id is a string.
|
||||||
# @POST: Returns matching Role entity when present, otherwise None.
|
# @POST: Returns Role object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: role_id (str) - The role's unique identifier.
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[Role]]
|
# @RETURN: Optional[Role] - The found role or None.
|
||||||
def get_role_by_id(self, role_id: str) -> Optional[Role]:
|
def get_role_by_id(self, role_id: str) -> Optional[Role]:
|
||||||
with belief_scope("AuthRepository.get_role_by_id"):
|
with belief_scope("AuthRepository.get_role_by_id"):
|
||||||
return self.db.query(Role).filter(Role.id == role_id).first()
|
return self.db.query(Role).filter(Role.id == role_id).first()
|
||||||
# [/DEF:get_role_by_id:Function]
|
# [/DEF:get_role_by_id:Function]
|
||||||
|
|
||||||
# [DEF:get_permission_by_id:Function]
|
# [DEF:get_permission_by_id:Function]
|
||||||
# @PURPOSE: Retrieve a permission entity by identifier.
|
# @PURPOSE: Retrieves a permission by its unique ID.
|
||||||
# @PRE: perm_id is a non-empty str and self.db is a valid open Session.
|
# @PRE: perm_id is a string.
|
||||||
# @POST: Returns matching Permission entity when present, otherwise None.
|
# @POST: Returns Permission object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: perm_id (str) - The permission's unique identifier.
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[Permission]]
|
# @RETURN: Optional[Permission] - The found permission or None.
|
||||||
def get_permission_by_id(self, perm_id: str) -> Optional[Permission]:
|
def get_permission_by_id(self, perm_id: str) -> Optional[Permission]:
|
||||||
with belief_scope("AuthRepository.get_permission_by_id"):
|
with belief_scope("AuthRepository.get_permission_by_id"):
|
||||||
return self.db.query(Permission).filter(Permission.id == perm_id).first()
|
return self.db.query(Permission).filter(Permission.id == perm_id).first()
|
||||||
# [/DEF:get_permission_by_id:Function]
|
# [/DEF:get_permission_by_id:Function]
|
||||||
|
|
||||||
# [DEF:get_permission_by_resource_action:Function]
|
# [DEF:get_permission_by_resource_action:Function]
|
||||||
# @PURPOSE: Retrieve a permission entity by resource and action pair.
|
# @PURPOSE: Retrieves a permission by resource and action.
|
||||||
# @PRE: resource and action are non-empty str values; self.db is a valid open Session.
|
# @PRE: resource and action are strings.
|
||||||
# @POST: Returns matching Permission entity when present, otherwise None.
|
# @POST: Returns Permission object if found, else None.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
# @PARAM: resource (str) - The resource name.
|
||||||
# @DATA_CONTRACT: Input[str, str] -> Output[Optional[Permission]]
|
# @PARAM: action (str) - The action name.
|
||||||
|
# @RETURN: Optional[Permission] - The found permission or None.
|
||||||
def get_permission_by_resource_action(self, resource: str, action: str) -> Optional[Permission]:
|
def get_permission_by_resource_action(self, resource: str, action: str) -> Optional[Permission]:
|
||||||
with belief_scope("AuthRepository.get_permission_by_resource_action"):
|
with belief_scope("AuthRepository.get_permission_by_resource_action"):
|
||||||
return self.db.query(Permission).filter(
|
return self.db.query(Permission).filter(
|
||||||
@@ -149,54 +109,15 @@ class AuthRepository:
|
|||||||
).first()
|
).first()
|
||||||
# [/DEF:get_permission_by_resource_action:Function]
|
# [/DEF:get_permission_by_resource_action:Function]
|
||||||
|
|
||||||
# [DEF:get_user_dashboard_preference:Function]
|
|
||||||
# @PURPOSE: Retrieve dashboard preference entity owned by specified user.
|
|
||||||
# @PRE: user_id is a non-empty str and self.db is a valid open Session.
|
|
||||||
# @POST: Returns matching UserDashboardPreference entity when present, otherwise None.
|
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
|
||||||
# @DATA_CONTRACT: Input[str] -> Output[Optional[UserDashboardPreference]]
|
|
||||||
def get_user_dashboard_preference(self, user_id: str) -> Optional[UserDashboardPreference]:
|
|
||||||
with belief_scope("AuthRepository.get_user_dashboard_preference"):
|
|
||||||
return (
|
|
||||||
self.db.query(UserDashboardPreference)
|
|
||||||
.filter(UserDashboardPreference.user_id == user_id)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
# [/DEF:get_user_dashboard_preference:Function]
|
|
||||||
|
|
||||||
# [DEF:save_user_dashboard_preference:Function]
|
|
||||||
# @PURPOSE: Persist dashboard preference entity and return refreshed persistent row.
|
|
||||||
# @PRE: preference is a valid UserDashboardPreference entity and self.db is a valid open Session.
|
|
||||||
# @POST: preference is committed to DB, refreshed from DB state, and returned.
|
|
||||||
# @SIDE_EFFECT: Performs INSERT/UPDATE commit and refresh via active DB session.
|
|
||||||
# @DATA_CONTRACT: Input[UserDashboardPreference] -> Output[UserDashboardPreference]
|
|
||||||
def save_user_dashboard_preference(
|
|
||||||
self,
|
|
||||||
preference: UserDashboardPreference,
|
|
||||||
) -> UserDashboardPreference:
|
|
||||||
with belief_scope("AuthRepository.save_user_dashboard_preference"):
|
|
||||||
if not isinstance(preference, UserDashboardPreference):
|
|
||||||
raise TypeError("preference must be an instance of UserDashboardPreference")
|
|
||||||
|
|
||||||
logger.reason(f"Saving dashboard preference for user: {preference.user_id}")
|
|
||||||
self.db.add(preference)
|
|
||||||
self.db.commit()
|
|
||||||
self.db.refresh(preference)
|
|
||||||
logger.reflect(f"Dashboard preference saved and refreshed for user: {preference.user_id}")
|
|
||||||
return preference
|
|
||||||
# [/DEF:save_user_dashboard_preference:Function]
|
|
||||||
|
|
||||||
# [DEF:list_permissions:Function]
|
# [DEF:list_permissions:Function]
|
||||||
# @PURPOSE: List all permission entities available in storage.
|
# @PURPOSE: Lists all available permissions.
|
||||||
# @PRE: self.db is a valid open Session.
|
# @POST: Returns a list of all Permission objects.
|
||||||
# @POST: Returns list containing all Permission entities visible to the session.
|
# @RETURN: List[Permission] - List of permissions.
|
||||||
# @SIDE_EFFECT: Executes read-only SELECT query through active DB session.
|
|
||||||
# @DATA_CONTRACT: Input[None] -> Output[List[Permission]]
|
|
||||||
def list_permissions(self) -> List[Permission]:
|
def list_permissions(self) -> List[Permission]:
|
||||||
with belief_scope("AuthRepository.list_permissions"):
|
with belief_scope("AuthRepository.list_permissions"):
|
||||||
return self.db.query(Permission).all()
|
return self.db.query(Permission).all()
|
||||||
# [/DEF:list_permissions:Function]
|
# [/DEF:list_permissions:Function]
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:AuthRepository:Class]
|
# [/DEF:AuthRepository:Class]
|
||||||
|
|
||||||
# [/DEF:backend.src.core.auth.repository:Module]
|
# [/DEF:backend.src.core.auth.repository:Module]
|
||||||
@@ -1,17 +1,17 @@
|
|||||||
# [DEF:ConfigManagerModule:Module]
|
# [DEF:ConfigManagerModule:Module]
|
||||||
#
|
#
|
||||||
# @TIER: CRITICAL
|
# @TIER: STANDARD
|
||||||
# @SEMANTICS: config, manager, persistence, migration, postgresql
|
# @SEMANTICS: config, manager, persistence, postgresql
|
||||||
# @PURPOSE: Manages application configuration persistence in DB with one-time migration from legacy JSON.
|
# @PURPOSE: Manages application configuration persisted in database with one-time migration from JSON.
|
||||||
# @LAYER: Domain
|
# @LAYER: Core
|
||||||
# @RELATION: [DEPENDS_ON] ->[ConfigModels]
|
# @RELATION: DEPENDS_ON -> ConfigModels
|
||||||
# @RELATION: [DEPENDS_ON] ->[SessionLocal]
|
# @RELATION: DEPENDS_ON -> AppConfigRecord
|
||||||
# @RELATION: [DEPENDS_ON] ->[AppConfigRecord]
|
# @RELATION: CALLS -> logger
|
||||||
# @RELATION: [CALLS] ->[logger]
|
|
||||||
# @RELATION: [CALLS] ->[configure_logger]
|
|
||||||
# @RELATION: [BINDS_TO] ->[ConfigManager]
|
|
||||||
# @INVARIANT: Configuration must always be representable by AppConfig and persisted under global record id.
|
|
||||||
#
|
#
|
||||||
|
# @INVARIANT: Configuration must always be valid according to AppConfig model.
|
||||||
|
# @PUBLIC_API: ConfigManager
|
||||||
|
|
||||||
|
# [SECTION: IMPORTS]
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -23,58 +23,47 @@ from .config_models import AppConfig, Environment, GlobalSettings, StorageConfig
|
|||||||
from .database import SessionLocal
|
from .database import SessionLocal
|
||||||
from ..models.config import AppConfigRecord
|
from ..models.config import AppConfigRecord
|
||||||
from .logger import logger, configure_logger, belief_scope
|
from .logger import logger, configure_logger, belief_scope
|
||||||
|
# [/SECTION]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ConfigManager:Class]
|
# [DEF:ConfigManager:Class]
|
||||||
# @TIER: CRITICAL
|
# @TIER: STANDARD
|
||||||
# @PURPOSE: Handles application configuration load, validation, mutation, and persistence lifecycle.
|
# @PURPOSE: A class to handle application configuration persistence and management.
|
||||||
class ConfigManager:
|
class ConfigManager:
|
||||||
# [DEF:__init__:Function]
|
# [DEF:__init__:Function]
|
||||||
# @PURPOSE: Initialize manager state from persisted or migrated configuration.
|
# @TIER: STANDARD
|
||||||
# @PRE: config_path is a non-empty string path.
|
# @PURPOSE: Initializes the ConfigManager.
|
||||||
# @POST: self.config is initialized as AppConfig and logger is configured.
|
# @PRE: isinstance(config_path, str) and len(config_path) > 0
|
||||||
# @SIDE_EFFECT: Reads config sources and updates logging configuration.
|
# @POST: self.config is an instance of AppConfig
|
||||||
# @DATA_CONTRACT: Input(str config_path) -> Output(None; self.config: AppConfig)
|
# @PARAM: config_path (str) - Path to legacy JSON config (used only for initial migration fallback).
|
||||||
def __init__(self, config_path: str = "config.json"):
|
def __init__(self, config_path: str = "config.json"):
|
||||||
with belief_scope("ConfigManager.__init__"):
|
with belief_scope("__init__"):
|
||||||
if not isinstance(config_path, str) or not config_path:
|
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||||
logger.explore("Invalid config_path provided", extra={"path": config_path})
|
|
||||||
raise ValueError("config_path must be a non-empty string")
|
|
||||||
|
|
||||||
logger.reason(f"Initializing ConfigManager with legacy path: {config_path}")
|
logger.info(f"[ConfigManager][Entry] Initializing with legacy path {config_path}")
|
||||||
|
|
||||||
self.config_path = Path(config_path)
|
self.config_path = Path(config_path)
|
||||||
self.config: AppConfig = self._load_config()
|
self.config: AppConfig = self._load_config()
|
||||||
|
|
||||||
configure_logger(self.config.settings.logging)
|
configure_logger(self.config.settings.logging)
|
||||||
|
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||||
if not isinstance(self.config, AppConfig):
|
|
||||||
logger.explore("Config loading resulted in invalid type", extra={"type": type(self.config)})
|
|
||||||
raise TypeError("self.config must be an instance of AppConfig")
|
|
||||||
|
|
||||||
logger.reflect("ConfigManager initialization complete")
|
logger.info("[ConfigManager][Exit] Initialized")
|
||||||
# [/DEF:__init__:Function]
|
# [/DEF:__init__:Function]
|
||||||
|
|
||||||
# [DEF:_default_config:Function]
|
# [DEF:_default_config:Function]
|
||||||
# @PURPOSE: Build default application configuration fallback.
|
# @PURPOSE: Returns default application configuration.
|
||||||
# @PRE: None.
|
# @RETURN: AppConfig - Default configuration.
|
||||||
# @POST: Returns valid AppConfig with empty environments and default storage settings.
|
|
||||||
# @SIDE_EFFECT: None.
|
|
||||||
# @DATA_CONTRACT: Input(None) -> Output(AppConfig)
|
|
||||||
def _default_config(self) -> AppConfig:
|
def _default_config(self) -> AppConfig:
|
||||||
with belief_scope("_default_config"):
|
return AppConfig(
|
||||||
return AppConfig(
|
environments=[],
|
||||||
environments=[],
|
settings=GlobalSettings(storage=StorageConfig()),
|
||||||
settings=GlobalSettings(storage=StorageConfig()),
|
)
|
||||||
)
|
|
||||||
# [/DEF:_default_config:Function]
|
# [/DEF:_default_config:Function]
|
||||||
|
|
||||||
# [DEF:_load_from_legacy_file:Function]
|
# [DEF:_load_from_legacy_file:Function]
|
||||||
# @PURPOSE: Load legacy JSON configuration for migration fallback path.
|
# @PURPOSE: Loads legacy configuration from config.json for migration fallback.
|
||||||
# @PRE: self.config_path is initialized.
|
# @RETURN: AppConfig - Loaded or default configuration.
|
||||||
# @POST: Returns AppConfig from file payload or safe default.
|
|
||||||
# @SIDE_EFFECT: Filesystem read and error logging.
|
|
||||||
# @DATA_CONTRACT: Input(Path self.config_path) -> Output(AppConfig)
|
|
||||||
def _load_from_legacy_file(self) -> AppConfig:
|
def _load_from_legacy_file(self) -> AppConfig:
|
||||||
with belief_scope("_load_from_legacy_file"):
|
with belief_scope("_load_from_legacy_file"):
|
||||||
if not self.config_path.exists():
|
if not self.config_path.exists():
|
||||||
@@ -92,55 +81,47 @@ class ConfigManager:
|
|||||||
# [/DEF:_load_from_legacy_file:Function]
|
# [/DEF:_load_from_legacy_file:Function]
|
||||||
|
|
||||||
# [DEF:_get_record:Function]
|
# [DEF:_get_record:Function]
|
||||||
# @PURPOSE: Resolve global configuration record from DB.
|
# @PURPOSE: Loads config record from DB.
|
||||||
# @PRE: session is an active SQLAlchemy Session.
|
# @PARAM: session (Session) - DB session.
|
||||||
# @POST: Returns record when present, otherwise None.
|
# @RETURN: Optional[AppConfigRecord] - Existing record or None.
|
||||||
# @SIDE_EFFECT: Database read query.
|
|
||||||
# @DATA_CONTRACT: Input(Session) -> Output(Optional[AppConfigRecord])
|
|
||||||
def _get_record(self, session: Session) -> Optional[AppConfigRecord]:
|
def _get_record(self, session: Session) -> Optional[AppConfigRecord]:
|
||||||
with belief_scope("_get_record"):
|
return session.query(AppConfigRecord).filter(AppConfigRecord.id == "global").first()
|
||||||
return session.query(AppConfigRecord).filter(AppConfigRecord.id == "global").first()
|
|
||||||
# [/DEF:_get_record:Function]
|
# [/DEF:_get_record:Function]
|
||||||
|
|
||||||
# [DEF:_load_config:Function]
|
# [DEF:_load_config:Function]
|
||||||
# @PURPOSE: Load configuration from DB or perform one-time migration from legacy JSON.
|
# @PURPOSE: Loads the configuration from DB or performs one-time migration from JSON file.
|
||||||
# @PRE: SessionLocal factory is available and AppConfigRecord schema is accessible.
|
# @PRE: DB session factory is available.
|
||||||
# @POST: Returns valid AppConfig and closes opened DB session.
|
# @POST: isinstance(return, AppConfig)
|
||||||
# @SIDE_EFFECT: Database read/write, possible migration write, logging.
|
# @RETURN: AppConfig - Loaded configuration.
|
||||||
# @DATA_CONTRACT: Input(None) -> Output(AppConfig)
|
|
||||||
def _load_config(self) -> AppConfig:
|
def _load_config(self) -> AppConfig:
|
||||||
with belief_scope("ConfigManager._load_config"):
|
with belief_scope("_load_config"):
|
||||||
session: Session = SessionLocal()
|
session: Session = SessionLocal()
|
||||||
try:
|
try:
|
||||||
record = self._get_record(session)
|
record = self._get_record(session)
|
||||||
if record and record.payload:
|
if record and record.payload:
|
||||||
logger.reason("Configuration found in database")
|
logger.info("[_load_config][Coherence:OK] Configuration loaded from database")
|
||||||
config = AppConfig(**record.payload)
|
return AppConfig(**record.payload)
|
||||||
logger.reflect("Database configuration validated")
|
|
||||||
return config
|
|
||||||
|
|
||||||
logger.reason("No database config found, initiating legacy migration")
|
logger.info("[_load_config][Action] No database config found, migrating legacy config")
|
||||||
config = self._load_from_legacy_file()
|
config = self._load_from_legacy_file()
|
||||||
self._save_config_to_db(config, session=session)
|
self._save_config_to_db(config, session=session)
|
||||||
logger.reflect("Legacy configuration migrated to database")
|
|
||||||
return config
|
return config
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.explore(f"Error loading config from DB: {e}")
|
logger.error(f"[_load_config][Coherence:Failed] Error loading config from DB: {e}")
|
||||||
return self._default_config()
|
return self._default_config()
|
||||||
finally:
|
finally:
|
||||||
session.close()
|
session.close()
|
||||||
# [/DEF:_load_config:Function]
|
# [/DEF:_load_config:Function]
|
||||||
|
|
||||||
# [DEF:_save_config_to_db:Function]
|
# [DEF:_save_config_to_db:Function]
|
||||||
# @PURPOSE: Persist provided AppConfig into the global DB configuration record.
|
# @PURPOSE: Saves the provided configuration object to DB.
|
||||||
# @PRE: config is AppConfig; session is either None or an active Session.
|
# @PRE: isinstance(config, AppConfig)
|
||||||
# @POST: Global DB record payload equals config.model_dump() when commit succeeds.
|
# @POST: Configuration saved to database.
|
||||||
# @SIDE_EFFECT: Database insert/update, commit/rollback, logging.
|
# @PARAM: config (AppConfig) - The configuration to save.
|
||||||
# @DATA_CONTRACT: Input(AppConfig, Optional[Session]) -> Output(None)
|
# @PARAM: session (Optional[Session]) - Existing DB session for transactional reuse.
|
||||||
def _save_config_to_db(self, config: AppConfig, session: Optional[Session] = None):
|
def _save_config_to_db(self, config: AppConfig, session: Optional[Session] = None):
|
||||||
with belief_scope("ConfigManager._save_config_to_db"):
|
with belief_scope("_save_config_to_db"):
|
||||||
if not isinstance(config, AppConfig):
|
assert isinstance(config, AppConfig), "config must be an instance of AppConfig"
|
||||||
raise TypeError("config must be an instance of AppConfig")
|
|
||||||
|
|
||||||
owns_session = session is None
|
owns_session = session is None
|
||||||
db = session or SessionLocal()
|
db = session or SessionLocal()
|
||||||
@@ -148,17 +129,15 @@ class ConfigManager:
|
|||||||
record = self._get_record(db)
|
record = self._get_record(db)
|
||||||
payload = config.model_dump()
|
payload = config.model_dump()
|
||||||
if record is None:
|
if record is None:
|
||||||
logger.reason("Creating new global configuration record")
|
|
||||||
record = AppConfigRecord(id="global", payload=payload)
|
record = AppConfigRecord(id="global", payload=payload)
|
||||||
db.add(record)
|
db.add(record)
|
||||||
else:
|
else:
|
||||||
logger.reason("Updating existing global configuration record")
|
|
||||||
record.payload = payload
|
record.payload = payload
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.reflect("Configuration successfully committed to database")
|
logger.info("[_save_config_to_db][Action] Configuration saved to database")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
db.rollback()
|
db.rollback()
|
||||||
logger.explore(f"Failed to save configuration: {e}")
|
logger.error(f"[_save_config_to_db][Coherence:Failed] Failed to save: {e}")
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
if owns_session:
|
if owns_session:
|
||||||
@@ -166,51 +145,42 @@ class ConfigManager:
|
|||||||
# [/DEF:_save_config_to_db:Function]
|
# [/DEF:_save_config_to_db:Function]
|
||||||
|
|
||||||
# [DEF:save:Function]
|
# [DEF:save:Function]
|
||||||
# @PURPOSE: Persist current in-memory configuration state.
|
# @PURPOSE: Saves the current configuration state to DB.
|
||||||
# @PRE: self.config is initialized.
|
# @PRE: self.config is set.
|
||||||
# @POST: Current self.config is written to DB global record.
|
# @POST: self._save_config_to_db called.
|
||||||
# @SIDE_EFFECT: Database write and logging via delegated persistence call.
|
|
||||||
# @DATA_CONTRACT: Input(None; self.config: AppConfig) -> Output(None)
|
|
||||||
def save(self):
|
def save(self):
|
||||||
with belief_scope("save"):
|
with belief_scope("save"):
|
||||||
self._save_config_to_db(self.config)
|
self._save_config_to_db(self.config)
|
||||||
# [/DEF:save:Function]
|
# [/DEF:save:Function]
|
||||||
|
|
||||||
# [DEF:get_config:Function]
|
# [DEF:get_config:Function]
|
||||||
# @PURPOSE: Return current in-memory configuration snapshot.
|
# @PURPOSE: Returns the current configuration.
|
||||||
# @PRE: self.config is initialized.
|
# @RETURN: AppConfig - The current configuration.
|
||||||
# @POST: Returns AppConfig reference stored in manager.
|
|
||||||
# @SIDE_EFFECT: None.
|
|
||||||
# @DATA_CONTRACT: Input(None) -> Output(AppConfig)
|
|
||||||
def get_config(self) -> AppConfig:
|
def get_config(self) -> AppConfig:
|
||||||
with belief_scope("get_config"):
|
with belief_scope("get_config"):
|
||||||
return self.config
|
return self.config
|
||||||
# [/DEF:get_config:Function]
|
# [/DEF:get_config:Function]
|
||||||
|
|
||||||
# [DEF:update_global_settings:Function]
|
# [DEF:update_global_settings:Function]
|
||||||
# @PURPOSE: Replace global settings and persist the resulting configuration.
|
# @PURPOSE: Updates the global settings and persists the change.
|
||||||
# @PRE: settings is GlobalSettings.
|
# @PRE: isinstance(settings, GlobalSettings)
|
||||||
# @POST: self.config.settings equals provided settings and DB state is updated.
|
# @POST: self.config.settings updated and saved.
|
||||||
# @SIDE_EFFECT: Mutates self.config, DB write, logger reconfiguration, logging.
|
# @PARAM: settings (GlobalSettings) - The new global settings.
|
||||||
# @DATA_CONTRACT: Input(GlobalSettings) -> Output(None)
|
|
||||||
def update_global_settings(self, settings: GlobalSettings):
|
def update_global_settings(self, settings: GlobalSettings):
|
||||||
with belief_scope("ConfigManager.update_global_settings"):
|
with belief_scope("update_global_settings"):
|
||||||
if not isinstance(settings, GlobalSettings):
|
logger.info("[update_global_settings][Entry] Updating settings")
|
||||||
raise TypeError("settings must be an instance of GlobalSettings")
|
|
||||||
|
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
|
||||||
logger.reason("Updating global settings and persisting")
|
|
||||||
self.config.settings = settings
|
self.config.settings = settings
|
||||||
self.save()
|
self.save()
|
||||||
configure_logger(settings.logging)
|
configure_logger(settings.logging)
|
||||||
logger.reflect("Global settings updated and logger reconfigured")
|
logger.info("[update_global_settings][Exit] Settings updated")
|
||||||
# [/DEF:update_global_settings:Function]
|
# [/DEF:update_global_settings:Function]
|
||||||
|
|
||||||
# [DEF:validate_path:Function]
|
# [DEF:validate_path:Function]
|
||||||
# @PURPOSE: Validate that path exists and is writable, creating it when absent.
|
# @PURPOSE: Validates if a path exists and is writable.
|
||||||
# @PRE: path is a string path candidate.
|
# @PARAM: path (str) - The path to validate.
|
||||||
# @POST: Returns (True, msg) for writable path, else (False, reason).
|
# @RETURN: tuple (bool, str) - (is_valid, message)
|
||||||
# @SIDE_EFFECT: Filesystem directory creation attempt and OS permission checks.
|
|
||||||
# @DATA_CONTRACT: Input(str path) -> Output(tuple[bool, str])
|
|
||||||
def validate_path(self, path: str) -> tuple[bool, str]:
|
def validate_path(self, path: str) -> tuple[bool, str]:
|
||||||
with belief_scope("validate_path"):
|
with belief_scope("validate_path"):
|
||||||
p = os.path.abspath(path)
|
p = os.path.abspath(path)
|
||||||
@@ -227,33 +197,25 @@ class ConfigManager:
|
|||||||
# [/DEF:validate_path:Function]
|
# [/DEF:validate_path:Function]
|
||||||
|
|
||||||
# [DEF:get_environments:Function]
|
# [DEF:get_environments:Function]
|
||||||
# @PURPOSE: Return all configured environments.
|
# @PURPOSE: Returns the list of configured environments.
|
||||||
# @PRE: self.config is initialized.
|
# @RETURN: List[Environment] - List of environments.
|
||||||
# @POST: Returns list of Environment models from current configuration.
|
|
||||||
# @SIDE_EFFECT: None.
|
|
||||||
# @DATA_CONTRACT: Input(None) -> Output(List[Environment])
|
|
||||||
def get_environments(self) -> List[Environment]:
|
def get_environments(self) -> List[Environment]:
|
||||||
with belief_scope("get_environments"):
|
with belief_scope("get_environments"):
|
||||||
return self.config.environments
|
return self.config.environments
|
||||||
# [/DEF:get_environments:Function]
|
# [/DEF:get_environments:Function]
|
||||||
|
|
||||||
# [DEF:has_environments:Function]
|
# [DEF:has_environments:Function]
|
||||||
# @PURPOSE: Check whether at least one environment exists in configuration.
|
# @PURPOSE: Checks if at least one environment is configured.
|
||||||
# @PRE: self.config is initialized.
|
# @RETURN: bool - True if at least one environment exists.
|
||||||
# @POST: Returns True iff environment list length is greater than zero.
|
|
||||||
# @SIDE_EFFECT: None.
|
|
||||||
# @DATA_CONTRACT: Input(None) -> Output(bool)
|
|
||||||
def has_environments(self) -> bool:
|
def has_environments(self) -> bool:
|
||||||
with belief_scope("has_environments"):
|
with belief_scope("has_environments"):
|
||||||
return len(self.config.environments) > 0
|
return len(self.config.environments) > 0
|
||||||
# [/DEF:has_environments:Function]
|
# [/DEF:has_environments:Function]
|
||||||
|
|
||||||
# [DEF:get_environment:Function]
|
# [DEF:get_environment:Function]
|
||||||
# @PURPOSE: Resolve a configured environment by identifier.
|
# @PURPOSE: Returns a single environment by ID.
|
||||||
# @PRE: env_id is string identifier.
|
# @PARAM: env_id (str) - The ID of the environment to retrieve.
|
||||||
# @POST: Returns matching Environment when found; otherwise None.
|
# @RETURN: Optional[Environment] - The environment with the given ID, or None.
|
||||||
# @SIDE_EFFECT: None.
|
|
||||||
# @DATA_CONTRACT: Input(str env_id) -> Output(Optional[Environment])
|
|
||||||
def get_environment(self, env_id: str) -> Optional[Environment]:
|
def get_environment(self, env_id: str) -> Optional[Environment]:
|
||||||
with belief_scope("get_environment"):
|
with belief_scope("get_environment"):
|
||||||
for env in self.config.environments:
|
for env in self.config.environments:
|
||||||
@@ -263,72 +225,60 @@ class ConfigManager:
|
|||||||
# [/DEF:get_environment:Function]
|
# [/DEF:get_environment:Function]
|
||||||
|
|
||||||
# [DEF:add_environment:Function]
|
# [DEF:add_environment:Function]
|
||||||
# @PURPOSE: Upsert environment by id into configuration and persist.
|
# @PURPOSE: Adds a new environment to the configuration.
|
||||||
# @PRE: env is Environment.
|
# @PARAM: env (Environment) - The environment to add.
|
||||||
# @POST: Configuration contains provided env id with new payload persisted.
|
|
||||||
# @SIDE_EFFECT: Mutates environment list, DB write, logging.
|
|
||||||
# @DATA_CONTRACT: Input(Environment) -> Output(None)
|
|
||||||
def add_environment(self, env: Environment):
|
def add_environment(self, env: Environment):
|
||||||
with belief_scope("ConfigManager.add_environment"):
|
with belief_scope("add_environment"):
|
||||||
if not isinstance(env, Environment):
|
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||||
raise TypeError("env must be an instance of Environment")
|
assert isinstance(env, Environment), "env must be an instance of Environment"
|
||||||
|
|
||||||
logger.reason(f"Adding/Updating environment: {env.id}")
|
|
||||||
self.config.environments = [e for e in self.config.environments if e.id != env.id]
|
self.config.environments = [e for e in self.config.environments if e.id != env.id]
|
||||||
self.config.environments.append(env)
|
self.config.environments.append(env)
|
||||||
self.save()
|
self.save()
|
||||||
logger.reflect(f"Environment {env.id} persisted")
|
logger.info("[add_environment][Exit] Environment added")
|
||||||
# [/DEF:add_environment:Function]
|
# [/DEF:add_environment:Function]
|
||||||
|
|
||||||
# [DEF:update_environment:Function]
|
# [DEF:update_environment:Function]
|
||||||
# @PURPOSE: Update existing environment by id and preserve masked password placeholder behavior.
|
# @PURPOSE: Updates an existing environment.
|
||||||
# @PRE: env_id is non-empty string and updated_env is Environment.
|
# @PARAM: env_id (str) - The ID of the environment to update.
|
||||||
# @POST: Returns True and persists update when target exists; else returns False.
|
# @PARAM: updated_env (Environment) - The updated environment data.
|
||||||
# @SIDE_EFFECT: May mutate environment list, DB write, logging.
|
# @RETURN: bool - True if updated, False otherwise.
|
||||||
# @DATA_CONTRACT: Input(str env_id, Environment updated_env) -> Output(bool)
|
|
||||||
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
|
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
|
||||||
with belief_scope("ConfigManager.update_environment"):
|
with belief_scope("update_environment"):
|
||||||
if not env_id or not isinstance(env_id, str):
|
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||||
raise ValueError("env_id must be a non-empty string")
|
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||||
if not isinstance(updated_env, Environment):
|
assert isinstance(updated_env, Environment), "updated_env must be an instance of Environment"
|
||||||
raise TypeError("updated_env must be an instance of Environment")
|
|
||||||
|
|
||||||
logger.reason(f"Attempting to update environment: {env_id}")
|
|
||||||
for i, env in enumerate(self.config.environments):
|
for i, env in enumerate(self.config.environments):
|
||||||
if env.id == env_id:
|
if env.id == env_id:
|
||||||
if updated_env.password == "********":
|
if updated_env.password == "********":
|
||||||
logger.reason("Preserving existing password for masked update")
|
|
||||||
updated_env.password = env.password
|
updated_env.password = env.password
|
||||||
|
|
||||||
self.config.environments[i] = updated_env
|
self.config.environments[i] = updated_env
|
||||||
self.save()
|
self.save()
|
||||||
logger.reflect(f"Environment {env_id} updated and saved")
|
logger.info(f"[update_environment][Coherence:OK] Updated {env_id}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
logger.explore(f"Environment {env_id} not found for update")
|
logger.warning(f"[update_environment][Coherence:Failed] Environment {env_id} not found")
|
||||||
return False
|
return False
|
||||||
# [/DEF:update_environment:Function]
|
# [/DEF:update_environment:Function]
|
||||||
|
|
||||||
# [DEF:delete_environment:Function]
|
# [DEF:delete_environment:Function]
|
||||||
# @PURPOSE: Delete environment by id and persist when deletion occurs.
|
# @PURPOSE: Deletes an environment by ID.
|
||||||
# @PRE: env_id is non-empty string.
|
# @PARAM: env_id (str) - The ID of the environment to delete.
|
||||||
# @POST: Environment is removed when present; otherwise configuration is unchanged.
|
|
||||||
# @SIDE_EFFECT: May mutate environment list, conditional DB write, logging.
|
|
||||||
# @DATA_CONTRACT: Input(str env_id) -> Output(None)
|
|
||||||
def delete_environment(self, env_id: str):
|
def delete_environment(self, env_id: str):
|
||||||
with belief_scope("ConfigManager.delete_environment"):
|
with belief_scope("delete_environment"):
|
||||||
if not env_id or not isinstance(env_id, str):
|
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||||
raise ValueError("env_id must be a non-empty string")
|
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||||
|
|
||||||
logger.reason(f"Attempting to delete environment: {env_id}")
|
|
||||||
original_count = len(self.config.environments)
|
original_count = len(self.config.environments)
|
||||||
self.config.environments = [e for e in self.config.environments if e.id != env_id]
|
self.config.environments = [e for e in self.config.environments if e.id != env_id]
|
||||||
|
|
||||||
if len(self.config.environments) < original_count:
|
if len(self.config.environments) < original_count:
|
||||||
self.save()
|
self.save()
|
||||||
logger.reflect(f"Environment {env_id} deleted and configuration saved")
|
logger.info(f"[delete_environment][Action] Deleted {env_id}")
|
||||||
else:
|
else:
|
||||||
logger.explore(f"Environment {env_id} not found for deletion")
|
logger.warning(f"[delete_environment][Coherence:Failed] Environment {env_id} not found")
|
||||||
# [/DEF:delete_environment:Function]
|
# [/DEF:delete_environment:Function]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -24,19 +24,19 @@ class Schedule(BaseModel):
|
|||||||
|
|
||||||
# [DEF:Environment:DataClass]
|
# [DEF:Environment:DataClass]
|
||||||
# @PURPOSE: Represents a Superset environment configuration.
|
# @PURPOSE: Represents a Superset environment configuration.
|
||||||
class Environment(BaseModel):
|
class Environment(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
url: str
|
url: str
|
||||||
username: str
|
username: str
|
||||||
password: str # Will be masked in UI
|
password: str # Will be masked in UI
|
||||||
stage: str = Field(default="DEV", pattern="^(DEV|PREPROD|PROD)$")
|
stage: str = Field(default="DEV", pattern="^(DEV|PREPROD|PROD)$")
|
||||||
verify_ssl: bool = True
|
verify_ssl: bool = True
|
||||||
timeout: int = 30
|
timeout: int = 30
|
||||||
is_default: bool = False
|
is_default: bool = False
|
||||||
is_production: bool = False
|
is_production: bool = False
|
||||||
backup_schedule: Schedule = Field(default_factory=Schedule)
|
backup_schedule: Schedule = Field(default_factory=Schedule)
|
||||||
# [/DEF:Environment:DataClass]
|
# [/DEF:Environment:DataClass]
|
||||||
|
|
||||||
# [DEF:LoggingConfig:DataClass]
|
# [DEF:LoggingConfig:DataClass]
|
||||||
# @PURPOSE: Defines the configuration for the application's logging system.
|
# @PURPOSE: Defines the configuration for the application's logging system.
|
||||||
@@ -49,18 +49,10 @@ class LoggingConfig(BaseModel):
|
|||||||
enable_belief_state: bool = True
|
enable_belief_state: bool = True
|
||||||
# [/DEF:LoggingConfig:DataClass]
|
# [/DEF:LoggingConfig:DataClass]
|
||||||
|
|
||||||
# [DEF:CleanReleaseConfig:DataClass]
|
|
||||||
# @PURPOSE: Configuration for clean release compliance subsystem.
|
|
||||||
class CleanReleaseConfig(BaseModel):
|
|
||||||
active_policy_id: Optional[str] = None
|
|
||||||
active_registry_id: Optional[str] = None
|
|
||||||
# [/DEF:CleanReleaseConfig:DataClass]
|
|
||||||
|
|
||||||
# [DEF:GlobalSettings:DataClass]
|
# [DEF:GlobalSettings:DataClass]
|
||||||
# @PURPOSE: Represents global application settings.
|
# @PURPOSE: Represents global application settings.
|
||||||
class GlobalSettings(BaseModel):
|
class GlobalSettings(BaseModel):
|
||||||
storage: StorageConfig = Field(default_factory=StorageConfig)
|
storage: StorageConfig = Field(default_factory=StorageConfig)
|
||||||
clean_release: CleanReleaseConfig = Field(default_factory=CleanReleaseConfig)
|
|
||||||
default_environment_id: Optional[str] = None
|
default_environment_id: Optional[str] = None
|
||||||
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
||||||
connections: List[dict] = []
|
connections: List[dict] = []
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# @INVARIANT: A single engine instance is used for the entire application.
|
# @INVARIANT: A single engine instance is used for the entire application.
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
from sqlalchemy import create_engine, inspect, text
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from ..models.mapping import Base
|
from ..models.mapping import Base
|
||||||
# Import models to ensure they're registered with Base
|
# Import models to ensure they're registered with Base
|
||||||
@@ -20,9 +20,7 @@ from ..models import auth as _auth_models # noqa: F401
|
|||||||
from ..models import config as _config_models # noqa: F401
|
from ..models import config as _config_models # noqa: F401
|
||||||
from ..models import llm as _llm_models # noqa: F401
|
from ..models import llm as _llm_models # noqa: F401
|
||||||
from ..models import assistant as _assistant_models # noqa: F401
|
from ..models import assistant as _assistant_models # noqa: F401
|
||||||
from ..models import profile as _profile_models # noqa: F401
|
from .logger import belief_scope
|
||||||
from ..models import clean_release as _clean_release_models # noqa: F401
|
|
||||||
from .logger import belief_scope, logger
|
|
||||||
from .auth.config import auth_config
|
from .auth.config import auth_config
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -96,191 +94,6 @@ TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_e
|
|||||||
AuthSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=auth_engine)
|
AuthSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=auth_engine)
|
||||||
# [/DEF:AuthSessionLocal:Class]
|
# [/DEF:AuthSessionLocal:Class]
|
||||||
|
|
||||||
# [DEF:_ensure_user_dashboard_preferences_columns:Function]
|
|
||||||
# @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table.
|
|
||||||
# @PRE: bind_engine points to application database where profile table is stored.
|
|
||||||
# @POST: Missing columns are added without data loss.
|
|
||||||
def _ensure_user_dashboard_preferences_columns(bind_engine):
|
|
||||||
with belief_scope("_ensure_user_dashboard_preferences_columns"):
|
|
||||||
table_name = "user_dashboard_preferences"
|
|
||||||
inspector = inspect(bind_engine)
|
|
||||||
if table_name not in inspector.get_table_names():
|
|
||||||
return
|
|
||||||
|
|
||||||
existing_columns = {
|
|
||||||
str(column.get("name") or "").strip()
|
|
||||||
for column in inspector.get_columns(table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
alter_statements = []
|
|
||||||
if "git_username" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences ADD COLUMN git_username VARCHAR"
|
|
||||||
)
|
|
||||||
if "git_email" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences ADD COLUMN git_email VARCHAR"
|
|
||||||
)
|
|
||||||
if "git_personal_access_token_encrypted" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences "
|
|
||||||
"ADD COLUMN git_personal_access_token_encrypted VARCHAR"
|
|
||||||
)
|
|
||||||
if "start_page" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences "
|
|
||||||
"ADD COLUMN start_page VARCHAR NOT NULL DEFAULT 'dashboards'"
|
|
||||||
)
|
|
||||||
if "auto_open_task_drawer" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences "
|
|
||||||
"ADD COLUMN auto_open_task_drawer BOOLEAN NOT NULL DEFAULT TRUE"
|
|
||||||
)
|
|
||||||
if "dashboards_table_density" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences "
|
|
||||||
"ADD COLUMN dashboards_table_density VARCHAR NOT NULL DEFAULT 'comfortable'"
|
|
||||||
)
|
|
||||||
if "show_only_slug_dashboards" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences "
|
|
||||||
"ADD COLUMN show_only_slug_dashboards BOOLEAN NOT NULL DEFAULT TRUE"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not alter_statements:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with bind_engine.begin() as connection:
|
|
||||||
for statement in alter_statements:
|
|
||||||
connection.execute(text(statement))
|
|
||||||
except Exception as migration_error:
|
|
||||||
logger.warning(
|
|
||||||
"[database][EXPLORE] Profile preference additive migration failed: %s",
|
|
||||||
migration_error,
|
|
||||||
)
|
|
||||||
# [/DEF:_ensure_user_dashboard_preferences_columns:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_ensure_user_dashboard_preferences_health_columns:Function]
|
|
||||||
# @PURPOSE: Applies additive schema upgrades for user_dashboard_preferences table (health fields).
|
|
||||||
def _ensure_user_dashboard_preferences_health_columns(bind_engine):
|
|
||||||
with belief_scope("_ensure_user_dashboard_preferences_health_columns"):
|
|
||||||
table_name = "user_dashboard_preferences"
|
|
||||||
inspector = inspect(bind_engine)
|
|
||||||
if table_name not in inspector.get_table_names():
|
|
||||||
return
|
|
||||||
|
|
||||||
existing_columns = {
|
|
||||||
str(column.get("name") or "").strip()
|
|
||||||
for column in inspector.get_columns(table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
alter_statements = []
|
|
||||||
if "telegram_id" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences ADD COLUMN telegram_id VARCHAR"
|
|
||||||
)
|
|
||||||
if "email_address" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences ADD COLUMN email_address VARCHAR"
|
|
||||||
)
|
|
||||||
if "notify_on_fail" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE user_dashboard_preferences ADD COLUMN notify_on_fail BOOLEAN NOT NULL DEFAULT TRUE"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not alter_statements:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with bind_engine.begin() as connection:
|
|
||||||
for statement in alter_statements:
|
|
||||||
connection.execute(text(statement))
|
|
||||||
except Exception as migration_error:
|
|
||||||
logger.warning(
|
|
||||||
"[database][EXPLORE] Profile health preference additive migration failed: %s",
|
|
||||||
migration_error,
|
|
||||||
)
|
|
||||||
# [/DEF:_ensure_user_dashboard_preferences_health_columns:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_ensure_llm_validation_results_columns:Function]
|
|
||||||
# @PURPOSE: Applies additive schema upgrades for llm_validation_results table.
|
|
||||||
def _ensure_llm_validation_results_columns(bind_engine):
|
|
||||||
with belief_scope("_ensure_llm_validation_results_columns"):
|
|
||||||
table_name = "llm_validation_results"
|
|
||||||
inspector = inspect(bind_engine)
|
|
||||||
if table_name not in inspector.get_table_names():
|
|
||||||
return
|
|
||||||
|
|
||||||
existing_columns = {
|
|
||||||
str(column.get("name") or "").strip()
|
|
||||||
for column in inspector.get_columns(table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
alter_statements = []
|
|
||||||
if "task_id" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE llm_validation_results ADD COLUMN task_id VARCHAR"
|
|
||||||
)
|
|
||||||
if "environment_id" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE llm_validation_results ADD COLUMN environment_id VARCHAR"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not alter_statements:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with bind_engine.begin() as connection:
|
|
||||||
for statement in alter_statements:
|
|
||||||
connection.execute(text(statement))
|
|
||||||
except Exception as migration_error:
|
|
||||||
logger.warning(
|
|
||||||
"[database][EXPLORE] ValidationRecord additive migration failed: %s",
|
|
||||||
migration_error,
|
|
||||||
)
|
|
||||||
# [/DEF:_ensure_llm_validation_results_columns:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_ensure_git_server_configs_columns:Function]
|
|
||||||
# @PURPOSE: Applies additive schema upgrades for git_server_configs table.
|
|
||||||
# @PRE: bind_engine points to application database.
|
|
||||||
# @POST: Missing columns are added without data loss.
|
|
||||||
def _ensure_git_server_configs_columns(bind_engine):
|
|
||||||
with belief_scope("_ensure_git_server_configs_columns"):
|
|
||||||
table_name = "git_server_configs"
|
|
||||||
inspector = inspect(bind_engine)
|
|
||||||
if table_name not in inspector.get_table_names():
|
|
||||||
return
|
|
||||||
|
|
||||||
existing_columns = {
|
|
||||||
str(column.get("name") or "").strip()
|
|
||||||
for column in inspector.get_columns(table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
alter_statements = []
|
|
||||||
if "default_branch" not in existing_columns:
|
|
||||||
alter_statements.append(
|
|
||||||
"ALTER TABLE git_server_configs ADD COLUMN default_branch VARCHAR NOT NULL DEFAULT 'main'"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not alter_statements:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with bind_engine.begin() as connection:
|
|
||||||
for statement in alter_statements:
|
|
||||||
connection.execute(text(statement))
|
|
||||||
except Exception as migration_error:
|
|
||||||
logger.warning(
|
|
||||||
"[database][EXPLORE] GitServerConfig preference additive migration failed: %s",
|
|
||||||
migration_error,
|
|
||||||
)
|
|
||||||
# [/DEF:_ensure_git_server_configs_columns:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:init_db:Function]
|
# [DEF:init_db:Function]
|
||||||
# @PURPOSE: Initializes the database by creating all tables.
|
# @PURPOSE: Initializes the database by creating all tables.
|
||||||
# @PRE: engine, tasks_engine and auth_engine are initialized.
|
# @PRE: engine, tasks_engine and auth_engine are initialized.
|
||||||
@@ -291,10 +104,6 @@ def init_db():
|
|||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
Base.metadata.create_all(bind=tasks_engine)
|
Base.metadata.create_all(bind=tasks_engine)
|
||||||
Base.metadata.create_all(bind=auth_engine)
|
Base.metadata.create_all(bind=auth_engine)
|
||||||
_ensure_user_dashboard_preferences_columns(engine)
|
|
||||||
_ensure_llm_validation_results_columns(engine)
|
|
||||||
_ensure_user_dashboard_preferences_health_columns(engine)
|
|
||||||
_ensure_git_server_configs_columns(engine)
|
|
||||||
# [/DEF:init_db:Function]
|
# [/DEF:init_db:Function]
|
||||||
|
|
||||||
# [DEF:get_db:Function]
|
# [DEF:get_db:Function]
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ def test_enable_belief_state_flag(caplog):
|
|||||||
assert not any("[DisabledFunction][Exit]" in msg for msg in log_messages), "Exit should not be logged when disabled"
|
assert not any("[DisabledFunction][Exit]" in msg for msg in log_messages), "Exit should not be logged when disabled"
|
||||||
# Coherence:OK should still be logged (internal tracking)
|
# Coherence:OK should still be logged (internal tracking)
|
||||||
assert any("[DisabledFunction][COHERENCE:OK]" in msg for msg in log_messages), "Coherence should still be logged"
|
assert any("[DisabledFunction][COHERENCE:OK]" in msg for msg in log_messages), "Coherence should still be logged"
|
||||||
# [/DEF:test_enable_belief_state_flag:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_belief_scope_missing_anchor:Function]
|
# [DEF:test_belief_scope_missing_anchor:Function]
|
||||||
|
|||||||
@@ -1,170 +1,118 @@
|
|||||||
# [DEF:backend.src.core.migration.risk_assessor:Module]
|
# [DEF:backend.src.core.migration.risk_assessor:Module]
|
||||||
# @TIER: CRITICAL
|
# @TIER: STANDARD
|
||||||
# @SEMANTICS: migration, dry_run, risk, scoring, preflight
|
# @SEMANTICS: migration, dry_run, risk, scoring
|
||||||
# @PURPOSE: Compute deterministic migration risk items and aggregate score for dry-run reporting.
|
# @PURPOSE: Risk evaluation helpers for migration pre-flight reporting.
|
||||||
# @LAYER: Domain
|
# @LAYER: Core
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.core.superset_client.SupersetClient]
|
# @RELATION: USED_BY -> backend.src.core.migration.dry_run_orchestrator
|
||||||
# @RELATION: [DISPATCHES] ->[backend.src.core.migration.dry_run_orchestrator.MigrationDryRunService.run]
|
|
||||||
# @INVARIANT: Risk scoring must remain bounded to [0,100] and preserve severity-to-weight mapping.
|
|
||||||
# @TEST_CONTRACT: [source_objects,target_objects,diff,target_client] -> [List[RiskItem]]
|
|
||||||
# @TEST_SCENARIO: [overwrite_update_objects] -> [medium overwrite_existing risk is emitted for each update diff item]
|
|
||||||
# @TEST_SCENARIO: [missing_datasource_dataset] -> [high missing_datasource risk is emitted]
|
|
||||||
# @TEST_SCENARIO: [owner_mismatch_dashboard] -> [low owner_mismatch risk is emitted]
|
|
||||||
# @TEST_EDGE: [missing_field] -> [object without uuid is ignored by indexer]
|
|
||||||
# @TEST_EDGE: [invalid_type] -> [non-list owners input normalizes to empty identifiers]
|
|
||||||
# @TEST_EDGE: [external_fail] -> [target_client get_databases exception propagates to caller]
|
|
||||||
# @TEST_INVARIANT: [score_upper_bound_100] -> VERIFIED_BY: [severity_weight_aggregation]
|
|
||||||
# @UX_STATE: [Idle] -> [N/A backend domain module]
|
|
||||||
# @UX_FEEDBACK: [N/A] -> [No direct UI side effects in this module]
|
|
||||||
# @UX_RECOVERY: [N/A] -> [Caller-level retry/recovery]
|
|
||||||
# @UX_REACTIVITY: [N/A] -> [Backend synchronous function contracts]
|
|
||||||
|
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
from ..logger import logger, belief_scope
|
|
||||||
from ..superset_client import SupersetClient
|
from ..superset_client import SupersetClient
|
||||||
|
|
||||||
|
|
||||||
# [DEF:index_by_uuid:Function]
|
# [DEF:index_by_uuid:Function]
|
||||||
# @PURPOSE: Build UUID-index from normalized objects.
|
# @PURPOSE: Build UUID-index from normalized objects.
|
||||||
# @PRE: Input list items are dict-like payloads potentially containing "uuid".
|
|
||||||
# @POST: Returns mapping keyed by string uuid; only truthy uuid values are included.
|
|
||||||
# @SIDE_EFFECT: Emits reasoning/reflective logs only.
|
|
||||||
# @DATA_CONTRACT: List[Dict[str, Any]] -> Dict[str, Dict[str, Any]]
|
|
||||||
def index_by_uuid(objects: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
def index_by_uuid(objects: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
||||||
with belief_scope("risk_assessor.index_by_uuid"):
|
indexed: Dict[str, Dict[str, Any]] = {}
|
||||||
logger.reason("Building UUID index", extra={"objects_count": len(objects)})
|
for obj in objects:
|
||||||
indexed: Dict[str, Dict[str, Any]] = {}
|
uuid = obj.get("uuid")
|
||||||
for obj in objects:
|
if uuid:
|
||||||
uuid = obj.get("uuid")
|
indexed[str(uuid)] = obj
|
||||||
if uuid:
|
return indexed
|
||||||
indexed[str(uuid)] = obj
|
|
||||||
logger.reflect("UUID index built", extra={"indexed_count": len(indexed)})
|
|
||||||
return indexed
|
|
||||||
# [/DEF:index_by_uuid:Function]
|
# [/DEF:index_by_uuid:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:extract_owner_identifiers:Function]
|
# [DEF:extract_owner_identifiers:Function]
|
||||||
# @PURPOSE: Normalize owner payloads for stable comparison.
|
# @PURPOSE: Normalize owner payloads for stable comparison.
|
||||||
# @PRE: Owners may be list payload, scalar values, or None.
|
|
||||||
# @POST: Returns sorted unique owner identifiers as strings.
|
|
||||||
# @SIDE_EFFECT: Emits reasoning/reflective logs only.
|
|
||||||
# @DATA_CONTRACT: Any -> List[str]
|
|
||||||
def extract_owner_identifiers(owners: Any) -> List[str]:
|
def extract_owner_identifiers(owners: Any) -> List[str]:
|
||||||
with belief_scope("risk_assessor.extract_owner_identifiers"):
|
if not isinstance(owners, list):
|
||||||
logger.reason("Normalizing owner identifiers")
|
return []
|
||||||
if not isinstance(owners, list):
|
ids: List[str] = []
|
||||||
logger.reflect("Owners payload is not list; returning empty identifiers")
|
for owner in owners:
|
||||||
return []
|
if isinstance(owner, dict):
|
||||||
ids: List[str] = []
|
if owner.get("username"):
|
||||||
for owner in owners:
|
ids.append(str(owner["username"]))
|
||||||
if isinstance(owner, dict):
|
elif owner.get("id") is not None:
|
||||||
if owner.get("username"):
|
ids.append(str(owner["id"]))
|
||||||
ids.append(str(owner["username"]))
|
elif owner is not None:
|
||||||
elif owner.get("id") is not None:
|
ids.append(str(owner))
|
||||||
ids.append(str(owner["id"]))
|
return sorted(set(ids))
|
||||||
elif owner is not None:
|
|
||||||
ids.append(str(owner))
|
|
||||||
normalized_ids = sorted(set(ids))
|
|
||||||
logger.reflect("Owner identifiers normalized", extra={"owner_count": len(normalized_ids)})
|
|
||||||
return normalized_ids
|
|
||||||
# [/DEF:extract_owner_identifiers:Function]
|
# [/DEF:extract_owner_identifiers:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:build_risks:Function]
|
# [DEF:build_risks:Function]
|
||||||
# @PURPOSE: Build risk list from computed diffs and target catalog state.
|
# @PURPOSE: Build risk list from computed diffs and target catalog state.
|
||||||
# @PRE: source_objects/target_objects/diff contain dashboards/charts/datasets keys with expected list structures.
|
|
||||||
# @PRE: target_client is authenticated/usable for database list retrieval.
|
|
||||||
# @POST: Returns list of deterministic risk items derived from overwrite, missing datasource, reference, and owner mismatch checks.
|
|
||||||
# @SIDE_EFFECT: Calls target Superset API for databases metadata and emits logs.
|
|
||||||
# @DATA_CONTRACT: (
|
|
||||||
# @DATA_CONTRACT: Dict[str, List[Dict[str, Any]]],
|
|
||||||
# @DATA_CONTRACT: Dict[str, List[Dict[str, Any]]],
|
|
||||||
# @DATA_CONTRACT: Dict[str, Dict[str, List[Dict[str, Any]]]],
|
|
||||||
# @DATA_CONTRACT: SupersetClient
|
|
||||||
# @DATA_CONTRACT: ) -> List[Dict[str, Any]]
|
|
||||||
def build_risks(
|
def build_risks(
|
||||||
source_objects: Dict[str, List[Dict[str, Any]]],
|
source_objects: Dict[str, List[Dict[str, Any]]],
|
||||||
target_objects: Dict[str, List[Dict[str, Any]]],
|
target_objects: Dict[str, List[Dict[str, Any]]],
|
||||||
diff: Dict[str, Dict[str, List[Dict[str, Any]]]],
|
diff: Dict[str, Dict[str, List[Dict[str, Any]]]],
|
||||||
target_client: SupersetClient,
|
target_client: SupersetClient,
|
||||||
) -> List[Dict[str, Any]]:
|
) -> List[Dict[str, Any]]:
|
||||||
with belief_scope("risk_assessor.build_risks"):
|
risks: List[Dict[str, Any]] = []
|
||||||
logger.reason("Building migration risks from diff payload")
|
for object_type in ("dashboards", "charts", "datasets"):
|
||||||
risks: List[Dict[str, Any]] = []
|
for item in diff[object_type]["update"]:
|
||||||
for object_type in ("dashboards", "charts", "datasets"):
|
risks.append({
|
||||||
for item in diff[object_type]["update"]:
|
"code": "overwrite_existing",
|
||||||
risks.append({
|
"severity": "medium",
|
||||||
"code": "overwrite_existing",
|
"object_type": object_type[:-1],
|
||||||
"severity": "medium",
|
"object_uuid": item["uuid"],
|
||||||
"object_type": object_type[:-1],
|
"message": f"Object will be updated in target: {item.get('title') or item['uuid']}",
|
||||||
"object_uuid": item["uuid"],
|
})
|
||||||
"message": f"Object will be updated in target: {item.get('title') or item['uuid']}",
|
|
||||||
})
|
|
||||||
|
|
||||||
target_dataset_uuids = set(index_by_uuid(target_objects["datasets"]).keys())
|
target_dataset_uuids = set(index_by_uuid(target_objects["datasets"]).keys())
|
||||||
_, target_databases = target_client.get_databases(query={"columns": ["uuid"]})
|
_, target_databases = target_client.get_databases(query={"columns": ["uuid"]})
|
||||||
target_database_uuids = {str(item.get("uuid")) for item in target_databases if item.get("uuid")}
|
target_database_uuids = {str(item.get("uuid")) for item in target_databases if item.get("uuid")}
|
||||||
|
|
||||||
for dataset in source_objects["datasets"]:
|
for dataset in source_objects["datasets"]:
|
||||||
db_uuid = dataset.get("database_uuid")
|
db_uuid = dataset.get("database_uuid")
|
||||||
if db_uuid and str(db_uuid) not in target_database_uuids:
|
if db_uuid and str(db_uuid) not in target_database_uuids:
|
||||||
risks.append({
|
risks.append({
|
||||||
"code": "missing_datasource",
|
"code": "missing_datasource",
|
||||||
"severity": "high",
|
"severity": "high",
|
||||||
"object_type": "dataset",
|
"object_type": "dataset",
|
||||||
"object_uuid": dataset.get("uuid"),
|
"object_uuid": dataset.get("uuid"),
|
||||||
"message": f"Target datasource is missing for dataset {dataset.get('title') or dataset.get('uuid')}",
|
"message": f"Target datasource is missing for dataset {dataset.get('title') or dataset.get('uuid')}",
|
||||||
})
|
})
|
||||||
|
|
||||||
for chart in source_objects["charts"]:
|
for chart in source_objects["charts"]:
|
||||||
ds_uuid = chart.get("dataset_uuid")
|
ds_uuid = chart.get("dataset_uuid")
|
||||||
if ds_uuid and str(ds_uuid) not in target_dataset_uuids:
|
if ds_uuid and str(ds_uuid) not in target_dataset_uuids:
|
||||||
risks.append({
|
risks.append({
|
||||||
"code": "breaking_reference",
|
"code": "breaking_reference",
|
||||||
"severity": "high",
|
"severity": "high",
|
||||||
"object_type": "chart",
|
"object_type": "chart",
|
||||||
"object_uuid": chart.get("uuid"),
|
"object_uuid": chart.get("uuid"),
|
||||||
"message": f"Chart references dataset not found on target: {ds_uuid}",
|
"message": f"Chart references dataset not found on target: {ds_uuid}",
|
||||||
})
|
})
|
||||||
|
|
||||||
source_dash = index_by_uuid(source_objects["dashboards"])
|
source_dash = index_by_uuid(source_objects["dashboards"])
|
||||||
target_dash = index_by_uuid(target_objects["dashboards"])
|
target_dash = index_by_uuid(target_objects["dashboards"])
|
||||||
for item in diff["dashboards"]["update"]:
|
for item in diff["dashboards"]["update"]:
|
||||||
source_obj = source_dash.get(item["uuid"])
|
source_obj = source_dash.get(item["uuid"])
|
||||||
target_obj = target_dash.get(item["uuid"])
|
target_obj = target_dash.get(item["uuid"])
|
||||||
if not source_obj or not target_obj:
|
if not source_obj or not target_obj:
|
||||||
continue
|
continue
|
||||||
source_owners = extract_owner_identifiers(source_obj.get("owners"))
|
source_owners = extract_owner_identifiers(source_obj.get("owners"))
|
||||||
target_owners = extract_owner_identifiers(target_obj.get("owners"))
|
target_owners = extract_owner_identifiers(target_obj.get("owners"))
|
||||||
if source_owners and target_owners and source_owners != target_owners:
|
if source_owners and target_owners and source_owners != target_owners:
|
||||||
risks.append({
|
risks.append({
|
||||||
"code": "owner_mismatch",
|
"code": "owner_mismatch",
|
||||||
"severity": "low",
|
"severity": "low",
|
||||||
"object_type": "dashboard",
|
"object_type": "dashboard",
|
||||||
"object_uuid": item["uuid"],
|
"object_uuid": item["uuid"],
|
||||||
"message": f"Owner mismatch for dashboard {item.get('title') or item['uuid']}",
|
"message": f"Owner mismatch for dashboard {item.get('title') or item['uuid']}",
|
||||||
})
|
})
|
||||||
logger.reflect("Risk list assembled", extra={"risk_count": len(risks)})
|
return risks
|
||||||
return risks
|
|
||||||
# [/DEF:build_risks:Function]
|
# [/DEF:build_risks:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:score_risks:Function]
|
# [DEF:score_risks:Function]
|
||||||
# @PURPOSE: Aggregate risk list into score and level.
|
# @PURPOSE: Aggregate risk list into score and level.
|
||||||
# @PRE: risk_items contains optional severity fields expected in {high,medium,low} or defaults to low weight.
|
|
||||||
# @POST: Returns dict with score in [0,100], derived level, and original items.
|
|
||||||
# @SIDE_EFFECT: Emits reasoning/reflective logs only.
|
|
||||||
# @DATA_CONTRACT: List[Dict[str, Any]] -> Dict[str, Any]
|
|
||||||
def score_risks(risk_items: List[Dict[str, Any]]) -> Dict[str, Any]:
|
def score_risks(risk_items: List[Dict[str, Any]]) -> Dict[str, Any]:
|
||||||
with belief_scope("risk_assessor.score_risks"):
|
weights = {"high": 25, "medium": 10, "low": 5}
|
||||||
logger.reason("Scoring risk items", extra={"risk_items_count": len(risk_items)})
|
score = min(100, sum(weights.get(item.get("severity", "low"), 5) for item in risk_items))
|
||||||
weights = {"high": 25, "medium": 10, "low": 5}
|
level = "low" if score < 25 else "medium" if score < 60 else "high"
|
||||||
score = min(100, sum(weights.get(item.get("severity", "low"), 5) for item in risk_items))
|
return {"score": score, "level": level, "items": risk_items}
|
||||||
level = "low" if score < 25 else "medium" if score < 60 else "high"
|
|
||||||
result = {"score": score, "level": level, "items": risk_items}
|
|
||||||
logger.reflect("Risk score computed", extra={"score": score, "level": level})
|
|
||||||
return result
|
|
||||||
# [/DEF:score_risks:Function]
|
# [/DEF:score_risks:Function]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,11 @@
|
|||||||
# [DEF:backend.src.core.migration_engine:Module]
|
# [DEF:backend.src.core.migration_engine:Module]
|
||||||
#
|
#
|
||||||
# @TIER: CRITICAL
|
# @SEMANTICS: migration, engine, zip, yaml, transformation
|
||||||
# @SEMANTICS: migration, engine, zip, yaml, transformation, cross-filter, id-mapping
|
# @PURPOSE: Handles the interception and transformation of Superset asset ZIP archives.
|
||||||
# @PURPOSE: Transforms Superset export ZIP archives while preserving archive integrity and patching mapped identifiers.
|
# @LAYER: Core
|
||||||
# @LAYER: Domain
|
# @RELATION: DEPENDS_ON -> PyYAML
|
||||||
# @RELATION: [DEPENDS_ON] ->[src.core.logger]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[src.core.mapping_service.IdMappingService]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[src.models.mapping.ResourceType]
|
|
||||||
# @RELATION: [DEPENDS_ON] ->[yaml]
|
|
||||||
#
|
#
|
||||||
# @INVARIANT: ZIP structure and non-targeted metadata must remain valid after transformation.
|
# @INVARIANT: ZIP structure must be preserved after transformation.
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
import zipfile
|
import zipfile
|
||||||
@@ -30,17 +26,10 @@ from src.models.mapping import ResourceType
|
|||||||
class MigrationEngine:
|
class MigrationEngine:
|
||||||
|
|
||||||
# [DEF:__init__:Function]
|
# [DEF:__init__:Function]
|
||||||
# @PURPOSE: Initializes migration orchestration dependencies for ZIP/YAML metadata transformations.
|
# @PURPOSE: Initializes the migration engine with optional ID mapping service.
|
||||||
# @PRE: mapping_service is None or implements batch remote ID lookup for ResourceType.CHART.
|
|
||||||
# @POST: self.mapping_service is assigned and available for optional cross-filter patching flows.
|
|
||||||
# @SIDE_EFFECT: Mutates in-memory engine state by storing dependency reference.
|
|
||||||
# @DATA_CONTRACT: Input[Optional[IdMappingService]] -> Output[MigrationEngine]
|
|
||||||
# @PARAM: mapping_service (Optional[IdMappingService]) - Used for resolving target environment integer IDs.
|
# @PARAM: mapping_service (Optional[IdMappingService]) - Used for resolving target environment integer IDs.
|
||||||
def __init__(self, mapping_service: Optional[IdMappingService] = None):
|
def __init__(self, mapping_service: Optional[IdMappingService] = None):
|
||||||
with belief_scope("MigrationEngine.__init__"):
|
self.mapping_service = mapping_service
|
||||||
logger.reason("Initializing MigrationEngine")
|
|
||||||
self.mapping_service = mapping_service
|
|
||||||
logger.reflect("MigrationEngine initialized")
|
|
||||||
# [/DEF:__init__:Function]
|
# [/DEF:__init__:Function]
|
||||||
|
|
||||||
# [DEF:transform_zip:Function]
|
# [DEF:transform_zip:Function]
|
||||||
@@ -51,24 +40,20 @@ class MigrationEngine:
|
|||||||
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
|
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
|
||||||
# @PARAM: target_env_id (Optional[str]) - Used if fix_cross_filters is True to know which environment map to use.
|
# @PARAM: target_env_id (Optional[str]) - Used if fix_cross_filters is True to know which environment map to use.
|
||||||
# @PARAM: fix_cross_filters (bool) - Whether to patch dashboard json_metadata.
|
# @PARAM: fix_cross_filters (bool) - Whether to patch dashboard json_metadata.
|
||||||
# @PRE: zip_path points to a readable ZIP; output_path parent is writable; db_mapping keys/values are UUID strings.
|
# @PRE: zip_path must point to a valid Superset export archive.
|
||||||
# @POST: Returns True only when extraction, transformation, and packaging complete without exception.
|
# @POST: Transformed archive is saved to output_path.
|
||||||
# @SIDE_EFFECT: Reads/writes filesystem archives, creates temporary directory, emits structured logs.
|
# @RETURN: bool - True if successful.
|
||||||
# @DATA_CONTRACT: Input[(str zip_path, str output_path, Dict[str,str] db_mapping, bool strip_databases, Optional[str] target_env_id, bool fix_cross_filters)] -> Output[bool]
|
|
||||||
# @RETURN: bool - True if successful.
|
|
||||||
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True, target_env_id: Optional[str] = None, fix_cross_filters: bool = False) -> bool:
|
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True, target_env_id: Optional[str] = None, fix_cross_filters: bool = False) -> bool:
|
||||||
"""
|
"""
|
||||||
Transform a Superset export ZIP by replacing database UUIDs and optionally fixing cross-filters.
|
Transform a Superset export ZIP by replacing database UUIDs and optionally fixing cross-filters.
|
||||||
"""
|
"""
|
||||||
with belief_scope("MigrationEngine.transform_zip"):
|
with belief_scope("MigrationEngine.transform_zip"):
|
||||||
logger.reason(f"Starting ZIP transformation: {zip_path} -> {output_path}")
|
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as temp_dir_str:
|
with tempfile.TemporaryDirectory() as temp_dir_str:
|
||||||
temp_dir = Path(temp_dir_str)
|
temp_dir = Path(temp_dir_str)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# 1. Extract
|
# 1. Extract
|
||||||
logger.reason(f"Extracting source archive to {temp_dir}")
|
logger.info(f"[MigrationEngine.transform_zip][Action] Extracting ZIP: {zip_path}")
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zf:
|
with zipfile.ZipFile(zip_path, 'r') as zf:
|
||||||
zf.extractall(temp_dir)
|
zf.extractall(temp_dir)
|
||||||
|
|
||||||
@@ -76,33 +61,33 @@ class MigrationEngine:
|
|||||||
dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
|
dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
|
||||||
dataset_files = list(set(dataset_files))
|
dataset_files = list(set(dataset_files))
|
||||||
|
|
||||||
logger.reason(f"Transforming {len(dataset_files)} dataset YAML files")
|
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dataset_files)} dataset files.")
|
||||||
for ds_file in dataset_files:
|
for ds_file in dataset_files:
|
||||||
|
logger.info(f"[MigrationEngine.transform_zip][Action] Transforming dataset: {ds_file}")
|
||||||
self._transform_yaml(ds_file, db_mapping)
|
self._transform_yaml(ds_file, db_mapping)
|
||||||
|
|
||||||
# 2.5 Patch Cross-Filters (Dashboards)
|
# 2.5 Patch Cross-Filters (Dashboards)
|
||||||
if fix_cross_filters:
|
if fix_cross_filters and self.mapping_service and target_env_id:
|
||||||
if self.mapping_service and target_env_id:
|
dash_files = list(temp_dir.glob("**/dashboards/**/*.yaml")) + list(temp_dir.glob("**/dashboards/*.yaml"))
|
||||||
dash_files = list(temp_dir.glob("**/dashboards/**/*.yaml")) + list(temp_dir.glob("**/dashboards/*.yaml"))
|
dash_files = list(set(dash_files))
|
||||||
dash_files = list(set(dash_files))
|
|
||||||
|
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dash_files)} dashboard files for patching.")
|
||||||
logger.reason(f"Patching cross-filters for {len(dash_files)} dashboards")
|
|
||||||
|
# Gather all source UUID-to-ID mappings from the archive first
|
||||||
# Gather all source UUID-to-ID mappings from the archive first
|
source_id_to_uuid_map = self._extract_chart_uuids_from_archive(temp_dir)
|
||||||
source_id_to_uuid_map = self._extract_chart_uuids_from_archive(temp_dir)
|
|
||||||
|
for dash_file in dash_files:
|
||||||
for dash_file in dash_files:
|
logger.info(f"[MigrationEngine.transform_zip][Action] Patching dashboard: {dash_file}")
|
||||||
self._patch_dashboard_metadata(dash_file, target_env_id, source_id_to_uuid_map)
|
self._patch_dashboard_metadata(dash_file, target_env_id, source_id_to_uuid_map)
|
||||||
else:
|
|
||||||
logger.explore("Cross-filter patching requested but mapping service or target_env_id is missing")
|
|
||||||
|
|
||||||
# 3. Re-package
|
# 3. Re-package
|
||||||
logger.reason(f"Re-packaging transformed archive (strip_databases={strip_databases})")
|
logger.info(f"[MigrationEngine.transform_zip][Action] Re-packaging ZIP to: {output_path} (strip_databases={strip_databases})")
|
||||||
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
|
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||||
for root, dirs, files in os.walk(temp_dir):
|
for root, dirs, files in os.walk(temp_dir):
|
||||||
rel_root = Path(root).relative_to(temp_dir)
|
rel_root = Path(root).relative_to(temp_dir)
|
||||||
|
|
||||||
if strip_databases and "databases" in rel_root.parts:
|
if strip_databases and "databases" in rel_root.parts:
|
||||||
|
logger.info(f"[MigrationEngine.transform_zip][Action] Skipping file in databases directory: {rel_root}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
@@ -110,10 +95,9 @@ class MigrationEngine:
|
|||||||
arcname = file_path.relative_to(temp_dir)
|
arcname = file_path.relative_to(temp_dir)
|
||||||
zf.write(file_path, arcname)
|
zf.write(file_path, arcname)
|
||||||
|
|
||||||
logger.reflect("ZIP transformation completed successfully")
|
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.explore(f"Error transforming ZIP: {e}")
|
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
|
||||||
return False
|
return False
|
||||||
# [/DEF:transform_zip:Function]
|
# [/DEF:transform_zip:Function]
|
||||||
|
|
||||||
@@ -121,73 +105,54 @@ class MigrationEngine:
|
|||||||
# @PURPOSE: Replaces database_uuid in a single YAML file.
|
# @PURPOSE: Replaces database_uuid in a single YAML file.
|
||||||
# @PARAM: file_path (Path) - Path to the YAML file.
|
# @PARAM: file_path (Path) - Path to the YAML file.
|
||||||
# @PARAM: db_mapping (Dict[str, str]) - UUID mapping dictionary.
|
# @PARAM: db_mapping (Dict[str, str]) - UUID mapping dictionary.
|
||||||
# @PRE: file_path exists, is readable YAML, and db_mapping contains source->target UUID pairs.
|
# @PRE: file_path must exist and be readable.
|
||||||
# @POST: database_uuid is replaced in-place only when source UUID is present in db_mapping.
|
# @POST: File is modified in-place if source UUID matches mapping.
|
||||||
# @SIDE_EFFECT: Reads and conditionally rewrites YAML file on disk.
|
|
||||||
# @DATA_CONTRACT: Input[(Path file_path, Dict[str,str] db_mapping)] -> Output[None]
|
|
||||||
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
|
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
|
||||||
with belief_scope("MigrationEngine._transform_yaml"):
|
with open(file_path, 'r') as f:
|
||||||
if not file_path.exists():
|
data = yaml.safe_load(f)
|
||||||
logger.explore(f"YAML file not found: {file_path}")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(file_path, 'r') as f:
|
if not data:
|
||||||
data = yaml.safe_load(f)
|
return
|
||||||
|
|
||||||
if not data:
|
# Superset dataset YAML structure:
|
||||||
return
|
# database_uuid: ...
|
||||||
|
source_uuid = data.get('database_uuid')
|
||||||
source_uuid = data.get('database_uuid')
|
if source_uuid in db_mapping:
|
||||||
if source_uuid in db_mapping:
|
data['database_uuid'] = db_mapping[source_uuid]
|
||||||
logger.reason(f"Replacing database UUID in {file_path.name}")
|
with open(file_path, 'w') as f:
|
||||||
data['database_uuid'] = db_mapping[source_uuid]
|
yaml.dump(data, f)
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
yaml.dump(data, f)
|
|
||||||
logger.reflect(f"Database UUID patched in {file_path.name}")
|
|
||||||
# [/DEF:_transform_yaml:Function]
|
# [/DEF:_transform_yaml:Function]
|
||||||
|
|
||||||
# [DEF:_extract_chart_uuids_from_archive:Function]
|
# [DEF:_extract_chart_uuids_from_archive:Function]
|
||||||
# @PURPOSE: Scans extracted chart YAML files and builds a source chart ID to UUID lookup map.
|
# @PURPOSE: Scans the unpacked ZIP to map local exported integer IDs back to their UUIDs.
|
||||||
# @PRE: temp_dir exists and points to extracted archive root with optional chart YAML resources.
|
# @PARAM: temp_dir (Path) - Root dir of unpacked archive
|
||||||
# @POST: Returns a best-effort Dict[int, str] containing only parseable chart id/uuid pairs.
|
|
||||||
# @SIDE_EFFECT: Reads chart YAML files from filesystem; suppresses per-file parsing failures.
|
|
||||||
# @DATA_CONTRACT: Input[Path] -> Output[Dict[int,str]]
|
|
||||||
# @PARAM: temp_dir (Path) - Root dir of unpacked archive.
|
|
||||||
# @RETURN: Dict[int, str] - Mapping of source Integer ID to UUID.
|
# @RETURN: Dict[int, str] - Mapping of source Integer ID to UUID.
|
||||||
def _extract_chart_uuids_from_archive(self, temp_dir: Path) -> Dict[int, str]:
|
def _extract_chart_uuids_from_archive(self, temp_dir: Path) -> Dict[int, str]:
|
||||||
with belief_scope("MigrationEngine._extract_chart_uuids_from_archive"):
|
# Implementation Note: This is a placeholder for the logic that extracts
|
||||||
# Implementation Note: This is a placeholder for the logic that extracts
|
# actual Source IDs. In a real scenario, this involves parsing chart YAMLs
|
||||||
# actual Source IDs. In a real scenario, this involves parsing chart YAMLs
|
# or manifesting the export metadata structure where source IDs are stored.
|
||||||
# or manifesting the export metadata structure where source IDs are stored.
|
# For simplicity in US1 MVP, we assume it's read from chart files if present.
|
||||||
# For simplicity in US1 MVP, we assume it's read from chart files if present.
|
mapping = {}
|
||||||
mapping = {}
|
chart_files = list(temp_dir.glob("**/charts/**/*.yaml")) + list(temp_dir.glob("**/charts/*.yaml"))
|
||||||
chart_files = list(temp_dir.glob("**/charts/**/*.yaml")) + list(temp_dir.glob("**/charts/*.yaml"))
|
for cf in set(chart_files):
|
||||||
for cf in set(chart_files):
|
try:
|
||||||
try:
|
with open(cf, 'r') as f:
|
||||||
with open(cf, 'r') as f:
|
cdata = yaml.safe_load(f)
|
||||||
cdata = yaml.safe_load(f)
|
if cdata and 'id' in cdata and 'uuid' in cdata:
|
||||||
if cdata and 'id' in cdata and 'uuid' in cdata:
|
mapping[cdata['id']] = cdata['uuid']
|
||||||
mapping[cdata['id']] = cdata['uuid']
|
except Exception:
|
||||||
except Exception:
|
pass
|
||||||
pass
|
return mapping
|
||||||
return mapping
|
|
||||||
# [/DEF:_extract_chart_uuids_from_archive:Function]
|
# [/DEF:_extract_chart_uuids_from_archive:Function]
|
||||||
|
|
||||||
# [DEF:_patch_dashboard_metadata:Function]
|
# [DEF:_patch_dashboard_metadata:Function]
|
||||||
# @PURPOSE: Rewrites dashboard json_metadata chart/dataset integer identifiers using target environment mappings.
|
# @PURPOSE: Replaces integer IDs in json_metadata.
|
||||||
# @PRE: file_path points to dashboard YAML with json_metadata; target_env_id is non-empty; source_map contains source id->uuid.
|
|
||||||
# @POST: json_metadata is re-serialized with mapped integer IDs when remote mappings are available; otherwise file remains unchanged.
|
|
||||||
# @SIDE_EFFECT: Reads/writes YAML file, performs mapping lookup via mapping_service, emits logs for recoverable/terminal failures.
|
|
||||||
# @DATA_CONTRACT: Input[(Path file_path, str target_env_id, Dict[int,str] source_map)] -> Output[None]
|
|
||||||
# @PARAM: file_path (Path)
|
# @PARAM: file_path (Path)
|
||||||
# @PARAM: target_env_id (str)
|
# @PARAM: target_env_id (str)
|
||||||
# @PARAM: source_map (Dict[int, str])
|
# @PARAM: source_map (Dict[int, str])
|
||||||
def _patch_dashboard_metadata(self, file_path: Path, target_env_id: str, source_map: Dict[int, str]):
|
def _patch_dashboard_metadata(self, file_path: Path, target_env_id: str, source_map: Dict[int, str]):
|
||||||
with belief_scope("MigrationEngine._patch_dashboard_metadata"):
|
with belief_scope("MigrationEngine._patch_dashboard_metadata"):
|
||||||
try:
|
try:
|
||||||
if not file_path.exists():
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(file_path, 'r') as f:
|
with open(file_path, 'r') as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
@@ -198,13 +163,18 @@ class MigrationEngine:
|
|||||||
if not metadata_str:
|
if not metadata_str:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
metadata = json.loads(metadata_str)
|
||||||
|
modified = False
|
||||||
|
|
||||||
|
# We need to deeply traverse and replace. For MVP, string replacement over the raw JSON is an option,
|
||||||
|
# but careful dict traversal is safer.
|
||||||
|
|
||||||
# Fetch target UUIDs for everything we know:
|
# Fetch target UUIDs for everything we know:
|
||||||
uuids_needed = list(source_map.values())
|
uuids_needed = list(source_map.values())
|
||||||
logger.reason(f"Resolving {len(uuids_needed)} remote IDs for dashboard metadata patching")
|
|
||||||
target_ids = self.mapping_service.get_remote_ids_batch(target_env_id, ResourceType.CHART, uuids_needed)
|
target_ids = self.mapping_service.get_remote_ids_batch(target_env_id, ResourceType.CHART, uuids_needed)
|
||||||
|
|
||||||
if not target_ids:
|
if not target_ids:
|
||||||
logger.reflect("No remote target IDs found in mapping database for this dashboard.")
|
logger.info("[MigrationEngine._patch_dashboard_metadata][Reflect] No remote target IDs found in mapping database.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Map Source Int -> Target Int
|
# Map Source Int -> Target Int
|
||||||
@@ -217,16 +187,21 @@ class MigrationEngine:
|
|||||||
missing_targets.append(s_id)
|
missing_targets.append(s_id)
|
||||||
|
|
||||||
if missing_targets:
|
if missing_targets:
|
||||||
logger.explore(f"Missing target IDs for source IDs: {missing_targets}. Cross-filters might break.")
|
logger.warning(f"[MigrationEngine._patch_dashboard_metadata][Coherence:Recoverable] Missing target IDs for source IDs: {missing_targets}. Cross-filters for these IDs might break.")
|
||||||
|
|
||||||
if not source_to_target:
|
if not source_to_target:
|
||||||
logger.reflect("No source IDs matched remotely. Skipping patch.")
|
logger.info("[MigrationEngine._patch_dashboard_metadata][Reflect] No source IDs matched remotely. Skipping patch.")
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.reason(f"Patching {len(source_to_target)} ID references in json_metadata")
|
# Complex metadata traversal would go here (e.g. for native_filter_configuration)
|
||||||
|
# We use regex replacement over the string for safety over unknown nested dicts.
|
||||||
|
|
||||||
new_metadata_str = metadata_str
|
new_metadata_str = metadata_str
|
||||||
|
|
||||||
|
# Replace chartId and datasetId assignments explicitly.
|
||||||
|
# Pattern: "datasetId": 42 or "chartId": 42
|
||||||
for s_id, t_id in source_to_target.items():
|
for s_id, t_id in source_to_target.items():
|
||||||
|
# Replace in native_filter_configuration targets
|
||||||
new_metadata_str = re.sub(r'("datasetId"\s*:\s*)' + str(s_id) + r'(\b)', r'\g<1>' + str(t_id) + r'\g<2>', new_metadata_str)
|
new_metadata_str = re.sub(r'("datasetId"\s*:\s*)' + str(s_id) + r'(\b)', r'\g<1>' + str(t_id) + r'\g<2>', new_metadata_str)
|
||||||
new_metadata_str = re.sub(r'("chartId"\s*:\s*)' + str(s_id) + r'(\b)', r'\g<1>' + str(t_id) + r'\g<2>', new_metadata_str)
|
new_metadata_str = re.sub(r'("chartId"\s*:\s*)' + str(s_id) + r'(\b)', r'\g<1>' + str(t_id) + r'\g<2>', new_metadata_str)
|
||||||
|
|
||||||
@@ -235,10 +210,10 @@ class MigrationEngine:
|
|||||||
|
|
||||||
with open(file_path, 'w') as f:
|
with open(file_path, 'w') as f:
|
||||||
yaml.dump(data, f)
|
yaml.dump(data, f)
|
||||||
logger.reflect(f"Dashboard metadata patched and saved: {file_path.name}")
|
logger.info(f"[MigrationEngine._patch_dashboard_metadata][Reason] Re-serialized modified JSON metadata for dashboard.")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.explore(f"Metadata patch failed for {file_path.name}: {e}")
|
logger.error(f"[MigrationEngine._patch_dashboard_metadata][Coherence:Failed] Metadata patch failed: {e}")
|
||||||
|
|
||||||
# [/DEF:_patch_dashboard_metadata:Function]
|
# [/DEF:_patch_dashboard_metadata:Function]
|
||||||
|
|
||||||
|
|||||||
@@ -76,8 +76,17 @@ class PluginLoader:
|
|||||||
"""
|
"""
|
||||||
Loads a single Python module and extracts PluginBase subclasses.
|
Loads a single Python module and extracts PluginBase subclasses.
|
||||||
"""
|
"""
|
||||||
# All runtime code is imported through the canonical `src` package root.
|
# Try to determine the correct package prefix based on how the app is running
|
||||||
package_name = f"src.plugins.{module_name}"
|
# For standalone execution, we need to handle the import differently
|
||||||
|
if __name__ == "__main__" or "test" in __name__:
|
||||||
|
# When running as standalone or in tests, use relative import
|
||||||
|
package_name = f"plugins.{module_name}"
|
||||||
|
elif "backend.src" in __name__:
|
||||||
|
package_prefix = "backend.src.plugins"
|
||||||
|
package_name = f"{package_prefix}.{module_name}"
|
||||||
|
else:
|
||||||
|
package_prefix = "src.plugins"
|
||||||
|
package_name = f"{package_prefix}.{module_name}"
|
||||||
|
|
||||||
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
|
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
|
||||||
spec = importlib.util.spec_from_file_location(package_name, file_path)
|
spec = importlib.util.spec_from_file_location(package_name, file_path)
|
||||||
@@ -189,4 +198,4 @@ class PluginLoader:
|
|||||||
return plugin_id in self._plugins
|
return plugin_id in self._plugins
|
||||||
# [/DEF:has_plugin:Function]
|
# [/DEF:has_plugin:Function]
|
||||||
|
|
||||||
# [/DEF:PluginLoader:Class]
|
# [/DEF:PluginLoader:Class]
|
||||||
@@ -8,13 +8,9 @@
|
|||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
from apscheduler.triggers.cron import CronTrigger
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
from apscheduler.triggers.date import DateTrigger
|
|
||||||
from .logger import logger, belief_scope
|
from .logger import logger, belief_scope
|
||||||
from .config_manager import ConfigManager
|
from .config_manager import ConfigManager
|
||||||
from .database import SessionLocal
|
|
||||||
from ..models.llm import ValidationPolicy
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime, time, timedelta, date
|
|
||||||
# [/SECTION]
|
# [/SECTION]
|
||||||
|
|
||||||
# [DEF:SchedulerService:Class]
|
# [DEF:SchedulerService:Class]
|
||||||
@@ -121,63 +117,4 @@ class SchedulerService:
|
|||||||
# [/DEF:_trigger_backup:Function]
|
# [/DEF:_trigger_backup:Function]
|
||||||
|
|
||||||
# [/DEF:SchedulerService:Class]
|
# [/DEF:SchedulerService:Class]
|
||||||
|
|
||||||
# [DEF:ThrottledSchedulerConfigurator:Class]
|
|
||||||
# @TIER: CRITICAL
|
|
||||||
# @SEMANTICS: scheduler, throttling, distribution
|
|
||||||
# @PURPOSE: Distributes validation tasks evenly within an execution window.
|
|
||||||
class ThrottledSchedulerConfigurator:
|
|
||||||
# [DEF:calculate_schedule:Function]
|
|
||||||
# @PURPOSE: Calculates execution times for N tasks within a window.
|
|
||||||
# @PRE: window_start, window_end (time), dashboard_ids (List), current_date (date).
|
|
||||||
# @POST: Returns List[datetime] of scheduled times.
|
|
||||||
# @INVARIANT: Tasks are distributed with near-even spacing.
|
|
||||||
@staticmethod
|
|
||||||
def calculate_schedule(
|
|
||||||
window_start: time,
|
|
||||||
window_end: time,
|
|
||||||
dashboard_ids: list,
|
|
||||||
current_date: date
|
|
||||||
) -> list:
|
|
||||||
with belief_scope("ThrottledSchedulerConfigurator.calculate_schedule"):
|
|
||||||
n = len(dashboard_ids)
|
|
||||||
if n == 0:
|
|
||||||
return []
|
|
||||||
|
|
||||||
start_dt = datetime.combine(current_date, window_start)
|
|
||||||
end_dt = datetime.combine(current_date, window_end)
|
|
||||||
|
|
||||||
# Handle window crossing midnight
|
|
||||||
if end_dt < start_dt:
|
|
||||||
end_dt += timedelta(days=1)
|
|
||||||
|
|
||||||
total_seconds = (end_dt - start_dt).total_seconds()
|
|
||||||
|
|
||||||
# Minimum interval of 1 second to avoid division by zero or negative
|
|
||||||
if total_seconds <= 0:
|
|
||||||
logger.warning(f"[calculate_schedule] Window size is zero or negative. Falling back to start time for all {n} tasks.")
|
|
||||||
return [start_dt] * n
|
|
||||||
|
|
||||||
# If window is too small for even distribution (e.g. 10 tasks in 5 seconds),
|
|
||||||
# we still distribute them but they might be very close.
|
|
||||||
# The requirement says "near-even spacing".
|
|
||||||
|
|
||||||
if n == 1:
|
|
||||||
return [start_dt]
|
|
||||||
|
|
||||||
interval = total_seconds / (n - 1) if n > 1 else 0
|
|
||||||
|
|
||||||
# If interval is too small (e.g. < 1s), we might want a fallback,
|
|
||||||
# but the spec says "handle too-small windows with explicit fallback/warning".
|
|
||||||
if interval < 1:
|
|
||||||
logger.warning(f"[calculate_schedule] Window too small for {n} tasks (interval {interval:.2f}s). Tasks will be highly concentrated.")
|
|
||||||
|
|
||||||
scheduled_times = []
|
|
||||||
for i in range(n):
|
|
||||||
scheduled_times.append(start_dt + timedelta(seconds=i * interval))
|
|
||||||
|
|
||||||
return scheduled_times
|
|
||||||
# [/DEF:calculate_schedule:Function]
|
|
||||||
# [/DEF:ThrottledSchedulerConfigurator:Class]
|
|
||||||
|
|
||||||
# [/DEF:SchedulerModule:Module]
|
# [/DEF:SchedulerModule:Module]
|
||||||
@@ -150,54 +150,23 @@ class SupersetClient:
|
|||||||
# @PRE: Client is authenticated.
|
# @PRE: Client is authenticated.
|
||||||
# @POST: Returns a list of dashboard metadata summaries.
|
# @POST: Returns a list of dashboard metadata summaries.
|
||||||
# @RETURN: List[Dict]
|
# @RETURN: List[Dict]
|
||||||
def get_dashboards_summary(self, require_slug: bool = False) -> List[Dict]:
|
def get_dashboards_summary(self) -> List[Dict]:
|
||||||
with belief_scope("SupersetClient.get_dashboards_summary"):
|
with belief_scope("SupersetClient.get_dashboards_summary"):
|
||||||
# Rely on list endpoint default projection to stay compatible
|
# Rely on list endpoint default projection to stay compatible
|
||||||
# across Superset versions and preserve owners in one request.
|
# across Superset versions and preserve owners in one request.
|
||||||
query: Dict[str, Any] = {}
|
query: Dict[str, Any] = {}
|
||||||
if require_slug:
|
|
||||||
query["filters"] = [
|
|
||||||
{
|
|
||||||
"col": "slug",
|
|
||||||
"opr": "neq",
|
|
||||||
"value": "",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
_, dashboards = self.get_dashboards(query=query)
|
_, dashboards = self.get_dashboards(query=query)
|
||||||
|
|
||||||
# Map fields to DashboardMetadata schema
|
# Map fields to DashboardMetadata schema
|
||||||
result = []
|
result = []
|
||||||
max_debug_samples = 12
|
for dash in dashboards:
|
||||||
for index, dash in enumerate(dashboards):
|
owners = self._extract_owner_labels(dash.get("owners"))
|
||||||
raw_owners = dash.get("owners")
|
|
||||||
raw_created_by = dash.get("created_by")
|
|
||||||
raw_changed_by = dash.get("changed_by")
|
|
||||||
raw_changed_by_name = dash.get("changed_by_name")
|
|
||||||
|
|
||||||
owners = self._extract_owner_labels(raw_owners)
|
|
||||||
# No per-dashboard detail requests here: keep list endpoint O(1).
|
# No per-dashboard detail requests here: keep list endpoint O(1).
|
||||||
if not owners:
|
if not owners:
|
||||||
owners = self._extract_owner_labels(
|
owners = self._extract_owner_labels(
|
||||||
[raw_created_by, raw_changed_by],
|
[dash.get("created_by"), dash.get("changed_by")],
|
||||||
)
|
)
|
||||||
|
|
||||||
projected_created_by = self._extract_user_display(
|
|
||||||
None,
|
|
||||||
raw_created_by,
|
|
||||||
)
|
|
||||||
projected_modified_by = self._extract_user_display(
|
|
||||||
raw_changed_by_name,
|
|
||||||
raw_changed_by,
|
|
||||||
)
|
|
||||||
|
|
||||||
raw_owner_usernames: List[str] = []
|
|
||||||
if isinstance(raw_owners, list):
|
|
||||||
for owner_payload in raw_owners:
|
|
||||||
if isinstance(owner_payload, dict):
|
|
||||||
owner_username = self._sanitize_user_text(owner_payload.get("username"))
|
|
||||||
if owner_username:
|
|
||||||
raw_owner_usernames.append(owner_username)
|
|
||||||
|
|
||||||
result.append({
|
result.append({
|
||||||
"id": dash.get("id"),
|
"id": dash.get("id"),
|
||||||
"slug": dash.get("slug"),
|
"slug": dash.get("slug"),
|
||||||
@@ -205,26 +174,16 @@ class SupersetClient:
|
|||||||
"url": dash.get("url"),
|
"url": dash.get("url"),
|
||||||
"last_modified": dash.get("changed_on_utc"),
|
"last_modified": dash.get("changed_on_utc"),
|
||||||
"status": "published" if dash.get("published") else "draft",
|
"status": "published" if dash.get("published") else "draft",
|
||||||
"created_by": projected_created_by,
|
"created_by": self._extract_user_display(
|
||||||
"modified_by": projected_modified_by,
|
None,
|
||||||
|
dash.get("created_by"),
|
||||||
|
),
|
||||||
|
"modified_by": self._extract_user_display(
|
||||||
|
dash.get("changed_by_name"),
|
||||||
|
dash.get("changed_by"),
|
||||||
|
),
|
||||||
"owners": owners,
|
"owners": owners,
|
||||||
})
|
})
|
||||||
|
|
||||||
if index < max_debug_samples:
|
|
||||||
app_logger.reflect(
|
|
||||||
"[REFLECT] Dashboard actor projection sample "
|
|
||||||
f"(env={getattr(self.env, 'id', None)}, dashboard_id={dash.get('id')}, "
|
|
||||||
f"raw_owners={raw_owners!r}, raw_owner_usernames={raw_owner_usernames!r}, "
|
|
||||||
f"raw_created_by={raw_created_by!r}, raw_changed_by={raw_changed_by!r}, "
|
|
||||||
f"raw_changed_by_name={raw_changed_by_name!r}, projected_owners={owners!r}, "
|
|
||||||
f"projected_created_by={projected_created_by!r}, projected_modified_by={projected_modified_by!r})"
|
|
||||||
)
|
|
||||||
|
|
||||||
app_logger.reflect(
|
|
||||||
"[REFLECT] Dashboard actor projection summary "
|
|
||||||
f"(env={getattr(self.env, 'id', None)}, dashboards={len(result)}, "
|
|
||||||
f"sampled={min(len(result), max_debug_samples)})"
|
|
||||||
)
|
|
||||||
return result
|
return result
|
||||||
# [/DEF:get_dashboards_summary:Function]
|
# [/DEF:get_dashboards_summary:Function]
|
||||||
|
|
||||||
@@ -240,35 +199,23 @@ class SupersetClient:
|
|||||||
page: int,
|
page: int,
|
||||||
page_size: int,
|
page_size: int,
|
||||||
search: Optional[str] = None,
|
search: Optional[str] = None,
|
||||||
require_slug: bool = False,
|
|
||||||
) -> Tuple[int, List[Dict]]:
|
) -> Tuple[int, List[Dict]]:
|
||||||
with belief_scope("SupersetClient.get_dashboards_summary_page"):
|
with belief_scope("SupersetClient.get_dashboards_summary_page"):
|
||||||
query: Dict[str, Any] = {
|
query: Dict[str, Any] = {
|
||||||
"page": max(page - 1, 0),
|
"page": max(page - 1, 0),
|
||||||
"page_size": page_size,
|
"page_size": page_size,
|
||||||
}
|
}
|
||||||
filters: List[Dict[str, Any]] = []
|
|
||||||
if require_slug:
|
|
||||||
filters.append(
|
|
||||||
{
|
|
||||||
"col": "slug",
|
|
||||||
"opr": "neq",
|
|
||||||
"value": "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
normalized_search = (search or "").strip()
|
normalized_search = (search or "").strip()
|
||||||
if normalized_search:
|
if normalized_search:
|
||||||
# Superset list API supports filter objects with `opr` operator.
|
# Superset list API supports filter objects with `opr` operator.
|
||||||
# `ct` -> contains (ILIKE on most Superset backends).
|
# `ct` -> contains (ILIKE on most Superset backends).
|
||||||
filters.append(
|
query["filters"] = [
|
||||||
{
|
{
|
||||||
"col": "dashboard_title",
|
"col": "dashboard_title",
|
||||||
"opr": "ct",
|
"opr": "ct",
|
||||||
"value": normalized_search,
|
"value": normalized_search,
|
||||||
}
|
}
|
||||||
)
|
]
|
||||||
if filters:
|
|
||||||
query["filters"] = filters
|
|
||||||
|
|
||||||
total_count, dashboards = self.get_dashboards_page(query=query)
|
total_count, dashboards = self.get_dashboards_page(query=query)
|
||||||
|
|
||||||
|
|||||||
@@ -1,238 +0,0 @@
|
|||||||
# [DEF:backend.src.core.superset_profile_lookup:Module]
|
|
||||||
#
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: superset, users, lookup, profile, pagination, normalization
|
|
||||||
# @PURPOSE: Provides environment-scoped Superset account lookup adapter with stable normalized output.
|
|
||||||
# @LAYER: Core
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.utils.network.APIClient
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.logger
|
|
||||||
#
|
|
||||||
# @INVARIANT: Adapter never leaks raw upstream payload shape to API consumers.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
import json
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
from .logger import logger, belief_scope
|
|
||||||
from .utils.network import APIClient, AuthenticationError, SupersetAPIError
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:SupersetAccountLookupAdapter:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Lookup Superset users and normalize candidates for profile binding.
|
|
||||||
class SupersetAccountLookupAdapter:
|
|
||||||
# [DEF:__init__:Function]
|
|
||||||
# @PURPOSE: Initializes lookup adapter with authenticated API client and environment context.
|
|
||||||
# @PRE: network_client supports request(method, endpoint, params=...).
|
|
||||||
# @POST: Adapter is ready to perform users lookup requests.
|
|
||||||
def __init__(self, network_client: APIClient, environment_id: str):
|
|
||||||
self.network_client = network_client
|
|
||||||
self.environment_id = str(environment_id or "")
|
|
||||||
# [/DEF:__init__:Function]
|
|
||||||
|
|
||||||
# [DEF:get_users_page:Function]
|
|
||||||
# @PURPOSE: Fetch one users page from Superset with passthrough search/sort parameters.
|
|
||||||
# @PRE: page_index >= 0 and page_size >= 1.
|
|
||||||
# @POST: Returns deterministic payload with normalized items and total count.
|
|
||||||
# @RETURN: Dict[str, Any]
|
|
||||||
def get_users_page(
|
|
||||||
self,
|
|
||||||
search: Optional[str] = None,
|
|
||||||
page_index: int = 0,
|
|
||||||
page_size: int = 20,
|
|
||||||
sort_column: str = "username",
|
|
||||||
sort_order: str = "desc",
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
with belief_scope("SupersetAccountLookupAdapter.get_users_page"):
|
|
||||||
normalized_page_index = max(int(page_index), 0)
|
|
||||||
normalized_page_size = max(int(page_size), 1)
|
|
||||||
|
|
||||||
normalized_sort_column = str(sort_column or "username").strip().lower() or "username"
|
|
||||||
normalized_sort_order = str(sort_order or "desc").strip().lower()
|
|
||||||
if normalized_sort_order not in {"asc", "desc"}:
|
|
||||||
normalized_sort_order = "desc"
|
|
||||||
|
|
||||||
query: Dict[str, Any] = {
|
|
||||||
"page": normalized_page_index,
|
|
||||||
"page_size": normalized_page_size,
|
|
||||||
"order_column": normalized_sort_column,
|
|
||||||
"order_direction": normalized_sort_order,
|
|
||||||
}
|
|
||||||
|
|
||||||
normalized_search = str(search or "").strip()
|
|
||||||
if normalized_search:
|
|
||||||
query["filters"] = [{"col": "username", "opr": "ct", "value": normalized_search}]
|
|
||||||
|
|
||||||
logger.reason(
|
|
||||||
"[REASON] Lookup Superset users "
|
|
||||||
f"(env={self.environment_id}, page={normalized_page_index}, page_size={normalized_page_size})"
|
|
||||||
)
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Prepared Superset users lookup query "
|
|
||||||
f"(env={self.environment_id}, order_column={normalized_sort_column}, "
|
|
||||||
f"normalized_sort_order={normalized_sort_order}, "
|
|
||||||
f"payload_order_direction={query.get('order_direction')})"
|
|
||||||
)
|
|
||||||
|
|
||||||
primary_error: Optional[Exception] = None
|
|
||||||
last_error: Optional[Exception] = None
|
|
||||||
for attempt_index, endpoint in enumerate(("/security/users/", "/security/users"), start=1):
|
|
||||||
try:
|
|
||||||
logger.reason(
|
|
||||||
"[REASON] Users lookup request attempt "
|
|
||||||
f"(env={self.environment_id}, attempt={attempt_index}, endpoint={endpoint})"
|
|
||||||
)
|
|
||||||
response = self.network_client.request(
|
|
||||||
method="GET",
|
|
||||||
endpoint=endpoint,
|
|
||||||
params={"q": json.dumps(query)},
|
|
||||||
)
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Users lookup endpoint succeeded "
|
|
||||||
f"(env={self.environment_id}, attempt={attempt_index}, endpoint={endpoint})"
|
|
||||||
)
|
|
||||||
return self._normalize_lookup_payload(
|
|
||||||
response=response,
|
|
||||||
page_index=normalized_page_index,
|
|
||||||
page_size=normalized_page_size,
|
|
||||||
)
|
|
||||||
except Exception as exc:
|
|
||||||
if primary_error is None:
|
|
||||||
primary_error = exc
|
|
||||||
last_error = exc
|
|
||||||
cause = getattr(exc, "__cause__", None)
|
|
||||||
cause_response = getattr(cause, "response", None)
|
|
||||||
status_code = getattr(cause_response, "status_code", None)
|
|
||||||
logger.explore(
|
|
||||||
"[EXPLORE] Users lookup endpoint failed "
|
|
||||||
f"(env={self.environment_id}, attempt={attempt_index}, endpoint={endpoint}, "
|
|
||||||
f"error_type={type(exc).__name__}, status_code={status_code}, "
|
|
||||||
f"payload_order_direction={query.get('order_direction')}): {exc}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if last_error is not None:
|
|
||||||
selected_error: Exception = last_error
|
|
||||||
if (
|
|
||||||
primary_error is not None
|
|
||||||
and primary_error is not last_error
|
|
||||||
and isinstance(last_error, AuthenticationError)
|
|
||||||
and not isinstance(primary_error, AuthenticationError)
|
|
||||||
):
|
|
||||||
selected_error = primary_error
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Preserving primary lookup failure over fallback auth error "
|
|
||||||
f"(env={self.environment_id}, primary_error_type={type(primary_error).__name__}, "
|
|
||||||
f"fallback_error_type={type(last_error).__name__})"
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.explore(
|
|
||||||
"[EXPLORE] All Superset users lookup endpoints failed "
|
|
||||||
f"(env={self.environment_id}, payload_order_direction={query.get('order_direction')}, "
|
|
||||||
f"selected_error_type={type(selected_error).__name__})"
|
|
||||||
)
|
|
||||||
raise selected_error
|
|
||||||
raise SupersetAPIError("Superset users lookup failed without explicit error")
|
|
||||||
# [/DEF:get_users_page:Function]
|
|
||||||
|
|
||||||
# [DEF:_normalize_lookup_payload:Function]
|
|
||||||
# @PURPOSE: Convert Superset users response variants into stable candidates payload.
|
|
||||||
# @PRE: response can be dict/list in any supported upstream shape.
|
|
||||||
# @POST: Output contains canonical keys: status, environment_id, page_index, page_size, total, items.
|
|
||||||
# @RETURN: Dict[str, Any]
|
|
||||||
def _normalize_lookup_payload(
|
|
||||||
self,
|
|
||||||
response: Any,
|
|
||||||
page_index: int,
|
|
||||||
page_size: int,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
with belief_scope("SupersetAccountLookupAdapter._normalize_lookup_payload"):
|
|
||||||
payload = response
|
|
||||||
if isinstance(payload, dict) and isinstance(payload.get("result"), dict):
|
|
||||||
payload = payload.get("result")
|
|
||||||
|
|
||||||
raw_items: List[Any] = []
|
|
||||||
total = 0
|
|
||||||
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
if isinstance(payload.get("result"), list):
|
|
||||||
raw_items = payload.get("result") or []
|
|
||||||
total = int(payload.get("count", len(raw_items)) or 0)
|
|
||||||
elif isinstance(payload.get("users"), list):
|
|
||||||
raw_items = payload.get("users") or []
|
|
||||||
total = int(payload.get("total", len(raw_items)) or 0)
|
|
||||||
elif isinstance(payload.get("items"), list):
|
|
||||||
raw_items = payload.get("items") or []
|
|
||||||
total = int(payload.get("total", len(raw_items)) or 0)
|
|
||||||
elif isinstance(payload, list):
|
|
||||||
raw_items = payload
|
|
||||||
total = len(raw_items)
|
|
||||||
|
|
||||||
normalized_items: List[Dict[str, Any]] = []
|
|
||||||
seen_usernames = set()
|
|
||||||
|
|
||||||
for raw_user in raw_items:
|
|
||||||
candidate = self.normalize_user_payload(raw_user)
|
|
||||||
username_key = str(candidate.get("username") or "").strip().lower()
|
|
||||||
if not username_key:
|
|
||||||
continue
|
|
||||||
if username_key in seen_usernames:
|
|
||||||
continue
|
|
||||||
seen_usernames.add(username_key)
|
|
||||||
normalized_items.append(candidate)
|
|
||||||
|
|
||||||
logger.reflect(
|
|
||||||
"[REFLECT] Normalized lookup payload "
|
|
||||||
f"(env={self.environment_id}, items={len(normalized_items)}, total={max(total, len(normalized_items))})"
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "success",
|
|
||||||
"environment_id": self.environment_id,
|
|
||||||
"page_index": max(int(page_index), 0),
|
|
||||||
"page_size": max(int(page_size), 1),
|
|
||||||
"total": max(int(total), len(normalized_items)),
|
|
||||||
"items": normalized_items,
|
|
||||||
}
|
|
||||||
# [/DEF:_normalize_lookup_payload:Function]
|
|
||||||
|
|
||||||
# [DEF:normalize_user_payload:Function]
|
|
||||||
# @PURPOSE: Project raw Superset user object to canonical candidate shape.
|
|
||||||
# @PRE: raw_user may have heterogenous key names between Superset versions.
|
|
||||||
# @POST: Returns normalized candidate keys (environment_id, username, display_name, email, is_active).
|
|
||||||
# @RETURN: Dict[str, Any]
|
|
||||||
def normalize_user_payload(self, raw_user: Any) -> Dict[str, Any]:
|
|
||||||
if not isinstance(raw_user, dict):
|
|
||||||
raw_user = {}
|
|
||||||
|
|
||||||
username = str(
|
|
||||||
raw_user.get("username")
|
|
||||||
or raw_user.get("userName")
|
|
||||||
or raw_user.get("name")
|
|
||||||
or ""
|
|
||||||
).strip()
|
|
||||||
|
|
||||||
full_name = str(raw_user.get("full_name") or "").strip()
|
|
||||||
first_name = str(raw_user.get("first_name") or "").strip()
|
|
||||||
last_name = str(raw_user.get("last_name") or "").strip()
|
|
||||||
display_name = full_name or " ".join(
|
|
||||||
part for part in [first_name, last_name] if part
|
|
||||||
).strip()
|
|
||||||
if not display_name:
|
|
||||||
display_name = username or None
|
|
||||||
|
|
||||||
email = str(raw_user.get("email") or "").strip() or None
|
|
||||||
is_active_raw = raw_user.get("is_active")
|
|
||||||
is_active = bool(is_active_raw) if is_active_raw is not None else None
|
|
||||||
|
|
||||||
return {
|
|
||||||
"environment_id": self.environment_id,
|
|
||||||
"username": username,
|
|
||||||
"display_name": display_name,
|
|
||||||
"email": email,
|
|
||||||
"is_active": is_active,
|
|
||||||
}
|
|
||||||
# [/DEF:normalize_user_payload:Function]
|
|
||||||
# [/DEF:SupersetAccountLookupAdapter:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.core.superset_profile_lookup:Module]
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
# [DEF:__tests__/test_task_logger:Module]
|
|
||||||
# @RELATION: VERIFIES -> ../task_logger.py
|
|
||||||
# @PURPOSE: Contract testing for TaskLogger
|
|
||||||
# [/DEF:__tests__/test_task_logger:Module]
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from src.core.task_manager.task_logger import TaskLogger
|
|
||||||
|
|
||||||
# @TEST_FIXTURE: valid_task_logger -> {"task_id": "test_123", "add_log_fn": lambda *args: None, "source": "test_plugin"}
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_add_log():
|
|
||||||
return MagicMock()
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def task_logger(mock_add_log):
|
|
||||||
return TaskLogger(task_id="test_123", add_log_fn=mock_add_log, source="test_plugin")
|
|
||||||
|
|
||||||
# @TEST_CONTRACT: TaskLoggerModel -> Invariants
|
|
||||||
def test_task_logger_initialization(task_logger):
|
|
||||||
"""Verify TaskLogger is bound to specific task_id and source."""
|
|
||||||
assert task_logger._task_id == "test_123"
|
|
||||||
assert task_logger._default_source == "test_plugin"
|
|
||||||
|
|
||||||
# @TEST_CONTRACT: invariants -> "All specific log methods (info, error) delegate to _log"
|
|
||||||
def test_log_methods_delegation(task_logger, mock_add_log):
|
|
||||||
"""Verify info, error, warning, debug delegate to internal _log."""
|
|
||||||
task_logger.info("info message", metadata={"k": "v"})
|
|
||||||
mock_add_log.assert_called_with(
|
|
||||||
task_id="test_123",
|
|
||||||
level="INFO",
|
|
||||||
message="info message",
|
|
||||||
source="test_plugin",
|
|
||||||
metadata={"k": "v"}
|
|
||||||
)
|
|
||||||
|
|
||||||
task_logger.error("error message", source="override")
|
|
||||||
mock_add_log.assert_called_with(
|
|
||||||
task_id="test_123",
|
|
||||||
level="ERROR",
|
|
||||||
message="error message",
|
|
||||||
source="override",
|
|
||||||
metadata=None
|
|
||||||
)
|
|
||||||
|
|
||||||
task_logger.warning("warning message")
|
|
||||||
mock_add_log.assert_called_with(
|
|
||||||
task_id="test_123",
|
|
||||||
level="WARNING",
|
|
||||||
message="warning message",
|
|
||||||
source="test_plugin",
|
|
||||||
metadata=None
|
|
||||||
)
|
|
||||||
|
|
||||||
task_logger.debug("debug message")
|
|
||||||
mock_add_log.assert_called_with(
|
|
||||||
task_id="test_123",
|
|
||||||
level="DEBUG",
|
|
||||||
message="debug message",
|
|
||||||
source="test_plugin",
|
|
||||||
metadata=None
|
|
||||||
)
|
|
||||||
|
|
||||||
# @TEST_CONTRACT: invariants -> "with_source creates a new logger with the same task_id"
|
|
||||||
def test_with_source(task_logger):
|
|
||||||
"""Verify with_source returns a new instance with updated default source."""
|
|
||||||
new_logger = task_logger.with_source("new_source")
|
|
||||||
assert isinstance(new_logger, TaskLogger)
|
|
||||||
assert new_logger._task_id == "test_123"
|
|
||||||
assert new_logger._default_source == "new_source"
|
|
||||||
assert new_logger is not task_logger
|
|
||||||
|
|
||||||
# @TEST_EDGE: missing_task_id -> raises TypeError
|
|
||||||
def test_missing_task_id():
|
|
||||||
with pytest.raises(TypeError):
|
|
||||||
TaskLogger(add_log_fn=lambda x: x)
|
|
||||||
|
|
||||||
# @TEST_EDGE: invalid_add_log_fn -> raises TypeError
|
|
||||||
# (Python doesn't strictly enforce this at init, but let's verify it fails on call if not callable)
|
|
||||||
def test_invalid_add_log_fn():
|
|
||||||
logger = TaskLogger(task_id="msg", add_log_fn=None)
|
|
||||||
with pytest.raises(TypeError):
|
|
||||||
logger.info("test")
|
|
||||||
|
|
||||||
# @TEST_INVARIANT: consistent_delegation
|
|
||||||
def test_progress_log(task_logger, mock_add_log):
|
|
||||||
"""Verify progress method correctly formats metadata."""
|
|
||||||
task_logger.progress("Step 1", 45.5)
|
|
||||||
mock_add_log.assert_called_with(
|
|
||||||
task_id="test_123",
|
|
||||||
level="INFO",
|
|
||||||
message="Step 1",
|
|
||||||
source="test_plugin",
|
|
||||||
metadata={"progress": 45.5}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Boundary checks
|
|
||||||
task_logger.progress("Step high", 150)
|
|
||||||
assert mock_add_log.call_args[1]["metadata"]["progress"] == 100
|
|
||||||
|
|
||||||
task_logger.progress("Step low", -10)
|
|
||||||
assert mock_add_log.call_args[1]["metadata"]["progress"] == 0
|
|
||||||
@@ -10,7 +10,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
import json
|
import json
|
||||||
import re
|
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from ...models.task import TaskRecord, TaskLogRecord
|
from ...models.task import TaskRecord, TaskLogRecord
|
||||||
@@ -81,40 +80,18 @@ class TaskPersistenceService:
|
|||||||
|
|
||||||
# [DEF:_resolve_environment_id:Function]
|
# [DEF:_resolve_environment_id:Function]
|
||||||
# @TIER: STANDARD
|
# @TIER: STANDARD
|
||||||
# @PURPOSE: Resolve environment id into existing environments.id value to satisfy FK constraints.
|
# @PURPOSE: Resolve environment id based on provided value or fallback to default
|
||||||
# @PRE: Session is active
|
# @PRE: Session is active
|
||||||
# @POST: Returns existing environments.id or None when unresolved.
|
# @POST: Environment ID is returned
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _resolve_environment_id(session: Session, env_id: Optional[str]) -> Optional[str]:
|
def _resolve_environment_id(session: Session, env_id: Optional[str]) -> str:
|
||||||
with belief_scope("_resolve_environment_id"):
|
with belief_scope("_resolve_environment_id"):
|
||||||
raw_value = str(env_id or "").strip()
|
if env_id:
|
||||||
if not raw_value:
|
return env_id
|
||||||
return None
|
repo_env = session.query(Environment).filter_by(name="default").first()
|
||||||
|
if repo_env:
|
||||||
# 1) Direct match by primary key.
|
return str(repo_env.id)
|
||||||
by_id = session.query(Environment).filter(Environment.id == raw_value).first()
|
return "default"
|
||||||
if by_id:
|
|
||||||
return str(by_id.id)
|
|
||||||
|
|
||||||
# 2) Exact match by name.
|
|
||||||
by_name = session.query(Environment).filter(Environment.name == raw_value).first()
|
|
||||||
if by_name:
|
|
||||||
return str(by_name.id)
|
|
||||||
|
|
||||||
# 3) Slug-like match (e.g. "ss-dev" -> "SS DEV").
|
|
||||||
def normalize_token(value: str) -> str:
|
|
||||||
lowered = str(value or "").strip().lower()
|
|
||||||
return re.sub(r"[^a-z0-9]+", "-", lowered).strip("-")
|
|
||||||
|
|
||||||
target_token = normalize_token(raw_value)
|
|
||||||
if not target_token:
|
|
||||||
return None
|
|
||||||
|
|
||||||
for env in session.query(Environment).all():
|
|
||||||
if normalize_token(env.id) == target_token or normalize_token(env.name) == target_token:
|
|
||||||
return str(env.id)
|
|
||||||
|
|
||||||
return None
|
|
||||||
# [/DEF:_resolve_environment_id:Function]
|
# [/DEF:_resolve_environment_id:Function]
|
||||||
|
|
||||||
# [DEF:__init__:Function]
|
# [DEF:__init__:Function]
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.core.utils:Package]
|
|
||||||
# @PURPOSE: Shared utility package root.
|
|
||||||
# [/DEF:src.core.utils:Package]
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
# [DEF:backend.src.core.utils.async_network:Module]
|
|
||||||
#
|
|
||||||
# @TIER: CRITICAL
|
|
||||||
# @SEMANTICS: network, httpx, async, superset, authentication, cache
|
|
||||||
# @PURPOSE: Provides async Superset API client with shared auth-token cache to avoid per-request re-login.
|
|
||||||
# @LAYER: Infra
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.core.utils.network.SupersetAuthCache
|
|
||||||
# @INVARIANT: Async client reuses cached auth tokens per environment credentials and invalidates on 401.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
from typing import Optional, Dict, Any, Union
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from ..logger import logger as app_logger, belief_scope
|
|
||||||
from .network import (
|
|
||||||
AuthenticationError,
|
|
||||||
DashboardNotFoundError,
|
|
||||||
NetworkError,
|
|
||||||
PermissionDeniedError,
|
|
||||||
SupersetAPIError,
|
|
||||||
SupersetAuthCache,
|
|
||||||
)
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:AsyncAPIClient:Class]
|
|
||||||
# @PURPOSE: Async Superset API client backed by httpx.AsyncClient with shared auth cache.
|
|
||||||
class AsyncAPIClient:
|
|
||||||
DEFAULT_TIMEOUT = 30
|
|
||||||
_auth_locks: Dict[tuple[str, str, bool], asyncio.Lock] = {}
|
|
||||||
|
|
||||||
# [DEF:__init__:Function]
|
|
||||||
# @PURPOSE: Initialize async API client for one environment.
|
|
||||||
# @PRE: config contains base_url and auth payload.
|
|
||||||
# @POST: Client is ready for async request/authentication flow.
|
|
||||||
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT):
|
|
||||||
self.base_url: str = self._normalize_base_url(config.get("base_url", ""))
|
|
||||||
self.api_base_url: str = f"{self.base_url}/api/v1"
|
|
||||||
self.auth = config.get("auth")
|
|
||||||
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
|
|
||||||
self._client = httpx.AsyncClient(
|
|
||||||
verify=verify_ssl,
|
|
||||||
timeout=httpx.Timeout(timeout),
|
|
||||||
follow_redirects=True,
|
|
||||||
)
|
|
||||||
self._tokens: Dict[str, str] = {}
|
|
||||||
self._authenticated = False
|
|
||||||
self._auth_cache_key = SupersetAuthCache.build_key(
|
|
||||||
self.base_url,
|
|
||||||
self.auth,
|
|
||||||
verify_ssl,
|
|
||||||
)
|
|
||||||
|
|
||||||
# [/DEF:__init__:Function]
|
|
||||||
|
|
||||||
# [DEF:_normalize_base_url:Function]
|
|
||||||
# @PURPOSE: Normalize base URL for Superset API root construction.
|
|
||||||
# @POST: Returns canonical base URL without trailing slash and duplicate /api/v1 suffix.
|
|
||||||
def _normalize_base_url(self, raw_url: str) -> str:
|
|
||||||
normalized = str(raw_url or "").strip().rstrip("/")
|
|
||||||
if normalized.lower().endswith("/api/v1"):
|
|
||||||
normalized = normalized[:-len("/api/v1")]
|
|
||||||
return normalized.rstrip("/")
|
|
||||||
# [/DEF:_normalize_base_url:Function]
|
|
||||||
|
|
||||||
# [DEF:_build_api_url:Function]
|
|
||||||
# @PURPOSE: Build full API URL from relative Superset endpoint.
|
|
||||||
# @POST: Returns absolute URL for upstream request.
|
|
||||||
def _build_api_url(self, endpoint: str) -> str:
|
|
||||||
normalized_endpoint = str(endpoint or "").strip()
|
|
||||||
if normalized_endpoint.startswith("http://") or normalized_endpoint.startswith("https://"):
|
|
||||||
return normalized_endpoint
|
|
||||||
if not normalized_endpoint.startswith("/"):
|
|
||||||
normalized_endpoint = f"/{normalized_endpoint}"
|
|
||||||
if normalized_endpoint.startswith("/api/v1/") or normalized_endpoint == "/api/v1":
|
|
||||||
return f"{self.base_url}{normalized_endpoint}"
|
|
||||||
return f"{self.api_base_url}{normalized_endpoint}"
|
|
||||||
# [/DEF:_build_api_url:Function]
|
|
||||||
|
|
||||||
# [DEF:_get_auth_lock:Function]
|
|
||||||
# @PURPOSE: Return per-cache-key async lock to serialize fresh login attempts.
|
|
||||||
# @POST: Returns stable asyncio.Lock instance.
|
|
||||||
@classmethod
|
|
||||||
def _get_auth_lock(cls, cache_key: tuple[str, str, bool]) -> asyncio.Lock:
|
|
||||||
existing_lock = cls._auth_locks.get(cache_key)
|
|
||||||
if existing_lock is not None:
|
|
||||||
return existing_lock
|
|
||||||
created_lock = asyncio.Lock()
|
|
||||||
cls._auth_locks[cache_key] = created_lock
|
|
||||||
return created_lock
|
|
||||||
# [/DEF:_get_auth_lock:Function]
|
|
||||||
|
|
||||||
# [DEF:authenticate:Function]
|
|
||||||
# @PURPOSE: Authenticate against Superset and cache access/csrf tokens.
|
|
||||||
# @POST: Client tokens are populated and reusable across requests.
|
|
||||||
async def authenticate(self) -> Dict[str, str]:
|
|
||||||
cached_tokens = SupersetAuthCache.get(self._auth_cache_key)
|
|
||||||
if cached_tokens and cached_tokens.get("access_token") and cached_tokens.get("csrf_token"):
|
|
||||||
self._tokens = cached_tokens
|
|
||||||
self._authenticated = True
|
|
||||||
app_logger.info("[async_authenticate][CacheHit] Reusing cached Superset auth tokens for %s", self.base_url)
|
|
||||||
return self._tokens
|
|
||||||
|
|
||||||
auth_lock = self._get_auth_lock(self._auth_cache_key)
|
|
||||||
async with auth_lock:
|
|
||||||
cached_tokens = SupersetAuthCache.get(self._auth_cache_key)
|
|
||||||
if cached_tokens and cached_tokens.get("access_token") and cached_tokens.get("csrf_token"):
|
|
||||||
self._tokens = cached_tokens
|
|
||||||
self._authenticated = True
|
|
||||||
app_logger.info("[async_authenticate][CacheHitAfterWait] Reusing cached Superset auth tokens for %s", self.base_url)
|
|
||||||
return self._tokens
|
|
||||||
|
|
||||||
with belief_scope("AsyncAPIClient.authenticate"):
|
|
||||||
app_logger.info("[async_authenticate][Enter] Authenticating to %s", self.base_url)
|
|
||||||
try:
|
|
||||||
login_url = f"{self.api_base_url}/security/login"
|
|
||||||
response = await self._client.post(login_url, json=self.auth)
|
|
||||||
response.raise_for_status()
|
|
||||||
access_token = response.json()["access_token"]
|
|
||||||
|
|
||||||
csrf_url = f"{self.api_base_url}/security/csrf_token/"
|
|
||||||
csrf_response = await self._client.get(
|
|
||||||
csrf_url,
|
|
||||||
headers={"Authorization": f"Bearer {access_token}"},
|
|
||||||
)
|
|
||||||
csrf_response.raise_for_status()
|
|
||||||
|
|
||||||
self._tokens = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"csrf_token": csrf_response.json()["result"],
|
|
||||||
}
|
|
||||||
self._authenticated = True
|
|
||||||
SupersetAuthCache.set(self._auth_cache_key, self._tokens)
|
|
||||||
app_logger.info("[async_authenticate][Exit] Authenticated successfully.")
|
|
||||||
return self._tokens
|
|
||||||
except httpx.HTTPStatusError as exc:
|
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
status_code = exc.response.status_code if exc.response is not None else None
|
|
||||||
if status_code in [502, 503, 504]:
|
|
||||||
raise NetworkError(
|
|
||||||
f"Environment unavailable during authentication (Status {status_code})",
|
|
||||||
status_code=status_code,
|
|
||||||
) from exc
|
|
||||||
raise AuthenticationError(f"Authentication failed: {exc}") from exc
|
|
||||||
except (httpx.HTTPError, KeyError) as exc:
|
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
raise NetworkError(f"Network or parsing error during authentication: {exc}") from exc
|
|
||||||
# [/DEF:authenticate:Function]
|
|
||||||
|
|
||||||
# [DEF:get_headers:Function]
|
|
||||||
# @PURPOSE: Return authenticated Superset headers for async requests.
|
|
||||||
# @POST: Headers include Authorization and CSRF tokens.
|
|
||||||
async def get_headers(self) -> Dict[str, str]:
|
|
||||||
if not self._authenticated:
|
|
||||||
await self.authenticate()
|
|
||||||
return {
|
|
||||||
"Authorization": f"Bearer {self._tokens['access_token']}",
|
|
||||||
"X-CSRFToken": self._tokens.get("csrf_token", ""),
|
|
||||||
"Referer": self.base_url,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}
|
|
||||||
# [/DEF:get_headers:Function]
|
|
||||||
|
|
||||||
# [DEF:request:Function]
|
|
||||||
# @PURPOSE: Perform one authenticated async Superset API request.
|
|
||||||
# @POST: Returns JSON payload or raw httpx.Response when raw_response=true.
|
|
||||||
async def request(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
endpoint: str,
|
|
||||||
headers: Optional[Dict[str, str]] = None,
|
|
||||||
raw_response: bool = False,
|
|
||||||
**kwargs,
|
|
||||||
) -> Union[httpx.Response, Dict[str, Any]]:
|
|
||||||
full_url = self._build_api_url(endpoint)
|
|
||||||
request_headers = await self.get_headers()
|
|
||||||
if headers:
|
|
||||||
request_headers.update(headers)
|
|
||||||
if "allow_redirects" in kwargs and "follow_redirects" not in kwargs:
|
|
||||||
kwargs["follow_redirects"] = bool(kwargs.pop("allow_redirects"))
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = await self._client.request(method, full_url, headers=request_headers, **kwargs)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response if raw_response else response.json()
|
|
||||||
except httpx.HTTPStatusError as exc:
|
|
||||||
if exc.response is not None and exc.response.status_code == 401:
|
|
||||||
self._authenticated = False
|
|
||||||
self._tokens = {}
|
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
self._handle_http_error(exc, endpoint)
|
|
||||||
except httpx.HTTPError as exc:
|
|
||||||
self._handle_network_error(exc, full_url)
|
|
||||||
# [/DEF:request:Function]
|
|
||||||
|
|
||||||
# [DEF:_handle_http_error:Function]
|
|
||||||
# @PURPOSE: Translate upstream HTTP errors into stable domain exceptions.
|
|
||||||
# @POST: Raises domain-specific exception for caller flow control.
|
|
||||||
def _handle_http_error(self, exc: httpx.HTTPStatusError, endpoint: str) -> None:
|
|
||||||
with belief_scope("AsyncAPIClient._handle_http_error"):
|
|
||||||
status_code = exc.response.status_code
|
|
||||||
if status_code in [502, 503, 504]:
|
|
||||||
raise NetworkError(f"Environment unavailable (Status {status_code})", status_code=status_code) from exc
|
|
||||||
if status_code == 404:
|
|
||||||
raise DashboardNotFoundError(endpoint) from exc
|
|
||||||
if status_code == 403:
|
|
||||||
raise PermissionDeniedError() from exc
|
|
||||||
if status_code == 401:
|
|
||||||
raise AuthenticationError() from exc
|
|
||||||
raise SupersetAPIError(f"API Error {status_code}: {exc.response.text}") from exc
|
|
||||||
# [/DEF:_handle_http_error:Function]
|
|
||||||
|
|
||||||
# [DEF:_handle_network_error:Function]
|
|
||||||
# @PURPOSE: Translate generic httpx errors into NetworkError.
|
|
||||||
# @POST: Raises NetworkError with URL context.
|
|
||||||
def _handle_network_error(self, exc: httpx.HTTPError, url: str) -> None:
|
|
||||||
with belief_scope("AsyncAPIClient._handle_network_error"):
|
|
||||||
if isinstance(exc, httpx.TimeoutException):
|
|
||||||
message = "Request timeout"
|
|
||||||
elif isinstance(exc, httpx.ConnectError):
|
|
||||||
message = "Connection error"
|
|
||||||
else:
|
|
||||||
message = f"Unknown network error: {exc}"
|
|
||||||
raise NetworkError(message, url=url) from exc
|
|
||||||
# [/DEF:_handle_network_error:Function]
|
|
||||||
|
|
||||||
# [DEF:aclose:Function]
|
|
||||||
# @PURPOSE: Close underlying httpx client.
|
|
||||||
# @POST: Client resources are released.
|
|
||||||
async def aclose(self) -> None:
|
|
||||||
await self._client.aclose()
|
|
||||||
# [/DEF:aclose:Function]
|
|
||||||
# [/DEF:AsyncAPIClient:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.core.utils.async_network:Module]
|
|
||||||
@@ -8,12 +8,10 @@
|
|||||||
# @PUBLIC_API: APIClient
|
# @PUBLIC_API: APIClient
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
# [SECTION: IMPORTS]
|
||||||
from typing import Optional, Dict, Any, List, Union, cast, Tuple
|
from typing import Optional, Dict, Any, List, Union, cast
|
||||||
import json
|
import json
|
||||||
import io
|
import io
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
import urllib3
|
import urllib3
|
||||||
@@ -88,62 +86,6 @@ class NetworkError(Exception):
|
|||||||
# [/DEF:__init__:Function]
|
# [/DEF:__init__:Function]
|
||||||
# [/DEF:NetworkError:Class]
|
# [/DEF:NetworkError:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:SupersetAuthCache:Class]
|
|
||||||
# @PURPOSE: Process-local cache for Superset access/csrf tokens keyed by environment credentials.
|
|
||||||
# @PRE: base_url and username are stable strings.
|
|
||||||
# @POST: Cached entries expire automatically by TTL and can be reused across requests.
|
|
||||||
class SupersetAuthCache:
|
|
||||||
TTL_SECONDS = 300
|
|
||||||
|
|
||||||
_lock = threading.Lock()
|
|
||||||
_entries: Dict[Tuple[str, str, bool], Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def build_key(cls, base_url: str, auth: Optional[Dict[str, Any]], verify_ssl: bool) -> Tuple[str, str, bool]:
|
|
||||||
username = ""
|
|
||||||
if isinstance(auth, dict):
|
|
||||||
username = str(auth.get("username") or "").strip()
|
|
||||||
return (str(base_url or "").strip(), username, bool(verify_ssl))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, key: Tuple[str, str, bool]) -> Optional[Dict[str, str]]:
|
|
||||||
now = time.time()
|
|
||||||
with cls._lock:
|
|
||||||
payload = cls._entries.get(key)
|
|
||||||
if not payload:
|
|
||||||
return None
|
|
||||||
expires_at = float(payload.get("expires_at") or 0)
|
|
||||||
if expires_at <= now:
|
|
||||||
cls._entries.pop(key, None)
|
|
||||||
return None
|
|
||||||
tokens = payload.get("tokens")
|
|
||||||
if not isinstance(tokens, dict):
|
|
||||||
cls._entries.pop(key, None)
|
|
||||||
return None
|
|
||||||
return {
|
|
||||||
"access_token": str(tokens.get("access_token") or ""),
|
|
||||||
"csrf_token": str(tokens.get("csrf_token") or ""),
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set(cls, key: Tuple[str, str, bool], tokens: Dict[str, str], ttl_seconds: Optional[int] = None) -> None:
|
|
||||||
normalized_ttl = max(int(ttl_seconds or cls.TTL_SECONDS), 1)
|
|
||||||
with cls._lock:
|
|
||||||
cls._entries[key] = {
|
|
||||||
"tokens": {
|
|
||||||
"access_token": str(tokens.get("access_token") or ""),
|
|
||||||
"csrf_token": str(tokens.get("csrf_token") or ""),
|
|
||||||
},
|
|
||||||
"expires_at": time.time() + normalized_ttl,
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def invalidate(cls, key: Tuple[str, str, bool]) -> None:
|
|
||||||
with cls._lock:
|
|
||||||
cls._entries.pop(key, None)
|
|
||||||
# [/DEF:SupersetAuthCache:Class]
|
|
||||||
|
|
||||||
# [DEF:APIClient:Class]
|
# [DEF:APIClient:Class]
|
||||||
# @PURPOSE: Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
|
# @PURPOSE: Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
|
||||||
class APIClient:
|
class APIClient:
|
||||||
@@ -165,11 +107,6 @@ class APIClient:
|
|||||||
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
|
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
|
||||||
self.session = self._init_session()
|
self.session = self._init_session()
|
||||||
self._tokens: Dict[str, str] = {}
|
self._tokens: Dict[str, str] = {}
|
||||||
self._auth_cache_key = SupersetAuthCache.build_key(
|
|
||||||
self.base_url,
|
|
||||||
self.auth,
|
|
||||||
verify_ssl,
|
|
||||||
)
|
|
||||||
self._authenticated = False
|
self._authenticated = False
|
||||||
app_logger.info("[APIClient.__init__][Exit] APIClient initialized.")
|
app_logger.info("[APIClient.__init__][Exit] APIClient initialized.")
|
||||||
# [/DEF:__init__:Function]
|
# [/DEF:__init__:Function]
|
||||||
@@ -257,12 +194,6 @@ class APIClient:
|
|||||||
def authenticate(self) -> Dict[str, str]:
|
def authenticate(self) -> Dict[str, str]:
|
||||||
with belief_scope("authenticate"):
|
with belief_scope("authenticate"):
|
||||||
app_logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
|
app_logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
|
||||||
cached_tokens = SupersetAuthCache.get(self._auth_cache_key)
|
|
||||||
if cached_tokens and cached_tokens.get("access_token") and cached_tokens.get("csrf_token"):
|
|
||||||
self._tokens = cached_tokens
|
|
||||||
self._authenticated = True
|
|
||||||
app_logger.info("[authenticate][CacheHit] Reusing cached Superset auth tokens for %s", self.base_url)
|
|
||||||
return self._tokens
|
|
||||||
try:
|
try:
|
||||||
login_url = f"{self.api_base_url}/security/login"
|
login_url = f"{self.api_base_url}/security/login"
|
||||||
# Log the payload keys and values (masking password)
|
# Log the payload keys and values (masking password)
|
||||||
@@ -284,17 +215,14 @@ class APIClient:
|
|||||||
|
|
||||||
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
|
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
|
||||||
self._authenticated = True
|
self._authenticated = True
|
||||||
SupersetAuthCache.set(self._auth_cache_key, self._tokens)
|
|
||||||
app_logger.info("[authenticate][Exit] Authenticated successfully.")
|
app_logger.info("[authenticate][Exit] Authenticated successfully.")
|
||||||
return self._tokens
|
return self._tokens
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
status_code = e.response.status_code if e.response is not None else None
|
status_code = e.response.status_code if e.response is not None else None
|
||||||
if status_code in [502, 503, 504]:
|
if status_code in [502, 503, 504]:
|
||||||
raise NetworkError(f"Environment unavailable during authentication (Status {status_code})", status_code=status_code) from e
|
raise NetworkError(f"Environment unavailable during authentication (Status {status_code})", status_code=status_code) from e
|
||||||
raise AuthenticationError(f"Authentication failed: {e}") from e
|
raise AuthenticationError(f"Authentication failed: {e}") from e
|
||||||
except (requests.exceptions.RequestException, KeyError) as e:
|
except (requests.exceptions.RequestException, KeyError) as e:
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
|
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
|
||||||
# [/DEF:authenticate:Function]
|
# [/DEF:authenticate:Function]
|
||||||
|
|
||||||
@@ -335,10 +263,6 @@ class APIClient:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response if raw_response else response.json()
|
return response if raw_response else response.json()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
if e.response is not None and e.response.status_code == 401:
|
|
||||||
self._authenticated = False
|
|
||||||
self._tokens = {}
|
|
||||||
SupersetAuthCache.invalidate(self._auth_cache_key)
|
|
||||||
self._handle_http_error(e, endpoint)
|
self._handle_http_error(e, endpoint)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
self._handle_network_error(e, full_url)
|
self._handle_network_error(e, full_url)
|
||||||
|
|||||||
@@ -14,16 +14,8 @@ from .core.config_manager import ConfigManager
|
|||||||
from .core.scheduler import SchedulerService
|
from .core.scheduler import SchedulerService
|
||||||
from .services.resource_service import ResourceService
|
from .services.resource_service import ResourceService
|
||||||
from .services.mapping_service import MappingService
|
from .services.mapping_service import MappingService
|
||||||
from .services.clean_release.repositories import (
|
|
||||||
CandidateRepository, ArtifactRepository, ManifestRepository,
|
|
||||||
PolicyRepository, ComplianceRepository, ReportRepository,
|
|
||||||
ApprovalRepository, PublicationRepository, AuditRepository,
|
|
||||||
CleanReleaseAuditLog
|
|
||||||
)
|
|
||||||
from .services.clean_release.repository import CleanReleaseRepository
|
from .services.clean_release.repository import CleanReleaseRepository
|
||||||
from .services.clean_release.facade import CleanReleaseFacade
|
from .core.database import init_db, get_auth_db
|
||||||
from .services.reports.report_service import ReportsService
|
|
||||||
from .core.database import init_db, get_auth_db, get_db
|
|
||||||
from .core.logger import logger
|
from .core.logger import logger
|
||||||
from .core.auth.jwt import decode_token
|
from .core.auth.jwt import decode_token
|
||||||
from .core.auth.repository import AuthRepository
|
from .core.auth.repository import AuthRepository
|
||||||
@@ -63,10 +55,8 @@ logger.info("SchedulerService initialized")
|
|||||||
resource_service = ResourceService()
|
resource_service = ResourceService()
|
||||||
logger.info("ResourceService initialized")
|
logger.info("ResourceService initialized")
|
||||||
|
|
||||||
# Clean Release Redesign Singletons
|
clean_release_repository = CleanReleaseRepository()
|
||||||
# Note: These use get_db() which is a generator, so we need a way to provide a session.
|
logger.info("CleanReleaseRepository initialized")
|
||||||
# For singletons in dependencies.py, we might need a different approach or
|
|
||||||
# initialize them inside the dependency functions.
|
|
||||||
|
|
||||||
# [DEF:get_plugin_loader:Function]
|
# [DEF:get_plugin_loader:Function]
|
||||||
# @PURPOSE: Dependency injector for PluginLoader.
|
# @PURPOSE: Dependency injector for PluginLoader.
|
||||||
@@ -119,45 +109,15 @@ def get_mapping_service() -> MappingService:
|
|||||||
# [/DEF:get_mapping_service:Function]
|
# [/DEF:get_mapping_service:Function]
|
||||||
|
|
||||||
|
|
||||||
_clean_release_repository = CleanReleaseRepository()
|
|
||||||
|
|
||||||
# [DEF:get_clean_release_repository:Function]
|
# [DEF:get_clean_release_repository:Function]
|
||||||
# @PURPOSE: Legacy compatibility shim for CleanReleaseRepository.
|
# @PURPOSE: Dependency injector for CleanReleaseRepository.
|
||||||
# @POST: Returns a shared CleanReleaseRepository instance.
|
# @PRE: Global clean_release_repository must be initialized.
|
||||||
|
# @POST: Returns shared CleanReleaseRepository instance.
|
||||||
|
# @RETURN: CleanReleaseRepository - Shared clean release repository instance.
|
||||||
def get_clean_release_repository() -> CleanReleaseRepository:
|
def get_clean_release_repository() -> CleanReleaseRepository:
|
||||||
"""Legacy compatibility shim for CleanReleaseRepository."""
|
return clean_release_repository
|
||||||
return _clean_release_repository
|
|
||||||
# [/DEF:get_clean_release_repository:Function]
|
# [/DEF:get_clean_release_repository:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:get_clean_release_facade:Function]
|
|
||||||
# @PURPOSE: Dependency injector for CleanReleaseFacade.
|
|
||||||
# @POST: Returns a facade instance with a fresh DB session.
|
|
||||||
def get_clean_release_facade(db = Depends(get_db)) -> CleanReleaseFacade:
|
|
||||||
candidate_repo = CandidateRepository(db)
|
|
||||||
artifact_repo = ArtifactRepository(db)
|
|
||||||
manifest_repo = ManifestRepository(db)
|
|
||||||
policy_repo = PolicyRepository(db)
|
|
||||||
compliance_repo = ComplianceRepository(db)
|
|
||||||
report_repo = ReportRepository(db)
|
|
||||||
approval_repo = ApprovalRepository(db)
|
|
||||||
publication_repo = PublicationRepository(db)
|
|
||||||
audit_repo = AuditRepository(db)
|
|
||||||
|
|
||||||
return CleanReleaseFacade(
|
|
||||||
candidate_repo=candidate_repo,
|
|
||||||
artifact_repo=artifact_repo,
|
|
||||||
manifest_repo=manifest_repo,
|
|
||||||
policy_repo=policy_repo,
|
|
||||||
compliance_repo=compliance_repo,
|
|
||||||
report_repo=report_repo,
|
|
||||||
approval_repo=approval_repo,
|
|
||||||
publication_repo=publication_repo,
|
|
||||||
audit_repo=audit_repo,
|
|
||||||
config_manager=config_manager
|
|
||||||
)
|
|
||||||
# [/DEF:get_clean_release_facade:Function]
|
|
||||||
|
|
||||||
# [DEF:oauth2_scheme:Variable]
|
# [DEF:oauth2_scheme:Variable]
|
||||||
# @PURPOSE: OAuth2 password bearer scheme for token extraction.
|
# @PURPOSE: OAuth2 password bearer scheme for token extraction.
|
||||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.models:Package]
|
|
||||||
# @PURPOSE: Domain model package root.
|
|
||||||
# [/DEF:src.models:Package]
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
# [DEF:__tests__/test_clean_release:Module]
|
|
||||||
# @RELATION: VERIFIES -> ../clean_release.py
|
|
||||||
# @PURPOSE: Contract testing for Clean Release models
|
|
||||||
# [/DEF:__tests__/test_clean_release:Module]
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from datetime import datetime
|
|
||||||
from pydantic import ValidationError
|
|
||||||
from src.models.clean_release import (
|
|
||||||
ReleaseCandidate,
|
|
||||||
ReleaseCandidateStatus,
|
|
||||||
ProfileType,
|
|
||||||
CleanProfilePolicy,
|
|
||||||
DistributionManifest,
|
|
||||||
ManifestItem,
|
|
||||||
ManifestSummary,
|
|
||||||
ClassificationType,
|
|
||||||
ComplianceCheckRun,
|
|
||||||
CheckFinalStatus,
|
|
||||||
CheckStageResult,
|
|
||||||
CheckStageName,
|
|
||||||
CheckStageStatus,
|
|
||||||
ComplianceReport,
|
|
||||||
ExecutionMode
|
|
||||||
)
|
|
||||||
|
|
||||||
# @TEST_FIXTURE: valid_enterprise_candidate
|
|
||||||
@pytest.fixture
|
|
||||||
def valid_candidate_data():
|
|
||||||
return {
|
|
||||||
"candidate_id": "RC-001",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"profile": ProfileType.ENTERPRISE_CLEAN,
|
|
||||||
"created_at": datetime.now(),
|
|
||||||
"created_by": "admin",
|
|
||||||
"source_snapshot_ref": "v1.0.0-snapshot"
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_release_candidate_valid(valid_candidate_data):
|
|
||||||
rc = ReleaseCandidate(**valid_candidate_data)
|
|
||||||
assert rc.candidate_id == "RC-001"
|
|
||||||
assert rc.status == ReleaseCandidateStatus.DRAFT
|
|
||||||
|
|
||||||
def test_release_candidate_empty_id(valid_candidate_data):
|
|
||||||
valid_candidate_data["candidate_id"] = " "
|
|
||||||
with pytest.raises(ValueError, match="candidate_id must be non-empty"):
|
|
||||||
ReleaseCandidate(**valid_candidate_data)
|
|
||||||
|
|
||||||
# @TEST_FIXTURE: valid_enterprise_policy
|
|
||||||
@pytest.fixture
|
|
||||||
def valid_policy_data():
|
|
||||||
return {
|
|
||||||
"policy_id": "POL-001",
|
|
||||||
"policy_version": "1",
|
|
||||||
"active": True,
|
|
||||||
"prohibited_artifact_categories": ["test-data"],
|
|
||||||
"required_system_categories": ["core"],
|
|
||||||
"internal_source_registry_ref": "REG-1",
|
|
||||||
"effective_from": datetime.now(),
|
|
||||||
"profile": ProfileType.ENTERPRISE_CLEAN
|
|
||||||
}
|
|
||||||
|
|
||||||
# @TEST_INVARIANT: policy_purity
|
|
||||||
def test_enterprise_policy_valid(valid_policy_data):
|
|
||||||
policy = CleanProfilePolicy(**valid_policy_data)
|
|
||||||
assert policy.external_source_forbidden is True
|
|
||||||
|
|
||||||
# @TEST_EDGE: enterprise_policy_missing_prohibited
|
|
||||||
def test_enterprise_policy_missing_prohibited(valid_policy_data):
|
|
||||||
valid_policy_data["prohibited_artifact_categories"] = []
|
|
||||||
with pytest.raises(ValueError, match="enterprise-clean policy requires prohibited_artifact_categories"):
|
|
||||||
CleanProfilePolicy(**valid_policy_data)
|
|
||||||
|
|
||||||
# @TEST_EDGE: enterprise_policy_external_allowed
|
|
||||||
def test_enterprise_policy_external_allowed(valid_policy_data):
|
|
||||||
valid_policy_data["external_source_forbidden"] = False
|
|
||||||
with pytest.raises(ValueError, match="enterprise-clean policy requires external_source_forbidden=true"):
|
|
||||||
CleanProfilePolicy(**valid_policy_data)
|
|
||||||
|
|
||||||
# @TEST_INVARIANT: manifest_consistency
|
|
||||||
# @TEST_EDGE: manifest_count_mismatch
|
|
||||||
def test_manifest_count_mismatch():
|
|
||||||
summary = ManifestSummary(included_count=1, excluded_count=0, prohibited_detected_count=0)
|
|
||||||
item = ManifestItem(path="p", category="c", classification=ClassificationType.ALLOWED, reason="r")
|
|
||||||
|
|
||||||
# Valid
|
|
||||||
DistributionManifest(
|
|
||||||
manifest_id="m1", candidate_id="rc1", policy_id="p1",
|
|
||||||
generated_at=datetime.now(), generated_by="u", items=[item],
|
|
||||||
summary=summary, deterministic_hash="h"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Invalid count
|
|
||||||
summary.included_count = 2
|
|
||||||
with pytest.raises(ValueError, match="manifest summary counts must match items size"):
|
|
||||||
DistributionManifest(
|
|
||||||
manifest_id="m1", candidate_id="rc1", policy_id="p1",
|
|
||||||
generated_at=datetime.now(), generated_by="u", items=[item],
|
|
||||||
summary=summary, deterministic_hash="h"
|
|
||||||
)
|
|
||||||
|
|
||||||
# @TEST_INVARIANT: run_integrity
|
|
||||||
# @TEST_EDGE: compliant_run_stage_fail
|
|
||||||
def test_compliant_run_validation():
|
|
||||||
base_run = {
|
|
||||||
"check_run_id": "run1",
|
|
||||||
"candidate_id": "rc1",
|
|
||||||
"policy_id": "p1",
|
|
||||||
"started_at": datetime.now(),
|
|
||||||
"triggered_by": "u",
|
|
||||||
"execution_mode": ExecutionMode.TUI,
|
|
||||||
"final_status": CheckFinalStatus.COMPLIANT,
|
|
||||||
"checks": [
|
|
||||||
CheckStageResult(stage=CheckStageName.DATA_PURITY, status=CheckStageStatus.PASS),
|
|
||||||
CheckStageResult(stage=CheckStageName.INTERNAL_SOURCES_ONLY, status=CheckStageStatus.PASS),
|
|
||||||
CheckStageResult(stage=CheckStageName.NO_EXTERNAL_ENDPOINTS, status=CheckStageStatus.PASS),
|
|
||||||
CheckStageResult(stage=CheckStageName.MANIFEST_CONSISTENCY, status=CheckStageStatus.PASS),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
# Valid
|
|
||||||
ComplianceCheckRun(**base_run)
|
|
||||||
|
|
||||||
# One stage fails -> cannot be COMPLIANT
|
|
||||||
base_run["checks"][0].status = CheckStageStatus.FAIL
|
|
||||||
with pytest.raises(ValueError, match="compliant run requires PASS on all mandatory stages"):
|
|
||||||
ComplianceCheckRun(**base_run)
|
|
||||||
|
|
||||||
# Missing stage -> cannot be COMPLIANT
|
|
||||||
base_run["checks"] = base_run["checks"][1:]
|
|
||||||
with pytest.raises(ValueError, match="compliant run requires all mandatory stages"):
|
|
||||||
ComplianceCheckRun(**base_run)
|
|
||||||
|
|
||||||
def test_report_validation():
|
|
||||||
# Valid blocked report
|
|
||||||
ComplianceReport(
|
|
||||||
report_id="rep1", check_run_id="run1", candidate_id="rc1",
|
|
||||||
generated_at=datetime.now(), final_status=CheckFinalStatus.BLOCKED,
|
|
||||||
operator_summary="Blocked", structured_payload_ref="ref",
|
|
||||||
violations_count=2, blocking_violations_count=2
|
|
||||||
)
|
|
||||||
|
|
||||||
# BLOCKED with 0 blocking violations
|
|
||||||
with pytest.raises(ValueError, match="blocked report requires blocking violations"):
|
|
||||||
ComplianceReport(
|
|
||||||
report_id="rep1", check_run_id="run1", candidate_id="rc1",
|
|
||||||
generated_at=datetime.now(), final_status=CheckFinalStatus.BLOCKED,
|
|
||||||
operator_summary="Blocked", structured_payload_ref="ref",
|
|
||||||
violations_count=2, blocking_violations_count=0
|
|
||||||
)
|
|
||||||
@@ -1,217 +1,199 @@
|
|||||||
# [DEF:backend.src.models.clean_release:Module]
|
# [DEF:backend.src.models.clean_release:Module]
|
||||||
# @TIER: CRITICAL
|
# @TIER: CRITICAL
|
||||||
# @SEMANTICS: clean-release, models, lifecycle, compliance, evidence, immutability
|
# @SEMANTICS: clean-release, models, lifecycle, policy, manifest, compliance
|
||||||
# @PURPOSE: Define canonical clean release domain entities and lifecycle guards.
|
# @PURPOSE: Define clean release domain entities and validation contracts for enterprise compliance flow.
|
||||||
# @LAYER: Domain
|
# @LAYER: Domain
|
||||||
# @INVARIANT: Immutable snapshots are never mutated; forbidden lifecycle transitions are rejected.
|
# @RELATION: BINDS_TO -> specs/023-clean-repo-enterprise/data-model.md
|
||||||
|
# @INVARIANT: Enterprise-clean policy always forbids external sources.
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional, Dict, Any
|
from typing import List, Optional
|
||||||
from sqlalchemy import Column, String, DateTime, JSON, ForeignKey, Integer, Boolean
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from .mapping import Base
|
|
||||||
from ..services.clean_release.enums import (
|
|
||||||
CandidateStatus, RunStatus, ComplianceDecision,
|
|
||||||
ApprovalDecisionType, PublicationStatus, ClassificationType
|
|
||||||
)
|
|
||||||
from ..services.clean_release.exceptions import IllegalTransitionError
|
|
||||||
|
|
||||||
# [DEF:CheckFinalStatus:Class]
|
from pydantic import BaseModel, Field, model_validator
|
||||||
# @PURPOSE: Backward-compatible final status enum for legacy TUI/orchestrator tests.
|
|
||||||
class CheckFinalStatus(str, Enum):
|
|
||||||
COMPLIANT = "COMPLIANT"
|
|
||||||
BLOCKED = "BLOCKED"
|
|
||||||
FAILED = "FAILED"
|
|
||||||
# [/DEF:CheckFinalStatus:Class]
|
|
||||||
|
|
||||||
# [DEF:CheckStageName:Class]
|
|
||||||
# @PURPOSE: Backward-compatible stage name enum for legacy TUI/orchestrator tests.
|
|
||||||
class CheckStageName(str, Enum):
|
|
||||||
DATA_PURITY = "DATA_PURITY"
|
|
||||||
INTERNAL_SOURCES_ONLY = "INTERNAL_SOURCES_ONLY"
|
|
||||||
NO_EXTERNAL_ENDPOINTS = "NO_EXTERNAL_ENDPOINTS"
|
|
||||||
MANIFEST_CONSISTENCY = "MANIFEST_CONSISTENCY"
|
|
||||||
# [/DEF:CheckStageName:Class]
|
|
||||||
|
|
||||||
# [DEF:CheckStageStatus:Class]
|
|
||||||
# @PURPOSE: Backward-compatible stage status enum for legacy TUI/orchestrator tests.
|
|
||||||
class CheckStageStatus(str, Enum):
|
|
||||||
PASS = "PASS"
|
|
||||||
FAIL = "FAIL"
|
|
||||||
SKIPPED = "SKIPPED"
|
|
||||||
RUNNING = "RUNNING"
|
|
||||||
# [/DEF:CheckStageStatus:Class]
|
|
||||||
|
|
||||||
# [DEF:CheckStageResult:Class]
|
|
||||||
# @PURPOSE: Backward-compatible stage result container for legacy TUI/orchestrator tests.
|
|
||||||
@dataclass
|
|
||||||
class CheckStageResult:
|
|
||||||
stage: CheckStageName
|
|
||||||
status: CheckStageStatus
|
|
||||||
details: str = ""
|
|
||||||
# [/DEF:CheckStageResult:Class]
|
|
||||||
|
|
||||||
# [DEF:ProfileType:Class]
|
|
||||||
# @PURPOSE: Backward-compatible profile enum for legacy TUI bootstrap logic.
|
|
||||||
class ProfileType(str, Enum):
|
|
||||||
ENTERPRISE_CLEAN = "enterprise-clean"
|
|
||||||
# [/DEF:ProfileType:Class]
|
|
||||||
|
|
||||||
# [DEF:RegistryStatus:Class]
|
|
||||||
# @PURPOSE: Backward-compatible registry status enum for legacy TUI bootstrap logic.
|
|
||||||
class RegistryStatus(str, Enum):
|
|
||||||
ACTIVE = "ACTIVE"
|
|
||||||
INACTIVE = "INACTIVE"
|
|
||||||
# [/DEF:RegistryStatus:Class]
|
|
||||||
|
|
||||||
# [DEF:ReleaseCandidateStatus:Class]
|
# [DEF:ReleaseCandidateStatus:Class]
|
||||||
# @PURPOSE: Backward-compatible release candidate status enum for legacy TUI.
|
# @PURPOSE: Lifecycle states for release candidate.
|
||||||
class ReleaseCandidateStatus(str, Enum):
|
class ReleaseCandidateStatus(str, Enum):
|
||||||
DRAFT = CandidateStatus.DRAFT.value
|
DRAFT = "draft"
|
||||||
PREPARED = CandidateStatus.PREPARED.value
|
PREPARED = "prepared"
|
||||||
MANIFEST_BUILT = CandidateStatus.MANIFEST_BUILT.value
|
COMPLIANT = "compliant"
|
||||||
CHECK_PENDING = CandidateStatus.CHECK_PENDING.value
|
BLOCKED = "blocked"
|
||||||
CHECK_RUNNING = CandidateStatus.CHECK_RUNNING.value
|
RELEASED = "released"
|
||||||
CHECK_PASSED = CandidateStatus.CHECK_PASSED.value
|
|
||||||
CHECK_BLOCKED = CandidateStatus.CHECK_BLOCKED.value
|
|
||||||
CHECK_ERROR = CandidateStatus.CHECK_ERROR.value
|
|
||||||
APPROVED = CandidateStatus.APPROVED.value
|
|
||||||
PUBLISHED = CandidateStatus.PUBLISHED.value
|
|
||||||
REVOKED = CandidateStatus.REVOKED.value
|
|
||||||
# [/DEF:ReleaseCandidateStatus:Class]
|
# [/DEF:ReleaseCandidateStatus:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ProfileType:Class]
|
||||||
|
# @PURPOSE: Supported profile identifiers.
|
||||||
|
class ProfileType(str, Enum):
|
||||||
|
ENTERPRISE_CLEAN = "enterprise-clean"
|
||||||
|
DEVELOPMENT = "development"
|
||||||
|
# [/DEF:ProfileType:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ClassificationType:Class]
|
||||||
|
# @PURPOSE: Manifest classification outcomes for artifacts.
|
||||||
|
class ClassificationType(str, Enum):
|
||||||
|
REQUIRED_SYSTEM = "required-system"
|
||||||
|
ALLOWED = "allowed"
|
||||||
|
EXCLUDED_PROHIBITED = "excluded-prohibited"
|
||||||
|
# [/DEF:ClassificationType:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:RegistryStatus:Class]
|
||||||
|
# @PURPOSE: Registry lifecycle status.
|
||||||
|
class RegistryStatus(str, Enum):
|
||||||
|
ACTIVE = "active"
|
||||||
|
INACTIVE = "inactive"
|
||||||
|
# [/DEF:RegistryStatus:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:CheckFinalStatus:Class]
|
||||||
|
# @PURPOSE: Final status for compliance check run.
|
||||||
|
class CheckFinalStatus(str, Enum):
|
||||||
|
RUNNING = "running"
|
||||||
|
COMPLIANT = "compliant"
|
||||||
|
BLOCKED = "blocked"
|
||||||
|
FAILED = "failed"
|
||||||
|
# [/DEF:CheckFinalStatus:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ExecutionMode:Class]
|
||||||
|
# @PURPOSE: Execution channel for compliance checks.
|
||||||
|
class ExecutionMode(str, Enum):
|
||||||
|
TUI = "tui"
|
||||||
|
CI = "ci"
|
||||||
|
# [/DEF:ExecutionMode:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:CheckStageName:Class]
|
||||||
|
# @PURPOSE: Mandatory check stages.
|
||||||
|
class CheckStageName(str, Enum):
|
||||||
|
DATA_PURITY = "data_purity"
|
||||||
|
INTERNAL_SOURCES_ONLY = "internal_sources_only"
|
||||||
|
NO_EXTERNAL_ENDPOINTS = "no_external_endpoints"
|
||||||
|
MANIFEST_CONSISTENCY = "manifest_consistency"
|
||||||
|
# [/DEF:CheckStageName:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:CheckStageStatus:Class]
|
||||||
|
# @PURPOSE: Stage-level execution status.
|
||||||
|
class CheckStageStatus(str, Enum):
|
||||||
|
PASS = "pass"
|
||||||
|
FAIL = "fail"
|
||||||
|
SKIPPED = "skipped"
|
||||||
|
# [/DEF:CheckStageStatus:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ViolationCategory:Class]
|
||||||
|
# @PURPOSE: Normalized compliance violation categories.
|
||||||
|
class ViolationCategory(str, Enum):
|
||||||
|
DATA_PURITY = "data-purity"
|
||||||
|
EXTERNAL_SOURCE = "external-source"
|
||||||
|
MANIFEST_INTEGRITY = "manifest-integrity"
|
||||||
|
POLICY_CONFLICT = "policy-conflict"
|
||||||
|
OPERATIONAL_RISK = "operational-risk"
|
||||||
|
# [/DEF:ViolationCategory:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ViolationSeverity:Class]
|
||||||
|
# @PURPOSE: Severity levels for violation triage.
|
||||||
|
class ViolationSeverity(str, Enum):
|
||||||
|
CRITICAL = "critical"
|
||||||
|
HIGH = "high"
|
||||||
|
MEDIUM = "medium"
|
||||||
|
LOW = "low"
|
||||||
|
# [/DEF:ViolationSeverity:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:ReleaseCandidate:Class]
|
||||||
|
# @PURPOSE: Candidate metadata for clean-release workflow.
|
||||||
|
# @PRE: candidate_id, source_snapshot_ref are non-empty.
|
||||||
|
# @POST: Model instance is valid for lifecycle transitions.
|
||||||
|
class ReleaseCandidate(BaseModel):
|
||||||
|
candidate_id: str
|
||||||
|
version: str
|
||||||
|
profile: ProfileType
|
||||||
|
created_at: datetime
|
||||||
|
created_by: str
|
||||||
|
source_snapshot_ref: str
|
||||||
|
status: ReleaseCandidateStatus = ReleaseCandidateStatus.DRAFT
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_non_empty(self):
|
||||||
|
if not self.candidate_id.strip():
|
||||||
|
raise ValueError("candidate_id must be non-empty")
|
||||||
|
if not self.source_snapshot_ref.strip():
|
||||||
|
raise ValueError("source_snapshot_ref must be non-empty")
|
||||||
|
return self
|
||||||
|
# [/DEF:ReleaseCandidate:Class]
|
||||||
|
|
||||||
|
|
||||||
|
# [DEF:CleanProfilePolicy:Class]
|
||||||
|
# @PURPOSE: Policy contract for artifact/source decisions.
|
||||||
|
class CleanProfilePolicy(BaseModel):
|
||||||
|
policy_id: str
|
||||||
|
policy_version: str
|
||||||
|
active: bool
|
||||||
|
prohibited_artifact_categories: List[str] = Field(default_factory=list)
|
||||||
|
required_system_categories: List[str] = Field(default_factory=list)
|
||||||
|
external_source_forbidden: bool = True
|
||||||
|
internal_source_registry_ref: str
|
||||||
|
effective_from: datetime
|
||||||
|
effective_to: Optional[datetime] = None
|
||||||
|
profile: ProfileType = ProfileType.ENTERPRISE_CLEAN
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_policy(self):
|
||||||
|
if self.profile == ProfileType.ENTERPRISE_CLEAN:
|
||||||
|
if not self.external_source_forbidden:
|
||||||
|
raise ValueError("enterprise-clean policy requires external_source_forbidden=true")
|
||||||
|
if not self.prohibited_artifact_categories:
|
||||||
|
raise ValueError("enterprise-clean policy requires prohibited_artifact_categories")
|
||||||
|
if not self.internal_source_registry_ref.strip():
|
||||||
|
raise ValueError("internal_source_registry_ref must be non-empty")
|
||||||
|
return self
|
||||||
|
# [/DEF:CleanProfilePolicy:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ResourceSourceEntry:Class]
|
# [DEF:ResourceSourceEntry:Class]
|
||||||
# @PURPOSE: Backward-compatible source entry model for legacy TUI bootstrap logic.
|
# @PURPOSE: One internal source definition.
|
||||||
@dataclass
|
class ResourceSourceEntry(BaseModel):
|
||||||
class ResourceSourceEntry:
|
|
||||||
source_id: str
|
source_id: str
|
||||||
host: str
|
host: str
|
||||||
protocol: str
|
protocol: str
|
||||||
purpose: str
|
purpose: str
|
||||||
|
allowed_paths: List[str] = Field(default_factory=list)
|
||||||
enabled: bool = True
|
enabled: bool = True
|
||||||
# [/DEF:ResourceSourceEntry:Class]
|
# [/DEF:ResourceSourceEntry:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ResourceSourceRegistry:Class]
|
# [DEF:ResourceSourceRegistry:Class]
|
||||||
# @PURPOSE: Backward-compatible source registry model for legacy TUI bootstrap logic.
|
# @PURPOSE: Allowlist of internal sources.
|
||||||
@dataclass
|
class ResourceSourceRegistry(BaseModel):
|
||||||
class ResourceSourceRegistry:
|
|
||||||
registry_id: str
|
registry_id: str
|
||||||
name: str
|
name: str
|
||||||
entries: List[ResourceSourceEntry]
|
entries: List[ResourceSourceEntry]
|
||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
updated_by: str
|
updated_by: str
|
||||||
status: str = "ACTIVE"
|
status: RegistryStatus = RegistryStatus.ACTIVE
|
||||||
|
|
||||||
@property
|
@model_validator(mode="after")
|
||||||
def id(self) -> str:
|
def _validate_registry(self):
|
||||||
return self.registry_id
|
if not self.entries:
|
||||||
|
raise ValueError("registry entries cannot be empty")
|
||||||
|
if self.status == RegistryStatus.ACTIVE and not any(e.enabled for e in self.entries):
|
||||||
|
raise ValueError("active registry must include at least one enabled entry")
|
||||||
|
return self
|
||||||
# [/DEF:ResourceSourceRegistry:Class]
|
# [/DEF:ResourceSourceRegistry:Class]
|
||||||
|
|
||||||
# [DEF:CleanProfilePolicy:Class]
|
|
||||||
# @PURPOSE: Backward-compatible policy model for legacy TUI bootstrap logic.
|
|
||||||
@dataclass
|
|
||||||
class CleanProfilePolicy:
|
|
||||||
policy_id: str
|
|
||||||
policy_version: str
|
|
||||||
profile: str
|
|
||||||
active: bool
|
|
||||||
internal_source_registry_ref: str
|
|
||||||
prohibited_artifact_categories: List[str]
|
|
||||||
effective_from: datetime
|
|
||||||
required_system_categories: Optional[List[str]] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def id(self) -> str:
|
|
||||||
return self.policy_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def registry_snapshot_id(self) -> str:
|
|
||||||
return self.internal_source_registry_ref
|
|
||||||
# [/DEF:CleanProfilePolicy:Class]
|
|
||||||
|
|
||||||
# [DEF:ComplianceCheckRun:Class]
|
|
||||||
# @PURPOSE: Backward-compatible run model for legacy TUI typing/import compatibility.
|
|
||||||
@dataclass
|
|
||||||
class ComplianceCheckRun:
|
|
||||||
check_run_id: str
|
|
||||||
candidate_id: str
|
|
||||||
policy_id: str
|
|
||||||
requested_by: str
|
|
||||||
execution_mode: str
|
|
||||||
checks: List[CheckStageResult]
|
|
||||||
final_status: CheckFinalStatus
|
|
||||||
# [/DEF:ComplianceCheckRun:Class]
|
|
||||||
|
|
||||||
# [DEF:ReleaseCandidate:Class]
|
|
||||||
# @PURPOSE: Represents the release unit being prepared and governed.
|
|
||||||
# @PRE: id, version, source_snapshot_ref are non-empty.
|
|
||||||
# @POST: status advances only through legal transitions.
|
|
||||||
class ReleaseCandidate(Base):
|
|
||||||
__tablename__ = "clean_release_candidates"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
name = Column(String, nullable=True) # Added back for backward compatibility with some legacy DTOs
|
|
||||||
version = Column(String, nullable=False)
|
|
||||||
source_snapshot_ref = Column(String, nullable=False)
|
|
||||||
build_id = Column(String, nullable=True)
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
created_by = Column(String, nullable=False)
|
|
||||||
status = Column(String, default=CandidateStatus.DRAFT)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def candidate_id(self) -> str:
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
def transition_to(self, new_status: CandidateStatus):
|
|
||||||
"""
|
|
||||||
@PURPOSE: Enforce legal state transitions.
|
|
||||||
@PRE: Transition must be allowed by lifecycle rules.
|
|
||||||
"""
|
|
||||||
allowed = {
|
|
||||||
CandidateStatus.DRAFT: [CandidateStatus.PREPARED],
|
|
||||||
CandidateStatus.PREPARED: [CandidateStatus.MANIFEST_BUILT],
|
|
||||||
CandidateStatus.MANIFEST_BUILT: [CandidateStatus.CHECK_PENDING],
|
|
||||||
CandidateStatus.CHECK_PENDING: [CandidateStatus.CHECK_RUNNING],
|
|
||||||
CandidateStatus.CHECK_RUNNING: [
|
|
||||||
CandidateStatus.CHECK_PASSED,
|
|
||||||
CandidateStatus.CHECK_BLOCKED,
|
|
||||||
CandidateStatus.CHECK_ERROR
|
|
||||||
],
|
|
||||||
CandidateStatus.CHECK_PASSED: [CandidateStatus.APPROVED, CandidateStatus.CHECK_PENDING],
|
|
||||||
CandidateStatus.CHECK_BLOCKED: [CandidateStatus.CHECK_PENDING],
|
|
||||||
CandidateStatus.CHECK_ERROR: [CandidateStatus.CHECK_PENDING],
|
|
||||||
CandidateStatus.APPROVED: [CandidateStatus.PUBLISHED],
|
|
||||||
CandidateStatus.PUBLISHED: [CandidateStatus.REVOKED],
|
|
||||||
CandidateStatus.REVOKED: []
|
|
||||||
}
|
|
||||||
current_status = CandidateStatus(self.status)
|
|
||||||
if new_status not in allowed.get(current_status, []):
|
|
||||||
raise IllegalTransitionError(f"Forbidden transition from {current_status} to {new_status}")
|
|
||||||
self.status = new_status.value
|
|
||||||
# [/DEF:ReleaseCandidate:Class]
|
|
||||||
|
|
||||||
# [DEF:CandidateArtifact:Class]
|
|
||||||
# @PURPOSE: Represents one artifact associated with a release candidate.
|
|
||||||
class CandidateArtifact(Base):
|
|
||||||
__tablename__ = "clean_release_artifacts"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
|
||||||
path = Column(String, nullable=False)
|
|
||||||
sha256 = Column(String, nullable=False)
|
|
||||||
size = Column(Integer, nullable=False)
|
|
||||||
detected_category = Column(String, nullable=True)
|
|
||||||
declared_category = Column(String, nullable=True)
|
|
||||||
source_uri = Column(String, nullable=True)
|
|
||||||
source_host = Column(String, nullable=True)
|
|
||||||
metadata_json = Column(JSON, default=dict)
|
|
||||||
# [/DEF:CandidateArtifact:Class]
|
|
||||||
|
|
||||||
# [DEF:ManifestItem:Class]
|
# [DEF:ManifestItem:Class]
|
||||||
@dataclass
|
# @PURPOSE: One artifact entry in manifest.
|
||||||
class ManifestItem:
|
class ManifestItem(BaseModel):
|
||||||
path: str
|
path: str
|
||||||
category: str
|
category: str
|
||||||
classification: ClassificationType
|
classification: ClassificationType
|
||||||
@@ -219,218 +201,119 @@ class ManifestItem:
|
|||||||
checksum: Optional[str] = None
|
checksum: Optional[str] = None
|
||||||
# [/DEF:ManifestItem:Class]
|
# [/DEF:ManifestItem:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ManifestSummary:Class]
|
# [DEF:ManifestSummary:Class]
|
||||||
@dataclass
|
# @PURPOSE: Aggregate counters for manifest decisions.
|
||||||
class ManifestSummary:
|
class ManifestSummary(BaseModel):
|
||||||
included_count: int
|
included_count: int = Field(ge=0)
|
||||||
excluded_count: int
|
excluded_count: int = Field(ge=0)
|
||||||
prohibited_detected_count: int
|
prohibited_detected_count: int = Field(ge=0)
|
||||||
# [/DEF:ManifestSummary:Class]
|
# [/DEF:ManifestSummary:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:DistributionManifest:Class]
|
# [DEF:DistributionManifest:Class]
|
||||||
# @PURPOSE: Immutable snapshot of the candidate payload.
|
# @PURPOSE: Deterministic release composition for audit.
|
||||||
# @INVARIANT: Immutable after creation.
|
class DistributionManifest(BaseModel):
|
||||||
class DistributionManifest(Base):
|
manifest_id: str
|
||||||
__tablename__ = "clean_release_manifests"
|
candidate_id: str
|
||||||
|
policy_id: str
|
||||||
id = Column(String, primary_key=True)
|
generated_at: datetime
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
generated_by: str
|
||||||
manifest_version = Column(Integer, nullable=False)
|
items: List[ManifestItem]
|
||||||
manifest_digest = Column(String, nullable=False)
|
summary: ManifestSummary
|
||||||
artifacts_digest = Column(String, nullable=False)
|
deterministic_hash: str
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
created_by = Column(String, nullable=False)
|
|
||||||
source_snapshot_ref = Column(String, nullable=False)
|
|
||||||
content_json = Column(JSON, nullable=False)
|
|
||||||
immutable = Column(Boolean, default=True)
|
|
||||||
|
|
||||||
# Redesign compatibility fields (not persisted directly but used by builder/facade)
|
@model_validator(mode="after")
|
||||||
def __init__(self, **kwargs):
|
def _validate_counts(self):
|
||||||
# Handle fields from manifest_builder.py
|
if self.summary.included_count + self.summary.excluded_count != len(self.items):
|
||||||
if "manifest_id" in kwargs:
|
raise ValueError("manifest summary counts must match items size")
|
||||||
kwargs["id"] = kwargs.pop("manifest_id")
|
return self
|
||||||
if "generated_at" in kwargs:
|
|
||||||
kwargs["created_at"] = kwargs.pop("generated_at")
|
|
||||||
if "generated_by" in kwargs:
|
|
||||||
kwargs["created_by"] = kwargs.pop("generated_by")
|
|
||||||
if "deterministic_hash" in kwargs:
|
|
||||||
kwargs["manifest_digest"] = kwargs.pop("deterministic_hash")
|
|
||||||
|
|
||||||
# Ensure required DB fields have defaults if missing
|
|
||||||
if "manifest_version" not in kwargs:
|
|
||||||
kwargs["manifest_version"] = 1
|
|
||||||
if "artifacts_digest" not in kwargs:
|
|
||||||
kwargs["artifacts_digest"] = kwargs.get("manifest_digest", "pending")
|
|
||||||
if "source_snapshot_ref" not in kwargs:
|
|
||||||
kwargs["source_snapshot_ref"] = "pending"
|
|
||||||
|
|
||||||
# Pack items and summary into content_json if provided
|
|
||||||
if "items" in kwargs or "summary" in kwargs:
|
|
||||||
content = kwargs.get("content_json", {})
|
|
||||||
if "items" in kwargs:
|
|
||||||
items = kwargs.pop("items")
|
|
||||||
content["items"] = [
|
|
||||||
{
|
|
||||||
"path": i.path,
|
|
||||||
"category": i.category,
|
|
||||||
"classification": i.classification.value,
|
|
||||||
"reason": i.reason,
|
|
||||||
"checksum": i.checksum
|
|
||||||
} for i in items
|
|
||||||
]
|
|
||||||
if "summary" in kwargs:
|
|
||||||
summary = kwargs.pop("summary")
|
|
||||||
content["summary"] = {
|
|
||||||
"included_count": summary.included_count,
|
|
||||||
"excluded_count": summary.excluded_count,
|
|
||||||
"prohibited_detected_count": summary.prohibited_detected_count
|
|
||||||
}
|
|
||||||
kwargs["content_json"] = content
|
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
# [/DEF:DistributionManifest:Class]
|
# [/DEF:DistributionManifest:Class]
|
||||||
|
|
||||||
# [DEF:SourceRegistrySnapshot:Class]
|
|
||||||
# @PURPOSE: Immutable registry snapshot for allowed sources.
|
|
||||||
class SourceRegistrySnapshot(Base):
|
|
||||||
__tablename__ = "clean_release_registry_snapshots"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
registry_id = Column(String, nullable=False)
|
|
||||||
registry_version = Column(String, nullable=False)
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
allowed_hosts = Column(JSON, nullable=False) # List[str]
|
|
||||||
allowed_schemes = Column(JSON, nullable=False) # List[str]
|
|
||||||
allowed_source_types = Column(JSON, nullable=False) # List[str]
|
|
||||||
immutable = Column(Boolean, default=True)
|
|
||||||
# [/DEF:SourceRegistrySnapshot:Class]
|
|
||||||
|
|
||||||
# [DEF:CleanPolicySnapshot:Class]
|
# [DEF:CheckStageResult:Class]
|
||||||
# @PURPOSE: Immutable policy snapshot used to evaluate a run.
|
# @PURPOSE: Per-stage compliance result.
|
||||||
class CleanPolicySnapshot(Base):
|
class CheckStageResult(BaseModel):
|
||||||
__tablename__ = "clean_release_policy_snapshots"
|
stage: CheckStageName
|
||||||
|
status: CheckStageStatus
|
||||||
id = Column(String, primary_key=True)
|
details: Optional[str] = None
|
||||||
policy_id = Column(String, nullable=False)
|
duration_ms: Optional[int] = Field(default=None, ge=0)
|
||||||
policy_version = Column(String, nullable=False)
|
# [/DEF:CheckStageResult:Class]
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
content_json = Column(JSON, nullable=False)
|
|
||||||
registry_snapshot_id = Column(String, ForeignKey("clean_release_registry_snapshots.id"), nullable=False)
|
|
||||||
immutable = Column(Boolean, default=True)
|
|
||||||
# [/DEF:CleanPolicySnapshot:Class]
|
|
||||||
|
|
||||||
# [DEF:ComplianceRun:Class]
|
|
||||||
# @PURPOSE: Operational record for one compliance execution.
|
|
||||||
class ComplianceRun(Base):
|
|
||||||
__tablename__ = "clean_release_compliance_runs"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
|
||||||
manifest_id = Column(String, ForeignKey("clean_release_manifests.id"), nullable=False)
|
|
||||||
manifest_digest = Column(String, nullable=False)
|
|
||||||
policy_snapshot_id = Column(String, ForeignKey("clean_release_policy_snapshots.id"), nullable=False)
|
|
||||||
registry_snapshot_id = Column(String, ForeignKey("clean_release_registry_snapshots.id"), nullable=False)
|
|
||||||
requested_by = Column(String, nullable=False)
|
|
||||||
requested_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
started_at = Column(DateTime, nullable=True)
|
|
||||||
finished_at = Column(DateTime, nullable=True)
|
|
||||||
status = Column(String, default=RunStatus.PENDING)
|
|
||||||
final_status = Column(String, nullable=True) # ComplianceDecision
|
|
||||||
failure_reason = Column(String, nullable=True)
|
|
||||||
task_id = Column(String, nullable=True)
|
|
||||||
|
|
||||||
@property
|
# [DEF:ComplianceCheckRun:Class]
|
||||||
def check_run_id(self) -> str:
|
# @PURPOSE: One execution run of compliance pipeline.
|
||||||
return self.id
|
class ComplianceCheckRun(BaseModel):
|
||||||
# [/DEF:ComplianceRun:Class]
|
check_run_id: str
|
||||||
|
candidate_id: str
|
||||||
|
policy_id: str
|
||||||
|
started_at: datetime
|
||||||
|
finished_at: Optional[datetime] = None
|
||||||
|
final_status: CheckFinalStatus = CheckFinalStatus.RUNNING
|
||||||
|
triggered_by: str
|
||||||
|
execution_mode: ExecutionMode
|
||||||
|
checks: List[CheckStageResult] = Field(default_factory=list)
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_terminal_integrity(self):
|
||||||
|
if self.final_status == CheckFinalStatus.COMPLIANT:
|
||||||
|
mandatory = {c.stage: c.status for c in self.checks}
|
||||||
|
required = {
|
||||||
|
CheckStageName.DATA_PURITY,
|
||||||
|
CheckStageName.INTERNAL_SOURCES_ONLY,
|
||||||
|
CheckStageName.NO_EXTERNAL_ENDPOINTS,
|
||||||
|
CheckStageName.MANIFEST_CONSISTENCY,
|
||||||
|
}
|
||||||
|
if not required.issubset(mandatory.keys()):
|
||||||
|
raise ValueError("compliant run requires all mandatory stages")
|
||||||
|
if any(mandatory[s] != CheckStageStatus.PASS for s in required):
|
||||||
|
raise ValueError("compliant run requires PASS on all mandatory stages")
|
||||||
|
return self
|
||||||
|
# [/DEF:ComplianceCheckRun:Class]
|
||||||
|
|
||||||
# [DEF:ComplianceStageRun:Class]
|
|
||||||
# @PURPOSE: Stage-level execution record inside a run.
|
|
||||||
class ComplianceStageRun(Base):
|
|
||||||
__tablename__ = "clean_release_compliance_stage_runs"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
run_id = Column(String, ForeignKey("clean_release_compliance_runs.id"), nullable=False)
|
|
||||||
stage_name = Column(String, nullable=False)
|
|
||||||
status = Column(String, nullable=False)
|
|
||||||
started_at = Column(DateTime, nullable=True)
|
|
||||||
finished_at = Column(DateTime, nullable=True)
|
|
||||||
decision = Column(String, nullable=True) # ComplianceDecision
|
|
||||||
details_json = Column(JSON, default=dict)
|
|
||||||
# [/DEF:ComplianceStageRun:Class]
|
|
||||||
|
|
||||||
# [DEF:ComplianceViolation:Class]
|
# [DEF:ComplianceViolation:Class]
|
||||||
# @PURPOSE: Violation produced by a stage.
|
# @PURPOSE: Normalized violation row for triage and blocking decisions.
|
||||||
class ComplianceViolation(Base):
|
class ComplianceViolation(BaseModel):
|
||||||
__tablename__ = "clean_release_compliance_violations"
|
violation_id: str
|
||||||
|
check_run_id: str
|
||||||
id = Column(String, primary_key=True)
|
category: ViolationCategory
|
||||||
run_id = Column(String, ForeignKey("clean_release_compliance_runs.id"), nullable=False)
|
severity: ViolationSeverity
|
||||||
stage_name = Column(String, nullable=False)
|
location: str
|
||||||
code = Column(String, nullable=False)
|
evidence: Optional[str] = None
|
||||||
severity = Column(String, nullable=False)
|
remediation: str
|
||||||
artifact_path = Column(String, nullable=True)
|
blocked_release: bool
|
||||||
artifact_sha256 = Column(String, nullable=True)
|
detected_at: datetime
|
||||||
message = Column(String, nullable=False)
|
|
||||||
evidence_json = Column(JSON, default=dict)
|
@model_validator(mode="after")
|
||||||
|
def _validate_violation(self):
|
||||||
|
if self.category == ViolationCategory.EXTERNAL_SOURCE and not self.blocked_release:
|
||||||
|
raise ValueError("external-source violation must block release")
|
||||||
|
if self.severity == ViolationSeverity.CRITICAL and not self.remediation.strip():
|
||||||
|
raise ValueError("critical violation requires remediation")
|
||||||
|
return self
|
||||||
# [/DEF:ComplianceViolation:Class]
|
# [/DEF:ComplianceViolation:Class]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ComplianceReport:Class]
|
# [DEF:ComplianceReport:Class]
|
||||||
# @PURPOSE: Immutable result derived from a completed run.
|
# @PURPOSE: Final report payload for operator and audit systems.
|
||||||
# @INVARIANT: Immutable after creation.
|
class ComplianceReport(BaseModel):
|
||||||
class ComplianceReport(Base):
|
report_id: str
|
||||||
__tablename__ = "clean_release_compliance_reports"
|
check_run_id: str
|
||||||
|
candidate_id: str
|
||||||
id = Column(String, primary_key=True)
|
generated_at: datetime
|
||||||
run_id = Column(String, ForeignKey("clean_release_compliance_runs.id"), nullable=False)
|
final_status: CheckFinalStatus
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
operator_summary: str
|
||||||
final_status = Column(String, nullable=False) # ComplianceDecision
|
structured_payload_ref: str
|
||||||
summary_json = Column(JSON, nullable=False)
|
violations_count: int = Field(ge=0)
|
||||||
generated_at = Column(DateTime, default=datetime.utcnow)
|
blocking_violations_count: int = Field(ge=0)
|
||||||
immutable = Column(Boolean, default=True)
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def _validate_report_counts(self):
|
||||||
|
if self.blocking_violations_count > self.violations_count:
|
||||||
|
raise ValueError("blocking_violations_count cannot exceed violations_count")
|
||||||
|
if self.final_status == CheckFinalStatus.BLOCKED and self.blocking_violations_count <= 0:
|
||||||
|
raise ValueError("blocked report requires blocking violations")
|
||||||
|
return self
|
||||||
# [/DEF:ComplianceReport:Class]
|
# [/DEF:ComplianceReport:Class]
|
||||||
|
|
||||||
# [DEF:ApprovalDecision:Class]
|
|
||||||
# @PURPOSE: Approval or rejection bound to a candidate and report.
|
|
||||||
class ApprovalDecision(Base):
|
|
||||||
__tablename__ = "clean_release_approval_decisions"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
|
||||||
report_id = Column(String, ForeignKey("clean_release_compliance_reports.id"), nullable=False)
|
|
||||||
decision = Column(String, nullable=False) # ApprovalDecisionType
|
|
||||||
decided_by = Column(String, nullable=False)
|
|
||||||
decided_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
comment = Column(String, nullable=True)
|
|
||||||
# [/DEF:ApprovalDecision:Class]
|
|
||||||
|
|
||||||
# [DEF:PublicationRecord:Class]
|
|
||||||
# @PURPOSE: Publication or revocation record.
|
|
||||||
class PublicationRecord(Base):
|
|
||||||
__tablename__ = "clean_release_publication_records"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
candidate_id = Column(String, ForeignKey("clean_release_candidates.id"), nullable=False)
|
|
||||||
report_id = Column(String, ForeignKey("clean_release_compliance_reports.id"), nullable=False)
|
|
||||||
published_by = Column(String, nullable=False)
|
|
||||||
published_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
target_channel = Column(String, nullable=False)
|
|
||||||
publication_ref = Column(String, nullable=True)
|
|
||||||
status = Column(String, default=PublicationStatus.ACTIVE)
|
|
||||||
# [/DEF:PublicationRecord:Class]
|
|
||||||
|
|
||||||
# [DEF:CleanReleaseAuditLog:Class]
|
|
||||||
# @PURPOSE: Represents a persistent audit log entry for clean release actions.
|
|
||||||
import uuid
|
|
||||||
class CleanReleaseAuditLog(Base):
|
|
||||||
__tablename__ = "clean_release_audit_logs"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
|
||||||
candidate_id = Column(String, index=True, nullable=True)
|
|
||||||
action = Column(String, nullable=False) # e.g. "TRANSITION", "APPROVE", "PUBLISH"
|
|
||||||
actor = Column(String, nullable=False)
|
|
||||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
|
||||||
details_json = Column(JSON, default=dict)
|
|
||||||
# [/DEF:CleanReleaseAuditLog:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.models.clean_release:Module]
|
# [/DEF:backend.src.models.clean_release:Module]
|
||||||
@@ -1,25 +1,19 @@
|
|||||||
# [DEF:backend.src.models.config:Module]
|
# [DEF:backend.src.models.config:Module]
|
||||||
#
|
#
|
||||||
# @TIER: CRITICAL
|
# @TIER: STANDARD
|
||||||
# @SEMANTICS: database, config, settings, sqlalchemy, notification
|
# @SEMANTICS: database, config, settings, sqlalchemy
|
||||||
# @PURPOSE: Defines SQLAlchemy persistence models for application and notification configuration records.
|
# @PURPOSE: Defines database schema for persisted application configuration.
|
||||||
# @LAYER: Domain
|
# @LAYER: Domain
|
||||||
# @RELATION: [DEPENDS_ON] ->[sqlalchemy]
|
# @RELATION: DEPENDS_ON -> sqlalchemy
|
||||||
# @RELATION: [DEPENDS_ON] ->[backend.src.models.mapping:Base]
|
|
||||||
# @INVARIANT: Configuration payload and notification credentials must remain persisted as non-null JSON documents.
|
|
||||||
|
|
||||||
from sqlalchemy import Column, String, DateTime, JSON, Boolean
|
from sqlalchemy import Column, String, DateTime, JSON
|
||||||
from sqlalchemy.sql import func
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
from .mapping import Base
|
from .mapping import Base
|
||||||
|
|
||||||
|
|
||||||
# [DEF:AppConfigRecord:Class]
|
# [DEF:AppConfigRecord:Class]
|
||||||
# @PURPOSE: Stores persisted application configuration as a single authoritative record model.
|
# @PURPOSE: Stores the single source of truth for application configuration.
|
||||||
# @PRE: SQLAlchemy declarative Base is initialized and table metadata registration is active.
|
|
||||||
# @POST: ORM table 'app_configurations' exposes id, payload, and updated_at fields with declared nullability/default semantics.
|
|
||||||
# @SIDE_EFFECT: Registers ORM mapping metadata during module import.
|
|
||||||
# @DATA_CONTRACT: Input -> persistence row {id:str, payload:json, updated_at:datetime}; Output -> AppConfigRecord ORM entity.
|
|
||||||
class AppConfigRecord(Base):
|
class AppConfigRecord(Base):
|
||||||
__tablename__ = "app_configurations"
|
__tablename__ = "app_configurations"
|
||||||
|
|
||||||
@@ -29,25 +23,4 @@ class AppConfigRecord(Base):
|
|||||||
|
|
||||||
|
|
||||||
# [/DEF:AppConfigRecord:Class]
|
# [/DEF:AppConfigRecord:Class]
|
||||||
|
|
||||||
# [DEF:NotificationConfig:Class]
|
|
||||||
# @PURPOSE: Stores persisted provider-level notification configuration and encrypted credentials metadata.
|
|
||||||
# @PRE: SQLAlchemy declarative Base is initialized and uuid generation is available at instance creation time.
|
|
||||||
# @POST: ORM table 'notification_configs' exposes id, type, name, credentials, is_active, created_at, updated_at fields with declared constraints/defaults.
|
|
||||||
# @SIDE_EFFECT: Registers ORM mapping metadata during module import; may generate UUID values for new entity instances.
|
|
||||||
# @DATA_CONTRACT: Input -> persistence row {id:str, type:str, name:str, credentials:json, is_active:bool, created_at:datetime, updated_at:datetime}; Output -> NotificationConfig ORM entity.
|
|
||||||
class NotificationConfig(Base):
|
|
||||||
__tablename__ = "notification_configs"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
|
||||||
type = Column(String, nullable=False) # SMTP, SLACK, TELEGRAM
|
|
||||||
name = Column(String, nullable=False)
|
|
||||||
credentials = Column(JSON, nullable=False) # Encrypted connection details
|
|
||||||
is_active = Column(Boolean, default=True)
|
|
||||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
|
||||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
|
||||||
# [/DEF:NotificationConfig:Class]
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# [/DEF:backend.src.models.config:Module]
|
# [/DEF:backend.src.models.config:Module]
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ class GitServerConfig(Base):
|
|||||||
url = Column(String(255), nullable=False)
|
url = Column(String(255), nullable=False)
|
||||||
pat = Column(String(255), nullable=False) # PERSONAL ACCESS TOKEN
|
pat = Column(String(255), nullable=False) # PERSONAL ACCESS TOKEN
|
||||||
default_repository = Column(String(255), nullable=True)
|
default_repository = Column(String(255), nullable=True)
|
||||||
default_branch = Column(String(255), default="main")
|
|
||||||
status = Column(Enum(GitStatus), default=GitStatus.UNKNOWN)
|
status = Column(Enum(GitStatus), default=GitStatus.UNKNOWN)
|
||||||
last_validated = Column(DateTime, default=datetime.utcnow)
|
last_validated = Column(DateTime, default=datetime.utcnow)
|
||||||
# [/DEF:GitServerConfig:Class]
|
# [/DEF:GitServerConfig:Class]
|
||||||
@@ -54,7 +53,7 @@ class GitRepository(Base):
|
|||||||
config_id = Column(String(36), ForeignKey("git_server_configs.id"), nullable=False)
|
config_id = Column(String(36), ForeignKey("git_server_configs.id"), nullable=False)
|
||||||
remote_url = Column(String(255), nullable=False)
|
remote_url = Column(String(255), nullable=False)
|
||||||
local_path = Column(String(255), nullable=False)
|
local_path = Column(String(255), nullable=False)
|
||||||
current_branch = Column(String(255), default="dev")
|
current_branch = Column(String(255), default="main")
|
||||||
sync_status = Column(Enum(SyncStatus), default=SyncStatus.CLEAN)
|
sync_status = Column(Enum(SyncStatus), default=SyncStatus.CLEAN)
|
||||||
# [/DEF:GitRepository:Class]
|
# [/DEF:GitRepository:Class]
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
# @LAYER: Domain
|
# @LAYER: Domain
|
||||||
# @RELATION: INHERITS_FROM -> backend.src.models.mapping.Base
|
# @RELATION: INHERITS_FROM -> backend.src.models.mapping.Base
|
||||||
|
|
||||||
from sqlalchemy import Column, String, Boolean, DateTime, JSON, Text, Time, ForeignKey
|
from sqlalchemy import Column, String, Boolean, DateTime, JSON, Text
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import uuid
|
import uuid
|
||||||
from .mapping import Base
|
from .mapping import Base
|
||||||
@@ -13,26 +13,6 @@ from .mapping import Base
|
|||||||
def generate_uuid():
|
def generate_uuid():
|
||||||
return str(uuid.uuid4())
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
# [DEF:ValidationPolicy:Class]
|
|
||||||
# @PURPOSE: Defines a scheduled rule for validating a group of dashboards within an execution window.
|
|
||||||
class ValidationPolicy(Base):
|
|
||||||
__tablename__ = "validation_policies"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True, default=generate_uuid)
|
|
||||||
name = Column(String, nullable=False)
|
|
||||||
environment_id = Column(String, nullable=False)
|
|
||||||
is_active = Column(Boolean, default=True)
|
|
||||||
dashboard_ids = Column(JSON, nullable=False) # Array of dashboard IDs
|
|
||||||
schedule_days = Column(JSON, nullable=False) # Array of integers (0-6)
|
|
||||||
window_start = Column(Time, nullable=False)
|
|
||||||
window_end = Column(Time, nullable=False)
|
|
||||||
notify_owners = Column(Boolean, default=True)
|
|
||||||
custom_channels = Column(JSON, nullable=True) # List of external channels
|
|
||||||
alert_condition = Column(String, default="FAIL_ONLY") # FAIL_ONLY, WARN_AND_FAIL, ALWAYS
|
|
||||||
created_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
|
||||||
# [/DEF:ValidationPolicy:Class]
|
|
||||||
|
|
||||||
# [DEF:LLMProvider:Class]
|
# [DEF:LLMProvider:Class]
|
||||||
# @PURPOSE: SQLAlchemy model for LLM provider configuration.
|
# @PURPOSE: SQLAlchemy model for LLM provider configuration.
|
||||||
class LLMProvider(Base):
|
class LLMProvider(Base):
|
||||||
@@ -54,11 +34,9 @@ class ValidationRecord(Base):
|
|||||||
__tablename__ = "llm_validation_results"
|
__tablename__ = "llm_validation_results"
|
||||||
|
|
||||||
id = Column(String, primary_key=True, default=generate_uuid)
|
id = Column(String, primary_key=True, default=generate_uuid)
|
||||||
task_id = Column(String, nullable=True, index=True) # Reference to TaskRecord
|
|
||||||
dashboard_id = Column(String, nullable=False, index=True)
|
dashboard_id = Column(String, nullable=False, index=True)
|
||||||
environment_id = Column(String, nullable=True, index=True)
|
|
||||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
timestamp = Column(DateTime, default=datetime.utcnow)
|
||||||
status = Column(String, nullable=False) # PASS, WARN, FAIL, UNKNOWN
|
status = Column(String, nullable=False) # PASS, WARN, FAIL
|
||||||
screenshot_path = Column(String, nullable=True)
|
screenshot_path = Column(String, nullable=True)
|
||||||
issues = Column(JSON, nullable=False)
|
issues = Column(JSON, nullable=False)
|
||||||
summary = Column(Text, nullable=False)
|
summary = Column(Text, nullable=False)
|
||||||
|
|||||||
@@ -80,8 +80,6 @@ class MigrationJob(Base):
|
|||||||
status = Column(SQLEnum(MigrationStatus), default=MigrationStatus.PENDING)
|
status = Column(SQLEnum(MigrationStatus), default=MigrationStatus.PENDING)
|
||||||
replace_db = Column(Boolean, default=False)
|
replace_db = Column(Boolean, default=False)
|
||||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
# [/DEF:MigrationJob:Class]
|
|
||||||
|
|
||||||
# [DEF:ResourceMapping:Class]
|
# [DEF:ResourceMapping:Class]
|
||||||
# @TIER: STANDARD
|
# @TIER: STANDARD
|
||||||
# @PURPOSE: Maps a universal UUID for a resource to its actual ID on a specific environment.
|
# @PURPOSE: Maps a universal UUID for a resource to its actual ID on a specific environment.
|
||||||
|
|||||||
@@ -1,60 +0,0 @@
|
|||||||
# [DEF:backend.src.models.profile:Module]
|
|
||||||
#
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: profile, preferences, persistence, user, dashboard-filter, git, ui-preferences, sqlalchemy
|
|
||||||
# @PURPOSE: Defines persistent per-user profile settings for dashboard filter, Git identity/token, and UX preferences.
|
|
||||||
# @LAYER: Domain
|
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.models.auth
|
|
||||||
# @RELATION: INHERITS_FROM -> backend.src.models.mapping.Base
|
|
||||||
#
|
|
||||||
# @INVARIANT: Exactly one preference row exists per user_id.
|
|
||||||
# @INVARIANT: Sensitive Git token is stored encrypted and never returned in plaintext.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from .mapping import Base
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:UserDashboardPreference:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Stores Superset username binding and default "my dashboards" toggle for one authenticated user.
|
|
||||||
class UserDashboardPreference(Base):
|
|
||||||
__tablename__ = "user_dashboard_preferences"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
|
|
||||||
user_id = Column(String, ForeignKey("users.id"), nullable=False, unique=True, index=True)
|
|
||||||
|
|
||||||
superset_username = Column(String, nullable=True)
|
|
||||||
superset_username_normalized = Column(String, nullable=True, index=True)
|
|
||||||
|
|
||||||
show_only_my_dashboards = Column(Boolean, nullable=False, default=False)
|
|
||||||
show_only_slug_dashboards = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
git_username = Column(String, nullable=True)
|
|
||||||
git_email = Column(String, nullable=True)
|
|
||||||
git_personal_access_token_encrypted = Column(String, nullable=True)
|
|
||||||
|
|
||||||
start_page = Column(String, nullable=False, default="dashboards")
|
|
||||||
auto_open_task_drawer = Column(Boolean, nullable=False, default=True)
|
|
||||||
dashboards_table_density = Column(String, nullable=False, default="comfortable")
|
|
||||||
|
|
||||||
telegram_id = Column(String, nullable=True)
|
|
||||||
email_address = Column(String, nullable=True)
|
|
||||||
notify_on_fail = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime,
|
|
||||||
nullable=False,
|
|
||||||
default=datetime.utcnow,
|
|
||||||
onupdate=datetime.utcnow,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = relationship("User")
|
|
||||||
# [/DEF:UserDashboardPreference:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.models.profile:Module]
|
|
||||||
@@ -25,7 +25,6 @@ class TaskType(str, Enum):
|
|||||||
BACKUP = "backup"
|
BACKUP = "backup"
|
||||||
MIGRATION = "migration"
|
MIGRATION = "migration"
|
||||||
DOCUMENTATION = "documentation"
|
DOCUMENTATION = "documentation"
|
||||||
CLEAN_RELEASE = "clean_release"
|
|
||||||
UNKNOWN = "unknown"
|
UNKNOWN = "unknown"
|
||||||
# [/DEF:TaskType:Class]
|
# [/DEF:TaskType:Class]
|
||||||
|
|
||||||
@@ -112,7 +111,6 @@ class TaskReport(BaseModel):
|
|||||||
updated_at: datetime
|
updated_at: datetime
|
||||||
summary: str
|
summary: str
|
||||||
details: Optional[Dict[str, Any]] = None
|
details: Optional[Dict[str, Any]] = None
|
||||||
validation_record: Optional[Dict[str, Any]] = None # Extended for US2
|
|
||||||
error_context: Optional[ErrorContext] = None
|
error_context: Optional[ErrorContext] = None
|
||||||
source_ref: Optional[Dict[str, Any]] = None
|
source_ref: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.plugins:Package]
|
|
||||||
# @PURPOSE: Plugin package root for dynamic discovery and runtime imports.
|
|
||||||
# [/DEF:src.plugins:Package]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.plugins.git:Package]
|
|
||||||
# @PURPOSE: Git plugin extension package root.
|
|
||||||
# [/DEF:src.plugins.git:Package]
|
|
||||||
@@ -21,9 +21,8 @@ from ...services.llm_provider import LLMProviderService
|
|||||||
from ...core.superset_client import SupersetClient
|
from ...core.superset_client import SupersetClient
|
||||||
from .service import ScreenshotService, LLMClient
|
from .service import ScreenshotService, LLMClient
|
||||||
from .models import LLMProviderType, ValidationStatus, ValidationResult, DetectedIssue
|
from .models import LLMProviderType, ValidationStatus, ValidationResult, DetectedIssue
|
||||||
from ...models.llm import ValidationRecord, ValidationPolicy
|
from ...models.llm import ValidationRecord
|
||||||
from ...core.task_manager.context import TaskContext
|
from ...core.task_manager.context import TaskContext
|
||||||
from ...services.notifications.service import NotificationService
|
|
||||||
from ...services.llm_prompt_templates import (
|
from ...services.llm_prompt_templates import (
|
||||||
DEFAULT_LLM_PROMPTS,
|
DEFAULT_LLM_PROMPTS,
|
||||||
is_multimodal_model,
|
is_multimodal_model,
|
||||||
@@ -284,9 +283,7 @@ class DashboardValidationPlugin(PluginBase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
db_record = ValidationRecord(
|
db_record = ValidationRecord(
|
||||||
task_id=context.task_id if context else None,
|
|
||||||
dashboard_id=validation_result.dashboard_id,
|
dashboard_id=validation_result.dashboard_id,
|
||||||
environment_id=env_id,
|
|
||||||
status=validation_result.status.value,
|
status=validation_result.status.value,
|
||||||
summary=validation_result.summary,
|
summary=validation_result.summary,
|
||||||
issues=[issue.dict() for issue in validation_result.issues],
|
issues=[issue.dict() for issue in validation_result.issues],
|
||||||
@@ -297,20 +294,11 @@ class DashboardValidationPlugin(PluginBase):
|
|||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
# 7. Notification on failure (US1 / FR-015)
|
# 7. Notification on failure (US1 / FR-015)
|
||||||
try:
|
if validation_result.status == ValidationStatus.FAIL:
|
||||||
policy_id = params.get("policy_id")
|
log.warning(f"Dashboard {dashboard_id} validation FAILED. Summary: {validation_result.summary}")
|
||||||
policy = None
|
# Placeholder for Email/Pulse notification dispatch
|
||||||
if policy_id:
|
# In a real implementation, we would call a NotificationService here
|
||||||
policy = db.query(ValidationPolicy).filter(ValidationPolicy.id == policy_id).first()
|
# with a payload containing the summary and a link to the report.
|
||||||
|
|
||||||
notification_service = NotificationService(db, config_mgr)
|
|
||||||
await notification_service.dispatch_report(
|
|
||||||
record=db_record,
|
|
||||||
policy=policy,
|
|
||||||
background_tasks=context.background_tasks if context else None
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
log.error(f"Failed to dispatch notifications: {e}")
|
|
||||||
|
|
||||||
# Final log to ensure all analysis is visible in task logs
|
# Final log to ensure all analysis is visible in task logs
|
||||||
log.info(f"Validation completed for dashboard {dashboard_id}. Status: {validation_result.status.value}")
|
log.info(f"Validation completed for dashboard {dashboard_id}. Status: {validation_result.status.value}")
|
||||||
|
|||||||
@@ -228,25 +228,6 @@ class StoragePlugin(PluginBase):
|
|||||||
f"[StoragePlugin][Action] Listing files in root: {root}, category: {category}, subpath: {subpath}, recursive: {recursive}"
|
f"[StoragePlugin][Action] Listing files in root: {root}, category: {category}, subpath: {subpath}, recursive: {recursive}"
|
||||||
)
|
)
|
||||||
files = []
|
files = []
|
||||||
|
|
||||||
# Root view contract: show category directories only.
|
|
||||||
if category is None and not subpath:
|
|
||||||
for cat in FileCategory:
|
|
||||||
base_dir = root / cat.value
|
|
||||||
if not base_dir.exists():
|
|
||||||
continue
|
|
||||||
stat = base_dir.stat()
|
|
||||||
files.append(
|
|
||||||
StoredFile(
|
|
||||||
name=cat.value,
|
|
||||||
path=cat.value,
|
|
||||||
size=0,
|
|
||||||
created_at=datetime.fromtimestamp(stat.st_ctime),
|
|
||||||
category=cat,
|
|
||||||
mime_type="directory",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return sorted(files, key=lambda x: x.name)
|
|
||||||
|
|
||||||
categories = [category] if category else list(FileCategory)
|
categories = [category] if category else list(FileCategory)
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.schemas:Package]
|
|
||||||
# @PURPOSE: API schema package root.
|
|
||||||
# [/DEF:src.schemas:Package]
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
# [DEF:backend.src.schemas.__tests__.test_settings_and_health_schemas:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Regression tests for settings and health schema contracts updated in 026 fix batch.
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
from src.schemas.health import DashboardHealthItem
|
|
||||||
from src.schemas.settings import ValidationPolicyCreate
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_validation_policy_create_accepts_structured_custom_channels:Function]
|
|
||||||
# @PURPOSE: Ensure policy schema accepts structured custom channel objects with type/target fields.
|
|
||||||
def test_validation_policy_create_accepts_structured_custom_channels():
|
|
||||||
payload = {
|
|
||||||
"name": "Daily Health",
|
|
||||||
"environment_id": "env-1",
|
|
||||||
"dashboard_ids": ["10", "11"],
|
|
||||||
"schedule_days": [0, 1, 2],
|
|
||||||
"window_start": "01:00:00",
|
|
||||||
"window_end": "03:00:00",
|
|
||||||
"notify_owners": True,
|
|
||||||
"custom_channels": [{"type": "SLACK", "target": "#alerts"}],
|
|
||||||
"alert_condition": "FAIL_ONLY",
|
|
||||||
}
|
|
||||||
|
|
||||||
policy = ValidationPolicyCreate(**payload)
|
|
||||||
|
|
||||||
assert policy.custom_channels is not None
|
|
||||||
assert len(policy.custom_channels) == 1
|
|
||||||
assert policy.custom_channels[0].type == "SLACK"
|
|
||||||
assert policy.custom_channels[0].target == "#alerts"
|
|
||||||
# [/DEF:test_validation_policy_create_accepts_structured_custom_channels:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_validation_policy_create_rejects_legacy_string_custom_channels:Function]
|
|
||||||
# @PURPOSE: Ensure legacy list[str] custom channel payload is rejected by typed channel contract.
|
|
||||||
def test_validation_policy_create_rejects_legacy_string_custom_channels():
|
|
||||||
payload = {
|
|
||||||
"name": "Daily Health",
|
|
||||||
"environment_id": "env-1",
|
|
||||||
"dashboard_ids": ["10"],
|
|
||||||
"schedule_days": [0],
|
|
||||||
"window_start": "01:00:00",
|
|
||||||
"window_end": "02:00:00",
|
|
||||||
"notify_owners": False,
|
|
||||||
"custom_channels": ["SLACK:#alerts"],
|
|
||||||
}
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
ValidationPolicyCreate(**payload)
|
|
||||||
# [/DEF:test_validation_policy_create_rejects_legacy_string_custom_channels:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_dashboard_health_item_status_accepts_only_whitelisted_values:Function]
|
|
||||||
# @PURPOSE: Verify strict grouped regex only accepts PASS/WARN/FAIL/UNKNOWN exact statuses.
|
|
||||||
def test_dashboard_health_item_status_accepts_only_whitelisted_values():
|
|
||||||
valid = DashboardHealthItem(
|
|
||||||
dashboard_id="dash-1",
|
|
||||||
environment_id="env-1",
|
|
||||||
status="PASS",
|
|
||||||
last_check="2026-03-10T10:00:00",
|
|
||||||
)
|
|
||||||
assert valid.status == "PASS"
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
DashboardHealthItem(
|
|
||||||
dashboard_id="dash-1",
|
|
||||||
environment_id="env-1",
|
|
||||||
status="PASSING",
|
|
||||||
last_check="2026-03-10T10:00:00",
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
DashboardHealthItem(
|
|
||||||
dashboard_id="dash-1",
|
|
||||||
environment_id="env-1",
|
|
||||||
status="FAIL ",
|
|
||||||
last_check="2026-03-10T10:00:00",
|
|
||||||
)
|
|
||||||
# [/DEF:test_dashboard_health_item_status_accepts_only_whitelisted_values:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.schemas.__tests__.test_settings_and_health_schemas:Module]
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
# [DEF:backend.src.schemas.health:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: health, schemas, pydantic
|
|
||||||
# @PURPOSE: Pydantic schemas for dashboard health summary.
|
|
||||||
# @LAYER: Domain
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from typing import List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# [DEF:DashboardHealthItem:Class]
|
|
||||||
# @PURPOSE: Represents the latest health status of a single dashboard.
|
|
||||||
class DashboardHealthItem(BaseModel):
|
|
||||||
dashboard_id: str
|
|
||||||
dashboard_title: Optional[str] = None
|
|
||||||
environment_id: str
|
|
||||||
status: str = Field(..., pattern="^(PASS|WARN|FAIL|UNKNOWN)$")
|
|
||||||
last_check: datetime
|
|
||||||
task_id: Optional[str] = None
|
|
||||||
summary: Optional[str] = None
|
|
||||||
# [/DEF:DashboardHealthItem:Class]
|
|
||||||
|
|
||||||
# [DEF:HealthSummaryResponse:Class]
|
|
||||||
# @PURPOSE: Aggregated health summary for all dashboards.
|
|
||||||
class HealthSummaryResponse(BaseModel):
|
|
||||||
items: List[DashboardHealthItem]
|
|
||||||
pass_count: int
|
|
||||||
warn_count: int
|
|
||||||
fail_count: int
|
|
||||||
unknown_count: int
|
|
||||||
# [/DEF:HealthSummaryResponse:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.schemas.health:Module]
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
# [DEF:backend.src.schemas.profile:Module]
|
|
||||||
#
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: profile, schemas, pydantic, preferences, superset, lookup, security, git, ux
|
|
||||||
# @PURPOSE: Defines API schemas for profile preference persistence, security read-only snapshot, and Superset account lookup.
|
|
||||||
# @LAYER: API
|
|
||||||
# @RELATION: DEPENDS_ON -> pydantic
|
|
||||||
#
|
|
||||||
# @INVARIANT: Schema shapes stay stable for profile UI states and backend preference contracts.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Literal, Optional
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
# [/SECTION]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ProfilePermissionState:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Represents one permission badge state for profile read-only security view.
|
|
||||||
class ProfilePermissionState(BaseModel):
|
|
||||||
key: str
|
|
||||||
allowed: bool
|
|
||||||
# [/DEF:ProfilePermissionState:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ProfileSecuritySummary:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Read-only security and access snapshot for current user.
|
|
||||||
class ProfileSecuritySummary(BaseModel):
|
|
||||||
read_only: bool = True
|
|
||||||
auth_source: Optional[str] = None
|
|
||||||
current_role: Optional[str] = None
|
|
||||||
role_source: Optional[str] = None
|
|
||||||
roles: List[str] = Field(default_factory=list)
|
|
||||||
permissions: List[ProfilePermissionState] = Field(default_factory=list)
|
|
||||||
# [/DEF:ProfileSecuritySummary:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ProfilePreference:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Represents persisted profile preference for a single authenticated user.
|
|
||||||
class ProfilePreference(BaseModel):
|
|
||||||
user_id: str
|
|
||||||
superset_username: Optional[str] = None
|
|
||||||
superset_username_normalized: Optional[str] = None
|
|
||||||
show_only_my_dashboards: bool = False
|
|
||||||
show_only_slug_dashboards: bool = True
|
|
||||||
|
|
||||||
git_username: Optional[str] = None
|
|
||||||
git_email: Optional[str] = None
|
|
||||||
has_git_personal_access_token: bool = False
|
|
||||||
git_personal_access_token_masked: Optional[str] = None
|
|
||||||
|
|
||||||
start_page: Literal["dashboards", "datasets", "reports"] = "dashboards"
|
|
||||||
auto_open_task_drawer: bool = True
|
|
||||||
dashboards_table_density: Literal["compact", "comfortable"] = "comfortable"
|
|
||||||
|
|
||||||
telegram_id: Optional[str] = None
|
|
||||||
email_address: Optional[str] = None
|
|
||||||
notify_on_fail: bool = True
|
|
||||||
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
# [/DEF:ProfilePreference:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ProfilePreferenceUpdateRequest:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Request payload for updating current user's profile settings.
|
|
||||||
class ProfilePreferenceUpdateRequest(BaseModel):
|
|
||||||
superset_username: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Apache Superset username bound to current user profile.",
|
|
||||||
)
|
|
||||||
show_only_my_dashboards: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
description='When true, "/dashboards" can auto-apply profile filter in main context.',
|
|
||||||
)
|
|
||||||
show_only_slug_dashboards: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
description='When true, "/dashboards" hides dashboards without slug by default.',
|
|
||||||
)
|
|
||||||
git_username: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Git author username used for commit signature.",
|
|
||||||
)
|
|
||||||
git_email: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Git author email used for commit signature.",
|
|
||||||
)
|
|
||||||
git_personal_access_token: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Personal Access Token value. Empty string clears existing token.",
|
|
||||||
)
|
|
||||||
start_page: Optional[
|
|
||||||
Literal["dashboards", "datasets", "reports", "reports-logs"]
|
|
||||||
] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Preferred start page after login.",
|
|
||||||
)
|
|
||||||
auto_open_task_drawer: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Auto-open task drawer when long-running tasks start.",
|
|
||||||
)
|
|
||||||
dashboards_table_density: Optional[
|
|
||||||
Literal["compact", "comfortable", "free"]
|
|
||||||
] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Preferred table density for dashboard listings.",
|
|
||||||
)
|
|
||||||
telegram_id: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Telegram ID for notifications.",
|
|
||||||
)
|
|
||||||
email_address: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Email address for notifications (overrides system email).",
|
|
||||||
)
|
|
||||||
notify_on_fail: Optional[bool] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Whether to send notifications on validation failure.",
|
|
||||||
)
|
|
||||||
# [/DEF:ProfilePreferenceUpdateRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:ProfilePreferenceResponse:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Response envelope for profile preference read/update endpoints.
|
|
||||||
class ProfilePreferenceResponse(BaseModel):
|
|
||||||
status: Literal["success", "error"] = "success"
|
|
||||||
message: Optional[str] = None
|
|
||||||
validation_errors: List[str] = Field(default_factory=list)
|
|
||||||
preference: ProfilePreference
|
|
||||||
security: ProfileSecuritySummary = Field(default_factory=ProfileSecuritySummary)
|
|
||||||
# [/DEF:ProfilePreferenceResponse:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:SupersetAccountLookupRequest:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Query contract for Superset account lookup by selected environment.
|
|
||||||
class SupersetAccountLookupRequest(BaseModel):
|
|
||||||
environment_id: str
|
|
||||||
search: Optional[str] = None
|
|
||||||
page_index: int = Field(default=0, ge=0)
|
|
||||||
page_size: int = Field(default=20, ge=1, le=100)
|
|
||||||
sort_column: str = Field(default="username")
|
|
||||||
sort_order: str = Field(default="desc")
|
|
||||||
# [/DEF:SupersetAccountLookupRequest:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:SupersetAccountCandidate:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Canonical account candidate projected from Superset users payload.
|
|
||||||
class SupersetAccountCandidate(BaseModel):
|
|
||||||
environment_id: str
|
|
||||||
username: str
|
|
||||||
display_name: Optional[str] = None
|
|
||||||
email: Optional[str] = None
|
|
||||||
is_active: Optional[bool] = None
|
|
||||||
# [/DEF:SupersetAccountCandidate:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:SupersetAccountLookupResponse:Class]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Response envelope for Superset account lookup (success or degraded mode).
|
|
||||||
class SupersetAccountLookupResponse(BaseModel):
|
|
||||||
status: Literal["success", "degraded"]
|
|
||||||
environment_id: str
|
|
||||||
page_index: int = Field(ge=0)
|
|
||||||
page_size: int = Field(ge=1, le=100)
|
|
||||||
total: int = Field(ge=0)
|
|
||||||
warning: Optional[str] = None
|
|
||||||
items: List[SupersetAccountCandidate] = Field(default_factory=list)
|
|
||||||
# [/DEF:SupersetAccountLookupResponse:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.schemas.profile:Module]
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
# [DEF:backend.src.schemas.settings:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: settings, schemas, pydantic, validation
|
|
||||||
# @PURPOSE: Pydantic schemas for application settings and automation policies.
|
|
||||||
# @LAYER: Domain
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from typing import List, Optional
|
|
||||||
from datetime import datetime, time
|
|
||||||
|
|
||||||
# [DEF:NotificationChannel:Class]
|
|
||||||
# @PURPOSE: Structured notification channel definition for policy-level custom routing.
|
|
||||||
class NotificationChannel(BaseModel):
|
|
||||||
type: str = Field(..., description="Notification channel type (e.g., SLACK, SMTP, TELEGRAM)")
|
|
||||||
target: str = Field(..., description="Notification destination (e.g., #alerts, chat id, email)")
|
|
||||||
# [/DEF:NotificationChannel:Class]
|
|
||||||
|
|
||||||
# [DEF:ValidationPolicyBase:Class]
|
|
||||||
# @PURPOSE: Base schema for validation policy data.
|
|
||||||
class ValidationPolicyBase(BaseModel):
|
|
||||||
name: str = Field(..., description="Name of the policy")
|
|
||||||
environment_id: str = Field(..., description="Target Superset environment ID")
|
|
||||||
is_active: bool = Field(True, description="Whether the policy is currently active")
|
|
||||||
dashboard_ids: List[str] = Field(..., description="List of dashboard IDs to validate")
|
|
||||||
schedule_days: List[int] = Field(..., description="Days of the week (0-6, 0=Sunday) to run")
|
|
||||||
window_start: time = Field(..., description="Start of the execution window")
|
|
||||||
window_end: time = Field(..., description="End of the execution window")
|
|
||||||
notify_owners: bool = Field(True, description="Whether to notify dashboard owners on failure")
|
|
||||||
custom_channels: Optional[List[NotificationChannel]] = Field(
|
|
||||||
None,
|
|
||||||
description="List of additional structured notification channels",
|
|
||||||
)
|
|
||||||
alert_condition: str = Field("FAIL_ONLY", description="Condition to trigger alerts: FAIL_ONLY, WARN_AND_FAIL, ALWAYS")
|
|
||||||
# [/DEF:ValidationPolicyBase:Class]
|
|
||||||
|
|
||||||
# [DEF:ValidationPolicyCreate:Class]
|
|
||||||
# @PURPOSE: Schema for creating a new validation policy.
|
|
||||||
class ValidationPolicyCreate(ValidationPolicyBase):
|
|
||||||
pass
|
|
||||||
# [/DEF:ValidationPolicyCreate:Class]
|
|
||||||
|
|
||||||
# [DEF:ValidationPolicyUpdate:Class]
|
|
||||||
# @PURPOSE: Schema for updating an existing validation policy.
|
|
||||||
class ValidationPolicyUpdate(BaseModel):
|
|
||||||
name: Optional[str] = None
|
|
||||||
environment_id: Optional[str] = None
|
|
||||||
is_active: Optional[bool] = None
|
|
||||||
dashboard_ids: Optional[List[str]] = None
|
|
||||||
schedule_days: Optional[List[int]] = None
|
|
||||||
window_start: Optional[time] = None
|
|
||||||
window_end: Optional[time] = None
|
|
||||||
notify_owners: Optional[bool] = None
|
|
||||||
custom_channels: Optional[List[NotificationChannel]] = None
|
|
||||||
alert_condition: Optional[str] = None
|
|
||||||
# [/DEF:ValidationPolicyUpdate:Class]
|
|
||||||
|
|
||||||
# [DEF:ValidationPolicyResponse:Class]
|
|
||||||
# @PURPOSE: Schema for validation policy response data.
|
|
||||||
class ValidationPolicyResponse(ValidationPolicyBase):
|
|
||||||
id: str
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
|
||||||
# [/DEF:ValidationPolicyResponse:Class]
|
|
||||||
|
|
||||||
# [/DEF:backend.src.schemas.settings:Module]
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# [DEF:src.scripts:Package]
|
|
||||||
# @PURPOSE: Script entrypoint package root.
|
|
||||||
# [/DEF:src.scripts:Package]
|
|
||||||
@@ -1,444 +0,0 @@
|
|||||||
# [DEF:backend.src.scripts.clean_release_cli:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: cli, clean-release, candidate, artifacts, manifest
|
|
||||||
# @PURPOSE: Provide headless CLI commands for candidate registration, artifact import and manifest build.
|
|
||||||
# @LAYER: Scripts
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
from datetime import date, datetime, timezone
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
from ..models.clean_release import CandidateArtifact, ReleaseCandidate
|
|
||||||
from ..services.clean_release.approval_service import approve_candidate, reject_candidate
|
|
||||||
from ..services.clean_release.compliance_execution_service import ComplianceExecutionService
|
|
||||||
from ..services.clean_release.enums import CandidateStatus
|
|
||||||
from ..services.clean_release.publication_service import publish_candidate, revoke_publication
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:build_parser:Function]
|
|
||||||
# @PURPOSE: Build argparse parser for clean release CLI.
|
|
||||||
def build_parser() -> argparse.ArgumentParser:
|
|
||||||
parser = argparse.ArgumentParser(prog="clean-release-cli")
|
|
||||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
||||||
|
|
||||||
register = subparsers.add_parser("candidate-register")
|
|
||||||
register.add_argument("--candidate-id", required=True)
|
|
||||||
register.add_argument("--version", required=True)
|
|
||||||
register.add_argument("--source-snapshot-ref", required=True)
|
|
||||||
register.add_argument("--created-by", default="cli-operator")
|
|
||||||
|
|
||||||
artifact_import = subparsers.add_parser("artifact-import")
|
|
||||||
artifact_import.add_argument("--candidate-id", required=True)
|
|
||||||
artifact_import.add_argument("--artifact-id", required=True)
|
|
||||||
artifact_import.add_argument("--path", required=True)
|
|
||||||
artifact_import.add_argument("--sha256", required=True)
|
|
||||||
artifact_import.add_argument("--size", type=int, required=True)
|
|
||||||
|
|
||||||
manifest_build = subparsers.add_parser("manifest-build")
|
|
||||||
manifest_build.add_argument("--candidate-id", required=True)
|
|
||||||
manifest_build.add_argument("--created-by", default="cli-operator")
|
|
||||||
|
|
||||||
compliance_run = subparsers.add_parser("compliance-run")
|
|
||||||
compliance_run.add_argument("--candidate-id", required=True)
|
|
||||||
compliance_run.add_argument("--manifest-id", required=False, default=None)
|
|
||||||
compliance_run.add_argument("--actor", default="cli-operator")
|
|
||||||
compliance_run.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
compliance_status = subparsers.add_parser("compliance-status")
|
|
||||||
compliance_status.add_argument("--run-id", required=True)
|
|
||||||
compliance_status.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
compliance_report = subparsers.add_parser("compliance-report")
|
|
||||||
compliance_report.add_argument("--run-id", required=True)
|
|
||||||
compliance_report.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
compliance_violations = subparsers.add_parser("compliance-violations")
|
|
||||||
compliance_violations.add_argument("--run-id", required=True)
|
|
||||||
compliance_violations.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
approve = subparsers.add_parser("approve")
|
|
||||||
approve.add_argument("--candidate-id", required=True)
|
|
||||||
approve.add_argument("--report-id", required=True)
|
|
||||||
approve.add_argument("--actor", default="cli-operator")
|
|
||||||
approve.add_argument("--comment", required=False, default=None)
|
|
||||||
approve.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
reject = subparsers.add_parser("reject")
|
|
||||||
reject.add_argument("--candidate-id", required=True)
|
|
||||||
reject.add_argument("--report-id", required=True)
|
|
||||||
reject.add_argument("--actor", default="cli-operator")
|
|
||||||
reject.add_argument("--comment", required=False, default=None)
|
|
||||||
reject.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
publish = subparsers.add_parser("publish")
|
|
||||||
publish.add_argument("--candidate-id", required=True)
|
|
||||||
publish.add_argument("--report-id", required=True)
|
|
||||||
publish.add_argument("--actor", default="cli-operator")
|
|
||||||
publish.add_argument("--target-channel", required=True)
|
|
||||||
publish.add_argument("--publication-ref", required=False, default=None)
|
|
||||||
publish.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
revoke = subparsers.add_parser("revoke")
|
|
||||||
revoke.add_argument("--publication-id", required=True)
|
|
||||||
revoke.add_argument("--actor", default="cli-operator")
|
|
||||||
revoke.add_argument("--comment", required=False, default=None)
|
|
||||||
revoke.add_argument("--json", action="store_true")
|
|
||||||
|
|
||||||
return parser
|
|
||||||
# [/DEF:build_parser:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_candidate_register:Function]
|
|
||||||
# @PURPOSE: Register candidate in repository via CLI command.
|
|
||||||
# @PRE: Candidate ID must be unique.
|
|
||||||
# @POST: Candidate is persisted in DRAFT status.
|
|
||||||
def run_candidate_register(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
existing = repository.get_candidate(args.candidate_id)
|
|
||||||
if existing is not None:
|
|
||||||
print(json.dumps({"status": "error", "message": "candidate already exists"}))
|
|
||||||
return 1
|
|
||||||
|
|
||||||
candidate = ReleaseCandidate(
|
|
||||||
id=args.candidate_id,
|
|
||||||
version=args.version,
|
|
||||||
source_snapshot_ref=args.source_snapshot_ref,
|
|
||||||
created_by=args.created_by,
|
|
||||||
created_at=datetime.now(timezone.utc),
|
|
||||||
status=CandidateStatus.DRAFT.value,
|
|
||||||
)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
print(json.dumps({"status": "ok", "candidate_id": candidate.id}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_candidate_register:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_artifact_import:Function]
|
|
||||||
# @PURPOSE: Import single artifact for existing candidate.
|
|
||||||
# @PRE: Candidate must exist.
|
|
||||||
# @POST: Artifact is persisted for candidate.
|
|
||||||
def run_artifact_import(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
candidate = repository.get_candidate(args.candidate_id)
|
|
||||||
if candidate is None:
|
|
||||||
print(json.dumps({"status": "error", "message": "candidate not found"}))
|
|
||||||
return 1
|
|
||||||
|
|
||||||
artifact = CandidateArtifact(
|
|
||||||
id=args.artifact_id,
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
path=args.path,
|
|
||||||
sha256=args.sha256,
|
|
||||||
size=args.size,
|
|
||||||
)
|
|
||||||
repository.save_artifact(artifact)
|
|
||||||
|
|
||||||
if candidate.status == CandidateStatus.DRAFT.value:
|
|
||||||
candidate.transition_to(CandidateStatus.PREPARED)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "artifact_id": artifact.id}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_artifact_import:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_manifest_build:Function]
|
|
||||||
# @PURPOSE: Build immutable manifest snapshot for candidate.
|
|
||||||
# @PRE: Candidate must exist.
|
|
||||||
# @POST: New manifest version is persisted.
|
|
||||||
def run_manifest_build(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
from ..services.clean_release.manifest_service import build_manifest_snapshot
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
try:
|
|
||||||
manifest = build_manifest_snapshot(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
created_by=args.created_by,
|
|
||||||
)
|
|
||||||
except ValueError as exc:
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 1
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "manifest_id": manifest.id, "version": manifest.manifest_version}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_manifest_build:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_compliance_run:Function]
|
|
||||||
# @PURPOSE: Execute compliance run for candidate with optional manifest fallback.
|
|
||||||
# @PRE: Candidate exists and trusted snapshots are configured.
|
|
||||||
# @POST: Returns run payload and exit code 0 on success.
|
|
||||||
def run_compliance_run(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository, get_config_manager
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
config_manager = get_config_manager()
|
|
||||||
service = ComplianceExecutionService(repository=repository, config_manager=config_manager)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = service.execute_run(
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
requested_by=args.actor,
|
|
||||||
manifest_id=args.manifest_id,
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"status": "ok",
|
|
||||||
"run_id": result.run.id,
|
|
||||||
"candidate_id": result.run.candidate_id,
|
|
||||||
"run_status": result.run.status,
|
|
||||||
"final_status": result.run.final_status,
|
|
||||||
"task_id": getattr(result.run, "task_id", None),
|
|
||||||
"report_id": getattr(result.run, "report_id", None),
|
|
||||||
}
|
|
||||||
print(json.dumps(payload))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_compliance_run:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_compliance_status:Function]
|
|
||||||
# @PURPOSE: Read run status by run id.
|
|
||||||
# @PRE: Run exists.
|
|
||||||
# @POST: Returns run status payload.
|
|
||||||
def run_compliance_status(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
run = repository.get_check_run(args.run_id)
|
|
||||||
if run is None:
|
|
||||||
print(json.dumps({"status": "error", "message": "run not found"}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
report = next((item for item in repository.reports.values() if item.run_id == run.id), None)
|
|
||||||
payload = {
|
|
||||||
"status": "ok",
|
|
||||||
"run_id": run.id,
|
|
||||||
"candidate_id": run.candidate_id,
|
|
||||||
"run_status": run.status,
|
|
||||||
"final_status": run.final_status,
|
|
||||||
"task_id": getattr(run, "task_id", None),
|
|
||||||
"report_id": getattr(run, "report_id", None) or (report.id if report else None),
|
|
||||||
}
|
|
||||||
print(json.dumps(payload))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_compliance_status:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_to_payload:Function]
|
|
||||||
# @PURPOSE: Serialize domain models for CLI JSON output across SQLAlchemy/Pydantic variants.
|
|
||||||
# @PRE: value is serializable model or primitive object.
|
|
||||||
# @POST: Returns dictionary payload without mutating value.
|
|
||||||
def _to_payload(value: Any) -> Dict[str, Any]:
|
|
||||||
def _normalize(raw: Any) -> Any:
|
|
||||||
if isinstance(raw, datetime):
|
|
||||||
return raw.isoformat()
|
|
||||||
if isinstance(raw, date):
|
|
||||||
return raw.isoformat()
|
|
||||||
if isinstance(raw, dict):
|
|
||||||
return {str(key): _normalize(item) for key, item in raw.items()}
|
|
||||||
if isinstance(raw, list):
|
|
||||||
return [_normalize(item) for item in raw]
|
|
||||||
if isinstance(raw, tuple):
|
|
||||||
return [_normalize(item) for item in raw]
|
|
||||||
return raw
|
|
||||||
|
|
||||||
if hasattr(value, "model_dump"):
|
|
||||||
return _normalize(value.model_dump())
|
|
||||||
table = getattr(value, "__table__", None)
|
|
||||||
if table is not None:
|
|
||||||
row = {column.name: getattr(value, column.name) for column in table.columns}
|
|
||||||
return _normalize(row)
|
|
||||||
raise TypeError(f"unsupported payload type: {type(value)!r}")
|
|
||||||
# [/DEF:_to_payload:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_compliance_report:Function]
|
|
||||||
# @PURPOSE: Read immutable report by run id.
|
|
||||||
# @PRE: Run and report exist.
|
|
||||||
# @POST: Returns report payload.
|
|
||||||
def run_compliance_report(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
run = repository.get_check_run(args.run_id)
|
|
||||||
if run is None:
|
|
||||||
print(json.dumps({"status": "error", "message": "run not found"}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
report = next((item for item in repository.reports.values() if item.run_id == run.id), None)
|
|
||||||
if report is None:
|
|
||||||
print(json.dumps({"status": "error", "message": "report not found"}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "report": _to_payload(report)}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_compliance_report:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_compliance_violations:Function]
|
|
||||||
# @PURPOSE: Read run violations by run id.
|
|
||||||
# @PRE: Run exists.
|
|
||||||
# @POST: Returns violations payload.
|
|
||||||
def run_compliance_violations(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
run = repository.get_check_run(args.run_id)
|
|
||||||
if run is None:
|
|
||||||
print(json.dumps({"status": "error", "message": "run not found"}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
violations = repository.get_violations_by_run(args.run_id)
|
|
||||||
print(json.dumps({"status": "ok", "items": [_to_payload(item) for item in violations]}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_compliance_violations:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_approve:Function]
|
|
||||||
# @PURPOSE: Approve candidate based on immutable PASSED report.
|
|
||||||
# @PRE: Candidate and report exist; report is PASSED.
|
|
||||||
# @POST: Persists APPROVED decision and returns success payload.
|
|
||||||
def run_approve(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
try:
|
|
||||||
decision = approve_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
report_id=args.report_id,
|
|
||||||
decided_by=args.actor,
|
|
||||||
comment=args.comment,
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "decision": decision.decision, "decision_id": decision.id}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_approve:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_reject:Function]
|
|
||||||
# @PURPOSE: Reject candidate without mutating compliance evidence.
|
|
||||||
# @PRE: Candidate and report exist.
|
|
||||||
# @POST: Persists REJECTED decision and returns success payload.
|
|
||||||
def run_reject(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
try:
|
|
||||||
decision = reject_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
report_id=args.report_id,
|
|
||||||
decided_by=args.actor,
|
|
||||||
comment=args.comment,
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "decision": decision.decision, "decision_id": decision.id}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_reject:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_publish:Function]
|
|
||||||
# @PURPOSE: Publish approved candidate to target channel.
|
|
||||||
# @PRE: Candidate is approved and report belongs to candidate.
|
|
||||||
# @POST: Appends ACTIVE publication record and returns payload.
|
|
||||||
def run_publish(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
try:
|
|
||||||
publication = publish_candidate(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=args.candidate_id,
|
|
||||||
report_id=args.report_id,
|
|
||||||
published_by=args.actor,
|
|
||||||
target_channel=args.target_channel,
|
|
||||||
publication_ref=args.publication_ref,
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "publication": _to_payload(publication)}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_publish:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:run_revoke:Function]
|
|
||||||
# @PURPOSE: Revoke active publication record.
|
|
||||||
# @PRE: Publication id exists and is ACTIVE.
|
|
||||||
# @POST: Publication record status becomes REVOKED.
|
|
||||||
def run_revoke(args: argparse.Namespace) -> int:
|
|
||||||
from ..dependencies import get_clean_release_repository
|
|
||||||
|
|
||||||
repository = get_clean_release_repository()
|
|
||||||
try:
|
|
||||||
publication = revoke_publication(
|
|
||||||
repository=repository,
|
|
||||||
publication_id=args.publication_id,
|
|
||||||
revoked_by=args.actor,
|
|
||||||
comment=args.comment,
|
|
||||||
)
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
print(json.dumps({"status": "error", "message": str(exc)}))
|
|
||||||
return 2
|
|
||||||
|
|
||||||
print(json.dumps({"status": "ok", "publication": _to_payload(publication)}))
|
|
||||||
return 0
|
|
||||||
# [/DEF:run_revoke:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:main:Function]
|
|
||||||
# @PURPOSE: CLI entrypoint for clean release commands.
|
|
||||||
def main(argv: Optional[List[str]] = None) -> int:
|
|
||||||
parser = build_parser()
|
|
||||||
args = parser.parse_args(argv)
|
|
||||||
|
|
||||||
if args.command == "candidate-register":
|
|
||||||
return run_candidate_register(args)
|
|
||||||
if args.command == "artifact-import":
|
|
||||||
return run_artifact_import(args)
|
|
||||||
if args.command == "manifest-build":
|
|
||||||
return run_manifest_build(args)
|
|
||||||
if args.command == "compliance-run":
|
|
||||||
return run_compliance_run(args)
|
|
||||||
if args.command == "compliance-status":
|
|
||||||
return run_compliance_status(args)
|
|
||||||
if args.command == "compliance-report":
|
|
||||||
return run_compliance_report(args)
|
|
||||||
if args.command == "compliance-violations":
|
|
||||||
return run_compliance_violations(args)
|
|
||||||
if args.command == "approve":
|
|
||||||
return run_approve(args)
|
|
||||||
if args.command == "reject":
|
|
||||||
return run_reject(args)
|
|
||||||
if args.command == "publish":
|
|
||||||
return run_publish(args)
|
|
||||||
if args.command == "revoke":
|
|
||||||
return run_revoke(args)
|
|
||||||
|
|
||||||
print(json.dumps({"status": "error", "message": "unknown command"}))
|
|
||||||
return 2
|
|
||||||
# [/DEF:main:Function]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
raise SystemExit(main())
|
|
||||||
|
|
||||||
# [/DEF:backend.src.scripts.clean_release_cli:Module]
|
|
||||||
@@ -1,590 +1,38 @@
|
|||||||
# [DEF:backend.src.scripts.clean_release_tui:Module]
|
# [DEF:backend.src.scripts.clean_release_tui:Module]
|
||||||
# @TIER: STANDARD
|
# @TIER: CRITICAL
|
||||||
# @SEMANTICS: clean-release, tui, ncurses, interactive-validator
|
# @SEMANTICS: tui, clean-release, ncurses, operator-flow, placeholder
|
||||||
# @PURPOSE: Interactive terminal interface for Enterprise Clean Release compliance validation.
|
# @PURPOSE: Provide clean release TUI entrypoint placeholder for phased implementation.
|
||||||
# @LAYER: UI
|
# @LAYER: UI
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.services.clean_release.compliance_orchestrator
|
# @RELATION: BINDS_TO -> specs/023-clean-repo-enterprise/ux_reference.md
|
||||||
# @RELATION: DEPENDS_ON -> backend.src.services.clean_release.repository
|
# @INVARIANT: Entry point is executable and does not mutate release data in placeholder mode.
|
||||||
# @INVARIANT: TUI refuses startup in non-TTY environments; headless flow is CLI/API only.
|
|
||||||
|
|
||||||
import curses
|
# @PRE: Python runtime is available.
|
||||||
import json
|
# @POST: Placeholder message is emitted and process exits with success.
|
||||||
import os
|
# @UX_STATE: READY -> Displays profile hints and allowed internal sources
|
||||||
import sys
|
# @UX_STATE: RUNNING -> Triggered by operator action (F5), check in progress
|
||||||
from datetime import datetime, timezone
|
# @UX_STATE: BLOCKED -> Violations are displayed with remediation hints
|
||||||
from types import SimpleNamespace
|
# @UX_FEEDBACK: Console lines provide immediate operator guidance
|
||||||
from typing import List, Optional, Any, Dict
|
# @UX_RECOVERY: Operator re-runs check after remediation from the same screen
|
||||||
|
# @TEST_CONTRACT: TuiEntrypointInput -> ExitCodeInt
|
||||||
# Standardize sys.path for direct execution from project root or scripts dir.
|
# @TEST_SCENARIO: startup_ready_state -> main prints READY and returns 0
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
# @TEST_FIXTURE: tui_placeholder -> INLINE_JSON
|
||||||
BACKEND_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, "..", ".."))
|
# @TEST_EDGE: stdout_unavailable -> process returns non-zero via runtime exception propagation
|
||||||
if BACKEND_ROOT not in sys.path:
|
# @TEST_EDGE: interrupted_execution -> user interruption terminates process
|
||||||
sys.path.insert(0, BACKEND_ROOT)
|
# @TEST_EDGE: invalid_terminal -> fallback text output remains deterministic
|
||||||
|
# @TEST_INVARIANT: placeholder_no_mutation -> VERIFIED_BY: [startup_ready_state]
|
||||||
from src.models.clean_release import (
|
|
||||||
CandidateArtifact,
|
|
||||||
CheckFinalStatus,
|
|
||||||
CheckStageName,
|
|
||||||
CheckStageStatus,
|
|
||||||
CleanProfilePolicy,
|
|
||||||
ComplianceViolation,
|
|
||||||
ProfileType,
|
|
||||||
ReleaseCandidate,
|
|
||||||
ResourceSourceEntry,
|
|
||||||
ResourceSourceRegistry,
|
|
||||||
RegistryStatus,
|
|
||||||
ReleaseCandidateStatus,
|
|
||||||
)
|
|
||||||
from src.services.clean_release.approval_service import approve_candidate
|
|
||||||
from src.services.clean_release.compliance_execution_service import ComplianceExecutionService
|
|
||||||
from src.services.clean_release.enums import CandidateStatus
|
|
||||||
from src.services.clean_release.manifest_service import build_manifest_snapshot
|
|
||||||
from src.services.clean_release.publication_service import publish_candidate
|
|
||||||
from src.services.clean_release.repository import CleanReleaseRepository
|
|
||||||
|
|
||||||
# [DEF:TuiFacadeAdapter:Class]
|
|
||||||
# @PURPOSE: Thin TUI adapter that routes business mutations through application services.
|
|
||||||
# @PRE: repository contains candidate and trusted policy/registry snapshots for execution.
|
|
||||||
# @POST: Business actions return service results/errors without direct TUI-owned mutations.
|
|
||||||
class TuiFacadeAdapter:
|
|
||||||
def __init__(self, repository: CleanReleaseRepository):
|
|
||||||
self.repository = repository
|
|
||||||
|
|
||||||
def _build_config_manager(self):
|
|
||||||
policy = self.repository.get_active_policy()
|
|
||||||
if policy is None:
|
|
||||||
raise ValueError("Active policy not found")
|
|
||||||
clean_release = SimpleNamespace(
|
|
||||||
active_policy_id=policy.id,
|
|
||||||
active_registry_id=policy.registry_snapshot_id,
|
|
||||||
)
|
|
||||||
settings = SimpleNamespace(clean_release=clean_release)
|
|
||||||
config = SimpleNamespace(settings=settings)
|
|
||||||
return SimpleNamespace(get_config=lambda: config)
|
|
||||||
|
|
||||||
def run_compliance(self, *, candidate_id: str, actor: str):
|
|
||||||
manifests = self.repository.get_manifests_by_candidate(candidate_id)
|
|
||||||
if not manifests:
|
|
||||||
raise ValueError("Manifest required before compliance run")
|
|
||||||
latest_manifest = sorted(manifests, key=lambda item: item.manifest_version, reverse=True)[0]
|
|
||||||
service = ComplianceExecutionService(
|
|
||||||
repository=self.repository,
|
|
||||||
config_manager=self._build_config_manager(),
|
|
||||||
)
|
|
||||||
return service.execute_run(candidate_id=candidate_id, requested_by=actor, manifest_id=latest_manifest.id)
|
|
||||||
|
|
||||||
def approve_latest(self, *, candidate_id: str, actor: str):
|
|
||||||
reports = [item for item in self.repository.reports.values() if item.candidate_id == candidate_id]
|
|
||||||
if not reports:
|
|
||||||
raise ValueError("No compliance report available for approval")
|
|
||||||
report = sorted(reports, key=lambda item: item.generated_at, reverse=True)[0]
|
|
||||||
return approve_candidate(
|
|
||||||
repository=self.repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
report_id=report.id,
|
|
||||||
decided_by=actor,
|
|
||||||
comment="Approved from TUI",
|
|
||||||
)
|
|
||||||
|
|
||||||
def publish_latest(self, *, candidate_id: str, actor: str):
|
|
||||||
reports = [item for item in self.repository.reports.values() if item.candidate_id == candidate_id]
|
|
||||||
if not reports:
|
|
||||||
raise ValueError("No compliance report available for publication")
|
|
||||||
report = sorted(reports, key=lambda item: item.generated_at, reverse=True)[0]
|
|
||||||
return publish_candidate(
|
|
||||||
repository=self.repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
report_id=report.id,
|
|
||||||
published_by=actor,
|
|
||||||
target_channel="stable",
|
|
||||||
publication_ref=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def build_manifest(self, *, candidate_id: str, actor: str):
|
|
||||||
return build_manifest_snapshot(
|
|
||||||
repository=self.repository,
|
|
||||||
candidate_id=candidate_id,
|
|
||||||
created_by=actor,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_overview(self, *, candidate_id: str) -> Dict[str, Any]:
|
|
||||||
candidate = self.repository.get_candidate(candidate_id)
|
|
||||||
manifests = self.repository.get_manifests_by_candidate(candidate_id)
|
|
||||||
latest_manifest = sorted(manifests, key=lambda item: item.manifest_version, reverse=True)[0] if manifests else None
|
|
||||||
runs = [item for item in self.repository.check_runs.values() if item.candidate_id == candidate_id]
|
|
||||||
latest_run = sorted(runs, key=lambda item: item.requested_at, reverse=True)[0] if runs else None
|
|
||||||
latest_report = next((item for item in self.repository.reports.values() if latest_run and item.run_id == latest_run.id), None)
|
|
||||||
approvals = getattr(self.repository, "approval_decisions", [])
|
|
||||||
latest_approval = sorted(
|
|
||||||
[item for item in approvals if item.candidate_id == candidate_id],
|
|
||||||
key=lambda item: item.decided_at,
|
|
||||||
reverse=True,
|
|
||||||
)[0] if any(item.candidate_id == candidate_id for item in approvals) else None
|
|
||||||
publications = getattr(self.repository, "publication_records", [])
|
|
||||||
latest_publication = sorted(
|
|
||||||
[item for item in publications if item.candidate_id == candidate_id],
|
|
||||||
key=lambda item: item.published_at,
|
|
||||||
reverse=True,
|
|
||||||
)[0] if any(item.candidate_id == candidate_id for item in publications) else None
|
|
||||||
policy = self.repository.get_active_policy()
|
|
||||||
registry = self.repository.get_registry(policy.internal_source_registry_ref) if policy else None
|
|
||||||
return {
|
|
||||||
"candidate": candidate,
|
|
||||||
"manifest": latest_manifest,
|
|
||||||
"run": latest_run,
|
|
||||||
"report": latest_report,
|
|
||||||
"approval": latest_approval,
|
|
||||||
"publication": latest_publication,
|
|
||||||
"policy": policy,
|
|
||||||
"registry": registry,
|
|
||||||
}
|
|
||||||
# [/DEF:TuiFacadeAdapter:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:CleanReleaseTUI:Class]
|
|
||||||
# @PURPOSE: Curses-based application for compliance monitoring.
|
|
||||||
# @UX_STATE: READY -> Waiting for operator to start checks (F5).
|
|
||||||
# @UX_STATE: RUNNING -> Executing compliance stages with progress feedback.
|
|
||||||
# @UX_STATE: COMPLIANT -> Release candidate passed all checks.
|
|
||||||
# @UX_STATE: BLOCKED -> Violations detected, release forbidden.
|
|
||||||
# @UX_FEEDBACK: Red alerts for BLOCKED status, Green for COMPLIANT.
|
|
||||||
class CleanReleaseTUI:
|
|
||||||
def __init__(self, stdscr: curses.window):
|
|
||||||
self.stdscr = stdscr
|
|
||||||
self.mode = os.getenv("CLEAN_TUI_MODE", "demo").strip().lower()
|
|
||||||
self.repo = self._build_repository(self.mode)
|
|
||||||
self.facade = TuiFacadeAdapter(self.repo)
|
|
||||||
self.candidate_id = self._resolve_candidate_id()
|
|
||||||
self.status: Any = "READY"
|
|
||||||
self.checks_progress: List[Dict[str, Any]] = []
|
|
||||||
self.violations_list: List[ComplianceViolation] = []
|
|
||||||
self.report_id: Optional[str] = None
|
|
||||||
self.last_error: Optional[str] = None
|
|
||||||
self.overview: Dict[str, Any] = {}
|
|
||||||
self.refresh_overview()
|
|
||||||
|
|
||||||
curses.start_color()
|
|
||||||
curses.use_default_colors()
|
|
||||||
curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLUE) # Header/Footer
|
|
||||||
curses.init_pair(2, curses.COLOR_GREEN, -1) # PASS
|
|
||||||
curses.init_pair(3, curses.COLOR_RED, -1) # FAIL/BLOCKED
|
|
||||||
curses.init_pair(4, curses.COLOR_YELLOW, -1) # RUNNING
|
|
||||||
curses.init_pair(5, curses.COLOR_CYAN, -1) # Text
|
|
||||||
|
|
||||||
def _build_repository(self, mode: str) -> CleanReleaseRepository:
|
|
||||||
repo = CleanReleaseRepository()
|
|
||||||
if mode == "demo":
|
|
||||||
self._bootstrap_demo_repository(repo)
|
|
||||||
else:
|
|
||||||
self._bootstrap_real_repository(repo)
|
|
||||||
return repo
|
|
||||||
|
|
||||||
def _bootstrap_demo_repository(self, repository: CleanReleaseRepository) -> None:
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
policy = CleanProfilePolicy(
|
|
||||||
policy_id="POL-ENT-CLEAN",
|
|
||||||
policy_version="1",
|
|
||||||
profile=ProfileType.ENTERPRISE_CLEAN,
|
|
||||||
active=True,
|
|
||||||
internal_source_registry_ref="REG-1",
|
|
||||||
prohibited_artifact_categories=["test-data"],
|
|
||||||
effective_from=now,
|
|
||||||
)
|
|
||||||
setattr(policy, "immutable", True)
|
|
||||||
repository.save_policy(policy)
|
|
||||||
|
|
||||||
registry = ResourceSourceRegistry(
|
|
||||||
registry_id="REG-1",
|
|
||||||
name="Default Internal Registry",
|
|
||||||
entries=[
|
|
||||||
ResourceSourceEntry(
|
|
||||||
source_id="S1",
|
|
||||||
host="internal-repo.company.com",
|
|
||||||
protocol="https",
|
|
||||||
purpose="artifactory",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
updated_at=now,
|
|
||||||
updated_by="system",
|
|
||||||
)
|
|
||||||
setattr(registry, "immutable", True)
|
|
||||||
setattr(registry, "allowed_hosts", ["internal-repo.company.com"])
|
|
||||||
setattr(registry, "allowed_schemes", ["https"])
|
|
||||||
setattr(registry, "allowed_source_types", ["artifactory"])
|
|
||||||
repository.save_registry(registry)
|
|
||||||
candidate = ReleaseCandidate(
|
|
||||||
id="2026.03.03-rc1",
|
|
||||||
version="1.0.0",
|
|
||||||
source_snapshot_ref="v1.0.0-rc1",
|
|
||||||
created_at=now,
|
|
||||||
created_by="system",
|
|
||||||
status=CandidateStatus.DRAFT.value,
|
|
||||||
)
|
|
||||||
candidate.transition_to(CandidateStatus.PREPARED)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
repository.save_artifact(
|
|
||||||
CandidateArtifact(
|
|
||||||
id="demo-art-1",
|
|
||||||
candidate_id=candidate.id,
|
|
||||||
path="src/main.py",
|
|
||||||
sha256="sha256-demo-core",
|
|
||||||
size=128,
|
|
||||||
detected_category="core",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
repository.save_artifact(
|
|
||||||
CandidateArtifact(
|
|
||||||
id="demo-art-2",
|
|
||||||
candidate_id=candidate.id,
|
|
||||||
path="test/data.csv",
|
|
||||||
sha256="sha256-demo-test",
|
|
||||||
size=64,
|
|
||||||
detected_category="test-data",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
manifest = build_manifest_snapshot(
|
|
||||||
repository=repository,
|
|
||||||
candidate_id=candidate.id,
|
|
||||||
created_by="system",
|
|
||||||
policy_id="POL-ENT-CLEAN",
|
|
||||||
)
|
|
||||||
summary = dict(manifest.content_json.get("summary", {}))
|
|
||||||
summary["prohibited_detected_count"] = 1
|
|
||||||
manifest.content_json["summary"] = summary
|
|
||||||
|
|
||||||
def _bootstrap_real_repository(self, repository: CleanReleaseRepository) -> None:
|
|
||||||
bootstrap_path = os.getenv("CLEAN_TUI_BOOTSTRAP_JSON", "").strip()
|
|
||||||
if not bootstrap_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(bootstrap_path, "r", encoding="utf-8") as bootstrap_file:
|
|
||||||
payload = json.load(bootstrap_file)
|
|
||||||
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
candidate = ReleaseCandidate(
|
|
||||||
id=payload.get("candidate_id", "candidate-1"),
|
|
||||||
version=payload.get("version", "1.0.0"),
|
|
||||||
source_snapshot_ref=payload.get("source_snapshot_ref", "snapshot-ref"),
|
|
||||||
created_at=now,
|
|
||||||
created_by=payload.get("created_by", "operator"),
|
|
||||||
status=ReleaseCandidateStatus.DRAFT,
|
|
||||||
)
|
|
||||||
repository.save_candidate(candidate)
|
|
||||||
|
|
||||||
registry_id = payload.get("registry_id", "REG-1")
|
|
||||||
entries = [
|
|
||||||
ResourceSourceEntry(
|
|
||||||
source_id=f"S-{index + 1}",
|
|
||||||
host=host,
|
|
||||||
protocol="https",
|
|
||||||
purpose="bootstrap",
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
for index, host in enumerate(payload.get("allowed_hosts", []))
|
|
||||||
if str(host).strip()
|
|
||||||
]
|
|
||||||
if entries:
|
|
||||||
repository.save_registry(
|
|
||||||
ResourceSourceRegistry(
|
|
||||||
registry_id=registry_id,
|
|
||||||
name=payload.get("registry_name", "Bootstrap Internal Registry"),
|
|
||||||
entries=entries,
|
|
||||||
updated_at=now,
|
|
||||||
updated_by=payload.get("created_by", "operator"),
|
|
||||||
status=RegistryStatus.ACTIVE,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if entries:
|
|
||||||
repository.save_policy(
|
|
||||||
CleanProfilePolicy(
|
|
||||||
policy_id=payload.get("policy_id", "POL-ENT-CLEAN"),
|
|
||||||
policy_version=payload.get("policy_version", "1"),
|
|
||||||
profile=ProfileType.ENTERPRISE_CLEAN,
|
|
||||||
active=True,
|
|
||||||
internal_source_registry_ref=registry_id,
|
|
||||||
prohibited_artifact_categories=payload.get(
|
|
||||||
"prohibited_artifact_categories",
|
|
||||||
["test-data", "demo", "load-test"],
|
|
||||||
),
|
|
||||||
required_system_categories=payload.get("required_system_categories", ["core"]),
|
|
||||||
effective_from=now,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _resolve_candidate_id(self) -> str:
|
|
||||||
env_candidate = os.getenv("CLEAN_TUI_CANDIDATE_ID", "").strip()
|
|
||||||
if env_candidate:
|
|
||||||
return env_candidate
|
|
||||||
|
|
||||||
candidate_ids = list(self.repo.candidates.keys())
|
|
||||||
if candidate_ids:
|
|
||||||
return candidate_ids[0]
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def draw_header(self, max_y: int, max_x: int):
|
|
||||||
header_text = " Enterprise Clean Release Validator (TUI) "
|
|
||||||
self.stdscr.attron(curses.color_pair(1) | curses.A_BOLD)
|
|
||||||
# Avoid slicing if possible to satisfy Pyre, or use explicit int
|
|
||||||
centered = header_text.center(max_x)
|
|
||||||
self.stdscr.addstr(0, 0, centered[:max_x])
|
|
||||||
self.stdscr.attroff(curses.color_pair(1) | curses.A_BOLD)
|
|
||||||
|
|
||||||
candidate = self.overview.get("candidate")
|
|
||||||
candidate_text = self.candidate_id or "not-set"
|
|
||||||
profile_text = "enterprise-clean"
|
|
||||||
lifecycle = getattr(candidate, "status", "UNKNOWN")
|
|
||||||
info_line_text = (
|
|
||||||
f" │ Candidate: [{candidate_text}] Profile: [{profile_text}] "
|
|
||||||
f"Lifecycle: [{lifecycle}] Mode: [{self.mode}]"
|
|
||||||
).ljust(max_x)
|
|
||||||
self.stdscr.addstr(2, 0, info_line_text[:max_x])
|
|
||||||
|
|
||||||
def draw_checks(self):
|
|
||||||
self.stdscr.addstr(4, 3, "Checks:")
|
|
||||||
check_defs = [
|
|
||||||
(CheckStageName.DATA_PURITY, "Data Purity (no test/demo payloads)"),
|
|
||||||
(CheckStageName.INTERNAL_SOURCES_ONLY, "Internal Sources Only (company servers)"),
|
|
||||||
(CheckStageName.NO_EXTERNAL_ENDPOINTS, "No External Internet Endpoints"),
|
|
||||||
(CheckStageName.MANIFEST_CONSISTENCY, "Release Manifest Consistency"),
|
|
||||||
]
|
|
||||||
|
|
||||||
row = 5
|
|
||||||
drawn_checks = {c["stage"]: c for c in self.checks_progress}
|
|
||||||
|
|
||||||
for stage, desc in check_defs:
|
|
||||||
status_text = " "
|
|
||||||
color = curses.color_pair(5)
|
|
||||||
|
|
||||||
if stage in drawn_checks:
|
|
||||||
c = drawn_checks[stage]
|
|
||||||
if c["status"] == "RUNNING":
|
|
||||||
status_text = "..."
|
|
||||||
color = curses.color_pair(4)
|
|
||||||
elif c["status"] == CheckStageStatus.PASS:
|
|
||||||
status_text = "PASS"
|
|
||||||
color = curses.color_pair(2)
|
|
||||||
elif c["status"] == CheckStageStatus.FAIL:
|
|
||||||
status_text = "FAIL"
|
|
||||||
color = curses.color_pair(3)
|
|
||||||
|
|
||||||
self.stdscr.addstr(row, 4, f"[{status_text:^4}] {desc}")
|
|
||||||
if status_text != " ":
|
|
||||||
self.stdscr.addstr(row, 50, f"{status_text:>10}", color | curses.A_BOLD)
|
|
||||||
row += 1
|
|
||||||
|
|
||||||
def draw_sources(self):
|
|
||||||
self.stdscr.addstr(12, 3, "Allowed Internal Sources:", curses.A_BOLD)
|
|
||||||
reg = self.overview.get("registry")
|
|
||||||
row = 13
|
|
||||||
if reg:
|
|
||||||
for entry in reg.entries:
|
|
||||||
self.stdscr.addstr(row, 3, f" - {entry.host}")
|
|
||||||
row += 1
|
|
||||||
else:
|
|
||||||
self.stdscr.addstr(row, 3, " - (none)")
|
|
||||||
|
|
||||||
def draw_status(self):
|
|
||||||
color = curses.color_pair(5)
|
|
||||||
if self.status == CheckFinalStatus.COMPLIANT: color = curses.color_pair(2)
|
|
||||||
elif self.status == CheckFinalStatus.BLOCKED: color = curses.color_pair(3)
|
|
||||||
|
|
||||||
stat_str = str(self.status.value if hasattr(self.status, "value") else self.status)
|
|
||||||
self.stdscr.addstr(18, 3, f"FINAL STATUS: {stat_str.upper()}", color | curses.A_BOLD)
|
|
||||||
|
|
||||||
if self.report_id:
|
|
||||||
self.stdscr.addstr(19, 3, f"Report ID: {self.report_id}")
|
|
||||||
|
|
||||||
approval = self.overview.get("approval")
|
|
||||||
publication = self.overview.get("publication")
|
|
||||||
if approval:
|
|
||||||
self.stdscr.addstr(20, 3, f"Approval: {approval.decision}")
|
|
||||||
if publication:
|
|
||||||
self.stdscr.addstr(20, 32, f"Publication: {publication.status}")
|
|
||||||
|
|
||||||
if self.violations_list:
|
|
||||||
self.stdscr.addstr(21, 3, f"Violations Details ({len(self.violations_list)} total):", curses.color_pair(3) | curses.A_BOLD)
|
|
||||||
row = 22
|
|
||||||
for i, v in enumerate(self.violations_list[:5]):
|
|
||||||
v_cat = str(getattr(v, "code", "VIOLATION"))
|
|
||||||
msg = str(getattr(v, "message", "Violation detected"))
|
|
||||||
location = str(
|
|
||||||
getattr(v, "artifact_path", "")
|
|
||||||
or getattr(getattr(v, "evidence_json", {}), "get", lambda *_: "")("location", "")
|
|
||||||
)
|
|
||||||
msg_text = f"[{v_cat}] {msg} (Loc: {location})"
|
|
||||||
self.stdscr.addstr(row + i, 5, msg_text[:70], curses.color_pair(3))
|
|
||||||
if self.last_error:
|
|
||||||
self.stdscr.addstr(27, 3, f"Error: {self.last_error}"[:100], curses.color_pair(3) | curses.A_BOLD)
|
|
||||||
|
|
||||||
def draw_footer(self, max_y: int, max_x: int):
|
|
||||||
footer_text = " F5 Run F6 Manifest F7 Refresh F8 Approve F9 Publish F10 Exit ".center(max_x)
|
|
||||||
self.stdscr.attron(curses.color_pair(1))
|
|
||||||
self.stdscr.addstr(max_y - 1, 0, footer_text[:max_x])
|
|
||||||
self.stdscr.attroff(curses.color_pair(1))
|
|
||||||
|
|
||||||
# [DEF:run_checks:Function]
|
|
||||||
# @PURPOSE: Execute compliance run via facade adapter and update UI state.
|
|
||||||
# @PRE: Candidate and policy snapshots are present in repository.
|
|
||||||
# @POST: UI reflects final run/report/violation state from service result.
|
|
||||||
def run_checks(self):
|
|
||||||
self.status = "RUNNING"
|
|
||||||
self.report_id = None
|
|
||||||
self.violations_list = []
|
|
||||||
self.checks_progress = []
|
|
||||||
self.last_error = None
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = self.facade.run_compliance(candidate_id=self.candidate_id, actor="operator")
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
self.status = CheckFinalStatus.FAILED
|
|
||||||
self.last_error = str(exc)
|
|
||||||
self.refresh_screen()
|
|
||||||
return
|
|
||||||
|
|
||||||
self.checks_progress = [
|
|
||||||
{
|
|
||||||
"stage": stage.stage_name,
|
|
||||||
"status": CheckStageStatus.PASS if str(stage.decision).upper() == "PASSED" else CheckStageStatus.FAIL,
|
|
||||||
}
|
|
||||||
for stage in result.stage_runs
|
|
||||||
]
|
|
||||||
self.violations_list = result.violations
|
|
||||||
self.report_id = result.report.id if result.report is not None else None
|
|
||||||
|
|
||||||
final_status = str(result.run.final_status or "").upper()
|
|
||||||
if final_status in {"BLOCKED", CheckFinalStatus.BLOCKED.value}:
|
|
||||||
self.status = CheckFinalStatus.BLOCKED
|
|
||||||
elif final_status in {"COMPLIANT", "PASSED", CheckFinalStatus.COMPLIANT.value}:
|
|
||||||
self.status = CheckFinalStatus.COMPLIANT
|
|
||||||
else:
|
|
||||||
self.status = CheckFinalStatus.FAILED
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
# [/DEF:run_checks:Function]
|
|
||||||
|
|
||||||
def build_manifest(self):
|
|
||||||
try:
|
|
||||||
manifest = self.facade.build_manifest(candidate_id=self.candidate_id, actor="operator")
|
|
||||||
self.status = "READY"
|
|
||||||
self.report_id = None
|
|
||||||
self.violations_list = []
|
|
||||||
self.checks_progress = []
|
|
||||||
self.last_error = f"Manifest built: {manifest.id}"
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
self.last_error = str(exc)
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
def clear_history(self):
|
|
||||||
self.status = "READY"
|
|
||||||
self.report_id = None
|
|
||||||
self.violations_list = []
|
|
||||||
self.checks_progress = []
|
|
||||||
self.last_error = None
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
def approve_latest(self):
|
|
||||||
if not self.report_id:
|
|
||||||
self.last_error = "F8 disabled: no compliance report available"
|
|
||||||
self.refresh_screen()
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self.facade.approve_latest(candidate_id=self.candidate_id, actor="operator")
|
|
||||||
self.last_error = None
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
self.last_error = str(exc)
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
def publish_latest(self):
|
|
||||||
if not self.report_id:
|
|
||||||
self.last_error = "F9 disabled: no compliance report available"
|
|
||||||
self.refresh_screen()
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self.facade.publish_latest(candidate_id=self.candidate_id, actor="operator")
|
|
||||||
self.last_error = None
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
self.last_error = str(exc)
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
def refresh_overview(self):
|
|
||||||
if not self.report_id:
|
|
||||||
self.last_error = "F9 disabled: no compliance report available"
|
|
||||||
self.refresh_screen()
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self.facade.publish_latest(candidate_id=self.candidate_id, actor="operator")
|
|
||||||
self.last_error = None
|
|
||||||
except Exception as exc: # noqa: BLE001
|
|
||||||
self.last_error = str(exc)
|
|
||||||
self.refresh_overview()
|
|
||||||
self.refresh_screen()
|
|
||||||
|
|
||||||
def refresh_overview(self):
|
|
||||||
if not self.candidate_id:
|
|
||||||
self.overview = {}
|
|
||||||
return
|
|
||||||
self.overview = self.facade.get_overview(candidate_id=self.candidate_id)
|
|
||||||
|
|
||||||
def refresh_screen(self):
|
|
||||||
max_y, max_x = self.stdscr.getmaxyx()
|
|
||||||
self.stdscr.clear()
|
|
||||||
try:
|
|
||||||
self.draw_header(max_y, max_x)
|
|
||||||
self.draw_checks()
|
|
||||||
self.draw_sources()
|
|
||||||
self.draw_status()
|
|
||||||
self.draw_footer(max_y, max_x)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
self.stdscr.refresh()
|
|
||||||
|
|
||||||
def loop(self):
|
|
||||||
self.refresh_screen()
|
|
||||||
while True:
|
|
||||||
char = self.stdscr.getch()
|
|
||||||
if char == curses.KEY_F10:
|
|
||||||
break
|
|
||||||
elif char == curses.KEY_F5:
|
|
||||||
self.run_checks()
|
|
||||||
elif char == curses.KEY_F6:
|
|
||||||
self.build_manifest()
|
|
||||||
elif char == curses.KEY_F7:
|
|
||||||
self.clear_history()
|
|
||||||
elif char == curses.KEY_F8:
|
|
||||||
self.approve_latest()
|
|
||||||
elif char == curses.KEY_F9:
|
|
||||||
self.publish_latest()
|
|
||||||
# [/DEF:CleanReleaseTUI:Class]
|
|
||||||
|
|
||||||
|
|
||||||
def tui_main(stdscr: curses.window):
|
|
||||||
curses.curs_set(0) # Hide cursor
|
|
||||||
app = CleanReleaseTUI(stdscr)
|
|
||||||
app.loop()
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> int:
|
def main() -> int:
|
||||||
# TUI requires interactive terminal; headless mode must use CLI/API flow.
|
print("Enterprise Clean Release Validator (TUI placeholder)")
|
||||||
if not sys.stdout.isatty():
|
print("Allowed Internal Sources:")
|
||||||
print(
|
print(" - repo.intra.company.local")
|
||||||
"TTY is required for TUI mode. Use CLI/API workflow instead.",
|
print(" - artifacts.intra.company.local")
|
||||||
file=sys.stderr,
|
print(" - pypi.intra.company.local")
|
||||||
)
|
print("Status: READY")
|
||||||
return 2
|
print("Use F5 to run check; BLOCKED state will show external-source violation details.")
|
||||||
try:
|
return 0
|
||||||
curses.wrapper(tui_main)
|
|
||||||
return 0
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting TUI: {e}", file=sys.stderr)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
raise SystemExit(main())
|
||||||
# [/DEF:backend.src.scripts.clean_release_tui:Module]
|
# [/DEF:backend.src.scripts.clean_release_tui:Module]
|
||||||
@@ -9,8 +9,8 @@
|
|||||||
"last_name": "Admin"
|
"last_name": "Admin"
|
||||||
},
|
},
|
||||||
"changed_by_name": "Superset Admin",
|
"changed_by_name": "Superset Admin",
|
||||||
"changed_on": "2026-02-24T19:24:01.850617",
|
"changed_on": "2026-02-10T13:39:35.945662",
|
||||||
"changed_on_delta_humanized": "7 days ago",
|
"changed_on_delta_humanized": "16 days ago",
|
||||||
"charts": [
|
"charts": [
|
||||||
"TA-0001-001 test_chart"
|
"TA-0001-001 test_chart"
|
||||||
],
|
],
|
||||||
@@ -19,12 +19,12 @@
|
|||||||
"id": 1,
|
"id": 1,
|
||||||
"last_name": "Admin"
|
"last_name": "Admin"
|
||||||
},
|
},
|
||||||
"created_on_delta_humanized": "13 days ago",
|
"created_on_delta_humanized": "16 days ago",
|
||||||
"css": null,
|
"css": null,
|
||||||
"dashboard_title": "TA-0001 Test dashboard",
|
"dashboard_title": "TA-0001 Test dashboard",
|
||||||
"id": 13,
|
"id": 13,
|
||||||
"is_managed_externally": false,
|
"is_managed_externally": false,
|
||||||
"json_metadata": "{\"color_scheme_domain\": [], \"shared_label_colors\": [], \"map_label_colors\": {}, \"label_colors\": {}, \"native_filter_configuration\": []}",
|
"json_metadata": "{\"color_scheme_domain\": [], \"shared_label_colors\": [], \"map_label_colors\": {}, \"label_colors\": {}}",
|
||||||
"owners": [
|
"owners": [
|
||||||
{
|
{
|
||||||
"first_name": "Superset",
|
"first_name": "Superset",
|
||||||
@@ -32,13 +32,13 @@
|
|||||||
"last_name": "Admin"
|
"last_name": "Admin"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"position_json": "{\"DASHBOARD_VERSION_KEY\": \"v2\", \"ROOT_ID\": {\"children\": [\"GRID_ID\"], \"id\": \"ROOT_ID\", \"type\": \"ROOT\"}, \"GRID_ID\": {\"children\": [\"ROW-N-LH8TG1XX\"], \"id\": \"GRID_ID\", \"parents\": [\"ROOT_ID\"], \"type\": \"GRID\"}, \"HEADER_ID\": {\"id\": \"HEADER_ID\", \"meta\": {\"text\": \"TA-0001 Test dashboard\"}, \"type\": \"HEADER\"}, \"ROW-N-LH8TG1XX\": {\"children\": [\"CHART-1EKC8H7C\"], \"id\": \"ROW-N-LH8TG1XX\", \"meta\": {\"0\": \"ROOT_ID\", \"background\": \"BACKGROUND_TRANSPARENT\"}, \"type\": \"ROW\", \"parents\": [\"ROOT_ID\", \"GRID_ID\"]}, \"CHART-1EKC8H7C\": {\"children\": [], \"id\": \"CHART-1EKC8H7C\", \"meta\": {\"chartId\": 162, \"height\": 50, \"sliceName\": \"TA-0001-001 test_chart\", \"uuid\": \"008cdaa7-21b3-4042-9f55-f15653609ebd\", \"width\": 4}, \"type\": \"CHART\", \"parents\": [\"ROOT_ID\", \"GRID_ID\", \"ROW-N-LH8TG1XX\"]}}",
|
"position_json": null,
|
||||||
"published": true,
|
"published": true,
|
||||||
"roles": [],
|
"roles": [],
|
||||||
"slug": null,
|
"slug": null,
|
||||||
"tags": [],
|
"tags": [],
|
||||||
"theme": null,
|
"theme": null,
|
||||||
"thumbnail_url": "/api/v1/dashboard/13/thumbnail/97dfd5d8d24f7cf01de45671c9a0699d/",
|
"thumbnail_url": "/api/v1/dashboard/13/thumbnail/3cfc57e6aea7188b139f94fb437a1426/",
|
||||||
"url": "/superset/dashboard/13/",
|
"url": "/superset/dashboard/13/",
|
||||||
"uuid": "124b28d4-d54a-4ade-ade7-2d0473b90686"
|
"uuid": "124b28d4-d54a-4ade-ade7-2d0473b90686"
|
||||||
}
|
}
|
||||||
@@ -53,15 +53,15 @@
|
|||||||
"first_name": "Superset",
|
"first_name": "Superset",
|
||||||
"last_name": "Admin"
|
"last_name": "Admin"
|
||||||
},
|
},
|
||||||
"changed_on": "2026-02-18T14:56:04.863722",
|
"changed_on": "2026-02-10T13:38:26.175551",
|
||||||
"changed_on_humanized": "13 days ago",
|
"changed_on_humanized": "16 days ago",
|
||||||
"column_formats": {},
|
"column_formats": {},
|
||||||
"columns": [
|
"columns": [
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.382289",
|
"changed_on": "2026-02-10T13:38:26.158196",
|
||||||
"column_name": "has_2fa",
|
"column_name": "color",
|
||||||
"created_on": "2026-02-18T14:56:05.382138",
|
"created_on": "2026-02-10T13:38:26.158189",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -71,16 +71,16 @@
|
|||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": false,
|
"is_dttm": false,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "STRING",
|
||||||
"type_generic": 3,
|
"type_generic": 1,
|
||||||
"uuid": "fe374f2a-9e06-4708-89fd-c3926e3e5faa",
|
"uuid": "4fa810ee-99cc-4d1f-8c0d-0f289c3b01f4",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.545701",
|
"changed_on": "2026-02-10T13:38:26.158249",
|
||||||
"column_name": "is_ultra_restricted",
|
"column_name": "deleted",
|
||||||
"created_on": "2026-02-18T14:56:05.545465",
|
"created_on": "2026-02-10T13:38:26.158245",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -92,14 +92,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "eac7ecce-d472-4933-9652-d4f2811074fd",
|
"uuid": "ebc07e82-7250-4eef-8d13-ea61561fa52c",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.683578",
|
"changed_on": "2026-02-10T13:38:26.158289",
|
||||||
"column_name": "is_primary_owner",
|
"column_name": "has_2fa",
|
||||||
"created_on": "2026-02-18T14:56:05.683257",
|
"created_on": "2026-02-10T13:38:26.158285",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -111,14 +111,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "94a15acd-ef98-425b-8f0d-1ce038ca95c5",
|
"uuid": "08e72f4d-3ced-4d9a-9f7d-2f85291ce88b",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.758231",
|
"changed_on": "2026-02-10T13:38:26.158328",
|
||||||
"column_name": "is_app_user",
|
"column_name": "id",
|
||||||
"created_on": "2026-02-18T14:56:05.758142",
|
"created_on": "2026-02-10T13:38:26.158324",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -128,16 +128,16 @@
|
|||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": false,
|
"is_dttm": false,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "STRING",
|
||||||
"type_generic": 3,
|
"type_generic": 1,
|
||||||
"uuid": "d3fcd712-dc96-4bba-a026-aa82022eccf5",
|
"uuid": "fd11955c-0130-4ea1-b3c0-d8b159971789",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.799597",
|
"changed_on": "2026-02-10T13:38:26.158366",
|
||||||
"column_name": "is_admin",
|
"column_name": "is_admin",
|
||||||
"created_on": "2026-02-18T14:56:05.799519",
|
"created_on": "2026-02-10T13:38:26.158362",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -149,14 +149,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "5a1c9de5-80f1-4fe8-a91b-e6e530688aae",
|
"uuid": "13a6c8e1-c9f8-4f08-aa62-05bca7be547b",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.819443",
|
"changed_on": "2026-02-10T13:38:26.158404",
|
||||||
"column_name": "is_bot",
|
"column_name": "is_app_user",
|
||||||
"created_on": "2026-02-18T14:56:05.819382",
|
"created_on": "2026-02-10T13:38:26.158400",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -168,14 +168,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "6c93e5de-e0d7-430c-88d7-87158905d60a",
|
"uuid": "6321ba8a-28d7-4d68-a6b3-5cef6cd681a2",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.827568",
|
"changed_on": "2026-02-10T13:38:26.158442",
|
||||||
"column_name": "is_restricted",
|
"column_name": "is_bot",
|
||||||
"created_on": "2026-02-18T14:56:05.827556",
|
"created_on": "2026-02-10T13:38:26.158438",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -187,14 +187,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "2e8e6d32-0124-4e3a-a53f-6f200f852439",
|
"uuid": "f3ded50e-b1a2-4a88-b805-781d5923e062",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.835380",
|
"changed_on": "2026-02-10T13:38:26.158480",
|
||||||
"column_name": "is_owner",
|
"column_name": "is_owner",
|
||||||
"created_on": "2026-02-18T14:56:05.835366",
|
"created_on": "2026-02-10T13:38:26.158477",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -206,14 +206,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "510d651b-a595-4261-98e4-278af0a06594",
|
"uuid": "8a1408eb-050d-4455-878c-22342df5da3d",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.843802",
|
"changed_on": "2026-02-10T13:38:26.158532",
|
||||||
"column_name": "deleted",
|
"column_name": "is_primary_owner",
|
||||||
"created_on": "2026-02-18T14:56:05.843784",
|
"created_on": "2026-02-10T13:38:26.158528",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -225,14 +225,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "BOOLEAN",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 3,
|
"type_generic": 3,
|
||||||
"uuid": "2653fd2f-c0ce-484e-a5df-d2515b1e822d",
|
"uuid": "054b8c16-82fd-480c-82e0-a0975229673a",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.851074",
|
"changed_on": "2026-02-10T13:38:26.158583",
|
||||||
"column_name": "updated",
|
"column_name": "is_restricted",
|
||||||
"created_on": "2026-02-18T14:56:05.851063",
|
"created_on": "2026-02-10T13:38:26.158579",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -240,18 +240,18 @@
|
|||||||
"groupby": true,
|
"groupby": true,
|
||||||
"id": 781,
|
"id": 781,
|
||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": true,
|
"is_dttm": false,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "DATETIME",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 2,
|
"type_generic": 3,
|
||||||
"uuid": "1b1f90c8-2567-49b8-9398-e7246396461e",
|
"uuid": "6932c25f-0273-4595-85c1-29422a801ded",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.857578",
|
"changed_on": "2026-02-10T13:38:26.158621",
|
||||||
"column_name": "tz_offset",
|
"column_name": "is_ultra_restricted",
|
||||||
"created_on": "2026-02-18T14:56:05.857571",
|
"created_on": "2026-02-10T13:38:26.158618",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -261,16 +261,16 @@
|
|||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": false,
|
"is_dttm": false,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "LONGINTEGER",
|
"type": "BOOLEAN",
|
||||||
"type_generic": 0,
|
"type_generic": 3,
|
||||||
"uuid": "e6d19b74-7f5d-447b-8071-951961dc2295",
|
"uuid": "9b14e5f9-3ab4-498e-b1e3-bbf49e9d61fe",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.863101",
|
"changed_on": "2026-02-10T13:38:26.158660",
|
||||||
"column_name": "channel_name",
|
"column_name": "name",
|
||||||
"created_on": "2026-02-18T14:56:05.863094",
|
"created_on": "2026-02-10T13:38:26.158656",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -282,14 +282,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "e1f34628-ebc1-4e0c-8eea-54c3c9efba1b",
|
"uuid": "ebee8249-0e10-4157-8a8e-96ae107887a3",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.877136",
|
"changed_on": "2026-02-10T13:38:26.158697",
|
||||||
"column_name": "real_name",
|
"column_name": "real_name",
|
||||||
"created_on": "2026-02-18T14:56:05.877083",
|
"created_on": "2026-02-10T13:38:26.158694",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -301,14 +301,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "6cc5ab57-9431-428a-a331-0a5b10e4b074",
|
"uuid": "553517a0-fe05-4ff5-a4eb-e9d2165d6f64",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.893859",
|
"changed_on": "2026-02-10T13:38:26.158735",
|
||||||
"column_name": "tz_label",
|
"column_name": "team_id",
|
||||||
"created_on": "2026-02-18T14:56:05.893834",
|
"created_on": "2026-02-10T13:38:26.158731",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -320,14 +320,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "8e6dbd8e-b880-4517-a5f6-64e429bd1bea",
|
"uuid": "6c207fac-424d-465c-b80a-306b42b55ce8",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.902363",
|
"changed_on": "2026-02-10T13:38:26.158773",
|
||||||
"column_name": "team_id",
|
"column_name": "tz",
|
||||||
"created_on": "2026-02-18T14:56:05.902352",
|
"created_on": "2026-02-10T13:38:26.158769",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -339,14 +339,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "ba8e225d-221b-4275-aadb-e79557756f89",
|
"uuid": "6efcc042-0b78-4362-9373-2f684077d574",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.910169",
|
"changed_on": "2026-02-10T13:38:26.158824",
|
||||||
"column_name": "name",
|
"column_name": "tz_label",
|
||||||
"created_on": "2026-02-18T14:56:05.910151",
|
"created_on": "2026-02-10T13:38:26.158820",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -358,14 +358,14 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "02a7a026-d9f3-49e9-9586-534ebccdd867",
|
"uuid": "c6a6ac40-5c60-472d-a878-4b65b8460ccc",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.915366",
|
"changed_on": "2026-02-10T13:38:26.158861",
|
||||||
"column_name": "color",
|
"column_name": "tz_offset",
|
||||||
"created_on": "2026-02-18T14:56:05.915357",
|
"created_on": "2026-02-10T13:38:26.158857",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -375,16 +375,16 @@
|
|||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": false,
|
"is_dttm": false,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "LONGINTEGER",
|
||||||
"type_generic": 1,
|
"type_generic": 0,
|
||||||
"uuid": "0702fcdf-2d03-45db-8496-697d47b300d6",
|
"uuid": "cf6da93a-bba9-47df-9154-6cfd0c9922fc",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.919466",
|
"changed_on": "2026-02-10T13:38:26.158913",
|
||||||
"column_name": "id",
|
"column_name": "updated",
|
||||||
"created_on": "2026-02-18T14:56:05.919460",
|
"created_on": "2026-02-10T13:38:26.158909",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -392,18 +392,18 @@
|
|||||||
"groupby": true,
|
"groupby": true,
|
||||||
"id": 789,
|
"id": 789,
|
||||||
"is_active": true,
|
"is_active": true,
|
||||||
"is_dttm": false,
|
"is_dttm": true,
|
||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "DATETIME",
|
||||||
"type_generic": 1,
|
"type_generic": 2,
|
||||||
"uuid": "a4b58528-fcbf-45e9-af39-fe9d737ba380",
|
"uuid": "2aa0a72a-5602-4799-b5ab-f22000108d62",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"advanced_data_type": null,
|
"advanced_data_type": null,
|
||||||
"changed_on": "2026-02-18T14:56:05.932553",
|
"changed_on": "2026-02-10T13:38:26.158967",
|
||||||
"column_name": "tz",
|
"column_name": "channel_name",
|
||||||
"created_on": "2026-02-18T14:56:05.932530",
|
"created_on": "2026-02-10T13:38:26.158963",
|
||||||
"description": null,
|
"description": null,
|
||||||
"expression": null,
|
"expression": null,
|
||||||
"extra": null,
|
"extra": null,
|
||||||
@@ -415,7 +415,7 @@
|
|||||||
"python_date_format": null,
|
"python_date_format": null,
|
||||||
"type": "STRING",
|
"type": "STRING",
|
||||||
"type_generic": 1,
|
"type_generic": 1,
|
||||||
"uuid": "bc872357-1920-42f3-aeda-b596122bcdb8",
|
"uuid": "a84bd658-c83c-4e7f-9e1b-192595092d9b",
|
||||||
"verbose_name": null
|
"verbose_name": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -423,8 +423,8 @@
|
|||||||
"first_name": "Superset",
|
"first_name": "Superset",
|
||||||
"last_name": "Admin"
|
"last_name": "Admin"
|
||||||
},
|
},
|
||||||
"created_on": "2026-02-18T14:56:04.317950",
|
"created_on": "2026-02-10T13:38:26.050436",
|
||||||
"created_on_humanized": "13 days ago",
|
"created_on_humanized": "16 days ago",
|
||||||
"database": {
|
"database": {
|
||||||
"allow_multi_catalog": false,
|
"allow_multi_catalog": false,
|
||||||
"backend": "postgresql",
|
"backend": "postgresql",
|
||||||
@@ -452,8 +452,8 @@
|
|||||||
"main_dttm_col": "updated",
|
"main_dttm_col": "updated",
|
||||||
"metrics": [
|
"metrics": [
|
||||||
{
|
{
|
||||||
"changed_on": "2026-02-18T14:56:05.085244",
|
"changed_on": "2026-02-10T13:38:26.182269",
|
||||||
"created_on": "2026-02-18T14:56:05.085166",
|
"created_on": "2026-02-10T13:38:26.182264",
|
||||||
"currency": null,
|
"currency": null,
|
||||||
"d3format": null,
|
"d3format": null,
|
||||||
"description": null,
|
"description": null,
|
||||||
@@ -462,7 +462,7 @@
|
|||||||
"id": 33,
|
"id": 33,
|
||||||
"metric_name": "count",
|
"metric_name": "count",
|
||||||
"metric_type": "count",
|
"metric_type": "count",
|
||||||
"uuid": "10c8b8cf-b697-4512-9e9e-2996721f829e",
|
"uuid": "7510f8ca-05ee-4a37-bec1-4a5d7bf2ac50",
|
||||||
"verbose_name": "COUNT(*)",
|
"verbose_name": "COUNT(*)",
|
||||||
"warning_text": null
|
"warning_text": null
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,7 +45,6 @@ INITIAL_PERMISSIONS = [
|
|||||||
{"resource": "plugin:storage", "action": "READ"},
|
{"resource": "plugin:storage", "action": "READ"},
|
||||||
{"resource": "plugin:storage", "action": "WRITE"},
|
{"resource": "plugin:storage", "action": "WRITE"},
|
||||||
{"resource": "plugin:debug", "action": "EXECUTE"},
|
{"resource": "plugin:debug", "action": "EXECUTE"},
|
||||||
{"resource": "git_config", "action": "READ"},
|
|
||||||
]
|
]
|
||||||
# [/DEF:INITIAL_PERMISSIONS:Constant]
|
# [/DEF:INITIAL_PERMISSIONS:Constant]
|
||||||
|
|
||||||
@@ -94,7 +93,6 @@ def seed_permissions():
|
|||||||
("plugins", "READ"),
|
("plugins", "READ"),
|
||||||
("tasks", "READ"),
|
("tasks", "READ"),
|
||||||
("tasks", "WRITE"),
|
("tasks", "WRITE"),
|
||||||
("git_config", "READ"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for res, act in user_permissions:
|
for res, act in user_permissions:
|
||||||
|
|||||||
@@ -291,9 +291,6 @@ def main() -> None:
|
|||||||
logger.info(f"[COHERENCE:OK] Result summary: {json.dumps(result, ensure_ascii=True)}")
|
logger.info(f"[COHERENCE:OK] Result summary: {json.dumps(result, ensure_ascii=True)}")
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:main:Function]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -100,10 +100,7 @@ def test_dashboard_dataset_relations():
|
|||||||
logger.info(f" Found {len(dashboards)} dashboards using this dataset:")
|
logger.info(f" Found {len(dashboards)} dashboards using this dataset:")
|
||||||
|
|
||||||
for dash in dashboards:
|
for dash in dashboards:
|
||||||
if isinstance(dash, dict):
|
logger.info(f" - Dashboard ID {dash.get('id')}: {dash.get('dashboard_title', dash.get('title', 'Unknown'))}")
|
||||||
logger.info(f" - Dashboard ID {dash.get('id')}: {dash.get('dashboard_title', dash.get('title', 'Unknown'))}")
|
|
||||||
else:
|
|
||||||
logger.info(f" - Dashboard: {dash}")
|
|
||||||
elif 'result' in related_objects:
|
elif 'result' in related_objects:
|
||||||
# Some Superset versions use 'result' wrapper
|
# Some Superset versions use 'result' wrapper
|
||||||
result = related_objects['result']
|
result = related_objects['result']
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ class TestEncryptionManager:
|
|||||||
# Re-implement the same logic as EncryptionManager to avoid import issues
|
# Re-implement the same logic as EncryptionManager to avoid import issues
|
||||||
# with the llm_provider module's relative imports
|
# with the llm_provider module's relative imports
|
||||||
import os
|
import os
|
||||||
key = os.getenv("ENCRYPTION_KEY", "REMOVED_HISTORICAL_SECRET_DO_NOT_USE").encode()
|
key = os.getenv("ENCRYPTION_KEY", "ZcytYzi0iHIl4Ttr-GdAEk117aGRogkGvN3wiTxrPpE=").encode()
|
||||||
fernet = Fernet(key)
|
fernet = Fernet(key)
|
||||||
|
|
||||||
class EncryptionManager:
|
class EncryptionManager:
|
||||||
|
|||||||
@@ -1,87 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from src.services.health_service import HealthService
|
|
||||||
from src.models.llm import ValidationRecord
|
|
||||||
|
|
||||||
# [DEF:test_health_service:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Unit tests for HealthService aggregation logic.
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_get_health_summary_aggregation():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Verify that HealthService correctly aggregates the latest record per dashboard.
|
|
||||||
"""
|
|
||||||
# Setup: Mock DB session
|
|
||||||
db = MagicMock()
|
|
||||||
|
|
||||||
now = datetime.utcnow()
|
|
||||||
|
|
||||||
# Dashboard 1: Old FAIL, New PASS
|
|
||||||
rec1_old = ValidationRecord(
|
|
||||||
dashboard_id="dash_1",
|
|
||||||
environment_id="env_1",
|
|
||||||
status="FAIL",
|
|
||||||
timestamp=now - timedelta(hours=1),
|
|
||||||
summary="Old failure",
|
|
||||||
issues=[]
|
|
||||||
)
|
|
||||||
rec1_new = ValidationRecord(
|
|
||||||
dashboard_id="dash_1",
|
|
||||||
environment_id="env_1",
|
|
||||||
status="PASS",
|
|
||||||
timestamp=now,
|
|
||||||
summary="New pass",
|
|
||||||
issues=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Dashboard 2: Single WARN
|
|
||||||
rec2 = ValidationRecord(
|
|
||||||
dashboard_id="dash_2",
|
|
||||||
environment_id="env_1",
|
|
||||||
status="WARN",
|
|
||||||
timestamp=now,
|
|
||||||
summary="Warning",
|
|
||||||
issues=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Mock the query chain
|
|
||||||
# subquery = self.db.query(...).filter(...).group_by(...).subquery()
|
|
||||||
# query = self.db.query(ValidationRecord).join(subquery, ...).all()
|
|
||||||
|
|
||||||
mock_query = db.query.return_value
|
|
||||||
mock_query.filter.return_value = mock_query
|
|
||||||
mock_query.group_by.return_value = mock_query
|
|
||||||
mock_query.subquery.return_value = MagicMock()
|
|
||||||
|
|
||||||
db.query.return_value.join.return_value.all.return_value = [rec1_new, rec2]
|
|
||||||
|
|
||||||
service = HealthService(db)
|
|
||||||
summary = await service.get_health_summary(environment_id="env_1")
|
|
||||||
|
|
||||||
assert summary.pass_count == 1
|
|
||||||
assert summary.warn_count == 1
|
|
||||||
assert summary.fail_count == 0
|
|
||||||
assert len(summary.items) == 2
|
|
||||||
|
|
||||||
# Verify dash_1 has the latest status (PASS)
|
|
||||||
dash_1_item = next(item for item in summary.items if item.dashboard_id == "dash_1")
|
|
||||||
assert dash_1_item.status == "PASS"
|
|
||||||
assert dash_1_item.summary == "New pass"
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_get_health_summary_empty():
|
|
||||||
"""
|
|
||||||
@TEST_SCENARIO: Verify behavior with no records.
|
|
||||||
"""
|
|
||||||
db = MagicMock()
|
|
||||||
db.query.return_value.join.return_value.all.return_value = []
|
|
||||||
|
|
||||||
service = HealthService(db)
|
|
||||||
summary = await service.get_health_summary(environment_id="env_none")
|
|
||||||
|
|
||||||
assert summary.pass_count == 0
|
|
||||||
assert len(summary.items) == 0
|
|
||||||
|
|
||||||
# [/DEF:test_health_service:Module]
|
|
||||||
@@ -1,150 +0,0 @@
|
|||||||
# [DEF:backend.src.services.__tests__.test_llm_plugin_persistence:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @PURPOSE: Regression test for ValidationRecord persistence fields populated from task context.
|
|
||||||
|
|
||||||
import types
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from src.plugins.llm_analysis import plugin as plugin_module
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_DummyLogger:Class]
|
|
||||||
# @PURPOSE: Minimal logger shim for TaskContext-like objects used in tests.
|
|
||||||
class _DummyLogger:
|
|
||||||
def with_source(self, _source: str):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def info(self, *_args, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def debug(self, *_args, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def warning(self, *_args, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def error(self, *_args, **_kwargs):
|
|
||||||
return None
|
|
||||||
# [/DEF:_DummyLogger:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:_FakeDBSession:Class]
|
|
||||||
# @PURPOSE: Captures persisted records for assertion and mimics SQLAlchemy session methods used by plugin.
|
|
||||||
class _FakeDBSession:
|
|
||||||
def __init__(self):
|
|
||||||
self.added = None
|
|
||||||
self.committed = False
|
|
||||||
self.closed = False
|
|
||||||
|
|
||||||
def add(self, obj):
|
|
||||||
self.added = obj
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
self.committed = True
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self.closed = True
|
|
||||||
# [/DEF:_FakeDBSession:Class]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_dashboard_validation_plugin_persists_task_and_environment_ids:Function]
|
|
||||||
# @PURPOSE: Ensure db ValidationRecord includes context.task_id and params.environment_id.
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_dashboard_validation_plugin_persists_task_and_environment_ids(tmp_path, monkeypatch):
|
|
||||||
fake_db = _FakeDBSession()
|
|
||||||
|
|
||||||
env = types.SimpleNamespace(id="env-42")
|
|
||||||
provider = types.SimpleNamespace(
|
|
||||||
id="provider-1",
|
|
||||||
name="Main LLM",
|
|
||||||
provider_type="openai",
|
|
||||||
base_url="https://example.invalid/v1",
|
|
||||||
default_model="gpt-4o",
|
|
||||||
is_active=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
class _FakeProviderService:
|
|
||||||
def __init__(self, _db):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_provider(self, _provider_id):
|
|
||||||
return provider
|
|
||||||
|
|
||||||
def get_decrypted_api_key(self, _provider_id):
|
|
||||||
return "a" * 32
|
|
||||||
|
|
||||||
class _FakeScreenshotService:
|
|
||||||
def __init__(self, _env):
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def capture_dashboard(self, _dashboard_id, _screenshot_path):
|
|
||||||
return None
|
|
||||||
|
|
||||||
class _FakeLLMClient:
|
|
||||||
def __init__(self, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def analyze_dashboard(self, *_args, **_kwargs):
|
|
||||||
return {
|
|
||||||
"status": "PASS",
|
|
||||||
"summary": "Dashboard healthy",
|
|
||||||
"issues": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
class _FakeNotificationService:
|
|
||||||
def __init__(self, *_args, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def dispatch_report(self, **_kwargs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
class _FakeConfigManager:
|
|
||||||
def get_environment(self, _env_id):
|
|
||||||
return env
|
|
||||||
|
|
||||||
def get_config(self):
|
|
||||||
return types.SimpleNamespace(
|
|
||||||
settings=types.SimpleNamespace(
|
|
||||||
storage=types.SimpleNamespace(root_path=str(tmp_path)),
|
|
||||||
llm={},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
class _FakeSupersetClient:
|
|
||||||
def __init__(self, _env):
|
|
||||||
self.network = types.SimpleNamespace(request=lambda **_kwargs: {"result": []})
|
|
||||||
|
|
||||||
monkeypatch.setattr(plugin_module, "SessionLocal", lambda: fake_db)
|
|
||||||
monkeypatch.setattr(plugin_module, "LLMProviderService", _FakeProviderService)
|
|
||||||
monkeypatch.setattr(plugin_module, "ScreenshotService", _FakeScreenshotService)
|
|
||||||
monkeypatch.setattr(plugin_module, "LLMClient", _FakeLLMClient)
|
|
||||||
monkeypatch.setattr(plugin_module, "NotificationService", _FakeNotificationService)
|
|
||||||
monkeypatch.setattr(plugin_module, "SupersetClient", _FakeSupersetClient)
|
|
||||||
monkeypatch.setattr("src.dependencies.get_config_manager", lambda: _FakeConfigManager())
|
|
||||||
|
|
||||||
context = types.SimpleNamespace(
|
|
||||||
task_id="task-999",
|
|
||||||
logger=_DummyLogger(),
|
|
||||||
background_tasks=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
plugin = plugin_module.DashboardValidationPlugin()
|
|
||||||
result = await plugin.execute(
|
|
||||||
{
|
|
||||||
"dashboard_id": "11",
|
|
||||||
"environment_id": "env-42",
|
|
||||||
"provider_id": "provider-1",
|
|
||||||
},
|
|
||||||
context=context,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["environment_id"] == "env-42"
|
|
||||||
assert fake_db.committed is True
|
|
||||||
assert fake_db.closed is True
|
|
||||||
assert fake_db.added is not None
|
|
||||||
assert fake_db.added.task_id == "task-999"
|
|
||||||
assert fake_db.added.environment_id == "env-42"
|
|
||||||
# [/DEF:test_dashboard_validation_plugin_persists_task_and_environment_ids:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.services.__tests__.test_llm_plugin_persistence:Module]
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
# [DEF:__tests__/test_llm_provider:Module]
|
|
||||||
# @RELATION: VERIFIES -> ../llm_provider.py
|
|
||||||
# @PURPOSE: Contract testing for LLMProviderService and EncryptionManager
|
|
||||||
# [/DEF:__tests__/test_llm_provider:Module]
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import os
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from src.services.llm_provider import EncryptionManager, LLMProviderService
|
|
||||||
from src.models.llm import LLMProvider
|
|
||||||
from src.plugins.llm_analysis.models import LLMProviderConfig, ProviderType
|
|
||||||
|
|
||||||
# @TEST_CONTRACT: EncryptionManagerModel -> Invariants
|
|
||||||
# @TEST_INVARIANT: symmetric_encryption
|
|
||||||
def test_encryption_cycle():
|
|
||||||
"""Verify encrypted data can be decrypted back to original string."""
|
|
||||||
manager = EncryptionManager()
|
|
||||||
original = "secret_api_key_123"
|
|
||||||
encrypted = manager.encrypt(original)
|
|
||||||
assert encrypted != original
|
|
||||||
assert manager.decrypt(encrypted) == original
|
|
||||||
|
|
||||||
# @TEST_EDGE: empty_string_encryption
|
|
||||||
def test_empty_string_encryption():
|
|
||||||
manager = EncryptionManager()
|
|
||||||
original = ""
|
|
||||||
encrypted = manager.encrypt(original)
|
|
||||||
assert manager.decrypt(encrypted) == ""
|
|
||||||
|
|
||||||
# @TEST_EDGE: decrypt_invalid_data
|
|
||||||
def test_decrypt_invalid_data():
|
|
||||||
manager = EncryptionManager()
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
manager.decrypt("not-encrypted-string")
|
|
||||||
|
|
||||||
# @TEST_FIXTURE: mock_db_session
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_db():
|
|
||||||
return MagicMock(spec=Session)
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def service(mock_db):
|
|
||||||
return LLMProviderService(db=mock_db)
|
|
||||||
|
|
||||||
def test_get_all_providers(service, mock_db):
|
|
||||||
service.get_all_providers()
|
|
||||||
mock_db.query.assert_called()
|
|
||||||
mock_db.query().all.assert_called()
|
|
||||||
|
|
||||||
def test_create_provider(service, mock_db):
|
|
||||||
config = LLMProviderConfig(
|
|
||||||
provider_type=ProviderType.OPENAI,
|
|
||||||
name="Test OpenAI",
|
|
||||||
base_url="https://api.openai.com",
|
|
||||||
api_key="sk-test",
|
|
||||||
default_model="gpt-4",
|
|
||||||
is_active=True
|
|
||||||
)
|
|
||||||
|
|
||||||
provider = service.create_provider(config)
|
|
||||||
|
|
||||||
mock_db.add.assert_called()
|
|
||||||
mock_db.commit.assert_called()
|
|
||||||
# Verify API key was encrypted
|
|
||||||
assert provider.api_key != "sk-test"
|
|
||||||
# Decrypt to verify it matches
|
|
||||||
assert EncryptionManager().decrypt(provider.api_key) == "sk-test"
|
|
||||||
|
|
||||||
def test_get_decrypted_api_key(service, mock_db):
|
|
||||||
# Setup mock provider
|
|
||||||
encrypted_key = EncryptionManager().encrypt("secret-value")
|
|
||||||
mock_provider = LLMProvider(id="p1", api_key=encrypted_key)
|
|
||||||
mock_db.query().filter().first.return_value = mock_provider
|
|
||||||
|
|
||||||
key = service.get_decrypted_api_key("p1")
|
|
||||||
assert key == "secret-value"
|
|
||||||
|
|
||||||
def test_get_decrypted_api_key_not_found(service, mock_db):
|
|
||||||
mock_db.query().filter().first.return_value = None
|
|
||||||
assert service.get_decrypted_api_key("missing") is None
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
# [DEF:backend.src.services.__tests__.test_rbac_permission_catalog:Module]
|
|
||||||
# @TIER: STANDARD
|
|
||||||
# @SEMANTICS: tests, rbac, permissions, catalog, discovery, sync
|
|
||||||
# @PURPOSE: Verifies RBAC permission catalog discovery and idempotent synchronization behavior.
|
|
||||||
# @LAYER: Service Tests
|
|
||||||
# @RELATION: TESTS -> backend.src.services.rbac_permission_catalog
|
|
||||||
# @INVARIANT: Synchronization adds only missing normalized permission pairs.
|
|
||||||
|
|
||||||
# [SECTION: IMPORTS]
|
|
||||||
from types import SimpleNamespace
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
|
|
||||||
import src.services.rbac_permission_catalog as catalog
|
|
||||||
# [/SECTION: IMPORTS]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_discover_route_permissions_extracts_declared_pairs_and_ignores_tests:Function]
|
|
||||||
# @PURPOSE: Ensures route-scanner extracts has_permission pairs from route files and skips __tests__.
|
|
||||||
# @PRE: Temporary route directory contains route and test files.
|
|
||||||
# @POST: Returned set includes production route permissions and excludes test-only declarations.
|
|
||||||
def test_discover_route_permissions_extracts_declared_pairs_and_ignores_tests(tmp_path, monkeypatch):
|
|
||||||
routes_dir = tmp_path / "routes"
|
|
||||||
routes_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
(routes_dir / "dashboards.py").write_text(
|
|
||||||
'\n'.join(
|
|
||||||
[
|
|
||||||
'_ = Depends(has_permission("plugin:migration", "READ"))',
|
|
||||||
'_ = Depends(has_permission("plugin:migration", "EXECUTE"))',
|
|
||||||
'_ = Depends(has_permission("tasks", "WRITE"))',
|
|
||||||
]
|
|
||||||
),
|
|
||||||
encoding="utf-8",
|
|
||||||
)
|
|
||||||
|
|
||||||
tests_dir = routes_dir / "__tests__"
|
|
||||||
tests_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
(tests_dir / "test_fake.py").write_text(
|
|
||||||
'_ = Depends(has_permission("plugin:ignored", "READ"))',
|
|
||||||
encoding="utf-8",
|
|
||||||
)
|
|
||||||
|
|
||||||
monkeypatch.setattr(catalog, "ROUTES_DIR", routes_dir)
|
|
||||||
|
|
||||||
discovered = catalog._discover_route_permissions()
|
|
||||||
|
|
||||||
assert ("plugin:migration", "READ") in discovered
|
|
||||||
assert ("plugin:migration", "EXECUTE") in discovered
|
|
||||||
assert ("tasks", "WRITE") in discovered
|
|
||||||
assert ("plugin:ignored", "READ") not in discovered
|
|
||||||
# [/DEF:test_discover_route_permissions_extracts_declared_pairs_and_ignores_tests:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_discover_declared_permissions_unions_route_and_plugin_permissions:Function]
|
|
||||||
# @PURPOSE: Ensures full catalog includes route-level permissions plus dynamic plugin EXECUTE rights.
|
|
||||||
# @PRE: Route discovery and plugin loader both return permission sources.
|
|
||||||
# @POST: Result set contains union of both sources.
|
|
||||||
def test_discover_declared_permissions_unions_route_and_plugin_permissions(monkeypatch):
|
|
||||||
monkeypatch.setattr(
|
|
||||||
catalog,
|
|
||||||
"_discover_route_permissions",
|
|
||||||
lambda: {("tasks", "READ"), ("plugin:migration", "READ")},
|
|
||||||
)
|
|
||||||
|
|
||||||
plugin_loader = MagicMock()
|
|
||||||
plugin_loader.get_all_plugin_configs.return_value = [
|
|
||||||
SimpleNamespace(id="superset-backup"),
|
|
||||||
SimpleNamespace(id="llm_dashboard_validation"),
|
|
||||||
]
|
|
||||||
|
|
||||||
discovered = catalog.discover_declared_permissions(plugin_loader=plugin_loader)
|
|
||||||
|
|
||||||
assert ("tasks", "READ") in discovered
|
|
||||||
assert ("plugin:migration", "READ") in discovered
|
|
||||||
assert ("plugin:superset-backup", "EXECUTE") in discovered
|
|
||||||
assert ("plugin:llm_dashboard_validation", "EXECUTE") in discovered
|
|
||||||
# [/DEF:test_discover_declared_permissions_unions_route_and_plugin_permissions:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_sync_permission_catalog_inserts_only_missing_normalized_pairs:Function]
|
|
||||||
# @PURPOSE: Ensures synchronization inserts only missing pairs and normalizes action/resource tokens.
|
|
||||||
# @PRE: DB already contains subset of permissions.
|
|
||||||
# @POST: Only missing normalized pairs are inserted and commit is executed once.
|
|
||||||
def test_sync_permission_catalog_inserts_only_missing_normalized_pairs():
|
|
||||||
db = MagicMock()
|
|
||||||
db.query.return_value.all.return_value = [
|
|
||||||
SimpleNamespace(resource="tasks", action="READ"),
|
|
||||||
SimpleNamespace(resource="plugin:migration", action="EXECUTE"),
|
|
||||||
]
|
|
||||||
|
|
||||||
declared_permissions = {
|
|
||||||
("tasks", "read"),
|
|
||||||
("plugin:migration", "execute"),
|
|
||||||
("plugin:migration", "READ"),
|
|
||||||
("", "WRITE"),
|
|
||||||
("plugin:migration", ""),
|
|
||||||
}
|
|
||||||
|
|
||||||
inserted_count = catalog.sync_permission_catalog(
|
|
||||||
db=db,
|
|
||||||
declared_permissions=declared_permissions,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert inserted_count == 1
|
|
||||||
assert db.add.call_count == 1
|
|
||||||
inserted_permission = db.add.call_args[0][0]
|
|
||||||
assert inserted_permission.resource == "plugin:migration"
|
|
||||||
assert inserted_permission.action == "READ"
|
|
||||||
db.commit.assert_called_once()
|
|
||||||
# [/DEF:test_sync_permission_catalog_inserts_only_missing_normalized_pairs:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_sync_permission_catalog_is_noop_when_all_permissions_exist:Function]
|
|
||||||
# @PURPOSE: Ensures synchronization is idempotent when all declared pairs already exist.
|
|
||||||
# @PRE: DB contains full declared permission set.
|
|
||||||
# @POST: No inserts are added and commit is not called.
|
|
||||||
def test_sync_permission_catalog_is_noop_when_all_permissions_exist():
|
|
||||||
db = MagicMock()
|
|
||||||
db.query.return_value.all.return_value = [
|
|
||||||
SimpleNamespace(resource="tasks", action="READ"),
|
|
||||||
SimpleNamespace(resource="plugin:migration", action="READ"),
|
|
||||||
]
|
|
||||||
|
|
||||||
declared_permissions = {
|
|
||||||
("tasks", "READ"),
|
|
||||||
("plugin:migration", "READ"),
|
|
||||||
}
|
|
||||||
|
|
||||||
inserted_count = catalog.sync_permission_catalog(
|
|
||||||
db=db,
|
|
||||||
declared_permissions=declared_permissions,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert inserted_count == 0
|
|
||||||
db.add.assert_not_called()
|
|
||||||
db.commit.assert_not_called()
|
|
||||||
# [/DEF:test_sync_permission_catalog_is_noop_when_all_permissions_exist:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.services.__tests__.test_rbac_permission_catalog:Module]
|
|
||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from unittest.mock import MagicMock, patch, AsyncMock
|
from unittest.mock import MagicMock, patch, AsyncMock
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_with_status:Function]
|
# [DEF:test_get_dashboards_with_status:Function]
|
||||||
@@ -269,71 +269,4 @@ def test_get_last_task_for_resource_no_match():
|
|||||||
# [/DEF:test_get_last_task_for_resource_no_match:Function]
|
# [/DEF:test_get_last_task_for_resource_no_match:Function]
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes:Function]
|
|
||||||
# @TEST: get_dashboards_with_status handles mixed naive/aware datetimes without comparison errors.
|
|
||||||
# @PRE: Task list includes both timezone-aware and timezone-naive timestamps.
|
|
||||||
# @POST: Latest task is selected deterministically and no exception is raised.
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes():
|
|
||||||
with patch("src.services.resource_service.SupersetClient") as mock_client, \
|
|
||||||
patch("src.services.resource_service.GitService"):
|
|
||||||
|
|
||||||
from src.services.resource_service import ResourceService
|
|
||||||
|
|
||||||
service = ResourceService()
|
|
||||||
mock_client.return_value.get_dashboards_summary.return_value = [
|
|
||||||
{"id": 1, "title": "Dashboard 1", "slug": "dash-1"}
|
|
||||||
]
|
|
||||||
|
|
||||||
task_naive = MagicMock()
|
|
||||||
task_naive.id = "task-naive"
|
|
||||||
task_naive.plugin_id = "llm_dashboard_validation"
|
|
||||||
task_naive.status = "SUCCESS"
|
|
||||||
task_naive.params = {"dashboard_id": "1", "environment_id": "prod"}
|
|
||||||
task_naive.started_at = datetime(2024, 1, 1, 10, 0, 0)
|
|
||||||
|
|
||||||
task_aware = MagicMock()
|
|
||||||
task_aware.id = "task-aware"
|
|
||||||
task_aware.plugin_id = "llm_dashboard_validation"
|
|
||||||
task_aware.status = "SUCCESS"
|
|
||||||
task_aware.params = {"dashboard_id": "1", "environment_id": "prod"}
|
|
||||||
task_aware.started_at = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc)
|
|
||||||
|
|
||||||
env = MagicMock()
|
|
||||||
env.id = "prod"
|
|
||||||
|
|
||||||
result = await service.get_dashboards_with_status(env, [task_naive, task_aware])
|
|
||||||
|
|
||||||
assert result[0]["last_task"]["task_id"] == "task-aware"
|
|
||||||
# [/DEF:test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [DEF:test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at:Function]
|
|
||||||
# @TEST: _get_last_task_for_resource handles mixed naive/aware created_at values.
|
|
||||||
# @PRE: Matching tasks include naive and aware created_at timestamps.
|
|
||||||
# @POST: Latest task is returned without raising datetime comparison errors.
|
|
||||||
def test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at():
|
|
||||||
from src.services.resource_service import ResourceService
|
|
||||||
|
|
||||||
service = ResourceService()
|
|
||||||
|
|
||||||
task_naive = MagicMock()
|
|
||||||
task_naive.id = "task-old"
|
|
||||||
task_naive.status = "SUCCESS"
|
|
||||||
task_naive.params = {"resource_id": "dashboard-1"}
|
|
||||||
task_naive.created_at = datetime(2024, 1, 1, 10, 0, 0)
|
|
||||||
|
|
||||||
task_aware = MagicMock()
|
|
||||||
task_aware.id = "task-new"
|
|
||||||
task_aware.status = "RUNNING"
|
|
||||||
task_aware.params = {"resource_id": "dashboard-1"}
|
|
||||||
task_aware.created_at = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc)
|
|
||||||
|
|
||||||
result = service._get_last_task_for_resource("dashboard-1", [task_naive, task_aware])
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result["task_id"] == "task-new"
|
|
||||||
# [/DEF:test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at:Function]
|
|
||||||
|
|
||||||
|
|
||||||
# [/DEF:backend.src.services.__tests__.test_resource_service:Module]
|
# [/DEF:backend.src.services.__tests__.test_resource_service:Module]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user