fix: commit verified semantic repair changes
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
# [DEF:backend.src.api.routes.__tests__.test_git_api:Module]
|
||||
# @RELATION: VERIFIES -> src.api.routes.git
|
||||
# [DEF:TestGitApi:Module]
|
||||
# @COMPLEXITY: 3
|
||||
# @RELATION: VERIFIES ->[src.api.routes.git]
|
||||
# @PURPOSE: API tests for Git configurations and repository operations.
|
||||
|
||||
import pytest
|
||||
@@ -9,32 +10,52 @@ from fastapi import HTTPException
|
||||
from src.api.routes import git as git_routes
|
||||
from src.models.git import GitServerConfig, GitProvider, GitStatus, GitRepository
|
||||
|
||||
|
||||
class DbMock:
|
||||
def __init__(self, data=None):
|
||||
self._data = data or []
|
||||
self._deleted = []
|
||||
self._added = []
|
||||
self._filtered = None
|
||||
|
||||
def query(self, model):
|
||||
self._model = model
|
||||
self._filtered = None
|
||||
return self
|
||||
|
||||
def filter(self, condition):
|
||||
# Simplistic mocking for tests, assuming equality checks
|
||||
for item in self._data:
|
||||
# We assume condition is an equality expression like GitServerConfig.id == "123"
|
||||
# It's hard to eval the condition exactly in a mock without complex parsing,
|
||||
# so we'll just return items where type matches.
|
||||
pass
|
||||
# Honor simple SQLAlchemy equality expressions used by these route tests.
|
||||
candidates = [
|
||||
item
|
||||
for item in self._data
|
||||
if not hasattr(self, "_model") or isinstance(item, self._model)
|
||||
]
|
||||
try:
|
||||
left_key = getattr(getattr(condition, "left", None), "key", None)
|
||||
right_value = getattr(getattr(condition, "right", None), "value", None)
|
||||
if left_key is not None and right_value is not None:
|
||||
self._filtered = [
|
||||
item
|
||||
for item in candidates
|
||||
if getattr(item, left_key, None) == right_value
|
||||
]
|
||||
else:
|
||||
self._filtered = candidates
|
||||
except Exception:
|
||||
self._filtered = candidates
|
||||
return self
|
||||
|
||||
def first(self):
|
||||
if self._filtered is not None:
|
||||
return self._filtered[0] if self._filtered else None
|
||||
for item in self._data:
|
||||
if hasattr(self, "_model") and isinstance(item, self._model):
|
||||
return item
|
||||
return None
|
||||
|
||||
def all(self):
|
||||
if self._filtered is not None:
|
||||
return list(self._filtered)
|
||||
return self._data
|
||||
|
||||
def add(self, item):
|
||||
@@ -57,254 +78,410 @@ class DbMock:
|
||||
if not hasattr(item, "last_validated"):
|
||||
item.last_validated = "2026-03-08T00:00:00Z"
|
||||
|
||||
|
||||
# [DEF:test_get_git_configs_masks_pat:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_get_git_configs_masks_pat():
|
||||
"""
|
||||
@PRE: Database session `db` is available.
|
||||
@POST: Returns a list of all GitServerConfig objects from the database with PAT masked.
|
||||
"""
|
||||
db = DbMock([GitServerConfig(
|
||||
id="config-1", name="Test Server", provider=GitProvider.GITHUB,
|
||||
url="https://github.com", pat="secret-token",
|
||||
status=GitStatus.CONNECTED, last_validated="2026-03-08T00:00:00Z"
|
||||
)])
|
||||
|
||||
db = DbMock(
|
||||
[
|
||||
GitServerConfig(
|
||||
id="config-1",
|
||||
name="Test Server",
|
||||
provider=GitProvider.GITHUB,
|
||||
url="https://github.com",
|
||||
pat="secret-token",
|
||||
status=GitStatus.CONNECTED,
|
||||
last_validated="2026-03-08T00:00:00Z",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
result = asyncio.run(git_routes.get_git_configs(db=db))
|
||||
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].pat == "********"
|
||||
assert result[0].name == "Test Server"
|
||||
|
||||
|
||||
# [/DEF:test_get_git_configs_masks_pat:Function]
|
||||
|
||||
|
||||
# [DEF:test_create_git_config_persists_config:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_create_git_config_persists_config():
|
||||
"""
|
||||
@PRE: `config` contains valid GitServerConfigCreate data.
|
||||
@POST: A new GitServerConfig record is created in the database.
|
||||
"""
|
||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
||||
|
||||
db = DbMock()
|
||||
config = GitServerConfigCreate(
|
||||
name="New Server", provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com", pat="new-token",
|
||||
default_branch="master"
|
||||
name="New Server",
|
||||
provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com",
|
||||
pat="new-token",
|
||||
default_branch="master",
|
||||
)
|
||||
|
||||
|
||||
result = asyncio.run(git_routes.create_git_config(config=config, db=db))
|
||||
|
||||
|
||||
assert len(db._added) == 1
|
||||
assert db._added[0].name == "New Server"
|
||||
assert db._added[0].pat == "new-token"
|
||||
assert result.name == "New Server"
|
||||
assert result.pat == "new-token" # Note: route returns unmasked until serialized by FastAPI usually, but in tests schema might catch it or not.
|
||||
assert (
|
||||
result.pat == "new-token"
|
||||
) # Note: route returns unmasked until serialized by FastAPI usually, but in tests schema might catch it or not.
|
||||
|
||||
|
||||
# [/DEF:test_create_git_config_persists_config:Function]
|
||||
|
||||
from src.api.routes.git_schemas import GitServerConfigUpdate
|
||||
|
||||
|
||||
# [DEF:test_update_git_config_modifies_record:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_update_git_config_modifies_record():
|
||||
"""
|
||||
@PRE: `config_id` corresponds to an existing configuration.
|
||||
@POST: The configuration record is updated in the database, preserving PAT if masked is sent.
|
||||
"""
|
||||
existing_config = GitServerConfig(
|
||||
id="config-1", name="Old Server", provider=GitProvider.GITHUB,
|
||||
url="https://github.com", pat="old-token",
|
||||
status=GitStatus.CONNECTED, last_validated="2026-03-08T00:00:00Z"
|
||||
id="config-1",
|
||||
name="Old Server",
|
||||
provider=GitProvider.GITHUB,
|
||||
url="https://github.com",
|
||||
pat="old-token",
|
||||
status=GitStatus.CONNECTED,
|
||||
last_validated="2026-03-08T00:00:00Z",
|
||||
)
|
||||
|
||||
# The monkeypatched query will return existing_config as it's the only one in the list
|
||||
class SingleConfigDbMock:
|
||||
def query(self, *args): return self
|
||||
def filter(self, *args): return self
|
||||
def first(self): return existing_config
|
||||
def commit(self): pass
|
||||
def refresh(self, config): pass
|
||||
def query(self, *args):
|
||||
return self
|
||||
|
||||
def filter(self, *args):
|
||||
return self
|
||||
|
||||
def first(self):
|
||||
return existing_config
|
||||
|
||||
def commit(self):
|
||||
pass
|
||||
|
||||
def refresh(self, config):
|
||||
pass
|
||||
|
||||
db = SingleConfigDbMock()
|
||||
update_data = GitServerConfigUpdate(name="Updated Server", pat="********")
|
||||
|
||||
result = asyncio.run(git_routes.update_git_config(config_id="config-1", config_update=update_data, db=db))
|
||||
|
||||
|
||||
result = asyncio.run(
|
||||
git_routes.update_git_config(
|
||||
config_id="config-1", config_update=update_data, db=db
|
||||
)
|
||||
)
|
||||
|
||||
assert existing_config.name == "Updated Server"
|
||||
assert existing_config.pat == "old-token" # Ensure PAT is not overwritten with asterisks
|
||||
assert (
|
||||
existing_config.pat == "old-token"
|
||||
) # Ensure PAT is not overwritten with asterisks
|
||||
assert result.pat == "********"
|
||||
|
||||
|
||||
# [/DEF:test_update_git_config_modifies_record:Function]
|
||||
|
||||
|
||||
# [DEF:test_update_git_config_raises_404_if_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_update_git_config_raises_404_if_not_found():
|
||||
"""
|
||||
@PRE: `config_id` corresponds to a missing configuration.
|
||||
@THROW: HTTPException 404
|
||||
"""
|
||||
db = DbMock([]) # Empty db
|
||||
db = DbMock([]) # Empty db
|
||||
update_data = GitServerConfigUpdate(name="Updated Server", pat="new-token")
|
||||
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
asyncio.run(git_routes.update_git_config(config_id="config-1", config_update=update_data, db=db))
|
||||
|
||||
asyncio.run(
|
||||
git_routes.update_git_config(
|
||||
config_id="config-1", config_update=update_data, db=db
|
||||
)
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 404
|
||||
assert exc_info.value.detail == "Configuration not found"
|
||||
|
||||
|
||||
# [/DEF:test_update_git_config_raises_404_if_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_delete_git_config_removes_record:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_delete_git_config_removes_record():
|
||||
"""
|
||||
@PRE: `config_id` corresponds to an existing configuration.
|
||||
@POST: The configuration record is removed from the database.
|
||||
"""
|
||||
existing_config = GitServerConfig(id="config-1")
|
||||
|
||||
class SingleConfigDbMock:
|
||||
def query(self, *args): return self
|
||||
def filter(self, *args): return self
|
||||
def first(self): return existing_config
|
||||
def delete(self, config): self.deleted = config
|
||||
def commit(self): pass
|
||||
def query(self, *args):
|
||||
return self
|
||||
|
||||
def filter(self, *args):
|
||||
return self
|
||||
|
||||
def first(self):
|
||||
return existing_config
|
||||
|
||||
def delete(self, config):
|
||||
self.deleted = config
|
||||
|
||||
def commit(self):
|
||||
pass
|
||||
|
||||
db = SingleConfigDbMock()
|
||||
|
||||
|
||||
result = asyncio.run(git_routes.delete_git_config(config_id="config-1", db=db))
|
||||
|
||||
|
||||
assert db.deleted == existing_config
|
||||
assert result["status"] == "success"
|
||||
|
||||
|
||||
# [/DEF:test_delete_git_config_removes_record:Function]
|
||||
|
||||
|
||||
# [DEF:test_test_git_config_validates_connection_successfully:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_test_git_config_validates_connection_successfully(monkeypatch):
|
||||
"""
|
||||
@PRE: `config` contains provider, url, and pat.
|
||||
@POST: Returns success if the connection is validated via GitService.
|
||||
"""
|
||||
|
||||
class MockGitService:
|
||||
async def test_connection(self, provider, url, pat):
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
||||
|
||||
|
||||
config = GitServerConfigCreate(
|
||||
name="Test Server", provider=GitProvider.GITHUB,
|
||||
url="https://github.com", pat="test-pat"
|
||||
name="Test Server",
|
||||
provider=GitProvider.GITHUB,
|
||||
url="https://github.com",
|
||||
pat="test-pat",
|
||||
)
|
||||
db = DbMock([])
|
||||
|
||||
|
||||
result = asyncio.run(git_routes.test_git_config(config=config, db=db))
|
||||
|
||||
|
||||
assert result["status"] == "success"
|
||||
|
||||
|
||||
# [/DEF:test_test_git_config_validates_connection_successfully:Function]
|
||||
|
||||
|
||||
# [DEF:test_test_git_config_fails_validation:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_test_git_config_fails_validation(monkeypatch):
|
||||
"""
|
||||
@PRE: `config` contains provider, url, and pat BUT connection fails.
|
||||
@THROW: HTTPException 400
|
||||
"""
|
||||
|
||||
class MockGitService:
|
||||
async def test_connection(self, provider, url, pat):
|
||||
return False
|
||||
|
||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
||||
from src.api.routes.git_schemas import GitServerConfigCreate
|
||||
|
||||
|
||||
config = GitServerConfigCreate(
|
||||
name="Test Server", provider=GitProvider.GITHUB,
|
||||
url="https://github.com", pat="bad-pat"
|
||||
name="Test Server",
|
||||
provider=GitProvider.GITHUB,
|
||||
url="https://github.com",
|
||||
pat="bad-pat",
|
||||
)
|
||||
db = DbMock([])
|
||||
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
asyncio.run(git_routes.test_git_config(config=config, db=db))
|
||||
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.detail == "Connection failed"
|
||||
|
||||
|
||||
# [/DEF:test_test_git_config_fails_validation:Function]
|
||||
|
||||
|
||||
# [DEF:test_list_gitea_repositories_returns_payload:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_list_gitea_repositories_returns_payload(monkeypatch):
|
||||
"""
|
||||
@PRE: config_id exists and provider is GITEA.
|
||||
@POST: Returns repositories visible to PAT user.
|
||||
"""
|
||||
|
||||
class MockGitService:
|
||||
async def list_gitea_repositories(self, url, pat):
|
||||
return [{"name": "test-repo", "full_name": "owner/test-repo", "private": True}]
|
||||
return [
|
||||
{"name": "test-repo", "full_name": "owner/test-repo", "private": True}
|
||||
]
|
||||
|
||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
||||
existing_config = GitServerConfig(
|
||||
id="config-1", name="Gitea Server", provider=GitProvider.GITEA,
|
||||
url="https://gitea.local", pat="gitea-token"
|
||||
id="config-1",
|
||||
name="Gitea Server",
|
||||
provider=GitProvider.GITEA,
|
||||
url="https://gitea.local",
|
||||
pat="gitea-token",
|
||||
)
|
||||
db = DbMock([existing_config])
|
||||
|
||||
result = asyncio.run(git_routes.list_gitea_repositories(config_id="config-1", db=db))
|
||||
|
||||
|
||||
result = asyncio.run(
|
||||
git_routes.list_gitea_repositories(config_id="config-1", db=db)
|
||||
)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].name == "test-repo"
|
||||
assert result[0].private is True
|
||||
|
||||
|
||||
# [/DEF:test_list_gitea_repositories_returns_payload:Function]
|
||||
|
||||
|
||||
# [DEF:test_list_gitea_repositories_rejects_non_gitea:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_list_gitea_repositories_rejects_non_gitea(monkeypatch):
|
||||
"""
|
||||
@PRE: config_id exists and provider is NOT GITEA.
|
||||
@THROW: HTTPException 400
|
||||
"""
|
||||
existing_config = GitServerConfig(
|
||||
id="config-1", name="GitHub Server", provider=GitProvider.GITHUB,
|
||||
url="https://github.com", pat="token"
|
||||
id="config-1",
|
||||
name="GitHub Server",
|
||||
provider=GitProvider.GITHUB,
|
||||
url="https://github.com",
|
||||
pat="token",
|
||||
)
|
||||
db = DbMock([existing_config])
|
||||
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
asyncio.run(git_routes.list_gitea_repositories(config_id="config-1", db=db))
|
||||
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "GITEA provider only" in exc_info.value.detail
|
||||
|
||||
|
||||
# [/DEF:test_list_gitea_repositories_rejects_non_gitea:Function]
|
||||
|
||||
|
||||
# [DEF:test_create_remote_repository_creates_provider_repo:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_create_remote_repository_creates_provider_repo(monkeypatch):
|
||||
"""
|
||||
@PRE: config_id exists and PAT has creation permissions.
|
||||
@POST: Returns normalized remote repository payload.
|
||||
"""
|
||||
|
||||
class MockGitService:
|
||||
async def create_gitlab_repository(self, server_url, pat, name, private, description, auto_init, default_branch):
|
||||
async def create_gitlab_repository(
|
||||
self, server_url, pat, name, private, description, auto_init, default_branch
|
||||
):
|
||||
return {
|
||||
"name": name,
|
||||
"full_name": f"user/{name}",
|
||||
"private": private,
|
||||
"clone_url": f"{server_url}/user/{name}.git"
|
||||
"clone_url": f"{server_url}/user/{name}.git",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(git_routes, "git_service", MockGitService())
|
||||
from src.api.routes.git_schemas import RemoteRepoCreateRequest
|
||||
|
||||
|
||||
existing_config = GitServerConfig(
|
||||
id="config-1", name="GitLab Server", provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com", pat="token"
|
||||
id="config-1",
|
||||
name="GitLab Server",
|
||||
provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com",
|
||||
pat="token",
|
||||
)
|
||||
db = DbMock([existing_config])
|
||||
|
||||
|
||||
request = RemoteRepoCreateRequest(name="new-repo", private=True, description="desc")
|
||||
result = asyncio.run(git_routes.create_remote_repository(config_id="config-1", request=request, db=db))
|
||||
|
||||
result = asyncio.run(
|
||||
git_routes.create_remote_repository(
|
||||
config_id="config-1", request=request, db=db
|
||||
)
|
||||
)
|
||||
|
||||
assert result.provider == GitProvider.GITLAB
|
||||
assert result.name == "new-repo"
|
||||
assert result.full_name == "user/new-repo"
|
||||
|
||||
|
||||
# [/DEF:test_create_remote_repository_creates_provider_repo:Function]
|
||||
|
||||
|
||||
# [DEF:test_init_repository_initializes_and_saves_binding:Function]
|
||||
# @RELATION: BINDS_TO ->[TestGitApi]
|
||||
def test_init_repository_initializes_and_saves_binding(monkeypatch):
|
||||
"""
|
||||
@PRE: `dashboard_ref` exists and `init_data` contains valid config_id and remote_url.
|
||||
@POST: Repository is initialized on disk and a GitRepository record is saved in DB.
|
||||
"""
|
||||
from src.api.routes.git_schemas import RepoInitRequest
|
||||
|
||||
|
||||
class MockGitService:
|
||||
def init_repo(self, dashboard_id, remote_url, pat, repo_key, default_branch):
|
||||
self.init_called = True
|
||||
|
||||
def _get_repo_path(self, dashboard_id, repo_key):
|
||||
return f"/tmp/repos/{repo_key}"
|
||||
|
||||
git_service_mock = MockGitService()
|
||||
monkeypatch.setattr(git_routes, "git_service", git_service_mock)
|
||||
monkeypatch.setattr(git_routes, "_resolve_dashboard_id_from_ref", lambda *args, **kwargs: 123)
|
||||
monkeypatch.setattr(git_routes, "_resolve_repo_key_from_ref", lambda *args, **kwargs: "dashboard-123")
|
||||
|
||||
monkeypatch.setattr(
|
||||
git_routes, "_resolve_dashboard_id_from_ref", lambda *args, **kwargs: 123
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
git_routes,
|
||||
"_resolve_repo_key_from_ref",
|
||||
lambda *args, **kwargs: "dashboard-123",
|
||||
)
|
||||
|
||||
existing_config = GitServerConfig(
|
||||
id="config-1", name="GitLab Server", provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com", pat="token", default_branch="main"
|
||||
id="config-1",
|
||||
name="GitLab Server",
|
||||
provider=GitProvider.GITLAB,
|
||||
url="https://gitlab.com",
|
||||
pat="token",
|
||||
default_branch="main",
|
||||
)
|
||||
db = DbMock([existing_config])
|
||||
|
||||
init_data = RepoInitRequest(config_id="config-1", remote_url="https://git.local/repo.git")
|
||||
|
||||
result = asyncio.run(git_routes.init_repository(dashboard_ref="123", init_data=init_data, config_manager=MagicMock(), db=db))
|
||||
|
||||
|
||||
init_data = RepoInitRequest(
|
||||
config_id="config-1", remote_url="https://git.local/repo.git"
|
||||
)
|
||||
|
||||
result = asyncio.run(
|
||||
git_routes.init_repository(
|
||||
dashboard_ref="123", init_data=init_data, config_manager=MagicMock(), db=db
|
||||
)
|
||||
)
|
||||
|
||||
assert result["status"] == "success"
|
||||
assert git_service_mock.init_called is True
|
||||
assert len(db._added) == 1
|
||||
assert isinstance(db._added[0], GitRepository)
|
||||
assert db._added[0].dashboard_id == 123
|
||||
|
||||
# [/DEF:backend.src.api.routes.__tests__.test_git_api:Module]
|
||||
|
||||
# [/DEF:test_init_repository_initializes_and_saves_binding:Function]
|
||||
# [/DEF:TestGitApi:Module]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,5 @@
|
||||
# [DEF:backend.src.plugins.llm_analysis.__tests__.test_screenshot_service:Module]
|
||||
# [DEF:TestScreenshotService:Module]
|
||||
# @RELATION: VERIFIES ->[src.plugins.llm_analysis.service.ScreenshotService]
|
||||
# @COMPLEXITY: 3
|
||||
# @SEMANTICS: tests, screenshot-service, navigation, timeout-regression
|
||||
# @PURPOSE: Protect dashboard screenshot navigation from brittle networkidle waits.
|
||||
@@ -9,6 +10,7 @@ from src.plugins.llm_analysis.service import ScreenshotService
|
||||
|
||||
|
||||
# [DEF:test_iter_login_roots_includes_child_frames:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Login discovery must search embedded auth frames, not only the main page.
|
||||
# @PRE: Page exposes child frames list.
|
||||
# @POST: Returned roots include page plus child frames in order.
|
||||
@@ -21,10 +23,13 @@ def test_iter_login_roots_includes_child_frames():
|
||||
roots = service._iter_login_roots(fake_page)
|
||||
|
||||
assert roots == [fake_page, frame_a, frame_b]
|
||||
|
||||
|
||||
# [/DEF:test_iter_login_roots_includes_child_frames:Function]
|
||||
|
||||
|
||||
# [DEF:test_response_looks_like_login_page_detects_login_markup:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Direct login fallback must reject responses that render the login screen again.
|
||||
# @PRE: Response body contains stable login-page markers.
|
||||
# @POST: Helper returns True so caller treats fallback as failed authentication.
|
||||
@@ -45,10 +50,13 @@ def test_response_looks_like_login_page_detects_login_markup():
|
||||
)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
# [/DEF:test_response_looks_like_login_page_detects_login_markup:Function]
|
||||
|
||||
|
||||
# [DEF:test_find_first_visible_locator_skips_hidden_first_match:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Locator helper must not reject a selector collection just because its first element is hidden.
|
||||
# @PRE: First matched element is hidden and second matched element is visible.
|
||||
# @POST: Helper returns the second visible candidate.
|
||||
@@ -73,18 +81,23 @@ async def test_find_first_visible_locator_skips_hidden_first_match():
|
||||
return self._elements[index]
|
||||
|
||||
service = ScreenshotService(env=type("Env", (), {})())
|
||||
hidden_then_visible = _FakeLocator([
|
||||
_FakeElement(False, "hidden"),
|
||||
_FakeElement(True, "visible"),
|
||||
])
|
||||
hidden_then_visible = _FakeLocator(
|
||||
[
|
||||
_FakeElement(False, "hidden"),
|
||||
_FakeElement(True, "visible"),
|
||||
]
|
||||
)
|
||||
|
||||
result = await service._find_first_visible_locator([hidden_then_visible])
|
||||
|
||||
assert result.label == "visible"
|
||||
|
||||
|
||||
# [/DEF:test_find_first_visible_locator_skips_hidden_first_match:Function]
|
||||
|
||||
|
||||
# [DEF:test_submit_login_via_form_post_uses_browser_context_request:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Fallback login must submit hidden fields and credentials through the context request cookie jar.
|
||||
# @PRE: Login DOM exposes csrf hidden field and request context returns authenticated HTML.
|
||||
# @POST: Helper returns True and request payload contains csrf_token plus credentials plus request options.
|
||||
@@ -122,15 +135,25 @@ async def test_submit_login_via_form_post_uses_browser_context_request():
|
||||
def __init__(self):
|
||||
self.calls = []
|
||||
|
||||
async def post(self, url, form=None, headers=None, timeout=None, fail_on_status_code=None, max_redirects=None):
|
||||
self.calls.append({
|
||||
"url": url,
|
||||
"form": dict(form or {}),
|
||||
"headers": dict(headers or {}),
|
||||
"timeout": timeout,
|
||||
"fail_on_status_code": fail_on_status_code,
|
||||
"max_redirects": max_redirects,
|
||||
})
|
||||
async def post(
|
||||
self,
|
||||
url,
|
||||
form=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
fail_on_status_code=None,
|
||||
max_redirects=None,
|
||||
):
|
||||
self.calls.append(
|
||||
{
|
||||
"url": url,
|
||||
"form": dict(form or {}),
|
||||
"headers": dict(headers or {}),
|
||||
"timeout": timeout,
|
||||
"fail_on_status_code": fail_on_status_code,
|
||||
"max_redirects": max_redirects,
|
||||
}
|
||||
)
|
||||
return _FakeResponse()
|
||||
|
||||
class _FakeContext:
|
||||
@@ -144,39 +167,48 @@ async def test_submit_login_via_form_post_uses_browser_context_request():
|
||||
|
||||
def locator(self, selector):
|
||||
if selector == "input[type='hidden'][name]":
|
||||
return _FakeLocator([
|
||||
_FakeInput("csrf_token", "csrf-123"),
|
||||
_FakeInput("next", "/superset/welcome/"),
|
||||
])
|
||||
return _FakeLocator(
|
||||
[
|
||||
_FakeInput("csrf_token", "csrf-123"),
|
||||
_FakeInput("next", "/superset/welcome/"),
|
||||
]
|
||||
)
|
||||
return _FakeLocator([])
|
||||
|
||||
env = type("Env", (), {"username": "admin", "password": "secret"})()
|
||||
service = ScreenshotService(env=env)
|
||||
page = _FakePage()
|
||||
|
||||
result = await service._submit_login_via_form_post(page, "https://example.test/login/")
|
||||
result = await service._submit_login_via_form_post(
|
||||
page, "https://example.test/login/"
|
||||
)
|
||||
|
||||
assert result is True
|
||||
assert page.context.request.calls == [{
|
||||
"url": "https://example.test/login/",
|
||||
"form": {
|
||||
"csrf_token": "csrf-123",
|
||||
"next": "/superset/welcome/",
|
||||
"username": "admin",
|
||||
"password": "secret",
|
||||
},
|
||||
"headers": {
|
||||
"Origin": "https://example.test",
|
||||
"Referer": "https://example.test/login/",
|
||||
},
|
||||
"timeout": 10000,
|
||||
"fail_on_status_code": False,
|
||||
"max_redirects": 0,
|
||||
}]
|
||||
assert page.context.request.calls == [
|
||||
{
|
||||
"url": "https://example.test/login/",
|
||||
"form": {
|
||||
"csrf_token": "csrf-123",
|
||||
"next": "/superset/welcome/",
|
||||
"username": "admin",
|
||||
"password": "secret",
|
||||
},
|
||||
"headers": {
|
||||
"Origin": "https://example.test",
|
||||
"Referer": "https://example.test/login/",
|
||||
},
|
||||
"timeout": 10000,
|
||||
"fail_on_status_code": False,
|
||||
"max_redirects": 0,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
# [/DEF:test_submit_login_via_form_post_uses_browser_context_request:Function]
|
||||
|
||||
|
||||
# [DEF:test_submit_login_via_form_post_accepts_authenticated_redirect:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Fallback login must treat non-login 302 redirect as success without waiting for redirect target.
|
||||
# @PRE: Request response is 302 with Location outside login path.
|
||||
# @POST: Helper returns True.
|
||||
@@ -212,7 +244,15 @@ async def test_submit_login_via_form_post_accepts_authenticated_redirect():
|
||||
return ""
|
||||
|
||||
class _FakeRequest:
|
||||
async def post(self, url, form=None, headers=None, timeout=None, fail_on_status_code=None, max_redirects=None):
|
||||
async def post(
|
||||
self,
|
||||
url,
|
||||
form=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
fail_on_status_code=None,
|
||||
max_redirects=None,
|
||||
):
|
||||
return _FakeResponse()
|
||||
|
||||
class _FakeContext:
|
||||
@@ -232,13 +272,18 @@ async def test_submit_login_via_form_post_accepts_authenticated_redirect():
|
||||
env = type("Env", (), {"username": "admin", "password": "secret"})()
|
||||
service = ScreenshotService(env=env)
|
||||
|
||||
result = await service._submit_login_via_form_post(_FakePage(), "https://example.test/login/")
|
||||
result = await service._submit_login_via_form_post(
|
||||
_FakePage(), "https://example.test/login/"
|
||||
)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
# [/DEF:test_submit_login_via_form_post_accepts_authenticated_redirect:Function]
|
||||
|
||||
|
||||
# [DEF:test_submit_login_via_form_post_rejects_login_markup_response:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Fallback login must fail when POST response still contains login form content.
|
||||
# @PRE: Login DOM exposes csrf hidden field and request response renders login markup.
|
||||
# @POST: Helper returns False.
|
||||
@@ -282,7 +327,15 @@ async def test_submit_login_via_form_post_rejects_login_markup_response():
|
||||
"""
|
||||
|
||||
class _FakeRequest:
|
||||
async def post(self, url, form=None, headers=None, timeout=None, fail_on_status_code=None, max_redirects=None):
|
||||
async def post(
|
||||
self,
|
||||
url,
|
||||
form=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
fail_on_status_code=None,
|
||||
max_redirects=None,
|
||||
):
|
||||
return _FakeResponse()
|
||||
|
||||
class _FakeContext:
|
||||
@@ -302,13 +355,18 @@ async def test_submit_login_via_form_post_rejects_login_markup_response():
|
||||
env = type("Env", (), {"username": "admin", "password": "secret"})()
|
||||
service = ScreenshotService(env=env)
|
||||
|
||||
result = await service._submit_login_via_form_post(_FakePage(), "https://example.test/login/")
|
||||
result = await service._submit_login_via_form_post(
|
||||
_FakePage(), "https://example.test/login/"
|
||||
)
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
# [/DEF:test_submit_login_via_form_post_rejects_login_markup_response:Function]
|
||||
|
||||
|
||||
# [DEF:test_goto_resilient_falls_back_from_domcontentloaded_to_load:Function]
|
||||
# @RELATION: BINDS_TO ->[TestScreenshotService]
|
||||
# @PURPOSE: Pages with unstable primary wait must retry with fallback wait strategy.
|
||||
# @PRE: First page.goto call raises; second succeeds.
|
||||
# @POST: Helper returns second response and attempts both wait modes in order.
|
||||
@@ -340,5 +398,7 @@ async def test_goto_resilient_falls_back_from_domcontentloaded_to_load():
|
||||
("https://example.test/dashboard", "domcontentloaded", 1234),
|
||||
("https://example.test/dashboard", "load", 1234),
|
||||
]
|
||||
|
||||
|
||||
# [/DEF:test_goto_resilient_falls_back_from_domcontentloaded_to_load:Function]
|
||||
# [/DEF:backend.src.plugins.llm_analysis.__tests__.test_screenshot_service:Module]
|
||||
# [/DEF:TestScreenshotService:Module]
|
||||
|
||||
@@ -7,6 +7,8 @@ from src.models.llm import ValidationRecord
|
||||
# [DEF:test_health_service:Module]
|
||||
# @COMPLEXITY: 3
|
||||
# @PURPOSE: Unit tests for HealthService aggregation logic.
|
||||
# @RELATION: VERIFIES ->[src.services.health_service.HealthService]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_health_summary_aggregation():
|
||||
@@ -15,9 +17,9 @@ async def test_get_health_summary_aggregation():
|
||||
"""
|
||||
# Setup: Mock DB session
|
||||
db = MagicMock()
|
||||
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
|
||||
# Dashboard 1: Old FAIL, New PASS
|
||||
rec1_old = ValidationRecord(
|
||||
id="rec-old",
|
||||
@@ -26,7 +28,7 @@ async def test_get_health_summary_aggregation():
|
||||
status="FAIL",
|
||||
timestamp=now - timedelta(hours=1),
|
||||
summary="Old failure",
|
||||
issues=[]
|
||||
issues=[],
|
||||
)
|
||||
rec1_new = ValidationRecord(
|
||||
id="rec-new",
|
||||
@@ -35,9 +37,9 @@ async def test_get_health_summary_aggregation():
|
||||
status="PASS",
|
||||
timestamp=now,
|
||||
summary="New pass",
|
||||
issues=[]
|
||||
issues=[],
|
||||
)
|
||||
|
||||
|
||||
# Dashboard 2: Single WARN
|
||||
rec2 = ValidationRecord(
|
||||
id="rec-warn",
|
||||
@@ -46,28 +48,28 @@ async def test_get_health_summary_aggregation():
|
||||
status="WARN",
|
||||
timestamp=now,
|
||||
summary="Warning",
|
||||
issues=[]
|
||||
issues=[],
|
||||
)
|
||||
|
||||
|
||||
# Mock the query chain
|
||||
# subquery = self.db.query(...).filter(...).group_by(...).subquery()
|
||||
# query = self.db.query(ValidationRecord).join(subquery, ...).all()
|
||||
|
||||
|
||||
mock_query = db.query.return_value
|
||||
mock_query.filter.return_value = mock_query
|
||||
mock_query.group_by.return_value = mock_query
|
||||
mock_query.subquery.return_value = MagicMock()
|
||||
|
||||
|
||||
db.query.return_value.join.return_value.all.return_value = [rec1_new, rec2]
|
||||
|
||||
|
||||
service = HealthService(db)
|
||||
summary = await service.get_health_summary(environment_id="env_1")
|
||||
|
||||
|
||||
assert summary.pass_count == 1
|
||||
assert summary.warn_count == 1
|
||||
assert summary.fail_count == 0
|
||||
assert len(summary.items) == 2
|
||||
|
||||
|
||||
# Verify dash_1 has the latest status (PASS)
|
||||
dash_1_item = next(item for item in summary.items if item.dashboard_id == "dash_1")
|
||||
assert dash_1_item.status == "PASS"
|
||||
@@ -75,6 +77,7 @@ async def test_get_health_summary_aggregation():
|
||||
assert dash_1_item.record_id == rec1_new.id
|
||||
assert dash_1_item.dashboard_slug == "dash_1"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_health_summary_empty():
|
||||
"""
|
||||
@@ -82,10 +85,10 @@ async def test_get_health_summary_empty():
|
||||
"""
|
||||
db = MagicMock()
|
||||
db.query.return_value.join.return_value.all.return_value = []
|
||||
|
||||
|
||||
service = HealthService(db)
|
||||
summary = await service.get_health_summary(environment_id="env_none")
|
||||
|
||||
|
||||
assert summary.pass_count == 0
|
||||
assert len(summary.items) == 0
|
||||
|
||||
@@ -159,6 +162,8 @@ async def test_get_health_summary_reuses_dashboard_metadata_cache_across_service
|
||||
HealthService._dashboard_summary_cache.clear()
|
||||
|
||||
|
||||
# [DEF:test_delete_validation_report_deletes_dashboard_scope_and_linked_tasks:Function]
|
||||
# @RELATION: BINDS_TO ->[test_health_service]
|
||||
def test_delete_validation_report_deletes_dashboard_scope_and_linked_tasks():
|
||||
db = MagicMock()
|
||||
config_manager = MagicMock()
|
||||
@@ -222,12 +227,17 @@ def test_delete_validation_report_deletes_dashboard_scope_and_linked_tasks():
|
||||
db.commit.assert_called_once()
|
||||
cleanup_instance.delete_task_with_logs.assert_any_call("task-1")
|
||||
cleanup_instance.delete_task_with_logs.assert_any_call("task-2")
|
||||
cleanup_instance.delete_task_with_logs.call_count == 2
|
||||
assert cleanup_instance.delete_task_with_logs.call_count == 2
|
||||
assert "task-1" not in task_manager.tasks
|
||||
assert "task-2" not in task_manager.tasks
|
||||
assert "task-3" in task_manager.tasks
|
||||
|
||||
|
||||
# [/DEF:test_delete_validation_report_deletes_dashboard_scope_and_linked_tasks:Function]
|
||||
|
||||
|
||||
# [DEF:test_delete_validation_report_returns_false_for_unknown_record:Function]
|
||||
# @RELATION: BINDS_TO ->[test_health_service]
|
||||
def test_delete_validation_report_returns_false_for_unknown_record():
|
||||
db = MagicMock()
|
||||
db.query.return_value.filter.return_value.first.return_value = None
|
||||
@@ -237,6 +247,11 @@ def test_delete_validation_report_returns_false_for_unknown_record():
|
||||
assert service.delete_validation_report("missing") is False
|
||||
|
||||
|
||||
# [/DEF:test_delete_validation_report_returns_false_for_unknown_record:Function]
|
||||
|
||||
|
||||
# [DEF:test_delete_validation_report_swallows_linked_task_cleanup_failure:Function]
|
||||
# @RELATION: BINDS_TO ->[test_health_service]
|
||||
def test_delete_validation_report_swallows_linked_task_cleanup_failure():
|
||||
db = MagicMock()
|
||||
config_manager = MagicMock()
|
||||
@@ -264,11 +279,14 @@ def test_delete_validation_report_swallows_linked_task_cleanup_failure():
|
||||
|
||||
db.query.side_effect = [first_query, peer_query]
|
||||
|
||||
with patch("src.services.health_service.TaskCleanupService") as cleanup_cls, patch(
|
||||
"src.services.health_service.logger"
|
||||
) as mock_logger:
|
||||
with (
|
||||
patch("src.services.health_service.TaskCleanupService") as cleanup_cls,
|
||||
patch("src.services.health_service.logger") as mock_logger,
|
||||
):
|
||||
cleanup_instance = MagicMock()
|
||||
cleanup_instance.delete_task_with_logs.side_effect = RuntimeError("cleanup exploded")
|
||||
cleanup_instance.delete_task_with_logs.side_effect = RuntimeError(
|
||||
"cleanup exploded"
|
||||
)
|
||||
cleanup_cls.return_value = cleanup_instance
|
||||
|
||||
service = HealthService(db, config_manager=config_manager)
|
||||
@@ -282,4 +300,5 @@ def test_delete_validation_report_swallows_linked_task_cleanup_failure():
|
||||
assert "task-1" not in task_manager.tasks
|
||||
|
||||
|
||||
# [/DEF:test_delete_validation_report_swallows_linked_task_cleanup_failure:Function]
|
||||
# [/DEF:test_health_service:Module]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# [DEF:backend.src.services.__tests__.test_llm_plugin_persistence:Module]
|
||||
# [DEF:test_llm_plugin_persistence:Module]
|
||||
# @RELATION: VERIFIES ->[src.plugins.llm_analysis.plugin.DashboardValidationPlugin]
|
||||
# @COMPLEXITY: 3
|
||||
# @PURPOSE: Regression test for ValidationRecord persistence fields populated from task context.
|
||||
|
||||
@@ -9,6 +10,7 @@ from src.plugins.llm_analysis import plugin as plugin_module
|
||||
|
||||
|
||||
# [DEF:_DummyLogger:Class]
|
||||
# @RELATION: BINDS_TO ->[test_llm_plugin_persistence]
|
||||
# @PURPOSE: Minimal logger shim for TaskContext-like objects used in tests.
|
||||
class _DummyLogger:
|
||||
def with_source(self, _source: str):
|
||||
@@ -25,10 +27,13 @@ class _DummyLogger:
|
||||
|
||||
def error(self, *_args, **_kwargs):
|
||||
return None
|
||||
|
||||
|
||||
# [/DEF:_DummyLogger:Class]
|
||||
|
||||
|
||||
# [DEF:_FakeDBSession:Class]
|
||||
# @RELATION: BINDS_TO ->[test_llm_plugin_persistence]
|
||||
# @PURPOSE: Captures persisted records for assertion and mimics SQLAlchemy session methods used by plugin.
|
||||
class _FakeDBSession:
|
||||
def __init__(self):
|
||||
@@ -44,13 +49,18 @@ class _FakeDBSession:
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
|
||||
|
||||
# [/DEF:_FakeDBSession:Class]
|
||||
|
||||
|
||||
# [DEF:test_dashboard_validation_plugin_persists_task_and_environment_ids:Function]
|
||||
# @RELATION: BINDS_TO ->[test_llm_plugin_persistence]
|
||||
# @PURPOSE: Ensure db ValidationRecord includes context.task_id and params.environment_id.
|
||||
@pytest.mark.asyncio
|
||||
async def test_dashboard_validation_plugin_persists_task_and_environment_ids(tmp_path, monkeypatch):
|
||||
async def test_dashboard_validation_plugin_persists_task_and_environment_ids(
|
||||
tmp_path, monkeypatch
|
||||
):
|
||||
fake_db = _FakeDBSession()
|
||||
|
||||
env = types.SimpleNamespace(id="env-42")
|
||||
@@ -112,7 +122,9 @@ async def test_dashboard_validation_plugin_persists_task_and_environment_ids(tmp
|
||||
|
||||
class _FakeSupersetClient:
|
||||
def __init__(self, _env):
|
||||
self.network = types.SimpleNamespace(request=lambda **_kwargs: {"result": []})
|
||||
self.network = types.SimpleNamespace(
|
||||
request=lambda **_kwargs: {"result": []}
|
||||
)
|
||||
|
||||
monkeypatch.setattr(plugin_module, "SessionLocal", lambda: fake_db)
|
||||
monkeypatch.setattr(plugin_module, "LLMProviderService", _FakeProviderService)
|
||||
@@ -120,7 +132,9 @@ async def test_dashboard_validation_plugin_persists_task_and_environment_ids(tmp
|
||||
monkeypatch.setattr(plugin_module, "LLMClient", _FakeLLMClient)
|
||||
monkeypatch.setattr(plugin_module, "NotificationService", _FakeNotificationService)
|
||||
monkeypatch.setattr(plugin_module, "SupersetClient", _FakeSupersetClient)
|
||||
monkeypatch.setattr("src.dependencies.get_config_manager", lambda: _FakeConfigManager())
|
||||
monkeypatch.setattr(
|
||||
"src.dependencies.get_config_manager", lambda: _FakeConfigManager()
|
||||
)
|
||||
|
||||
context = types.SimpleNamespace(
|
||||
task_id="task-999",
|
||||
@@ -144,7 +158,9 @@ async def test_dashboard_validation_plugin_persists_task_and_environment_ids(tmp
|
||||
assert fake_db.added is not None
|
||||
assert fake_db.added.task_id == "task-999"
|
||||
assert fake_db.added.environment_id == "env-42"
|
||||
|
||||
|
||||
# [/DEF:test_dashboard_validation_plugin_persists_task_and_environment_ids:Function]
|
||||
|
||||
|
||||
# [/DEF:backend.src.services.__tests__.test_llm_plugin_persistence:Module]
|
||||
# [/DEF:test_llm_plugin_persistence:Module]
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
# [DEF:backend.src.services.__tests__.test_resource_service:Module]
|
||||
# [DEF:TestResourceService:Module]
|
||||
# @COMPLEXITY: 3
|
||||
# @SEMANTICS: resource-service, tests, dashboards, datasets, activity
|
||||
# @PURPOSE: Unit tests for ResourceService
|
||||
# @LAYER: Service
|
||||
# @RELATION: TESTS -> backend.src.services.resource_service
|
||||
# @RELATION: VERIFIES -> ResourceService
|
||||
# @RELATION: VERIFIES ->[src.services.resource_service.ResourceService]
|
||||
# @INVARIANT: Resource summaries preserve task linkage and status projection behavior.
|
||||
|
||||
import pytest
|
||||
@@ -13,25 +12,27 @@ from datetime import datetime, timezone
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_with_status:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @PURPOSE: Validate dashboard enrichment includes git/task status projections.
|
||||
# @TEST: get_dashboards_with_status returns dashboards with git and task status
|
||||
# @PRE: SupersetClient returns dashboard list
|
||||
# @POST: Each dashboard has git_status and last_task fields
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_dashboards_with_status():
|
||||
with patch("src.services.resource_service.SupersetClient") as mock_client, \
|
||||
patch("src.services.resource_service.GitService"):
|
||||
|
||||
with (
|
||||
patch("src.services.resource_service.SupersetClient") as mock_client,
|
||||
patch("src.services.resource_service.GitService"),
|
||||
):
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
# Mock Superset response
|
||||
mock_client.return_value.get_dashboards_summary.return_value = [
|
||||
{"id": 1, "title": "Dashboard 1", "slug": "dash-1"},
|
||||
{"id": 2, "title": "Dashboard 2", "slug": "dash-2"}
|
||||
{"id": 2, "title": "Dashboard 2", "slug": "dash-2"},
|
||||
]
|
||||
|
||||
|
||||
# Mock tasks
|
||||
task_prod_old = MagicMock()
|
||||
task_prod_old.id = "task-123"
|
||||
@@ -62,7 +63,7 @@ async def test_get_dashboards_with_status():
|
||||
env,
|
||||
[task_prod_old, task_prod_new, task_other_env],
|
||||
)
|
||||
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == 1
|
||||
assert "git_status" in result[0]
|
||||
@@ -76,35 +77,35 @@ async def test_get_dashboards_with_status():
|
||||
|
||||
|
||||
# [DEF:test_get_datasets_with_status:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: get_datasets_with_status returns datasets with task status
|
||||
# @PRE: SupersetClient returns dataset list
|
||||
# @POST: Each dataset has last_task field
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_datasets_with_status():
|
||||
with patch("src.services.resource_service.SupersetClient") as mock_client:
|
||||
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
# Mock Superset response
|
||||
mock_client.return_value.get_datasets_summary.return_value = [
|
||||
{"id": 1, "table_name": "users", "schema": "public", "database": "app"},
|
||||
{"id": 2, "table_name": "orders", "schema": "public", "database": "app"}
|
||||
{"id": 2, "table_name": "orders", "schema": "public", "database": "app"},
|
||||
]
|
||||
|
||||
|
||||
# Mock tasks
|
||||
mock_task = MagicMock()
|
||||
mock_task.id = "task-456"
|
||||
mock_task.status = "RUNNING"
|
||||
mock_task.params = {"resource_id": "dataset-1"}
|
||||
mock_task.created_at = datetime.now()
|
||||
|
||||
|
||||
env = MagicMock()
|
||||
env.id = "prod"
|
||||
|
||||
|
||||
result = await service.get_datasets_with_status(env, [mock_task])
|
||||
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["table_name"] == "users"
|
||||
assert "last_task" in result[0]
|
||||
@@ -116,35 +117,36 @@ async def test_get_datasets_with_status():
|
||||
|
||||
|
||||
# [DEF:test_get_activity_summary:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: get_activity_summary returns active count and recent tasks
|
||||
# @PRE: tasks list provided
|
||||
# @POST: Returns dict with active_count and recent_tasks
|
||||
def test_get_activity_summary():
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
# Create mock tasks
|
||||
task1 = MagicMock()
|
||||
task1.id = "task-1"
|
||||
task1.status = "RUNNING"
|
||||
task1.params = {"resource_name": "Dashboard 1", "resource_type": "dashboard"}
|
||||
task1.created_at = datetime(2024, 1, 1, 10, 0, 0)
|
||||
|
||||
|
||||
task2 = MagicMock()
|
||||
task2.id = "task-2"
|
||||
task2.status = "SUCCESS"
|
||||
task2.params = {"resource_name": "Dataset 1", "resource_type": "dataset"}
|
||||
task2.created_at = datetime(2024, 1, 1, 9, 0, 0)
|
||||
|
||||
|
||||
task3 = MagicMock()
|
||||
task3.id = "task-3"
|
||||
task3.status = "WAITING_INPUT"
|
||||
task3.params = {"resource_name": "Dashboard 2", "resource_type": "dashboard"}
|
||||
task3.created_at = datetime(2024, 1, 1, 8, 0, 0)
|
||||
|
||||
|
||||
result = service.get_activity_summary([task1, task2, task3])
|
||||
|
||||
|
||||
assert result["active_count"] == 2 # RUNNING + WAITING_INPUT
|
||||
assert len(result["recent_tasks"]) == 3
|
||||
|
||||
@@ -153,51 +155,52 @@ def test_get_activity_summary():
|
||||
|
||||
|
||||
# [DEF:test_get_git_status_for_dashboard_no_repo:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _get_git_status_for_dashboard returns None when no repo exists
|
||||
# @PRE: GitService returns None for repo
|
||||
# @POST: Returns None
|
||||
def test_get_git_status_for_dashboard_no_repo():
|
||||
with patch("src.services.resource_service.GitService") as mock_git:
|
||||
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
mock_git.return_value.get_repo.return_value = None
|
||||
|
||||
|
||||
result = service._get_git_status_for_dashboard(123)
|
||||
|
||||
|
||||
assert result is not None
|
||||
assert result['sync_status'] == 'NO_REPO'
|
||||
assert result['has_repo'] is False
|
||||
assert result["sync_status"] == "NO_REPO"
|
||||
assert result["has_repo"] is False
|
||||
|
||||
|
||||
# [/DEF:test_get_git_status_for_dashboard_no_repo:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_last_task_for_resource:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _get_last_task_for_resource returns most recent task for resource
|
||||
# @PRE: tasks list with matching resource_id
|
||||
# @POST: Returns task summary with task_id and status
|
||||
def test_get_last_task_for_resource():
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
# Create mock tasks
|
||||
task1 = MagicMock()
|
||||
task1.id = "task-old"
|
||||
task1.status = "SUCCESS"
|
||||
task1.params = {"resource_id": "dashboard-1"}
|
||||
task1.created_at = datetime(2024, 1, 1, 10, 0, 0)
|
||||
|
||||
|
||||
task2 = MagicMock()
|
||||
task2.id = "task-new"
|
||||
task2.status = "RUNNING"
|
||||
task2.params = {"resource_id": "dashboard-1"}
|
||||
task2.created_at = datetime(2024, 1, 1, 12, 0, 0)
|
||||
|
||||
|
||||
result = service._get_last_task_for_resource("dashboard-1", [task1, task2])
|
||||
|
||||
|
||||
assert result is not None
|
||||
assert result["task_id"] == "task-new" # Most recent
|
||||
assert result["status"] == "RUNNING"
|
||||
@@ -207,27 +210,28 @@ def test_get_last_task_for_resource():
|
||||
|
||||
|
||||
# [DEF:test_extract_resource_name_from_task:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _extract_resource_name_from_task extracts name from params
|
||||
# @PRE: task has resource_name in params
|
||||
# @POST: Returns resource name or fallback
|
||||
def test_extract_resource_name_from_task():
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
# Task with resource_name
|
||||
task = MagicMock()
|
||||
task.id = "task-123"
|
||||
task.params = {"resource_name": "My Dashboard"}
|
||||
|
||||
|
||||
result = service._extract_resource_name_from_task(task)
|
||||
assert result == "My Dashboard"
|
||||
|
||||
|
||||
# Task without resource_name
|
||||
task2 = MagicMock()
|
||||
task2.id = "task-456"
|
||||
task2.params = {}
|
||||
|
||||
|
||||
result2 = service._extract_resource_name_from_task(task2)
|
||||
assert "task-456" in result2
|
||||
|
||||
@@ -236,48 +240,56 @@ def test_extract_resource_name_from_task():
|
||||
|
||||
|
||||
# [DEF:test_get_last_task_for_resource_empty_tasks:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _get_last_task_for_resource returns None for empty tasks list
|
||||
# @PRE: tasks is empty list
|
||||
# @POST: Returns None
|
||||
def test_get_last_task_for_resource_empty_tasks():
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
result = service._get_last_task_for_resource("dashboard-1", [])
|
||||
assert result is None
|
||||
|
||||
|
||||
# [/DEF:test_get_last_task_for_resource_empty_tasks:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_last_task_for_resource_no_match:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _get_last_task_for_resource returns None when no tasks match resource_id
|
||||
# @PRE: tasks list has no matching resource_id
|
||||
# @POST: Returns None
|
||||
def test_get_last_task_for_resource_no_match():
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
|
||||
service = ResourceService()
|
||||
|
||||
|
||||
task = MagicMock()
|
||||
task.id = "task-999"
|
||||
task.status = "SUCCESS"
|
||||
task.params = {"resource_id": "dashboard-99"}
|
||||
task.created_at = datetime(2024, 1, 1, 10, 0, 0)
|
||||
|
||||
|
||||
result = service._get_last_task_for_resource("dashboard-1", [task])
|
||||
assert result is None
|
||||
|
||||
|
||||
# [/DEF:test_get_last_task_for_resource_no_match:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: get_dashboards_with_status handles mixed naive/aware datetimes without comparison errors.
|
||||
# @PRE: Task list includes both timezone-aware and timezone-naive timestamps.
|
||||
# @POST: Latest task is selected deterministically and no exception is raised.
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes():
|
||||
with patch("src.services.resource_service.SupersetClient") as mock_client, \
|
||||
patch("src.services.resource_service.GitService"):
|
||||
|
||||
with (
|
||||
patch("src.services.resource_service.SupersetClient") as mock_client,
|
||||
patch("src.services.resource_service.GitService"),
|
||||
):
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
service = ResourceService()
|
||||
@@ -305,18 +317,22 @@ async def test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_dat
|
||||
result = await service.get_dashboards_with_status(env, [task_naive, task_aware])
|
||||
|
||||
assert result[0]["last_task"]["task_id"] == "task-aware"
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_with_status_handles_mixed_naive_and_aware_task_datetimes:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_with_status_prefers_latest_decisive_validation_status_over_newer_unknown:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: get_dashboards_with_status keeps latest task identity while falling back to older decisive validation status.
|
||||
# @PRE: Same dashboard has older WARN and newer UNKNOWN validation tasks.
|
||||
# @POST: Returned last_task points to newest task but preserves WARN as last meaningful validation state.
|
||||
@pytest.mark.anyio
|
||||
async def test_get_dashboards_with_status_prefers_latest_decisive_validation_status_over_newer_unknown():
|
||||
with patch("src.services.resource_service.SupersetClient") as mock_client, \
|
||||
patch("src.services.resource_service.GitService"):
|
||||
|
||||
with (
|
||||
patch("src.services.resource_service.SupersetClient") as mock_client,
|
||||
patch("src.services.resource_service.GitService"),
|
||||
):
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
service = ResourceService()
|
||||
@@ -343,23 +359,29 @@ async def test_get_dashboards_with_status_prefers_latest_decisive_validation_sta
|
||||
env = MagicMock()
|
||||
env.id = "prod"
|
||||
|
||||
result = await service.get_dashboards_with_status(env, [task_warn, task_unknown])
|
||||
result = await service.get_dashboards_with_status(
|
||||
env, [task_warn, task_unknown]
|
||||
)
|
||||
|
||||
assert result[0]["last_task"]["task_id"] == "task-unknown"
|
||||
assert result[0]["last_task"]["status"] == "RUNNING"
|
||||
assert result[0]["last_task"]["validation_status"] == "WARN"
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_with_status_prefers_latest_decisive_validation_status_over_newer_unknown:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_with_status_falls_back_to_latest_unknown_without_decisive_history:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: get_dashboards_with_status still returns newest UNKNOWN when no decisive validation exists.
|
||||
# @PRE: Same dashboard has only UNKNOWN validation tasks.
|
||||
# @POST: Returned last_task keeps newest UNKNOWN task.
|
||||
@pytest.mark.anyio
|
||||
async def test_get_dashboards_with_status_falls_back_to_latest_unknown_without_decisive_history():
|
||||
with patch("src.services.resource_service.SupersetClient") as mock_client, \
|
||||
patch("src.services.resource_service.GitService"):
|
||||
|
||||
with (
|
||||
patch("src.services.resource_service.SupersetClient") as mock_client,
|
||||
patch("src.services.resource_service.GitService"),
|
||||
):
|
||||
from src.services.resource_service import ResourceService
|
||||
|
||||
service = ResourceService()
|
||||
@@ -386,14 +408,19 @@ async def test_get_dashboards_with_status_falls_back_to_latest_unknown_without_d
|
||||
env = MagicMock()
|
||||
env.id = "prod"
|
||||
|
||||
result = await service.get_dashboards_with_status(env, [task_unknown_old, task_unknown_new])
|
||||
result = await service.get_dashboards_with_status(
|
||||
env, [task_unknown_old, task_unknown_new]
|
||||
)
|
||||
|
||||
assert result[0]["last_task"]["task_id"] == "task-unknown-new"
|
||||
assert result[0]["last_task"]["validation_status"] == "UNKNOWN"
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_with_status_falls_back_to_latest_unknown_without_decisive_history:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at:Function]
|
||||
# @RELATION: BINDS_TO ->[TestResourceService]
|
||||
# @TEST: _get_last_task_for_resource handles mixed naive/aware created_at values.
|
||||
# @PRE: Matching tasks include naive and aware created_at timestamps.
|
||||
# @POST: Latest task is returned without raising datetime comparison errors.
|
||||
@@ -414,11 +441,15 @@ def test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at():
|
||||
task_aware.params = {"resource_id": "dashboard-1"}
|
||||
task_aware.created_at = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
result = service._get_last_task_for_resource("dashboard-1", [task_naive, task_aware])
|
||||
result = service._get_last_task_for_resource(
|
||||
"dashboard-1", [task_naive, task_aware]
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result["task_id"] == "task-new"
|
||||
|
||||
|
||||
# [/DEF:test_get_last_task_for_resource_handles_mixed_naive_and_aware_created_at:Function]
|
||||
|
||||
|
||||
# [/DEF:backend.src.services.__tests__.test_resource_service:Module]
|
||||
# [/DEF:TestResourceService:Module]
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# [DEF:backend.tests.core.test_mapping_service:Module]
|
||||
# [DEF:TestMappingService:Module]
|
||||
#
|
||||
# @COMPLEXITY: 3
|
||||
# @PURPOSE: Unit tests for the IdMappingService matching UUIDs to integer IDs.
|
||||
# @LAYER: Domain
|
||||
# @RELATION: VERIFIES -> backend.src.core.mapping_service
|
||||
# @RELATION: VERIFIES ->[src.core.mapping_service.IdMappingService]
|
||||
#
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
@@ -21,16 +21,18 @@ if backend_dir not in sys.path:
|
||||
from src.models.mapping import Base, ResourceMapping, ResourceType
|
||||
from src.core.mapping_service import IdMappingService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session():
|
||||
# In-memory SQLite for testing
|
||||
engine = create_engine('sqlite:///:memory:')
|
||||
engine = create_engine("sqlite:///:memory:")
|
||||
Base.metadata.create_all(engine)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
yield session
|
||||
session.close()
|
||||
|
||||
|
||||
class MockSupersetClient:
|
||||
def __init__(self, resources):
|
||||
self.resources = resources
|
||||
@@ -38,16 +40,25 @@ class MockSupersetClient:
|
||||
def get_all_resources(self, endpoint, since_dttm=None):
|
||||
return self.resources.get(endpoint, [])
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_upserts_correctly:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_upserts_correctly(db_session):
|
||||
service = IdMappingService(db_session)
|
||||
mock_client = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 42, "uuid": "123e4567-e89b-12d3-a456-426614174000", "slice_name": "Test Chart"}
|
||||
]
|
||||
})
|
||||
mock_client = MockSupersetClient(
|
||||
{
|
||||
"chart": [
|
||||
{
|
||||
"id": 42,
|
||||
"uuid": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"slice_name": "Test Chart",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
service.sync_environment("test-env", mock_client)
|
||||
|
||||
|
||||
mapping = db_session.query(ResourceMapping).first()
|
||||
assert mapping is not None
|
||||
assert mapping.environment_id == "test-env"
|
||||
@@ -56,6 +67,12 @@ def test_sync_environment_upserts_correctly(db_session):
|
||||
assert mapping.remote_integer_id == "42"
|
||||
assert mapping.resource_name == "Test Chart"
|
||||
|
||||
|
||||
# [/DEF:test_sync_environment_upserts_correctly:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_remote_id_returns_integer:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_get_remote_id_returns_integer(db_session):
|
||||
service = IdMappingService(db_session)
|
||||
mapping = ResourceMapping(
|
||||
@@ -64,7 +81,7 @@ def test_get_remote_id_returns_integer(db_session):
|
||||
uuid="uuid-1",
|
||||
remote_integer_id="99",
|
||||
resource_name="Test DS",
|
||||
last_synced_at=datetime.now(timezone.utc)
|
||||
last_synced_at=datetime.now(timezone.utc),
|
||||
)
|
||||
db_session.add(mapping)
|
||||
db_session.commit()
|
||||
@@ -72,80 +89,126 @@ def test_get_remote_id_returns_integer(db_session):
|
||||
result = service.get_remote_id("test-env", ResourceType.DATASET, "uuid-1")
|
||||
assert result == 99
|
||||
|
||||
|
||||
# [/DEF:test_get_remote_id_returns_integer:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_remote_ids_batch_returns_dict:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_get_remote_ids_batch_returns_dict(db_session):
|
||||
service = IdMappingService(db_session)
|
||||
m1 = ResourceMapping(
|
||||
environment_id="test-env",
|
||||
resource_type=ResourceType.DASHBOARD,
|
||||
uuid="uuid-1",
|
||||
remote_integer_id="11"
|
||||
remote_integer_id="11",
|
||||
)
|
||||
m2 = ResourceMapping(
|
||||
environment_id="test-env",
|
||||
resource_type=ResourceType.DASHBOARD,
|
||||
uuid="uuid-2",
|
||||
remote_integer_id="22"
|
||||
remote_integer_id="22",
|
||||
)
|
||||
db_session.add_all([m1, m2])
|
||||
db_session.commit()
|
||||
|
||||
result = service.get_remote_ids_batch("test-env", ResourceType.DASHBOARD, ["uuid-1", "uuid-2", "uuid-missing"])
|
||||
|
||||
result = service.get_remote_ids_batch(
|
||||
"test-env", ResourceType.DASHBOARD, ["uuid-1", "uuid-2", "uuid-missing"]
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result["uuid-1"] == 11
|
||||
assert result["uuid-2"] == 22
|
||||
assert "uuid-missing" not in result
|
||||
|
||||
|
||||
# [/DEF:test_get_remote_ids_batch_returns_dict:Function]
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_updates_existing_mapping:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_updates_existing_mapping(db_session):
|
||||
"""Verify that sync_environment updates an existing mapping (upsert UPDATE path)."""
|
||||
from src.models.mapping import ResourceMapping
|
||||
|
||||
# Pre-populate a mapping
|
||||
existing = ResourceMapping(
|
||||
environment_id="test-env",
|
||||
resource_type=ResourceType.CHART,
|
||||
uuid="123e4567-e89b-12d3-a456-426614174000",
|
||||
remote_integer_id="10",
|
||||
resource_name="Old Name"
|
||||
resource_name="Old Name",
|
||||
)
|
||||
db_session.add(existing)
|
||||
db_session.commit()
|
||||
|
||||
service = IdMappingService(db_session)
|
||||
mock_client = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 42, "uuid": "123e4567-e89b-12d3-a456-426614174000", "slice_name": "Updated Name"}
|
||||
]
|
||||
})
|
||||
mock_client = MockSupersetClient(
|
||||
{
|
||||
"chart": [
|
||||
{
|
||||
"id": 42,
|
||||
"uuid": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"slice_name": "Updated Name",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
service.sync_environment("test-env", mock_client)
|
||||
|
||||
mapping = db_session.query(ResourceMapping).filter_by(
|
||||
uuid="123e4567-e89b-12d3-a456-426614174000"
|
||||
).first()
|
||||
mapping = (
|
||||
db_session.query(ResourceMapping)
|
||||
.filter_by(uuid="123e4567-e89b-12d3-a456-426614174000")
|
||||
.first()
|
||||
)
|
||||
assert mapping.remote_integer_id == "42"
|
||||
assert mapping.resource_name == "Updated Name"
|
||||
# Should still be only one record (updated, not duplicated)
|
||||
count = db_session.query(ResourceMapping).count()
|
||||
assert count == 1
|
||||
|
||||
|
||||
# [/DEF:test_sync_environment_updates_existing_mapping:Function]
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_skips_resources_without_uuid:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_skips_resources_without_uuid(db_session):
|
||||
"""Resources missing uuid or having id=None should be silently skipped."""
|
||||
service = IdMappingService(db_session)
|
||||
mock_client = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 42, "slice_name": "No UUID"}, # Missing 'uuid' -> skipped
|
||||
{"id": None, "uuid": "valid-uuid", "slice_name": "ID is None"}, # id=None -> skipped
|
||||
{"id": None, "uuid": None, "slice_name": "Both None"}, # both None -> skipped
|
||||
]
|
||||
})
|
||||
mock_client = MockSupersetClient(
|
||||
{
|
||||
"chart": [
|
||||
{"id": 42, "slice_name": "No UUID"}, # Missing 'uuid' -> skipped
|
||||
{
|
||||
"id": None,
|
||||
"uuid": "valid-uuid",
|
||||
"slice_name": "ID is None",
|
||||
}, # id=None -> skipped
|
||||
{
|
||||
"id": None,
|
||||
"uuid": None,
|
||||
"slice_name": "Both None",
|
||||
}, # both None -> skipped
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
service.sync_environment("test-env", mock_client)
|
||||
|
||||
count = db_session.query(ResourceMapping).count()
|
||||
assert count == 0
|
||||
|
||||
|
||||
# [/DEF:test_sync_environment_skips_resources_without_uuid:Function]
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_handles_api_error_gracefully:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_handles_api_error_gracefully(db_session):
|
||||
"""If one resource type fails, others should still sync."""
|
||||
|
||||
class FailingClient:
|
||||
def get_all_resources(self, endpoint, since_dttm=None):
|
||||
if endpoint == "chart":
|
||||
@@ -162,12 +225,24 @@ def test_sync_environment_handles_api_error_gracefully(db_session):
|
||||
mapping = db_session.query(ResourceMapping).first()
|
||||
assert mapping.resource_type == ResourceType.DATASET
|
||||
|
||||
|
||||
# [/DEF:test_sync_environment_handles_api_error_gracefully:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_remote_id_returns_none_for_missing:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_get_remote_id_returns_none_for_missing(db_session):
|
||||
"""get_remote_id should return None when no mapping exists."""
|
||||
service = IdMappingService(db_session)
|
||||
result = service.get_remote_id("test-env", ResourceType.CHART, "nonexistent-uuid")
|
||||
assert result is None
|
||||
|
||||
|
||||
# [/DEF:test_get_remote_id_returns_none_for_missing:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_remote_ids_batch_returns_empty_for_empty_input:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_get_remote_ids_batch_returns_empty_for_empty_input(db_session):
|
||||
"""get_remote_ids_batch should return {} for an empty list of UUIDs."""
|
||||
service = IdMappingService(db_session)
|
||||
@@ -175,70 +250,90 @@ def test_get_remote_ids_batch_returns_empty_for_empty_input(db_session):
|
||||
assert result == {}
|
||||
|
||||
|
||||
# [/DEF:test_get_remote_ids_batch_returns_empty_for_empty_input:Function]
|
||||
|
||||
|
||||
# [DEF:test_mapping_service_alignment_with_test_data:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_mapping_service_alignment_with_test_data(db_session):
|
||||
"""**@TEST_DATA**: Verifies that the service aligns with the resource_mapping_record contract."""
|
||||
# Contract: {'environment_id': 'prod-env-1', 'resource_type': 'chart', 'uuid': '123e4567-e89b-12d3-a456-426614174000', 'remote_integer_id': '42'}
|
||||
contract_data = {
|
||||
'environment_id': 'prod-env-1',
|
||||
'resource_type': ResourceType.CHART,
|
||||
'uuid': '123e4567-e89b-12d3-a456-426614174000',
|
||||
'remote_integer_id': '42'
|
||||
"environment_id": "prod-env-1",
|
||||
"resource_type": ResourceType.CHART,
|
||||
"uuid": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"remote_integer_id": "42",
|
||||
}
|
||||
|
||||
|
||||
mapping = ResourceMapping(**contract_data)
|
||||
db_session.add(mapping)
|
||||
db_session.commit()
|
||||
|
||||
service = IdMappingService(db_session)
|
||||
result = service.get_remote_id(
|
||||
contract_data['environment_id'],
|
||||
contract_data['resource_type'],
|
||||
contract_data['uuid']
|
||||
contract_data["environment_id"],
|
||||
contract_data["resource_type"],
|
||||
contract_data["uuid"],
|
||||
)
|
||||
|
||||
|
||||
assert result == 42
|
||||
|
||||
|
||||
# [/DEF:test_mapping_service_alignment_with_test_data:Function]
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_requires_existing_env:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_requires_existing_env(db_session):
|
||||
"""**@PRE**: Verify behavior when environment_id is invalid/missing in DB.
|
||||
Note: The current implementation doesn't strictly check for environment existencia in the DB
|
||||
Note: The current implementation doesn't strictly check for environment existencia in the DB
|
||||
before polling, but it should handle it gracefully or follow the contract.
|
||||
"""
|
||||
service = IdMappingService(db_session)
|
||||
mock_client = MockSupersetClient({"chart": []})
|
||||
|
||||
# Even if environment doesn't exist in a hypothetical 'environments' table,
|
||||
|
||||
# Even if environment doesn't exist in a hypothetical 'environments' table,
|
||||
# the service should still complete or fail according to defined error handling.
|
||||
# In GRACE-Poly, @PRE is a hard requirement. If we don't have an Env model check,
|
||||
# we simulate the intent.
|
||||
|
||||
|
||||
service.sync_environment("non-existent-env", mock_client)
|
||||
# If no error raised, at least verify no mappings were created for other envs
|
||||
assert db_session.query(ResourceMapping).count() == 0
|
||||
|
||||
|
||||
# [/DEF:test_sync_environment_requires_existing_env:Function]
|
||||
|
||||
|
||||
# [DEF:test_sync_environment_deletes_stale_mappings:Function]
|
||||
# @RELATION: BINDS_TO ->[TestMappingService]
|
||||
def test_sync_environment_deletes_stale_mappings(db_session):
|
||||
"""Verify that mappings for resources deleted from the remote environment
|
||||
are removed from the local DB on the next sync cycle."""
|
||||
service = IdMappingService(db_session)
|
||||
|
||||
# First sync: 2 charts exist
|
||||
client_v1 = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
{"id": 2, "uuid": "bbb", "slice_name": "Chart B"},
|
||||
]
|
||||
})
|
||||
client_v1 = MockSupersetClient(
|
||||
{
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
{"id": 2, "uuid": "bbb", "slice_name": "Chart B"},
|
||||
]
|
||||
}
|
||||
)
|
||||
service.sync_environment("env1", client_v1)
|
||||
assert db_session.query(ResourceMapping).filter_by(environment_id="env1").count() == 2
|
||||
assert (
|
||||
db_session.query(ResourceMapping).filter_by(environment_id="env1").count() == 2
|
||||
)
|
||||
|
||||
# Second sync: user deleted Chart B from superset
|
||||
client_v2 = MockSupersetClient({
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
]
|
||||
})
|
||||
client_v2 = MockSupersetClient(
|
||||
{
|
||||
"chart": [
|
||||
{"id": 1, "uuid": "aaa", "slice_name": "Chart A"},
|
||||
]
|
||||
}
|
||||
)
|
||||
service.sync_environment("env1", client_v2)
|
||||
|
||||
remaining = db_session.query(ResourceMapping).filter_by(environment_id="env1").all()
|
||||
@@ -246,4 +341,5 @@ def test_sync_environment_deletes_stale_mappings(db_session):
|
||||
assert remaining[0].uuid == "aaa"
|
||||
|
||||
|
||||
# [/DEF:backend.tests.core.test_mapping_service:Module]
|
||||
# [/DEF:test_sync_environment_deletes_stale_mappings:Function]
|
||||
# [/DEF:TestMappingService:Module]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# [DEF:backend.tests.test_dashboards_api:Module]
|
||||
# [DEF:TestDashboardsApi:Module]
|
||||
# @RELATION: VERIFIES ->[src.api.routes.dashboards]
|
||||
# @COMPLEXITY: 3
|
||||
# @PURPOSE: Comprehensive contract-driven tests for Dashboard Hub API
|
||||
# @LAYER: Domain (Tests)
|
||||
@@ -8,8 +9,20 @@ from fastapi.testclient import TestClient
|
||||
from unittest.mock import MagicMock, patch, AsyncMock
|
||||
from datetime import datetime, timezone
|
||||
from src.app import app
|
||||
from src.api.routes.dashboards import DashboardsResponse, DashboardDetailResponse, DashboardTaskHistoryResponse, DatabaseMappingsResponse
|
||||
from src.dependencies import get_current_user, has_permission, get_config_manager, get_task_manager, get_resource_service, get_mapping_service
|
||||
from src.api.routes.dashboards import (
|
||||
DashboardsResponse,
|
||||
DashboardDetailResponse,
|
||||
DashboardTaskHistoryResponse,
|
||||
DatabaseMappingsResponse,
|
||||
)
|
||||
from src.dependencies import (
|
||||
get_current_user,
|
||||
has_permission,
|
||||
get_config_manager,
|
||||
get_task_manager,
|
||||
get_resource_service,
|
||||
get_mapping_service,
|
||||
)
|
||||
|
||||
# Global mock user
|
||||
mock_user = MagicMock()
|
||||
@@ -19,55 +32,73 @@ admin_role = MagicMock()
|
||||
admin_role.name = "Admin"
|
||||
mock_user.roles.append(admin_role)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_deps():
|
||||
config_manager = MagicMock()
|
||||
task_manager = MagicMock()
|
||||
resource_service = MagicMock()
|
||||
mapping_service = MagicMock()
|
||||
|
||||
|
||||
app.dependency_overrides[get_config_manager] = lambda: config_manager
|
||||
app.dependency_overrides[get_task_manager] = lambda: task_manager
|
||||
app.dependency_overrides[get_resource_service] = lambda: resource_service
|
||||
app.dependency_overrides[get_mapping_service] = lambda: mapping_service
|
||||
app.dependency_overrides[get_current_user] = lambda: mock_user
|
||||
|
||||
|
||||
# Overrides for specific permission checks
|
||||
app.dependency_overrides[has_permission("plugin:migration", "READ")] = lambda: mock_user
|
||||
app.dependency_overrides[has_permission("plugin:migration", "EXECUTE")] = lambda: mock_user
|
||||
app.dependency_overrides[has_permission("plugin:backup", "EXECUTE")] = lambda: mock_user
|
||||
app.dependency_overrides[has_permission("plugin:migration", "READ")] = (
|
||||
lambda: mock_user
|
||||
)
|
||||
app.dependency_overrides[has_permission("plugin:migration", "EXECUTE")] = (
|
||||
lambda: mock_user
|
||||
)
|
||||
app.dependency_overrides[has_permission("plugin:backup", "EXECUTE")] = (
|
||||
lambda: mock_user
|
||||
)
|
||||
app.dependency_overrides[has_permission("tasks", "READ")] = lambda: mock_user
|
||||
app.dependency_overrides[has_permission("dashboards", "READ")] = lambda: mock_user
|
||||
|
||||
|
||||
yield {
|
||||
"config": config_manager,
|
||||
"task": task_manager,
|
||||
"resource": resource_service,
|
||||
"mapping": mapping_service
|
||||
"mapping": mapping_service,
|
||||
}
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
# --- 1. get_dashboards tests ---
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_success(mock_deps):
|
||||
"""Uses @TEST_FIXTURE: dashboard_list_happy data."""
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_deps["task"].get_all_tasks.return_value = []
|
||||
|
||||
|
||||
# @TEST_FIXTURE: dashboard_list_happy -> {"id": 1, "title": "Main Revenue"}
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
||||
{"id": 1, "title": "Main Revenue", "slug": "main-revenue", "git_status": {"branch": "main", "sync_status": "OK"}}
|
||||
])
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(
|
||||
return_value=[
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Main Revenue",
|
||||
"slug": "main-revenue",
|
||||
"git_status": {"branch": "main", "sync_status": "OK"},
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
response = client.get("/api/dashboards?env_id=prod&page=1&page_size=10")
|
||||
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
|
||||
# exhaustive @POST assertions
|
||||
assert "dashboards" in data
|
||||
assert len(data["dashboards"]) == 1 # @TEST_FIXTURE: expected_count: 1
|
||||
@@ -76,26 +107,40 @@ def test_get_dashboards_success(mock_deps):
|
||||
assert data["page"] == 1
|
||||
assert data["page_size"] == 10
|
||||
assert data["total_pages"] == 1
|
||||
|
||||
|
||||
# schema validation
|
||||
DashboardsResponse(**data)
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_with_search:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_with_search(mock_deps):
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_deps["task"].get_all_tasks.return_value = []
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[
|
||||
{"id": 1, "title": "Sales Report", "slug": "sales"},
|
||||
{"id": 2, "title": "Marketing", "slug": "marketing"}
|
||||
])
|
||||
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(
|
||||
return_value=[
|
||||
{"id": 1, "title": "Sales Report", "slug": "sales"},
|
||||
{"id": 2, "title": "Marketing", "slug": "marketing"},
|
||||
]
|
||||
)
|
||||
|
||||
response = client.get("/api/dashboards?env_id=prod&search=sales")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data["dashboards"]) == 1
|
||||
assert data["dashboards"][0]["title"] == "Sales Report"
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_with_search:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_empty:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_empty(mock_deps):
|
||||
"""@TEST_EDGE: empty_dashboards -> {env_id: 'empty_env', expected_total: 0}"""
|
||||
mock_env = MagicMock()
|
||||
@@ -103,7 +148,7 @@ def test_get_dashboards_empty(mock_deps):
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_deps["task"].get_all_tasks.return_value = []
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(return_value=[])
|
||||
|
||||
|
||||
response = client.get("/api/dashboards?env_id=empty_env")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
@@ -112,6 +157,12 @@ def test_get_dashboards_empty(mock_deps):
|
||||
assert data["total_pages"] == 1
|
||||
DashboardsResponse(**data)
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_empty:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_superset_failure:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_superset_failure(mock_deps):
|
||||
"""@TEST_EDGE: external_superset_failure -> {env_id: 'bad_conn', status: 503}"""
|
||||
mock_env = MagicMock()
|
||||
@@ -121,42 +172,62 @@ def test_get_dashboards_superset_failure(mock_deps):
|
||||
mock_deps["resource"].get_dashboards_with_status = AsyncMock(
|
||||
side_effect=Exception("Connection refused")
|
||||
)
|
||||
|
||||
|
||||
response = client.get("/api/dashboards?env_id=bad_conn")
|
||||
assert response.status_code == 503
|
||||
assert "Failed to fetch dashboards" in response.json()["detail"]
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_superset_failure:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_env_not_found(mock_deps):
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.get("/api/dashboards?env_id=nonexistent")
|
||||
assert response.status_code == 404
|
||||
assert "Environment not found" in response.json()["detail"]
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboards_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboards_invalid_pagination:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboards_invalid_pagination(mock_deps):
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
|
||||
|
||||
# page < 1
|
||||
assert client.get("/api/dashboards?env_id=prod&page=0").status_code == 400
|
||||
assert client.get("/api/dashboards?env_id=prod&page=-1").status_code == 400
|
||||
|
||||
|
||||
# page_size < 1
|
||||
assert client.get("/api/dashboards?env_id=prod&page_size=0").status_code == 400
|
||||
|
||||
|
||||
# page_size > 100
|
||||
assert client.get("/api/dashboards?env_id=prod&page_size=101").status_code == 400
|
||||
|
||||
|
||||
# --- 2. get_database_mappings tests ---
|
||||
|
||||
# [/DEF:test_get_dashboards_invalid_pagination:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_database_mappings_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_database_mappings_success(mock_deps):
|
||||
mock_s = MagicMock(); mock_s.id = "s"
|
||||
mock_t = MagicMock(); mock_t.id = "t"
|
||||
mock_s = MagicMock()
|
||||
mock_s.id = "s"
|
||||
mock_t = MagicMock()
|
||||
mock_t.id = "t"
|
||||
mock_deps["config"].get_environments.return_value = [mock_s, mock_t]
|
||||
|
||||
mock_deps["mapping"].get_suggestions = AsyncMock(return_value=[
|
||||
{"source_db": "src", "target_db": "dst", "confidence": 0.9}
|
||||
])
|
||||
|
||||
mock_deps["mapping"].get_suggestions = AsyncMock(
|
||||
return_value=[{"source_db": "src", "target_db": "dst", "confidence": 0.9}]
|
||||
)
|
||||
response = client.get("/api/dashboards/db-mappings?source_env_id=s&target_env_id=t")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
@@ -164,23 +235,41 @@ def test_get_database_mappings_success(mock_deps):
|
||||
assert data["mappings"][0]["confidence"] == 0.9
|
||||
DatabaseMappingsResponse(**data)
|
||||
|
||||
|
||||
# [/DEF:test_get_database_mappings_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_database_mappings_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_database_mappings_env_not_found(mock_deps):
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.get("/api/dashboards/db-mappings?source_env_id=ghost&target_env_id=t")
|
||||
response = client.get(
|
||||
"/api/dashboards/db-mappings?source_env_id=ghost&target_env_id=t"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
# --- 3. get_dashboard_detail tests ---
|
||||
|
||||
# [/DEF:test_get_database_mappings_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_detail_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_detail_success(mock_deps):
|
||||
with patch("src.api.routes.dashboards.SupersetClient") as mock_client_cls:
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
|
||||
|
||||
mock_client = MagicMock()
|
||||
detail_payload = {
|
||||
"id": 42, "title": "Detail", "charts": [], "datasets": [],
|
||||
"chart_count": 0, "dataset_count": 0
|
||||
"id": 42,
|
||||
"title": "Detail",
|
||||
"charts": [],
|
||||
"datasets": [],
|
||||
"chart_count": 0,
|
||||
"dataset_count": 0,
|
||||
}
|
||||
mock_client.get_dashboard_detail.return_value = detail_payload
|
||||
mock_client_cls.return_value = mock_client
|
||||
@@ -191,16 +280,36 @@ def test_get_dashboard_detail_success(mock_deps):
|
||||
assert data["id"] == 42
|
||||
DashboardDetailResponse(**data)
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboard_detail_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_detail_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_detail_env_not_found(mock_deps):
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.get("/api/dashboards/42?env_id=missing")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
# --- 4. get_dashboard_tasks_history tests ---
|
||||
|
||||
# [/DEF:test_get_dashboard_detail_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_tasks_history_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_tasks_history_success(mock_deps):
|
||||
now = datetime.now(timezone.utc)
|
||||
task1 = MagicMock(id="t1", plugin_id="superset-backup", status="SUCCESS", started_at=now, finished_at=None, params={"env": "prod", "dashboards": [42]}, result={})
|
||||
task1 = MagicMock(
|
||||
id="t1",
|
||||
plugin_id="superset-backup",
|
||||
status="SUCCESS",
|
||||
started_at=now,
|
||||
finished_at=None,
|
||||
params={"env": "prod", "dashboards": [42]},
|
||||
result={},
|
||||
)
|
||||
mock_deps["task"].get_all_tasks.return_value = [task1]
|
||||
|
||||
response = client.get("/api/dashboards/42/tasks?env_id=prod")
|
||||
@@ -210,20 +319,39 @@ def test_get_dashboard_tasks_history_success(mock_deps):
|
||||
assert len(data["items"]) == 1
|
||||
DashboardTaskHistoryResponse(**data)
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboard_tasks_history_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_tasks_history_sorting:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_tasks_history_sorting(mock_deps):
|
||||
"""@POST: Response contains sorted task history (newest first)."""
|
||||
from datetime import timedelta
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
older = now - timedelta(hours=2)
|
||||
newest = now
|
||||
|
||||
task_old = MagicMock(id="t-old", plugin_id="superset-backup", status="SUCCESS",
|
||||
started_at=older, finished_at=None,
|
||||
params={"env": "prod", "dashboards": [42]}, result={})
|
||||
task_new = MagicMock(id="t-new", plugin_id="superset-backup", status="RUNNING",
|
||||
started_at=newest, finished_at=None,
|
||||
params={"env": "prod", "dashboards": [42]}, result={})
|
||||
|
||||
|
||||
task_old = MagicMock(
|
||||
id="t-old",
|
||||
plugin_id="superset-backup",
|
||||
status="SUCCESS",
|
||||
started_at=older,
|
||||
finished_at=None,
|
||||
params={"env": "prod", "dashboards": [42]},
|
||||
result={},
|
||||
)
|
||||
task_new = MagicMock(
|
||||
id="t-new",
|
||||
plugin_id="superset-backup",
|
||||
status="RUNNING",
|
||||
started_at=newest,
|
||||
finished_at=None,
|
||||
params={"env": "prod", "dashboards": [42]},
|
||||
result={},
|
||||
)
|
||||
|
||||
# Provide in wrong order to verify the endpoint sorts
|
||||
mock_deps["task"].get_all_tasks.return_value = [task_old, task_new]
|
||||
|
||||
@@ -235,38 +363,67 @@ def test_get_dashboard_tasks_history_sorting(mock_deps):
|
||||
assert data["items"][0]["id"] == "t-new"
|
||||
assert data["items"][1]["id"] == "t-old"
|
||||
|
||||
|
||||
# --- 5. get_dashboard_thumbnail tests ---
|
||||
|
||||
# [/DEF:test_get_dashboard_tasks_history_sorting:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_thumbnail_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_thumbnail_success(mock_deps):
|
||||
with patch("src.api.routes.dashboards.SupersetClient") as mock_client_cls:
|
||||
mock_env = MagicMock(); mock_env.id = "prod"
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_client = MagicMock()
|
||||
mock_response = MagicMock(status_code=200, content=b"img", headers={"Content-Type": "image/png"})
|
||||
mock_client.network.request.side_effect = lambda method, endpoint, **kw: {"image_url": "url"} if method == "POST" else mock_response
|
||||
mock_response = MagicMock(
|
||||
status_code=200, content=b"img", headers={"Content-Type": "image/png"}
|
||||
)
|
||||
mock_client.network.request.side_effect = (
|
||||
lambda method, endpoint, **kw: {"image_url": "url"}
|
||||
if method == "POST"
|
||||
else mock_response
|
||||
)
|
||||
mock_client_cls.return_value = mock_client
|
||||
|
||||
response = client.get("/api/dashboards/42/thumbnail?env_id=prod")
|
||||
assert response.status_code == 200
|
||||
assert response.content == b"img"
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboard_thumbnail_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_thumbnail_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_thumbnail_env_not_found(mock_deps):
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.get("/api/dashboards/42/thumbnail?env_id=missing")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
# [/DEF:test_get_dashboard_thumbnail_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_get_dashboard_thumbnail_202:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_get_dashboard_thumbnail_202(mock_deps):
|
||||
"""@POST: Returns 202 when thumbnail is being prepared by Superset."""
|
||||
with patch("src.api.routes.dashboards.SupersetClient") as mock_client_cls:
|
||||
mock_env = MagicMock(); mock_env.id = "prod"
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_client = MagicMock()
|
||||
|
||||
|
||||
# POST cache_dashboard_screenshot returns image_url
|
||||
mock_client.network.request.side_effect = [
|
||||
{"image_url": "/api/v1/dashboard/42/thumbnail/abc123/"}, # POST
|
||||
MagicMock(status_code=202, json=lambda: {"message": "Thumbnail is being generated"},
|
||||
headers={"Content-Type": "application/json"}) # GET thumbnail -> 202
|
||||
MagicMock(
|
||||
status_code=202,
|
||||
json=lambda: {"message": "Thumbnail is being generated"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
), # GET thumbnail -> 202
|
||||
]
|
||||
mock_client_cls.return_value = mock_client
|
||||
|
||||
@@ -274,93 +431,156 @@ def test_get_dashboard_thumbnail_202(mock_deps):
|
||||
assert response.status_code == 202
|
||||
assert "Thumbnail is being generated" in response.json()["message"]
|
||||
|
||||
|
||||
# --- 6. migrate_dashboards tests ---
|
||||
|
||||
# [/DEF:test_get_dashboard_thumbnail_202:Function]
|
||||
|
||||
|
||||
# [DEF:test_migrate_dashboards_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_migrate_dashboards_success(mock_deps):
|
||||
mock_s = MagicMock(); mock_s.id = "s"
|
||||
mock_t = MagicMock(); mock_t.id = "t"
|
||||
mock_s = MagicMock()
|
||||
mock_s.id = "s"
|
||||
mock_t = MagicMock()
|
||||
mock_t.id = "t"
|
||||
mock_deps["config"].get_environments.return_value = [mock_s, mock_t]
|
||||
mock_deps["task"].create_task = AsyncMock(return_value=MagicMock(id="task-123"))
|
||||
|
||||
response = client.post("/api/dashboards/migrate", json={
|
||||
"source_env_id": "s", "target_env_id": "t", "dashboard_ids": [1]
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/migrate",
|
||||
json={"source_env_id": "s", "target_env_id": "t", "dashboard_ids": [1]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["task_id"] == "task-123"
|
||||
|
||||
|
||||
# [/DEF:test_migrate_dashboards_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_migrate_dashboards_pre_checks:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_migrate_dashboards_pre_checks(mock_deps):
|
||||
# Missing IDs
|
||||
response = client.post("/api/dashboards/migrate", json={
|
||||
"source_env_id": "s", "target_env_id": "t", "dashboard_ids": []
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/migrate",
|
||||
json={"source_env_id": "s", "target_env_id": "t", "dashboard_ids": []},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "At least one dashboard ID must be provided" in response.json()["detail"]
|
||||
|
||||
|
||||
# [/DEF:test_migrate_dashboards_pre_checks:Function]
|
||||
|
||||
|
||||
# [DEF:test_migrate_dashboards_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_migrate_dashboards_env_not_found(mock_deps):
|
||||
"""@PRE: source_env_id and target_env_id are valid environment IDs."""
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.post("/api/dashboards/migrate", json={
|
||||
"source_env_id": "ghost", "target_env_id": "t", "dashboard_ids": [1]
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/migrate",
|
||||
json={"source_env_id": "ghost", "target_env_id": "t", "dashboard_ids": [1]},
|
||||
)
|
||||
assert response.status_code == 404
|
||||
assert "Source environment not found" in response.json()["detail"]
|
||||
|
||||
|
||||
# --- 7. backup_dashboards tests ---
|
||||
|
||||
# [/DEF:test_migrate_dashboards_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_backup_dashboards_success:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_backup_dashboards_success(mock_deps):
|
||||
mock_env = MagicMock(); mock_env.id = "prod"
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_deps["task"].create_task = AsyncMock(return_value=MagicMock(id="backup-123"))
|
||||
|
||||
response = client.post("/api/dashboards/backup", json={
|
||||
"env_id": "prod", "dashboard_ids": [1]
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/backup", json={"env_id": "prod", "dashboard_ids": [1]}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["task_id"] == "backup-123"
|
||||
|
||||
|
||||
# [/DEF:test_backup_dashboards_success:Function]
|
||||
|
||||
|
||||
# [DEF:test_backup_dashboards_pre_checks:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_backup_dashboards_pre_checks(mock_deps):
|
||||
response = client.post("/api/dashboards/backup", json={
|
||||
"env_id": "prod", "dashboard_ids": []
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/backup", json={"env_id": "prod", "dashboard_ids": []}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
# [/DEF:test_backup_dashboards_pre_checks:Function]
|
||||
|
||||
|
||||
# [DEF:test_backup_dashboards_env_not_found:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_backup_dashboards_env_not_found(mock_deps):
|
||||
"""@PRE: env_id is a valid environment ID."""
|
||||
mock_deps["config"].get_environments.return_value = []
|
||||
response = client.post("/api/dashboards/backup", json={
|
||||
"env_id": "ghost", "dashboard_ids": [1]
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/backup", json={"env_id": "ghost", "dashboard_ids": [1]}
|
||||
)
|
||||
assert response.status_code == 404
|
||||
assert "Environment not found" in response.json()["detail"]
|
||||
|
||||
|
||||
# [/DEF:test_backup_dashboards_env_not_found:Function]
|
||||
|
||||
|
||||
# [DEF:test_backup_dashboards_with_schedule:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_backup_dashboards_with_schedule(mock_deps):
|
||||
"""@POST: If schedule is provided, a scheduled task is created."""
|
||||
mock_env = MagicMock(); mock_env.id = "prod"
|
||||
mock_env = MagicMock()
|
||||
mock_env.id = "prod"
|
||||
mock_deps["config"].get_environments.return_value = [mock_env]
|
||||
mock_deps["task"].create_task = AsyncMock(return_value=MagicMock(id="sched-456"))
|
||||
|
||||
response = client.post("/api/dashboards/backup", json={
|
||||
"env_id": "prod", "dashboard_ids": [1], "schedule": "0 0 * * *"
|
||||
})
|
||||
response = client.post(
|
||||
"/api/dashboards/backup",
|
||||
json={"env_id": "prod", "dashboard_ids": [1], "schedule": "0 0 * * *"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["task_id"] == "sched-456"
|
||||
|
||||
|
||||
# Verify schedule was propagated to create_task
|
||||
call_kwargs = mock_deps["task"].create_task.call_args
|
||||
task_params = call_kwargs.kwargs.get("params") or call_kwargs[1].get("params", {})
|
||||
assert task_params["schedule"] == "0 0 * * *"
|
||||
|
||||
|
||||
# --- 8. Internal logic: _task_matches_dashboard ---
|
||||
# [/DEF:test_backup_dashboards_with_schedule:Function]
|
||||
|
||||
from src.api.routes.dashboards import _task_matches_dashboard
|
||||
|
||||
|
||||
# [DEF:test_task_matches_dashboard_logic:Function]
|
||||
# @RELATION: BINDS_TO ->[TestDashboardsApi]
|
||||
def test_task_matches_dashboard_logic():
|
||||
task = MagicMock(plugin_id="superset-backup", params={"dashboards": [42], "env": "prod"})
|
||||
task = MagicMock(
|
||||
plugin_id="superset-backup", params={"dashboards": [42], "env": "prod"}
|
||||
)
|
||||
assert _task_matches_dashboard(task, 42, "prod") is True
|
||||
assert _task_matches_dashboard(task, 43, "prod") is False
|
||||
assert _task_matches_dashboard(task, 42, "dev") is False
|
||||
|
||||
llm_task = MagicMock(plugin_id="llm_dashboard_validation", params={"dashboard_id": 42, "environment_id": "prod"})
|
||||
llm_task = MagicMock(
|
||||
plugin_id="llm_dashboard_validation",
|
||||
params={"dashboard_id": 42, "environment_id": "prod"},
|
||||
)
|
||||
assert _task_matches_dashboard(llm_task, 42, "prod") is True
|
||||
assert _task_matches_dashboard(llm_task, 42, None) is True
|
||||
|
||||
# [/DEF:backend.tests.test_dashboards_api:Module]
|
||||
|
||||
# [/DEF:test_task_matches_dashboard_logic:Function]
|
||||
# [/DEF:TestDashboardsApi:Module]
|
||||
|
||||
Reference in New Issue
Block a user