Fix LLM validation and dashboard health hot paths
This commit is contained in:
@@ -7,9 +7,15 @@ import pytest
|
||||
import os
|
||||
from unittest.mock import MagicMock
|
||||
from sqlalchemy.orm import Session
|
||||
from cryptography.fernet import Fernet
|
||||
from src.services.llm_provider import EncryptionManager, LLMProviderService
|
||||
from src.models.llm import LLMProvider
|
||||
from src.plugins.llm_analysis.models import LLMProviderConfig, ProviderType
|
||||
from src.plugins.llm_analysis.models import LLMProviderConfig, LLMProviderType
|
||||
|
||||
# [DEF:_test_encryption_key_fixture:Global]
|
||||
# @PURPOSE: Ensure encryption-dependent provider tests run with a valid Fernet key.
|
||||
os.environ.setdefault("ENCRYPTION_KEY", Fernet.generate_key().decode())
|
||||
# [/DEF:_test_encryption_key_fixture:Global]
|
||||
|
||||
# @TEST_CONTRACT: EncryptionManagerModel -> Invariants
|
||||
# @TEST_INVARIANT: symmetric_encryption
|
||||
@@ -50,7 +56,7 @@ def test_get_all_providers(service, mock_db):
|
||||
|
||||
def test_create_provider(service, mock_db):
|
||||
config = LLMProviderConfig(
|
||||
provider_type=ProviderType.OPENAI,
|
||||
provider_type=LLMProviderType.OPENAI,
|
||||
name="Test OpenAI",
|
||||
base_url="https://api.openai.com",
|
||||
api_key="sk-test",
|
||||
@@ -79,3 +85,32 @@ def test_get_decrypted_api_key(service, mock_db):
|
||||
def test_get_decrypted_api_key_not_found(service, mock_db):
|
||||
mock_db.query().filter().first.return_value = None
|
||||
assert service.get_decrypted_api_key("missing") is None
|
||||
|
||||
def test_update_provider_ignores_masked_placeholder_api_key(service, mock_db):
|
||||
existing_encrypted = EncryptionManager().encrypt("secret-value")
|
||||
mock_provider = LLMProvider(
|
||||
id="p1",
|
||||
provider_type="openai",
|
||||
name="Existing",
|
||||
base_url="https://api.openai.com/v1",
|
||||
api_key=existing_encrypted,
|
||||
default_model="gpt-4o",
|
||||
is_active=True,
|
||||
)
|
||||
mock_db.query().filter().first.return_value = mock_provider
|
||||
config = LLMProviderConfig(
|
||||
id="p1",
|
||||
provider_type=LLMProviderType.OPENAI,
|
||||
name="Existing",
|
||||
base_url="https://api.openai.com/v1",
|
||||
api_key="********",
|
||||
default_model="gpt-4o",
|
||||
is_active=False,
|
||||
)
|
||||
|
||||
updated = service.update_provider("p1", config)
|
||||
|
||||
assert updated is mock_provider
|
||||
assert updated.api_key == existing_encrypted
|
||||
assert EncryptionManager().decrypt(updated.api_key) == "secret-value"
|
||||
assert updated.is_active is False
|
||||
|
||||
Reference in New Issue
Block a user