sync worked

This commit is contained in:
2026-02-25 15:20:26 +03:00
parent 2a5b225800
commit 590ba49ddb
6 changed files with 319 additions and 61 deletions

View File

@@ -3,17 +3,17 @@
# @SEMANTICS: config, models, pydantic
# @PURPOSE: Defines the data models for application configuration using Pydantic.
# @LAYER: Core
# @RELATION: READS_FROM -> app_configurations (database)
# @RELATION: READS_FROM -> app_configurations (database)
# @RELATION: USED_BY -> ConfigManager
from pydantic import BaseModel, Field
from typing import List, Optional
from ..models.storage import StorageConfig
from ..services.llm_prompt_templates import (
DEFAULT_LLM_ASSISTANT_SETTINGS,
DEFAULT_LLM_PROMPTS,
DEFAULT_LLM_PROVIDER_BINDINGS,
)
from pydantic import BaseModel, Field
from typing import List, Optional
from ..models.storage import StorageConfig
from ..services.llm_prompt_templates import (
DEFAULT_LLM_ASSISTANT_SETTINGS,
DEFAULT_LLM_PROMPTS,
DEFAULT_LLM_PROVIDER_BINDINGS,
)
# [DEF:Schedule:DataClass]
# @PURPOSE: Represents a backup schedule configuration.
@@ -38,10 +38,10 @@ class Environment(BaseModel):
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
class LoggingConfig(BaseModel):
level: str = "INFO"
task_log_level: str = "INFO" # Minimum level for task-specific logs (DEBUG, INFO, WARNING, ERROR)
file_path: Optional[str] = None
class LoggingConfig(BaseModel):
level: str = "INFO"
task_log_level: str = "INFO" # Minimum level for task-specific logs (DEBUG, INFO, WARNING, ERROR)
file_path: Optional[str] = None
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
@@ -49,25 +49,28 @@ class LoggingConfig(BaseModel):
# [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings.
class GlobalSettings(BaseModel):
class GlobalSettings(BaseModel):
storage: StorageConfig = Field(default_factory=StorageConfig)
default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
connections: List[dict] = []
llm: dict = Field(
default_factory=lambda: {
"providers": [],
"default_provider": "",
"prompts": dict(DEFAULT_LLM_PROMPTS),
"provider_bindings": dict(DEFAULT_LLM_PROVIDER_BINDINGS),
**dict(DEFAULT_LLM_ASSISTANT_SETTINGS),
}
)
llm: dict = Field(
default_factory=lambda: {
"providers": [],
"default_provider": "",
"prompts": dict(DEFAULT_LLM_PROMPTS),
"provider_bindings": dict(DEFAULT_LLM_PROVIDER_BINDINGS),
**dict(DEFAULT_LLM_ASSISTANT_SETTINGS),
}
)
# Task retention settings
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# Migration sync settings
migration_sync_cron: str = "0 2 * * *"
# [/DEF:GlobalSettings:DataClass]
# [DEF:AppConfig:DataClass]

View File

@@ -829,6 +829,34 @@ class SupersetClient:
raise SupersetAPIError(f"Архив {zip_path} не содержит 'metadata.yaml'")
# [/DEF:_validate_import_file:Function]
# [DEF:get_all_resources:Function]
# @PURPOSE: Fetches all resources of a given type with id, uuid, and name columns.
# @PARAM: resource_type (str) - One of "chart", "dataset", "dashboard".
# @PRE: Client is authenticated. resource_type is valid.
# @POST: Returns a list of resource dicts with at minimum id, uuid, and name fields.
# @RETURN: List[Dict]
def get_all_resources(self, resource_type: str) -> List[Dict]:
with belief_scope("SupersetClient.get_all_resources", f"type={resource_type}"):
column_map = {
"chart": {"endpoint": "/chart/", "columns": ["id", "uuid", "slice_name"]},
"dataset": {"endpoint": "/dataset/", "columns": ["id", "uuid", "table_name"]},
"dashboard": {"endpoint": "/dashboard/", "columns": ["id", "uuid", "slug", "dashboard_title"]},
}
config = column_map.get(resource_type)
if not config:
app_logger.warning("[get_all_resources][Warning] Unknown resource type: %s", resource_type)
return []
query = {"columns": config["columns"]}
validated = self._validate_query_params(query)
data = self._fetch_all_pages(
endpoint=config["endpoint"],
pagination_options={"base_query": validated, "results_field": "result"},
)
app_logger.info("[get_all_resources][Exit] Fetched %d %s resources.", len(data), resource_type)
return data
# [/DEF:get_all_resources:Function]
# [/SECTION]
# [/DEF:SupersetClient:Class]

View File

@@ -75,10 +75,9 @@ class TaskManager:
# @POST: Logs are batch-written to database every LOG_FLUSH_INTERVAL seconds.
def _flusher_loop(self):
"""Background thread that flushes log buffer to database."""
with belief_scope("_flusher_loop"):
while not self._flusher_stop_event.is_set():
self._flush_logs()
self._flusher_stop_event.wait(self.LOG_FLUSH_INTERVAL)
while not self._flusher_stop_event.is_set():
self._flush_logs()
self._flusher_stop_event.wait(self.LOG_FLUSH_INTERVAL)
# [/DEF:_flusher_loop:Function]
# [DEF:_flush_logs:Function]
@@ -87,24 +86,24 @@ class TaskManager:
# @POST: All buffered logs are written to task_logs table.
def _flush_logs(self):
"""Flush all buffered logs to the database."""
with belief_scope("_flush_logs"):
with self._log_buffer_lock:
task_ids = list(self._log_buffer.keys())
for task_id in task_ids:
with self._log_buffer_lock:
task_ids = list(self._log_buffer.keys())
logs = self._log_buffer.pop(task_id, [])
for task_id in task_ids:
with self._log_buffer_lock:
logs = self._log_buffer.pop(task_id, [])
if logs:
try:
self.log_persistence_service.add_logs(task_id, logs)
except Exception as e:
logger.error(f"Failed to flush logs for task {task_id}: {e}")
# Re-add logs to buffer on failure
with self._log_buffer_lock:
if task_id not in self._log_buffer:
self._log_buffer[task_id] = []
self._log_buffer[task_id].extend(logs)
if logs:
try:
self.log_persistence_service.add_logs(task_id, logs)
logger.debug(f"Flushed {len(logs)} logs for task {task_id}")
except Exception as e:
logger.error(f"Failed to flush logs for task {task_id}: {e}")
# Re-add logs to buffer on failure
with self._log_buffer_lock:
if task_id not in self._log_buffer:
self._log_buffer[task_id] = []
self._log_buffer[task_id].extend(logs)
# [/DEF:_flush_logs:Function]
# [DEF:_flush_task_logs:Function]