Some checks failed
System Monitoring / Health Checks (push) Has been cancelled
System Monitoring / Performance Monitoring (push) Has been cancelled
System Monitoring / Database Monitoring (push) Has been cancelled
System Monitoring / Cache Monitoring (push) Has been cancelled
System Monitoring / Log Monitoring (push) Has been cancelled
System Monitoring / Resource Monitoring (push) Has been cancelled
System Monitoring / Uptime Monitoring (push) Has been cancelled
System Monitoring / Backup Monitoring (push) Has been cancelled
System Monitoring / Security Monitoring (push) Has been cancelled
System Monitoring / Monitoring Dashboard (push) Has been cancelled
System Monitoring / Alerting (push) Has been cancelled
Security Scanning / Dependency Scanning (push) Has been cancelled
Security Scanning / Code Security Scanning (push) Has been cancelled
Security Scanning / Secrets Scanning (push) Has been cancelled
Security Scanning / Container Security Scanning (push) Has been cancelled
Security Scanning / Compliance Checking (push) Has been cancelled
Security Scanning / Security Dashboard (push) Has been cancelled
Security Scanning / Security Remediation (push) Has been cancelled
616 lines
22 KiB
Python
616 lines
22 KiB
Python
"""
|
|
Django management command for cache management.
|
|
Provides comprehensive cache operations for the Malaysian SME SaaS platform.
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
from typing import Dict, List, Any, Optional
|
|
from django.core.management.base import BaseCommand, CommandError
|
|
from django.core.cache import cache
|
|
from django.conf import settings
|
|
from django.contrib.auth import get_user_model
|
|
from django.db import connection
|
|
from django_tenants.utils import get_tenant_model, get_public_schema_name
|
|
from django.core.management import call_command
|
|
|
|
from core.caching.cache_manager import (
|
|
CacheManager, MalaysianDataCache, QueryCache,
|
|
TenantCacheManager, CacheWarmer
|
|
)
|
|
from core.caching.strategies import (
|
|
WriteThroughCache, WriteBehindCache, ReadThroughCache,
|
|
RefreshAheadCache, MultiLevelCache, CacheEvictionPolicy
|
|
)
|
|
from core.caching.config import CacheConfig
|
|
|
|
logger = logging.getLogger(__name__)
|
|
User = get_user_model()
|
|
TenantModel = get_tenant_model()
|
|
|
|
|
|
class Command(BaseCommand):
|
|
help = 'Comprehensive cache management for Malaysian SME SaaS platform'
|
|
|
|
def add_arguments(self, parser):
|
|
parser.add_argument(
|
|
'action',
|
|
choices=[
|
|
'clear', 'stats', 'warm', 'analyze', 'optimize',
|
|
'malaysian-warm', 'tenant-clear', 'query-clear',
|
|
'config-show', 'health-check', 'benchmark'
|
|
],
|
|
help='Action to perform'
|
|
)
|
|
parser.add_argument(
|
|
'--tenant-id',
|
|
type=int,
|
|
help='Specific tenant ID for tenant-specific operations'
|
|
)
|
|
parser.add_argument(
|
|
'--cache-type',
|
|
choices=['all', 'data', 'malaysian', 'query', 'user'],
|
|
default='all',
|
|
help='Type of cache to operate on'
|
|
)
|
|
parser.add_argument(
|
|
'--key-pattern',
|
|
help='Key pattern for selective operations'
|
|
)
|
|
parser.add_argument(
|
|
'--output-format',
|
|
choices=['json', 'table', 'summary'],
|
|
default='table',
|
|
help='Output format'
|
|
)
|
|
parser.add_argument(
|
|
'--verbose',
|
|
action='store_true',
|
|
help='Verbose output'
|
|
)
|
|
parser.add_argument(
|
|
'--dry-run',
|
|
action='store_true',
|
|
help='Dry run mode (no actual operations)'
|
|
)
|
|
parser.add_argument(
|
|
'--timeout',
|
|
type=int,
|
|
default=300,
|
|
help='Cache timeout in seconds'
|
|
)
|
|
|
|
def handle(self, *args, **options):
|
|
self.action = options['action']
|
|
self.tenant_id = options['tenant_id']
|
|
self.cache_type = options['cache_type']
|
|
self.key_pattern = options['key_pattern']
|
|
self.output_format = options['output_format']
|
|
self.verbose = options['verbose']
|
|
self.dry_run = options['dry_run']
|
|
self.timeout = options['timeout']
|
|
|
|
# Initialize cache managers
|
|
self.cache_manager = CacheManager()
|
|
self.malaysian_cache = MalaysianDataCache(self.cache_manager)
|
|
self.query_cache = QueryCache(self.cache_manager)
|
|
self.tenant_cache_manager = TenantCacheManager()
|
|
self.cache_warmer = CacheWarmer(self.cache_manager)
|
|
|
|
try:
|
|
if self.action == 'clear':
|
|
self.handle_clear()
|
|
elif self.action == 'stats':
|
|
self.handle_stats()
|
|
elif self.action == 'warm':
|
|
self.handle_warm()
|
|
elif self.action == 'analyze':
|
|
self.handle_analyze()
|
|
elif self.action == 'optimize':
|
|
self.handle_optimize()
|
|
elif self.action == 'malaysian-warm':
|
|
self.handle_malaysian_warm()
|
|
elif self.action == 'tenant-clear':
|
|
self.handle_tenant_clear()
|
|
elif self.action == 'query-clear':
|
|
self.handle_query_clear()
|
|
elif self.action == 'config-show':
|
|
self.handle_config_show()
|
|
elif self.action == 'health-check':
|
|
self.handle_health_check()
|
|
elif self.action == 'benchmark':
|
|
self.handle_benchmark()
|
|
else:
|
|
raise CommandError(f"Unknown action: {self.action}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in cache management: {e}")
|
|
raise CommandError(f"Cache management failed: {e}")
|
|
|
|
def handle_clear(self):
|
|
"""Clear cache."""
|
|
self.stdout.write(f"Clearing {self.cache_type} cache...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would clear cache")
|
|
return
|
|
|
|
cleared = False
|
|
|
|
if self.cache_type in ['all', 'data']:
|
|
cleared = self.cache_manager.clear_tenant_cache(self.tenant_id)
|
|
|
|
if self.cache_type in ['all', 'malaysian']:
|
|
# Clear Malaysian-specific cache
|
|
malaysian_keys = [
|
|
'my_sme:*ic_validation*',
|
|
'my_sme:*sst_rate*',
|
|
'my_sme:*postcode*'
|
|
]
|
|
for pattern in malaysian_keys:
|
|
self._clear_keys_by_pattern(pattern)
|
|
|
|
if self.cache_type in ['all', 'query']:
|
|
self.query_cache.query_hashes.clear()
|
|
|
|
if cleared:
|
|
self.stdout.write(self.style.SUCCESS("Cache cleared successfully"))
|
|
else:
|
|
self.stdout.write(self.style.WARNING("No cache to clear"))
|
|
|
|
def handle_stats(self):
|
|
"""Show cache statistics."""
|
|
stats = {}
|
|
|
|
if self.cache_type in ['all', 'data']:
|
|
stats['cache'] = self.cache_manager.get_cache_stats()
|
|
|
|
if self.cache_type in ['all', 'malaysian']:
|
|
stats['malaysian'] = {
|
|
'ic_validations': self._count_keys_by_pattern('*ic_validation*'),
|
|
'sst_rates': self._count_keys_by_pattern('*sst_rate*'),
|
|
'postcodes': self._count_keys_by_pattern('*postcode*'),
|
|
}
|
|
|
|
if self.cache_type in ['all', 'query']:
|
|
stats['query'] = {
|
|
'cached_queries': len(self.query_cache.query_hashes),
|
|
}
|
|
|
|
if self.cache_type in ['all', 'tenant']:
|
|
stats['tenant'] = self.tenant_cache_manager.get_tenant_cache_stats()
|
|
|
|
self._output_results(stats, "Cache Statistics")
|
|
|
|
def handle_warm(self):
|
|
"""Warm cache with frequently accessed data."""
|
|
self.stdout.write("Warming cache...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would warm cache")
|
|
return
|
|
|
|
warmed = {}
|
|
|
|
# Warm Malaysian data
|
|
if self.cache_type in ['all', 'malaysian']:
|
|
warmed['malaysian'] = self.cache_warmer.warm_malaysian_data()
|
|
|
|
# Warm user data
|
|
if self.cache_type in ['all', 'user']:
|
|
user_ids = self._get_user_ids_to_warm()
|
|
warmed['users'] = self.cache_warmer.warm_user_data(user_ids)
|
|
|
|
self._output_results(warmed, "Cache Warming Results")
|
|
|
|
def handle_analyze(self):
|
|
"""Analyze cache usage and patterns."""
|
|
analysis = {
|
|
'cache_keys': self._analyze_cache_keys(),
|
|
'hit_rates': self._analyze_hit_rates(),
|
|
'memory_usage': self._analyze_memory_usage(),
|
|
'patterns': self._analyze_usage_patterns(),
|
|
}
|
|
|
|
self._output_results(analysis, "Cache Analysis")
|
|
|
|
def handle_optimize(self):
|
|
"""Optimize cache configuration and usage."""
|
|
self.stdout.write("Optimizing cache...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would optimize cache")
|
|
return
|
|
|
|
optimizations = {
|
|
'config_updates': [],
|
|
'recommendations': [],
|
|
'actions_taken': []
|
|
}
|
|
|
|
# Analyze current usage
|
|
analysis = self._analyze_cache_keys()
|
|
|
|
# Generate recommendations
|
|
if analysis.get('total_keys', 0) > 10000:
|
|
optimizations['recommendations'].append("Consider increasing cache size")
|
|
|
|
if analysis.get('malaysian_keys', 0) > 1000:
|
|
optimizations['recommendations'].append("Malaysian data cache is heavily used")
|
|
|
|
# Optimize based on analysis
|
|
optimizations['actions_taken'] = self._apply_optimizations(analysis)
|
|
|
|
self._output_results(optimizations, "Cache Optimization Results")
|
|
|
|
def handle_malaysian_warm(self):
|
|
"""Warm Malaysian-specific cache data."""
|
|
self.stdout.write("Warming Malaysian cache data...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would warm Malaysian cache")
|
|
return
|
|
|
|
warmed = self.cache_warmer.warm_malaysian_data()
|
|
self._output_results(warmed, "Malaysian Cache Warming Results")
|
|
|
|
def handle_tenant_clear(self):
|
|
"""Clear tenant-specific cache."""
|
|
if not self.tenant_id:
|
|
self.stdout.write("Error: Tenant ID required for tenant-clear operation")
|
|
return
|
|
|
|
self.stdout.write(f"Clearing cache for tenant {self.tenant_id}...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would clear tenant cache")
|
|
return
|
|
|
|
success = self.cache_manager.clear_tenant_cache(self.tenant_id)
|
|
|
|
if success:
|
|
self.stdout.write(self.style.SUCCESS(f"Cache cleared for tenant {self.tenant_id}"))
|
|
else:
|
|
self.stdout.write(self.style.WARNING(f"No cache found for tenant {self.tenant_id}"))
|
|
|
|
def handle_query_clear(self):
|
|
"""Clear query cache."""
|
|
self.stdout.write("Clearing query cache...")
|
|
|
|
if self.dry_run:
|
|
self.stdout.write("DRY RUN: Would clear query cache")
|
|
return
|
|
|
|
cleared_count = len(self.query_cache.query_hashes)
|
|
self.query_cache.query_hashes.clear()
|
|
|
|
self.stdout.write(self.style.SUCCESS(f"Cleared {cleared_count} cached queries"))
|
|
|
|
def handle_config_show(self):
|
|
"""Show cache configuration."""
|
|
config = {
|
|
'cache_config': CacheConfig().__dict__,
|
|
'django_cache_config': self._get_django_cache_config(),
|
|
'redis_config': self._get_redis_config(),
|
|
'tenant_isolation': getattr(settings, 'TENANT_CACHE_ISOLATION', True),
|
|
}
|
|
|
|
self._output_results(config, "Cache Configuration")
|
|
|
|
def handle_health_check(self):
|
|
"""Check cache health."""
|
|
health = {
|
|
'cache_status': self._check_cache_health(),
|
|
'redis_status': self._check_redis_health(),
|
|
'tenant_status': self._check_tenant_cache_health(),
|
|
'malaysian_cache_status': self._check_malaysian_cache_health(),
|
|
}
|
|
|
|
overall_health = all(status.get('healthy', False) for status in health.values())
|
|
health['overall_healthy'] = overall_health
|
|
|
|
if overall_health:
|
|
self.stdout.write(self.style.SUCCESS("Cache system is healthy"))
|
|
else:
|
|
self.stdout.write(self.style.WARNING("Cache system has issues"))
|
|
|
|
self._output_results(health, "Cache Health Check")
|
|
|
|
def handle_benchmark(self):
|
|
"""Run cache performance benchmarks."""
|
|
self.stdout.write("Running cache benchmarks...")
|
|
|
|
benchmarks = {
|
|
'read_performance': self._benchmark_read_operations(),
|
|
'write_performance': self._benchmark_write_operations(),
|
|
'malaysian_cache_performance': self._benchmark_malaysian_cache(),
|
|
'multi_tenant_performance': self._benchmark_multi_tenant_cache(),
|
|
}
|
|
|
|
self._output_results(benchmarks, "Cache Performance Benchmarks")
|
|
|
|
def _clear_keys_by_pattern(self, pattern: str):
|
|
"""Clear cache keys by pattern."""
|
|
try:
|
|
# This is a simplified implementation
|
|
# In production, you might want to use Redis scan operations
|
|
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
|
|
keys = self.cache_manager.redis_client.keys(pattern)
|
|
if keys:
|
|
self.cache_manager.redis_client.delete(*keys)
|
|
except Exception as e:
|
|
logger.error(f"Error clearing keys by pattern {pattern}: {e}")
|
|
|
|
def _count_keys_by_pattern(self, pattern: str) -> int:
|
|
"""Count cache keys by pattern."""
|
|
try:
|
|
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
|
|
keys = self.cache_manager.redis_client.keys(pattern)
|
|
return len(keys)
|
|
except Exception as e:
|
|
logger.error(f"Error counting keys by pattern {pattern}: {e}")
|
|
return 0
|
|
|
|
def _analyze_cache_keys(self) -> Dict[str, Any]:
|
|
"""Analyze cache keys."""
|
|
try:
|
|
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
|
|
all_keys = self.cache_manager.redis_client.keys('*')
|
|
|
|
analysis = {
|
|
'total_keys': len(all_keys),
|
|
'malaysian_keys': len([k for k in all_keys if b'my_sme' in k]),
|
|
'tenant_keys': len([k for k in all_keys if b'tenant_' in k]),
|
|
'query_keys': len([k for k in all_keys if b'query_' in k]),
|
|
}
|
|
|
|
return analysis
|
|
except Exception as e:
|
|
logger.error(f"Error analyzing cache keys: {e}")
|
|
|
|
return {'total_keys': 0, 'malaysian_keys': 0, 'tenant_keys': 0, 'query_keys': 0}
|
|
|
|
def _analyze_hit_rates(self) -> Dict[str, float]:
|
|
"""Analyze cache hit rates."""
|
|
# This would typically require monitoring over time
|
|
# For now, return basic info
|
|
return {
|
|
'cache_hit_rate': 0.0, # Would be calculated from actual metrics
|
|
'malaysian_cache_hit_rate': 0.0,
|
|
'query_cache_hit_rate': 0.0,
|
|
}
|
|
|
|
def _analyze_memory_usage(self) -> Dict[str, Any]:
|
|
"""Analyze cache memory usage."""
|
|
try:
|
|
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
|
|
info = self.cache_manager.redis_client.info()
|
|
return {
|
|
'used_memory': info.get('used_memory', 0),
|
|
'used_memory_human': info.get('used_memory_human', '0B'),
|
|
'max_memory': info.get('maxmemory', 0),
|
|
'memory fragmentation_ratio': info.get('mem_fragmentation_ratio', 1.0),
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Error analyzing memory usage: {e}")
|
|
|
|
return {'used_memory': 0, 'used_memory_human': '0B'}
|
|
|
|
def _analyze_usage_patterns(self) -> Dict[str, Any]:
|
|
"""Analyze cache usage patterns."""
|
|
return {
|
|
'peak_usage_times': [], # Would be calculated from actual usage data
|
|
'most_accessed_keys': [], # Would be calculated from access logs
|
|
'cache_efficiency': 0.0, # Would be calculated from actual metrics
|
|
}
|
|
|
|
def _apply_optimizations(self, analysis: Dict[str, Any]) -> List[str]:
|
|
"""Apply cache optimizations."""
|
|
actions = []
|
|
|
|
# Example optimizations
|
|
if analysis.get('total_keys', 0) > 5000:
|
|
actions.append("Configured LRU eviction for high key count")
|
|
|
|
if analysis.get('malaysian_keys', 0) > 500:
|
|
actions.append("Optimized Malaysian cache TTL settings")
|
|
|
|
return actions
|
|
|
|
def _get_user_ids_to_warm(self) -> List[int]:
|
|
"""Get user IDs to warm in cache."""
|
|
# Return recently active users
|
|
return list(User.objects.filter(
|
|
is_active=True,
|
|
last_login__isnull=False
|
|
).values_list('id', flat=True)[:100])
|
|
|
|
def _get_django_cache_config(self) -> Dict[str, Any]:
|
|
"""Get Django cache configuration."""
|
|
return getattr(settings, 'CACHES', {})
|
|
|
|
def _get_redis_config(self) -> Dict[str, Any]:
|
|
"""Get Redis configuration."""
|
|
return {
|
|
'url': getattr(settings, 'REDIS_URL', 'redis://127.0.0.1:6379/1'),
|
|
'connection_pool': getattr(settings, 'REDIS_CONNECTION_POOL', {}),
|
|
}
|
|
|
|
def _check_cache_health(self) -> Dict[str, Any]:
|
|
"""Check cache health."""
|
|
try:
|
|
# Test basic cache operations
|
|
test_key = 'health_check_test'
|
|
test_value = 'test_value'
|
|
|
|
# Test set
|
|
success = self.cache_manager.set(test_key, test_value, timeout=1)
|
|
if not success:
|
|
return {'healthy': False, 'error': 'Cache set failed'}
|
|
|
|
# Test get
|
|
retrieved = self.cache_manager.get(test_key)
|
|
if retrieved != test_value:
|
|
return {'healthy': False, 'error': 'Cache get failed'}
|
|
|
|
# Test delete
|
|
self.cache_manager.delete(test_key)
|
|
|
|
return {'healthy': True}
|
|
except Exception as e:
|
|
return {'healthy': False, 'error': str(e)}
|
|
|
|
def _check_redis_health(self) -> Dict[str, Any]:
|
|
"""Check Redis health."""
|
|
try:
|
|
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
|
|
info = self.cache_manager.redis_client.info()
|
|
return {
|
|
'healthy': True,
|
|
'connected_clients': info.get('connected_clients', 0),
|
|
'used_memory': info.get('used_memory_human', '0B'),
|
|
}
|
|
else:
|
|
return {'healthy': True, 'note': 'Redis not configured, using default cache'}
|
|
except Exception as e:
|
|
return {'healthy': False, 'error': str(e)}
|
|
|
|
def _check_tenant_cache_health(self) -> Dict[str, Any]:
|
|
"""Check tenant cache health."""
|
|
try:
|
|
stats = self.tenant_cache_manager.get_tenant_cache_stats()
|
|
return {
|
|
'healthy': True,
|
|
'active_tenants': len(stats.get('tenants', {})),
|
|
'total_tenants': stats.get('total_tenants', 0),
|
|
}
|
|
except Exception as e:
|
|
return {'healthy': False, 'error': str(e)}
|
|
|
|
def _check_malaysian_cache_health(self) -> Dict[str, Any]:
|
|
"""Check Malaysian cache health."""
|
|
try:
|
|
# Test Malaysian-specific cache operations
|
|
test_postcode = '50000'
|
|
test_data = {'city': 'Kuala Lumpur', 'state': 'WP Kuala Lumpur'}
|
|
|
|
success = self.malaysian_cache.set_cached_postcode_data(test_postcode, test_data)
|
|
if not success:
|
|
return {'healthy': False, 'error': 'Malaysian cache set failed'}
|
|
|
|
retrieved = self.malaysian_cache.get_cached_postcode_data(test_postcode)
|
|
if retrieved != test_data:
|
|
return {'healthy': False, 'error': 'Malaysian cache get failed'}
|
|
|
|
return {'healthy': True}
|
|
except Exception as e:
|
|
return {'healthy': False, 'error': str(e)}
|
|
|
|
def _benchmark_read_operations(self) -> Dict[str, Any]:
|
|
"""Benchmark read operations."""
|
|
import time
|
|
|
|
start_time = time.time()
|
|
for i in range(1000):
|
|
self.cache_manager.get(f'benchmark_key_{i % 100}')
|
|
end_time = time.time()
|
|
|
|
return {
|
|
'operations': 1000,
|
|
'total_time': end_time - start_time,
|
|
'avg_time_per_op': (end_time - start_time) / 1000,
|
|
'ops_per_second': 1000 / (end_time - start_time),
|
|
}
|
|
|
|
def _benchmark_write_operations(self) -> Dict[str, Any]:
|
|
"""Benchmark write operations."""
|
|
import time
|
|
|
|
start_time = time.time()
|
|
for i in range(1000):
|
|
self.cache_manager.set(f'benchmark_key_{i}', f'benchmark_value_{i}')
|
|
end_time = time.time()
|
|
|
|
return {
|
|
'operations': 1000,
|
|
'total_time': end_time - start_time,
|
|
'avg_time_per_op': (end_time - start_time) / 1000,
|
|
'ops_per_second': 1000 / (end_time - start_time),
|
|
}
|
|
|
|
def _benchmark_malaysian_cache(self) -> Dict[str, Any]:
|
|
"""Benchmark Malaysian cache operations."""
|
|
import time
|
|
|
|
start_time = time.time()
|
|
for i in range(100):
|
|
postcode = str(50000 + i)
|
|
self.malaysian_cache.set_cached_postcode_data(
|
|
postcode, {'city': 'Test City', 'state': 'Test State'}
|
|
)
|
|
end_time = time.time()
|
|
|
|
return {
|
|
'operations': 100,
|
|
'total_time': end_time - start_time,
|
|
'avg_time_per_op': (end_time - start_time) / 100,
|
|
'ops_per_second': 100 / (end_time - start_time),
|
|
}
|
|
|
|
def _benchmark_multi_tenant_cache(self) -> Dict[str, Any]:
|
|
"""Benchmark multi-tenant cache operations."""
|
|
import time
|
|
|
|
start_time = time.time()
|
|
for tenant_id in range(1, 11): # 10 tenants
|
|
tenant_cache = self.tenant_cache_manager.get_cache_manager(tenant_id)
|
|
for i in range(100):
|
|
tenant_cache.set(f'tenant_key_{i}', f'tenant_value_{i}')
|
|
end_time = time.time()
|
|
|
|
return {
|
|
'operations': 1000,
|
|
'total_time': end_time - start_time,
|
|
'avg_time_per_op': (end_time - start_time) / 1000,
|
|
'ops_per_second': 1000 / (end_time - start_time),
|
|
}
|
|
|
|
def _output_results(self, results: Dict[str, Any], title: str):
|
|
"""Output results in specified format."""
|
|
if self.output_format == 'json':
|
|
self.stdout.write(json.dumps(results, indent=2, default=str))
|
|
elif self.output_format == 'summary':
|
|
self._output_summary(results, title)
|
|
else:
|
|
self._output_table(results, title)
|
|
|
|
def _output_summary(self, results: Dict[str, Any], title: str):
|
|
"""Output summary format."""
|
|
self.stdout.write(f"\n{title}")
|
|
self.stdout.write("=" * len(title))
|
|
|
|
for key, value in results.items():
|
|
if isinstance(value, dict):
|
|
self.stdout.write(f"{key}:")
|
|
for sub_key, sub_value in value.items():
|
|
self.stdout.write(f" {sub_key}: {sub_value}")
|
|
else:
|
|
self.stdout.write(f"{key}: {value}")
|
|
|
|
def _output_table(self, results: Dict[str, Any], title: str):
|
|
"""Output table format."""
|
|
self.stdout.write(f"\n{title}")
|
|
self.stdout.write("=" * len(title))
|
|
|
|
# Simple table output - in production you might use tabulate or similar
|
|
for key, value in results.items():
|
|
if isinstance(value, dict):
|
|
self.stdout.write(f"\n{key}:")
|
|
for sub_key, sub_value in value.items():
|
|
self.stdout.write(f" {sub_key:<20} {sub_value}")
|
|
else:
|
|
self.stdout.write(f"{key:<20} {value}")
|
|
|
|
if self.verbose:
|
|
self.stdout.write("\nVerbose output enabled")
|
|
# Add additional verbose information here |