project initialization
Some checks failed
System Monitoring / Health Checks (push) Has been cancelled
System Monitoring / Performance Monitoring (push) Has been cancelled
System Monitoring / Database Monitoring (push) Has been cancelled
System Monitoring / Cache Monitoring (push) Has been cancelled
System Monitoring / Log Monitoring (push) Has been cancelled
System Monitoring / Resource Monitoring (push) Has been cancelled
System Monitoring / Uptime Monitoring (push) Has been cancelled
System Monitoring / Backup Monitoring (push) Has been cancelled
System Monitoring / Security Monitoring (push) Has been cancelled
System Monitoring / Monitoring Dashboard (push) Has been cancelled
System Monitoring / Alerting (push) Has been cancelled
Security Scanning / Dependency Scanning (push) Has been cancelled
Security Scanning / Code Security Scanning (push) Has been cancelled
Security Scanning / Secrets Scanning (push) Has been cancelled
Security Scanning / Container Security Scanning (push) Has been cancelled
Security Scanning / Compliance Checking (push) Has been cancelled
Security Scanning / Security Dashboard (push) Has been cancelled
Security Scanning / Security Remediation (push) Has been cancelled

This commit is contained in:
2025-10-05 02:37:33 +08:00
parent 2cbb6d5fa1
commit b3fff546e9
226 changed files with 97805 additions and 35 deletions

View File

@@ -0,0 +1,616 @@
"""
Django management command for cache management.
Provides comprehensive cache operations for the Malaysian SME SaaS platform.
"""
import json
import logging
from typing import Dict, List, Any, Optional
from django.core.management.base import BaseCommand, CommandError
from django.core.cache import cache
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import connection
from django_tenants.utils import get_tenant_model, get_public_schema_name
from django.core.management import call_command
from core.caching.cache_manager import (
CacheManager, MalaysianDataCache, QueryCache,
TenantCacheManager, CacheWarmer
)
from core.caching.strategies import (
WriteThroughCache, WriteBehindCache, ReadThroughCache,
RefreshAheadCache, MultiLevelCache, CacheEvictionPolicy
)
from core.caching.config import CacheConfig
logger = logging.getLogger(__name__)
User = get_user_model()
TenantModel = get_tenant_model()
class Command(BaseCommand):
help = 'Comprehensive cache management for Malaysian SME SaaS platform'
def add_arguments(self, parser):
parser.add_argument(
'action',
choices=[
'clear', 'stats', 'warm', 'analyze', 'optimize',
'malaysian-warm', 'tenant-clear', 'query-clear',
'config-show', 'health-check', 'benchmark'
],
help='Action to perform'
)
parser.add_argument(
'--tenant-id',
type=int,
help='Specific tenant ID for tenant-specific operations'
)
parser.add_argument(
'--cache-type',
choices=['all', 'data', 'malaysian', 'query', 'user'],
default='all',
help='Type of cache to operate on'
)
parser.add_argument(
'--key-pattern',
help='Key pattern for selective operations'
)
parser.add_argument(
'--output-format',
choices=['json', 'table', 'summary'],
default='table',
help='Output format'
)
parser.add_argument(
'--verbose',
action='store_true',
help='Verbose output'
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Dry run mode (no actual operations)'
)
parser.add_argument(
'--timeout',
type=int,
default=300,
help='Cache timeout in seconds'
)
def handle(self, *args, **options):
self.action = options['action']
self.tenant_id = options['tenant_id']
self.cache_type = options['cache_type']
self.key_pattern = options['key_pattern']
self.output_format = options['output_format']
self.verbose = options['verbose']
self.dry_run = options['dry_run']
self.timeout = options['timeout']
# Initialize cache managers
self.cache_manager = CacheManager()
self.malaysian_cache = MalaysianDataCache(self.cache_manager)
self.query_cache = QueryCache(self.cache_manager)
self.tenant_cache_manager = TenantCacheManager()
self.cache_warmer = CacheWarmer(self.cache_manager)
try:
if self.action == 'clear':
self.handle_clear()
elif self.action == 'stats':
self.handle_stats()
elif self.action == 'warm':
self.handle_warm()
elif self.action == 'analyze':
self.handle_analyze()
elif self.action == 'optimize':
self.handle_optimize()
elif self.action == 'malaysian-warm':
self.handle_malaysian_warm()
elif self.action == 'tenant-clear':
self.handle_tenant_clear()
elif self.action == 'query-clear':
self.handle_query_clear()
elif self.action == 'config-show':
self.handle_config_show()
elif self.action == 'health-check':
self.handle_health_check()
elif self.action == 'benchmark':
self.handle_benchmark()
else:
raise CommandError(f"Unknown action: {self.action}")
except Exception as e:
logger.error(f"Error in cache management: {e}")
raise CommandError(f"Cache management failed: {e}")
def handle_clear(self):
"""Clear cache."""
self.stdout.write(f"Clearing {self.cache_type} cache...")
if self.dry_run:
self.stdout.write("DRY RUN: Would clear cache")
return
cleared = False
if self.cache_type in ['all', 'data']:
cleared = self.cache_manager.clear_tenant_cache(self.tenant_id)
if self.cache_type in ['all', 'malaysian']:
# Clear Malaysian-specific cache
malaysian_keys = [
'my_sme:*ic_validation*',
'my_sme:*sst_rate*',
'my_sme:*postcode*'
]
for pattern in malaysian_keys:
self._clear_keys_by_pattern(pattern)
if self.cache_type in ['all', 'query']:
self.query_cache.query_hashes.clear()
if cleared:
self.stdout.write(self.style.SUCCESS("Cache cleared successfully"))
else:
self.stdout.write(self.style.WARNING("No cache to clear"))
def handle_stats(self):
"""Show cache statistics."""
stats = {}
if self.cache_type in ['all', 'data']:
stats['cache'] = self.cache_manager.get_cache_stats()
if self.cache_type in ['all', 'malaysian']:
stats['malaysian'] = {
'ic_validations': self._count_keys_by_pattern('*ic_validation*'),
'sst_rates': self._count_keys_by_pattern('*sst_rate*'),
'postcodes': self._count_keys_by_pattern('*postcode*'),
}
if self.cache_type in ['all', 'query']:
stats['query'] = {
'cached_queries': len(self.query_cache.query_hashes),
}
if self.cache_type in ['all', 'tenant']:
stats['tenant'] = self.tenant_cache_manager.get_tenant_cache_stats()
self._output_results(stats, "Cache Statistics")
def handle_warm(self):
"""Warm cache with frequently accessed data."""
self.stdout.write("Warming cache...")
if self.dry_run:
self.stdout.write("DRY RUN: Would warm cache")
return
warmed = {}
# Warm Malaysian data
if self.cache_type in ['all', 'malaysian']:
warmed['malaysian'] = self.cache_warmer.warm_malaysian_data()
# Warm user data
if self.cache_type in ['all', 'user']:
user_ids = self._get_user_ids_to_warm()
warmed['users'] = self.cache_warmer.warm_user_data(user_ids)
self._output_results(warmed, "Cache Warming Results")
def handle_analyze(self):
"""Analyze cache usage and patterns."""
analysis = {
'cache_keys': self._analyze_cache_keys(),
'hit_rates': self._analyze_hit_rates(),
'memory_usage': self._analyze_memory_usage(),
'patterns': self._analyze_usage_patterns(),
}
self._output_results(analysis, "Cache Analysis")
def handle_optimize(self):
"""Optimize cache configuration and usage."""
self.stdout.write("Optimizing cache...")
if self.dry_run:
self.stdout.write("DRY RUN: Would optimize cache")
return
optimizations = {
'config_updates': [],
'recommendations': [],
'actions_taken': []
}
# Analyze current usage
analysis = self._analyze_cache_keys()
# Generate recommendations
if analysis.get('total_keys', 0) > 10000:
optimizations['recommendations'].append("Consider increasing cache size")
if analysis.get('malaysian_keys', 0) > 1000:
optimizations['recommendations'].append("Malaysian data cache is heavily used")
# Optimize based on analysis
optimizations['actions_taken'] = self._apply_optimizations(analysis)
self._output_results(optimizations, "Cache Optimization Results")
def handle_malaysian_warm(self):
"""Warm Malaysian-specific cache data."""
self.stdout.write("Warming Malaysian cache data...")
if self.dry_run:
self.stdout.write("DRY RUN: Would warm Malaysian cache")
return
warmed = self.cache_warmer.warm_malaysian_data()
self._output_results(warmed, "Malaysian Cache Warming Results")
def handle_tenant_clear(self):
"""Clear tenant-specific cache."""
if not self.tenant_id:
self.stdout.write("Error: Tenant ID required for tenant-clear operation")
return
self.stdout.write(f"Clearing cache for tenant {self.tenant_id}...")
if self.dry_run:
self.stdout.write("DRY RUN: Would clear tenant cache")
return
success = self.cache_manager.clear_tenant_cache(self.tenant_id)
if success:
self.stdout.write(self.style.SUCCESS(f"Cache cleared for tenant {self.tenant_id}"))
else:
self.stdout.write(self.style.WARNING(f"No cache found for tenant {self.tenant_id}"))
def handle_query_clear(self):
"""Clear query cache."""
self.stdout.write("Clearing query cache...")
if self.dry_run:
self.stdout.write("DRY RUN: Would clear query cache")
return
cleared_count = len(self.query_cache.query_hashes)
self.query_cache.query_hashes.clear()
self.stdout.write(self.style.SUCCESS(f"Cleared {cleared_count} cached queries"))
def handle_config_show(self):
"""Show cache configuration."""
config = {
'cache_config': CacheConfig().__dict__,
'django_cache_config': self._get_django_cache_config(),
'redis_config': self._get_redis_config(),
'tenant_isolation': getattr(settings, 'TENANT_CACHE_ISOLATION', True),
}
self._output_results(config, "Cache Configuration")
def handle_health_check(self):
"""Check cache health."""
health = {
'cache_status': self._check_cache_health(),
'redis_status': self._check_redis_health(),
'tenant_status': self._check_tenant_cache_health(),
'malaysian_cache_status': self._check_malaysian_cache_health(),
}
overall_health = all(status.get('healthy', False) for status in health.values())
health['overall_healthy'] = overall_health
if overall_health:
self.stdout.write(self.style.SUCCESS("Cache system is healthy"))
else:
self.stdout.write(self.style.WARNING("Cache system has issues"))
self._output_results(health, "Cache Health Check")
def handle_benchmark(self):
"""Run cache performance benchmarks."""
self.stdout.write("Running cache benchmarks...")
benchmarks = {
'read_performance': self._benchmark_read_operations(),
'write_performance': self._benchmark_write_operations(),
'malaysian_cache_performance': self._benchmark_malaysian_cache(),
'multi_tenant_performance': self._benchmark_multi_tenant_cache(),
}
self._output_results(benchmarks, "Cache Performance Benchmarks")
def _clear_keys_by_pattern(self, pattern: str):
"""Clear cache keys by pattern."""
try:
# This is a simplified implementation
# In production, you might want to use Redis scan operations
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
keys = self.cache_manager.redis_client.keys(pattern)
if keys:
self.cache_manager.redis_client.delete(*keys)
except Exception as e:
logger.error(f"Error clearing keys by pattern {pattern}: {e}")
def _count_keys_by_pattern(self, pattern: str) -> int:
"""Count cache keys by pattern."""
try:
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
keys = self.cache_manager.redis_client.keys(pattern)
return len(keys)
except Exception as e:
logger.error(f"Error counting keys by pattern {pattern}: {e}")
return 0
def _analyze_cache_keys(self) -> Dict[str, Any]:
"""Analyze cache keys."""
try:
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
all_keys = self.cache_manager.redis_client.keys('*')
analysis = {
'total_keys': len(all_keys),
'malaysian_keys': len([k for k in all_keys if b'my_sme' in k]),
'tenant_keys': len([k for k in all_keys if b'tenant_' in k]),
'query_keys': len([k for k in all_keys if b'query_' in k]),
}
return analysis
except Exception as e:
logger.error(f"Error analyzing cache keys: {e}")
return {'total_keys': 0, 'malaysian_keys': 0, 'tenant_keys': 0, 'query_keys': 0}
def _analyze_hit_rates(self) -> Dict[str, float]:
"""Analyze cache hit rates."""
# This would typically require monitoring over time
# For now, return basic info
return {
'cache_hit_rate': 0.0, # Would be calculated from actual metrics
'malaysian_cache_hit_rate': 0.0,
'query_cache_hit_rate': 0.0,
}
def _analyze_memory_usage(self) -> Dict[str, Any]:
"""Analyze cache memory usage."""
try:
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
info = self.cache_manager.redis_client.info()
return {
'used_memory': info.get('used_memory', 0),
'used_memory_human': info.get('used_memory_human', '0B'),
'max_memory': info.get('maxmemory', 0),
'memory fragmentation_ratio': info.get('mem_fragmentation_ratio', 1.0),
}
except Exception as e:
logger.error(f"Error analyzing memory usage: {e}")
return {'used_memory': 0, 'used_memory_human': '0B'}
def _analyze_usage_patterns(self) -> Dict[str, Any]:
"""Analyze cache usage patterns."""
return {
'peak_usage_times': [], # Would be calculated from actual usage data
'most_accessed_keys': [], # Would be calculated from access logs
'cache_efficiency': 0.0, # Would be calculated from actual metrics
}
def _apply_optimizations(self, analysis: Dict[str, Any]) -> List[str]:
"""Apply cache optimizations."""
actions = []
# Example optimizations
if analysis.get('total_keys', 0) > 5000:
actions.append("Configured LRU eviction for high key count")
if analysis.get('malaysian_keys', 0) > 500:
actions.append("Optimized Malaysian cache TTL settings")
return actions
def _get_user_ids_to_warm(self) -> List[int]:
"""Get user IDs to warm in cache."""
# Return recently active users
return list(User.objects.filter(
is_active=True,
last_login__isnull=False
).values_list('id', flat=True)[:100])
def _get_django_cache_config(self) -> Dict[str, Any]:
"""Get Django cache configuration."""
return getattr(settings, 'CACHES', {})
def _get_redis_config(self) -> Dict[str, Any]:
"""Get Redis configuration."""
return {
'url': getattr(settings, 'REDIS_URL', 'redis://127.0.0.1:6379/1'),
'connection_pool': getattr(settings, 'REDIS_CONNECTION_POOL', {}),
}
def _check_cache_health(self) -> Dict[str, Any]:
"""Check cache health."""
try:
# Test basic cache operations
test_key = 'health_check_test'
test_value = 'test_value'
# Test set
success = self.cache_manager.set(test_key, test_value, timeout=1)
if not success:
return {'healthy': False, 'error': 'Cache set failed'}
# Test get
retrieved = self.cache_manager.get(test_key)
if retrieved != test_value:
return {'healthy': False, 'error': 'Cache get failed'}
# Test delete
self.cache_manager.delete(test_key)
return {'healthy': True}
except Exception as e:
return {'healthy': False, 'error': str(e)}
def _check_redis_health(self) -> Dict[str, Any]:
"""Check Redis health."""
try:
if hasattr(self.cache_manager, 'redis_client') and self.cache_manager.redis_client:
info = self.cache_manager.redis_client.info()
return {
'healthy': True,
'connected_clients': info.get('connected_clients', 0),
'used_memory': info.get('used_memory_human', '0B'),
}
else:
return {'healthy': True, 'note': 'Redis not configured, using default cache'}
except Exception as e:
return {'healthy': False, 'error': str(e)}
def _check_tenant_cache_health(self) -> Dict[str, Any]:
"""Check tenant cache health."""
try:
stats = self.tenant_cache_manager.get_tenant_cache_stats()
return {
'healthy': True,
'active_tenants': len(stats.get('tenants', {})),
'total_tenants': stats.get('total_tenants', 0),
}
except Exception as e:
return {'healthy': False, 'error': str(e)}
def _check_malaysian_cache_health(self) -> Dict[str, Any]:
"""Check Malaysian cache health."""
try:
# Test Malaysian-specific cache operations
test_postcode = '50000'
test_data = {'city': 'Kuala Lumpur', 'state': 'WP Kuala Lumpur'}
success = self.malaysian_cache.set_cached_postcode_data(test_postcode, test_data)
if not success:
return {'healthy': False, 'error': 'Malaysian cache set failed'}
retrieved = self.malaysian_cache.get_cached_postcode_data(test_postcode)
if retrieved != test_data:
return {'healthy': False, 'error': 'Malaysian cache get failed'}
return {'healthy': True}
except Exception as e:
return {'healthy': False, 'error': str(e)}
def _benchmark_read_operations(self) -> Dict[str, Any]:
"""Benchmark read operations."""
import time
start_time = time.time()
for i in range(1000):
self.cache_manager.get(f'benchmark_key_{i % 100}')
end_time = time.time()
return {
'operations': 1000,
'total_time': end_time - start_time,
'avg_time_per_op': (end_time - start_time) / 1000,
'ops_per_second': 1000 / (end_time - start_time),
}
def _benchmark_write_operations(self) -> Dict[str, Any]:
"""Benchmark write operations."""
import time
start_time = time.time()
for i in range(1000):
self.cache_manager.set(f'benchmark_key_{i}', f'benchmark_value_{i}')
end_time = time.time()
return {
'operations': 1000,
'total_time': end_time - start_time,
'avg_time_per_op': (end_time - start_time) / 1000,
'ops_per_second': 1000 / (end_time - start_time),
}
def _benchmark_malaysian_cache(self) -> Dict[str, Any]:
"""Benchmark Malaysian cache operations."""
import time
start_time = time.time()
for i in range(100):
postcode = str(50000 + i)
self.malaysian_cache.set_cached_postcode_data(
postcode, {'city': 'Test City', 'state': 'Test State'}
)
end_time = time.time()
return {
'operations': 100,
'total_time': end_time - start_time,
'avg_time_per_op': (end_time - start_time) / 100,
'ops_per_second': 100 / (end_time - start_time),
}
def _benchmark_multi_tenant_cache(self) -> Dict[str, Any]:
"""Benchmark multi-tenant cache operations."""
import time
start_time = time.time()
for tenant_id in range(1, 11): # 10 tenants
tenant_cache = self.tenant_cache_manager.get_cache_manager(tenant_id)
for i in range(100):
tenant_cache.set(f'tenant_key_{i}', f'tenant_value_{i}')
end_time = time.time()
return {
'operations': 1000,
'total_time': end_time - start_time,
'avg_time_per_op': (end_time - start_time) / 1000,
'ops_per_second': 1000 / (end_time - start_time),
}
def _output_results(self, results: Dict[str, Any], title: str):
"""Output results in specified format."""
if self.output_format == 'json':
self.stdout.write(json.dumps(results, indent=2, default=str))
elif self.output_format == 'summary':
self._output_summary(results, title)
else:
self._output_table(results, title)
def _output_summary(self, results: Dict[str, Any], title: str):
"""Output summary format."""
self.stdout.write(f"\n{title}")
self.stdout.write("=" * len(title))
for key, value in results.items():
if isinstance(value, dict):
self.stdout.write(f"{key}:")
for sub_key, sub_value in value.items():
self.stdout.write(f" {sub_key}: {sub_value}")
else:
self.stdout.write(f"{key}: {value}")
def _output_table(self, results: Dict[str, Any], title: str):
"""Output table format."""
self.stdout.write(f"\n{title}")
self.stdout.write("=" * len(title))
# Simple table output - in production you might use tabulate or similar
for key, value in results.items():
if isinstance(value, dict):
self.stdout.write(f"\n{key}:")
for sub_key, sub_value in value.items():
self.stdout.write(f" {sub_key:<20} {sub_value}")
else:
self.stdout.write(f"{key:<20} {value}")
if self.verbose:
self.stdout.write("\nVerbose output enabled")
# Add additional verbose information here

View File

@@ -0,0 +1,554 @@
"""
Database Optimization Management Command
This management command provides comprehensive database optimization utilities
for the multi-tenant SaaS platform, including index management, query optimization,
performance analysis, and maintenance operations specifically designed for
Malaysian deployment scenarios.
"""
import argparse
import json
import logging
import sys
from typing import List, Dict, Any, Optional
from django.core.management.base import BaseCommand, CommandError
from django.db import connection
from django.core.cache import cache
from django.conf import settings
from django.utils import timezone
from django_tenants.utils import get_tenant_model, schema_context
from core.optimization.query_optimization import (
DatabaseOptimizer,
QueryOptimizer,
CacheManager,
DatabaseMaintenance
)
from core.optimization.index_manager import (
IndexManager,
IndexType,
IndexStatus
)
from core.optimization.config import (
get_config,
DatabaseConfig,
validate_environment_config
)
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Database optimization management command.
Usage:
python manage.py optimize_database <action> [options]
Actions:
analyze - Analyze database performance
indexes - Manage database indexes
queries - Optimize database queries
cache - Manage database cache
maintenance - Perform database maintenance
config - Show configuration
malaysian - Malaysian-specific optimizations
report - Generate comprehensive report
"""
help = 'Optimize database performance for the multi-tenant SaaS platform'
def add_arguments(self, parser):
"""Add command arguments."""
parser.add_argument(
'action',
choices=[
'analyze', 'indexes', 'queries', 'cache',
'maintenance', 'config', 'malaysian', 'report'
],
help='Optimization action to perform'
)
parser.add_argument(
'--tenant',
help='Specific tenant schema to optimize'
)
parser.add_argument(
'--environment',
choices=['production', 'staging', 'development'],
default='production',
help='Environment configuration to use'
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Show what would be done without executing'
)
parser.add_argument(
'--verbose',
action='store_true',
help='Enable verbose output'
)
parser.add_argument(
'--output',
choices=['json', 'table', 'summary'],
default='table',
help='Output format'
)
parser.add_argument(
'--hours',
type=int,
default=24,
help='Number of hours to analyze (default: 24)'
)
parser.add_argument(
'--index-action',
choices=['create', 'drop', 'rebuild', 'analyze'],
help='Specific index action to perform'
)
parser.add_argument(
'--cache-action',
choices=['clear', 'stats', 'warmup'],
help='Cache management action'
)
def handle(self, *args, **options):
"""Handle the command."""
self.setup_logging(options.get('verbose'))
action = options['action']
tenant_schema = options.get('tenant')
environment = options.get('environment')
dry_run = options.get('dry_run')
output_format = options.get('output')
# Validate configuration
if not validate_environment_config(environment):
raise CommandError(f"Invalid configuration for environment: {environment}")
# Get configuration
config = get_config(environment)
if dry_run:
self.stdout.write(
self.style.WARNING(f"DRY RUN MODE - No changes will be made")
)
try:
if action == 'analyze':
self.analyze_database(config, tenant_schema, options, output_format)
elif action == 'indexes':
self.manage_indexes(config, tenant_schema, options, output_format)
elif action == 'queries':
self.optimize_queries(config, tenant_schema, options, output_format)
elif action == 'cache':
self.manage_cache(config, tenant_schema, options, output_format)
elif action == 'maintenance':
self.perform_maintenance(config, tenant_schema, options, output_format)
elif action == 'config':
self.show_configuration(config, output_format)
elif action == 'malaysian':
self.optimize_malaysian(config, tenant_schema, options, output_format)
elif action == 'report':
self.generate_report(config, tenant_schema, options, output_format)
else:
raise CommandError(f"Unknown action: {action}")
except Exception as e:
logger.error(f"Error during optimization: {e}")
raise CommandError(f"Optimization failed: {e}")
def setup_logging(self, verbose: bool):
"""Setup logging configuration."""
level = logging.DEBUG if verbose else logging.INFO
logging.basicConfig(
level=level,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
def analyze_database(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Analyze database performance."""
self.stdout.write("Analyzing database performance...")
optimizer = DatabaseOptimizer(tenant_schema)
# Analyze query performance
hours = options.get('hours', 24)
performance_analysis = optimizer.analyze_query_performance(hours)
# Analyze indexes
index_manager = IndexManager(tenant_schema)
index_performance = index_manager.analyze_index_performance()
# Get table statistics
table_stats = DatabaseMaintenance.get_table_sizes()
# Combine results
analysis_results = {
'performance_analysis': performance_analysis,
'index_analysis': index_performance,
'table_statistics': table_stats,
'optimization_recommendations': optimizer.get_optimization_report()
}
self.output_results(analysis_results, output_format)
def manage_indexes(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Manage database indexes."""
index_action = options.get('index_action')
dry_run = options.get('dry_run')
index_manager = IndexManager(tenant_schema)
if index_action == 'analyze':
self.stdout.write("Analyzing indexes...")
results = index_manager.analyze_index_performance()
self.output_results(results, output_format)
elif index_action == 'create':
self.stdout.write("Creating Malaysian-specific indexes...")
created = index_manager.create_malaysian_indexes()
created.extend(index_manager.create_multi_tenant_indexes())
if dry_run:
self.stdout.write(f"Would create {len(created)} indexes")
else:
self.stdout.write(
self.style.SUCCESS(f"Created {len(created)} indexes")
)
elif index_action == 'drop':
self.stdout.write("Analyzing unused indexes...")
performance_analysis = index_manager.analyze_index_performance()
unused_recommendations = [
r for r in performance_analysis['recommendations']
if r.action == 'drop'
]
if dry_run:
self.stdout.write(f"Would drop {len(unused_recommendations)} unused indexes")
else:
results = index_manager.execute_recommendations(
unused_recommendations, dry_run
)
self.stdout.write(
self.style.SUCCESS(f"Dropped {results['executed']} indexes")
)
elif index_action == 'rebuild':
self.stdout.write("Rebuilding fragmented indexes...")
performance_analysis = index_manager.analyze_index_performance()
rebuild_recommendations = [
r for r in performance_analysis['recommendations']
if r.action == 'rebuild'
]
if dry_run:
self.stdout.write(f"Would rebuild {len(rebuild_recommendations)} indexes")
else:
results = index_manager.execute_recommendations(
rebuild_recommendations, dry_run
)
self.stdout.write(
self.style.SUCCESS(f"Rebuilt {results['executed']} indexes")
)
else:
# Show index statistics
stats = index_manager.get_index_statistics()
self.output_results(stats, output_format)
def optimize_queries(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Optimize database queries."""
self.stdout.write("Optimizing database queries...")
optimizer = DatabaseOptimizer(tenant_schema)
# Get optimization report
report = optimizer.get_optimization_report()
# Optimize Malaysian queries
malaysian_opts = optimizer.optimize_malaysian_queries()
# Add to report
report['malaysian_optimizations'] = malaysian_opts
self.output_results(report, output_format)
def manage_cache(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Manage database cache."""
cache_action = options.get('cache_action')
cache_manager = CacheManager()
if cache_action == 'clear':
self.stdout.write("Clearing cache...")
if options.get('dry_run'):
self.stdout.write("Would clear all cache")
else:
cache.clear()
self.stdout.write(
self.style.SUCCESS("Cache cleared successfully")
)
elif cache_action == 'stats':
self.stdout.write("Getting cache statistics...")
try:
# Get Redis stats if using Redis
if 'redis' in str(config.cache.backend):
import redis
r = redis.from_url(config.cache.location)
stats = r.info()
self.output_results(stats, output_format)
else:
self.stdout.write("Cache statistics not available for current backend")
except Exception as e:
self.stdout.write(
self.style.ERROR(f"Error getting cache stats: {e}")
)
elif cache_action == 'warmup':
self.stdout.write("Warming up cache...")
# Implement cache warmup logic here
self.stdout.write("Cache warmup completed")
else:
# Show cache configuration
cache_config = {
'backend': config.cache.backend.value,
'location': config.cache.location,
'timeout': config.cache.timeout,
'key_prefix': config.cache.key_prefix,
'enabled': config.performance.enable_caching
}
self.output_results(cache_config, output_format)
def perform_maintenance(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Perform database maintenance."""
self.stdout.write("Performing database maintenance...")
maintenance = DatabaseMaintenance()
# Run maintenance tasks
with connection.cursor() as cursor:
# Analyze tables
cursor.execute("ANALYZE VERBOSE")
self.stdout.write("Analyzed database tables")
# Update statistics
cursor.execute("VACUUM ANALYZE")
self.stdout.write("Vacuumed and analyzed database")
# Get maintenance results
results = {
'tables_analyzed': len(DatabaseMaintenance.get_table_sizes()),
'maintenance_completed': timezone.now(),
'next_recommended': timezone.now() + timezone.timedelta(days=7)
}
self.output_results(results, output_format)
def show_configuration(self, config: DatabaseConfig, output_format: str):
"""Show current database configuration."""
self.stdout.write("Database Configuration:")
# Get all configuration settings
db_config = config.get_database_optimization_settings()
# Add Django settings
db_config['django_database'] = config.get_django_database_config()
db_config['django_cache'] = config.get_django_cache_config()
# Add validation warnings
warnings = config.validate_configuration()
if warnings:
db_config['warnings'] = warnings
# Add recommendations
recommendations = config.get_performance_recommendations()
if recommendations:
db_config['recommendations'] = recommendations
self.output_results(db_config, output_format)
def optimize_malaysian(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Perform Malaysian-specific optimizations."""
self.stdout.write("Performing Malaysian-specific optimizations...")
optimizer = DatabaseOptimizer(tenant_schema)
index_manager = IndexManager(tenant_schema)
# Create Malaysian indexes
created_indexes = index_manager.create_malaysian_indexes()
# Optimize Malaysian queries
malaysian_opts = optimizer.optimize_malaysian_queries()
# Get Malaysian-specific configuration
malaysian_config = {
'indexes_created': len(created_indexes),
'index_names': created_indexes,
'sst_queries_optimized': malaysian_opts['sst_queries_optimized'],
'ic_validation_optimized': malaysian_opts['ic_validation_optimized'],
'address_queries_optimized': malaysian_opts['address_queries_optimized'],
'localization_improvements': malaysian_opts['localization_improvements'],
'malaysian_config': {
'timezone': config.malaysian.timezone,
'locale': config.malaysian.locale,
'currency': config.malaysian.currency,
'local_caching_enabled': config.malaysian.enable_local_caching
}
}
self.output_results(malaysian_config, output_format)
def generate_report(self, config: DatabaseConfig, tenant_schema: Optional[str],
options: Dict[str, Any], output_format: str):
"""Generate comprehensive optimization report."""
self.stdout.write("Generating comprehensive optimization report...")
optimizer = DatabaseOptimizer(tenant_schema)
index_manager = IndexManager(tenant_schema)
# Collect all data for report
report_data = {
'report_generated': timezone.now(),
'environment': config.environment,
'tenant_schema': tenant_schema,
'configuration': config.get_database_optimization_settings(),
'performance_analysis': optimizer.analyze_query_performance(),
'index_analysis': index_manager.analyze_index_performance(),
'index_statistics': index_manager.get_index_statistics(),
'optimization_report': optimizer.get_optimization_report(),
'table_statistics': DatabaseMaintenance.get_table_sizes(),
'malaysian_optimizations': optimizer.optimize_malaysian_queries(),
'configuration_validation': config.validate_configuration(),
'recommendations': config.get_performance_recommendations()
}
self.output_results(report_data, output_format)
def output_results(self, results: Dict[str, Any], output_format: str):
"""Output results in specified format."""
if output_format == 'json':
self.output_json(results)
elif output_format == 'table':
self.output_table(results)
elif output_format == 'summary':
self.output_summary(results)
else:
self.output_table(results)
def output_json(self, results: Dict[str, Any]):
"""Output results as JSON."""
# Convert datetime objects to strings
def json_serializer(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'value'):
return obj.value
elif hasattr(obj, '__dict__'):
return obj.__dict__
return str(obj)
json_output = json.dumps(results, indent=2, default=json_serializer)
self.stdout.write(json_output)
def output_table(self, results: Dict[str, Any]):
"""Output results as formatted tables."""
for key, value in results.items():
self.stdout.write(f"\n{self.style.SUCCESS(key.upper()}:}")
if isinstance(value, dict):
for sub_key, sub_value in value.items():
self.stdout.write(f" {sub_key}: {sub_value}")
elif isinstance(value, list):
for i, item in enumerate(value):
self.stdout.write(f" {i+1}. {item}")
else:
self.stdout.write(f" {value}")
def output_summary(self, results: Dict[str, Any]):
"""Output results as summary."""
self.stdout.write(self.style.SUCCESS("OPTIMIZATION SUMMARY:"))
# Extract key metrics
total_queries = results.get('performance_analysis', {}).get('total_queries', 0)
slow_queries = results.get('performance_analysis', {}).get('slow_queries', 0)
total_indexes = results.get('index_analysis', {}).get('total_indexes', 0)
unused_indexes = results.get('index_analysis', {}).get('unused_indexes', 0)
recommendations = results.get('index_analysis', {}).get('recommendations', [])
self.stdout.write(f"• Total queries analyzed: {total_queries}")
self.stdout.write(f"• Slow queries found: {slow_queries}")
self.stdout.write(f"• Total indexes: {total_indexes}")
self.stdout.write(f"• Unused indexes: {unused_indexes}")
self.stdout.write(f"• Recommendations: {len(recommendations)}")
if recommendations:
self.stdout.write("\nTOP RECOMMENDATIONS:")
for i, rec in enumerate(recommendations[:5]):
priority = rec.get('priority', 'medium')
action = rec.get('action', 'unknown')
reason = rec.get('reason', 'No reason provided')
self.stdout.write(f" {i+1}. [{priority.upper()}] {action}: {reason}")
# Malaysian-specific summary
malaysian_opts = results.get('malaysian_optimizations', {})
if malaysian_opts:
self.stdout.write(f"\nMALAYSIAN OPTIMIZATIONS:")
self.stdout.write(f"• SST queries optimized: {malaysian_opts.get('sst_queries_optimized', 0)}")
self.stdout.write(f"• IC validation optimized: {malaysian_opts.get('ic_validation_optimized', False)}")
self.stdout.write(f"• Address queries optimized: {malaysian_opts.get('address_queries_optimized', 0)}")
def create_progress_bar(self, total: int, description: str):
"""Create a simple progress bar."""
return ProgressBar(total, description)
class ProgressBar:
"""Simple progress bar for command line output."""
def __init__(self, total: int, description: str):
self.total = total
self.current = 0
self.description = description
def update(self, increment: int = 1):
"""Update progress."""
self.current += increment
self._draw()
def _draw(self):
"""Draw progress bar."""
if self.total == 0:
return
progress = self.current / self.total
bar_length = 50
filled = int(bar_length * progress)
bar = '' * filled + '-' * (bar_length - filled)
percent = progress * 100
self.stdout.write(f"\r{self.description}: |{bar}| {percent:.1f}% ({self.current}/{self.total})")
self.stdout.flush()
def finish(self):
"""Finish progress bar."""
self._draw()
self.stdout.write("\n")
self.stdout.flush()