Some checks failed
System Monitoring / Health Checks (push) Has been cancelled
System Monitoring / Performance Monitoring (push) Has been cancelled
System Monitoring / Database Monitoring (push) Has been cancelled
System Monitoring / Cache Monitoring (push) Has been cancelled
System Monitoring / Log Monitoring (push) Has been cancelled
System Monitoring / Resource Monitoring (push) Has been cancelled
System Monitoring / Uptime Monitoring (push) Has been cancelled
System Monitoring / Backup Monitoring (push) Has been cancelled
System Monitoring / Security Monitoring (push) Has been cancelled
System Monitoring / Monitoring Dashboard (push) Has been cancelled
System Monitoring / Alerting (push) Has been cancelled
Security Scanning / Dependency Scanning (push) Has been cancelled
Security Scanning / Code Security Scanning (push) Has been cancelled
Security Scanning / Secrets Scanning (push) Has been cancelled
Security Scanning / Container Security Scanning (push) Has been cancelled
Security Scanning / Compliance Checking (push) Has been cancelled
Security Scanning / Security Dashboard (push) Has been cancelled
Security Scanning / Security Remediation (push) Has been cancelled
429 lines
14 KiB
Python
429 lines
14 KiB
Python
"""
|
|
Multi-tenant caching strategies for Malaysian SME SaaS platform.
|
|
Provides advanced caching with Malaysian-specific optimizations.
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
import hashlib
|
|
import pickle
|
|
from datetime import datetime, timedelta
|
|
from typing import Any, Dict, List, Optional, Union, Tuple
|
|
from django.core.cache import cache
|
|
from django.conf import settings
|
|
from django.db import connection, models
|
|
from django_redis import get_redis_connection
|
|
from django.contrib.auth import get_user_model
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
|
from django.utils import timezone
|
|
from django_tenants.utils import get_tenant_model, get_public_schema_name, get_tenant_schema_name
|
|
from rest_framework.response import Response
|
|
|
|
from .config import DatabaseConfig, CacheConfig
|
|
|
|
logger = logging.getLogger(__name__)
|
|
User = get_user_model()
|
|
TenantModel = get_tenant_model()
|
|
|
|
|
|
class CacheKeyGenerator:
|
|
"""Generates cache keys with multi-tenant support and Malaysian context."""
|
|
|
|
def __init__(self):
|
|
self.tenant_prefix = self._get_tenant_prefix()
|
|
self.malaysia_prefix = "my_sme"
|
|
|
|
def _get_tenant_prefix(self) -> str:
|
|
"""Get current tenant prefix for cache keys."""
|
|
try:
|
|
tenant = TenantModel.objects.get(schema_name=connection.schema_name)
|
|
return f"tenant_{tenant.id}"
|
|
except Exception:
|
|
return "public"
|
|
|
|
def generate_key(
|
|
self,
|
|
key_type: str,
|
|
identifier: str,
|
|
subkey: Optional[str] = None,
|
|
context: Optional[Dict[str, Any]] = None
|
|
) -> str:
|
|
"""Generate standardized cache key."""
|
|
components = [
|
|
self.malaysia_prefix,
|
|
self.tenant_prefix,
|
|
key_type,
|
|
identifier
|
|
]
|
|
|
|
if subkey:
|
|
components.append(subkey)
|
|
|
|
if context:
|
|
context_hash = hashlib.md5(
|
|
json.dumps(context, sort_keys=True).encode()
|
|
).hexdigest()[:8]
|
|
components.append(context_hash)
|
|
|
|
return ":".join(components)
|
|
|
|
def generate_malaysian_key(
|
|
self,
|
|
entity_type: str,
|
|
identifier: Union[str, int],
|
|
malaysian_context: Optional[Dict[str, Any]] = None
|
|
) -> str:
|
|
"""Generate Malaysian-specific cache key."""
|
|
return self.generate_key(
|
|
"my",
|
|
f"{entity_type}_{identifier}",
|
|
context=malaysian_context
|
|
)
|
|
|
|
|
|
class CacheManager:
|
|
"""Advanced cache management with multi-tenant support."""
|
|
|
|
def __init__(self, config: Optional[CacheConfig] = None):
|
|
self.config = config or CacheConfig()
|
|
self.key_generator = CacheKeyGenerator()
|
|
self.redis_client = None
|
|
|
|
if self.config.use_redis:
|
|
try:
|
|
self.redis_client = get_redis_connection("default")
|
|
except Exception as e:
|
|
logger.warning(f"Redis connection failed: {e}")
|
|
|
|
def get(
|
|
self,
|
|
key: str,
|
|
default: Any = None,
|
|
version: Optional[int] = None
|
|
) -> Any:
|
|
"""Get value from cache with error handling."""
|
|
try:
|
|
return cache.get(key, default=default, version=version)
|
|
except Exception as e:
|
|
logger.error(f"Cache get error for key {key}: {e}")
|
|
return default
|
|
|
|
def set(
|
|
self,
|
|
key: str,
|
|
value: Any,
|
|
timeout: Optional[int] = None,
|
|
version: Optional[int] = None
|
|
) -> bool:
|
|
"""Set value in cache with error handling."""
|
|
try:
|
|
timeout = timeout or self.config.default_timeout
|
|
return cache.set(key, value, timeout=timeout, version=version)
|
|
except Exception as e:
|
|
logger.error(f"Cache set error for key {key}: {e}")
|
|
return False
|
|
|
|
def delete(self, key: str, version: Optional[int] = None) -> bool:
|
|
"""Delete key from cache."""
|
|
try:
|
|
return cache.delete(key, version=version)
|
|
except Exception as e:
|
|
logger.error(f"Cache delete error for key {key}: {e}")
|
|
return False
|
|
|
|
def clear_tenant_cache(self, tenant_id: Optional[int] = None) -> bool:
|
|
"""Clear all cache for a specific tenant."""
|
|
try:
|
|
if tenant_id:
|
|
pattern = f"*:tenant_{tenant_id}:*"
|
|
else:
|
|
pattern = f"*:{self.key_generator.tenant_prefix}:*"
|
|
|
|
if self.redis_client:
|
|
keys = self.redis_client.keys(pattern)
|
|
if keys:
|
|
self.redis_client.delete(*keys)
|
|
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error clearing tenant cache: {e}")
|
|
return False
|
|
|
|
def get_cache_stats(self) -> Dict[str, Any]:
|
|
"""Get cache statistics."""
|
|
stats = {
|
|
"tenant": self.key_generator.tenant_prefix,
|
|
"redis_available": self.redis_client is not None,
|
|
"default_timeout": self.config.default_timeout,
|
|
}
|
|
|
|
if self.redis_client:
|
|
try:
|
|
info = self.redis_client.info()
|
|
stats.update({
|
|
"used_memory": info.get("used_memory_human", "N/A"),
|
|
"connected_clients": info.get("connected_clients", 0),
|
|
"total_commands_processed": info.get("total_commands_processed", 0),
|
|
})
|
|
except Exception as e:
|
|
logger.error(f"Error getting Redis stats: {e}")
|
|
|
|
return stats
|
|
|
|
|
|
class MalaysianDataCache:
|
|
"""Specialized caching for Malaysian data and validations."""
|
|
|
|
def __init__(self, cache_manager: CacheManager):
|
|
self.cache = cache_manager
|
|
|
|
def get_cached_ic_validation(self, ic_number: str) -> Optional[Dict[str, Any]]:
|
|
"""Get cached IC validation result."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"ic_validation",
|
|
ic_number
|
|
)
|
|
return self.cache.get(key)
|
|
|
|
def set_cached_ic_validation(
|
|
self,
|
|
ic_number: str,
|
|
validation_result: Dict[str, Any]
|
|
) -> bool:
|
|
"""Cache IC validation result."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"ic_validation",
|
|
ic_number
|
|
)
|
|
return self.cache.set(key, validation_result, timeout=86400) # 24 hours
|
|
|
|
def get_cached_sst_rate(self, state: str, category: str) -> Optional[float]:
|
|
"""Get cached SST rate."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"sst_rate",
|
|
f"{state}_{category}"
|
|
)
|
|
return self.cache.get(key)
|
|
|
|
def set_cached_sst_rate(
|
|
self,
|
|
state: str,
|
|
category: str,
|
|
rate: float
|
|
) -> bool:
|
|
"""Cache SST rate."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"sst_rate",
|
|
f"{state}_{category}"
|
|
)
|
|
return self.cache.set(key, rate, timeout=604800) # 7 days
|
|
|
|
def get_cached_postcode_data(self, postcode: str) -> Optional[Dict[str, Any]]:
|
|
"""Get cached postcode data."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"postcode",
|
|
postcode
|
|
)
|
|
return self.cache.get(key)
|
|
|
|
def set_cached_postcode_data(
|
|
self,
|
|
postcode: str,
|
|
postcode_data: Dict[str, Any]
|
|
) -> bool:
|
|
"""Cache postcode data."""
|
|
key = self.cache.key_generator.generate_malaysian_key(
|
|
"postcode",
|
|
postcode
|
|
)
|
|
return self.cache.set(key, postcode_data, timeout=2592000) # 30 days
|
|
|
|
|
|
class QueryCache:
|
|
"""Intelligent query caching with automatic invalidation."""
|
|
|
|
def __init__(self, cache_manager: CacheManager):
|
|
self.cache = cache_manager
|
|
self.query_hashes = set()
|
|
|
|
def generate_query_hash(self, query: str, params: Optional[tuple] = None) -> str:
|
|
"""Generate hash for query identification."""
|
|
query_string = query.strip().lower()
|
|
if params:
|
|
query_string += str(params)
|
|
return hashlib.md5(query_string.encode()).hexdigest()
|
|
|
|
def cache_query_result(
|
|
self,
|
|
query: str,
|
|
result: Any,
|
|
params: Optional[tuple] = None,
|
|
timeout: Optional[int] = None
|
|
) -> bool:
|
|
"""Cache query result."""
|
|
query_hash = self.generate_query_hash(query, params)
|
|
key = self.cache.key_generator.generate_key("query", query_hash)
|
|
|
|
success = self.cache.set(key, result, timeout=timeout)
|
|
if success:
|
|
self.query_hashes.add(query_hash)
|
|
|
|
return success
|
|
|
|
def get_cached_query_result(
|
|
self,
|
|
query: str,
|
|
params: Optional[tuple] = None
|
|
) -> Optional[Any]:
|
|
"""Get cached query result."""
|
|
query_hash = self.generate_query_hash(query, params)
|
|
key = self.cache.key_generator.generate_key("query", query_hash)
|
|
return self.cache.get(key)
|
|
|
|
def invalidate_model_cache(self, model_name: str) -> int:
|
|
"""Invalidate cache for a specific model."""
|
|
invalidated = 0
|
|
for query_hash in list(self.query_hashes):
|
|
if model_name.lower() in query_hash:
|
|
key = self.cache.key_generator.generate_key("query", query_hash)
|
|
if self.cache.delete(key):
|
|
invalidated += 1
|
|
self.query_hashes.discard(query_hash)
|
|
|
|
return invalidated
|
|
|
|
|
|
class TenantCacheManager:
|
|
"""Multi-tenant cache management with isolation."""
|
|
|
|
def __init__(self):
|
|
self.cache_managers = {}
|
|
|
|
def get_cache_manager(self, tenant_id: Optional[int] = None) -> CacheManager:
|
|
"""Get cache manager for specific tenant."""
|
|
if not tenant_id:
|
|
tenant_id = self._get_current_tenant_id()
|
|
|
|
if tenant_id not in self.cache_managers:
|
|
config = CacheConfig()
|
|
config.tenant_isolation = True
|
|
config.tenant_prefix = f"tenant_{tenant_id}"
|
|
self.cache_managers[tenant_id] = CacheManager(config)
|
|
|
|
return self.cache_managers[tenant_id]
|
|
|
|
def _get_current_tenant_id(self) -> int:
|
|
"""Get current tenant ID."""
|
|
try:
|
|
tenant = TenantModel.objects.get(schema_name=connection.schema_name)
|
|
return tenant.id
|
|
except Exception:
|
|
return 0 # Public schema
|
|
|
|
def clear_all_tenant_cache(self) -> Dict[str, Any]:
|
|
"""Clear cache for all tenants."""
|
|
results = {"cleared_tenants": 0, "errors": []}
|
|
|
|
for tenant_id, cache_manager in self.cache_managers.items():
|
|
try:
|
|
if cache_manager.clear_tenant_cache(tenant_id):
|
|
results["cleared_tenants"] += 1
|
|
except Exception as e:
|
|
results["errors"].append(f"Tenant {tenant_id}: {e}")
|
|
|
|
return results
|
|
|
|
def get_tenant_cache_stats(self) -> Dict[str, Any]:
|
|
"""Get cache statistics for all tenants."""
|
|
stats = {"tenants": {}, "total_tenants": len(self.cache_managers)}
|
|
|
|
for tenant_id, cache_manager in self.cache_managers.items():
|
|
stats["tenants"][str(tenant_id)] = cache_manager.get_cache_stats()
|
|
|
|
return stats
|
|
|
|
|
|
class CacheWarmer:
|
|
"""Proactive cache warming for critical data."""
|
|
|
|
def __init__(self, cache_manager: CacheManager):
|
|
self.cache = cache_manager
|
|
self.malaysian_cache = MalaysianDataCache(cache_manager)
|
|
|
|
def warm_malaysian_data(self) -> Dict[str, int]:
|
|
"""Warm cache with Malaysian reference data."""
|
|
warmed = {"ic_validations": 0, "sst_rates": 0, "postcodes": 0}
|
|
|
|
# Warm SST rates
|
|
sst_rates = self._get_sst_rates_to_warm()
|
|
for state, category, rate in sst_rates:
|
|
if self.malaysian_cache.set_cached_sst_rate(state, category, rate):
|
|
warmed["sst_rates"] += 1
|
|
|
|
# Warm postcode data
|
|
postcodes = self._get_postcodes_to_warm()
|
|
for postcode, data in postcodes:
|
|
if self.malaysian_cache.set_cached_postcode_data(postcode, data):
|
|
warmed["postcodes"] += 1
|
|
|
|
return warmed
|
|
|
|
def warm_user_data(self, user_ids: List[int]) -> int:
|
|
"""Warm cache with user data."""
|
|
warmed = 0
|
|
|
|
for user_id in user_ids:
|
|
try:
|
|
user = User.objects.get(id=user_id)
|
|
key = self.cache.key_generator.generate_key("user", str(user_id))
|
|
user_data = {
|
|
"id": user.id,
|
|
"username": user.username,
|
|
"email": user.email,
|
|
"is_active": user.is_active,
|
|
"last_login": user.last_login,
|
|
}
|
|
if self.cache.set(key, user_data):
|
|
warmed += 1
|
|
except User.DoesNotExist:
|
|
continue
|
|
|
|
return warmed
|
|
|
|
def _get_sst_rates_to_warm(self) -> List[Tuple[str, str, float]]:
|
|
"""Get SST rates to warm in cache."""
|
|
# Common Malaysian states and categories
|
|
states = ["Johor", "Kedah", "Kelantan", "Melaka", "Negeri Sembilan",
|
|
"Pahang", "Perak", "Perlis", "Pulau Pinang", "Sabah",
|
|
"Sarawak", "Selangor", "Terengganu", "WP Kuala Lumpur",
|
|
"WP Labuan", "WP Putrajaya"]
|
|
categories = ["standard", "food", "medical", "education"]
|
|
|
|
rates = []
|
|
for state in states:
|
|
for category in categories:
|
|
rate = 0.06 if category == "standard" else 0.0
|
|
rates.append((state, category, rate))
|
|
|
|
return rates
|
|
|
|
def _get_postcodes_to_warm(self) -> List[Tuple[str, Dict[str, Any]]]:
|
|
"""Get postcode data to warm in cache."""
|
|
# Common Malaysian postcodes
|
|
postcodes = [
|
|
("50000", {"city": "Kuala Lumpur", "state": "WP Kuala Lumpur"}),
|
|
("50480", {"city": "Kuala Lumpur", "state": "WP Kuala Lumpur"}),
|
|
("80000", {"city": "Johor Bahru", "state": "Johor"}),
|
|
("93000", {"city": "Kuching", "state": "Sarawak"}),
|
|
("88300", {"city": "Kota Kinabalu", "state": "Sabah"}),
|
|
]
|
|
|
|
return postcodes
|
|
|
|
|
|
# Global instances
|
|
tenant_cache_manager = TenantCacheManager()
|
|
cache_manager = CacheManager()
|
|
malaysian_cache = MalaysianDataCache(cache_manager)
|
|
query_cache = QueryCache(cache_manager)
|
|
cache_warmer = CacheWarmer(cache_manager) |