Some checks failed
System Monitoring / Health Checks (push) Has been cancelled
System Monitoring / Performance Monitoring (push) Has been cancelled
System Monitoring / Database Monitoring (push) Has been cancelled
System Monitoring / Cache Monitoring (push) Has been cancelled
System Monitoring / Log Monitoring (push) Has been cancelled
System Monitoring / Resource Monitoring (push) Has been cancelled
System Monitoring / Uptime Monitoring (push) Has been cancelled
System Monitoring / Backup Monitoring (push) Has been cancelled
System Monitoring / Security Monitoring (push) Has been cancelled
System Monitoring / Monitoring Dashboard (push) Has been cancelled
System Monitoring / Alerting (push) Has been cancelled
Security Scanning / Dependency Scanning (push) Has been cancelled
Security Scanning / Code Security Scanning (push) Has been cancelled
Security Scanning / Secrets Scanning (push) Has been cancelled
Security Scanning / Container Security Scanning (push) Has been cancelled
Security Scanning / Compliance Checking (push) Has been cancelled
Security Scanning / Security Dashboard (push) Has been cancelled
Security Scanning / Security Remediation (push) Has been cancelled
627 lines
22 KiB
Python
627 lines
22 KiB
Python
"""
|
|
Database Configuration Optimization
|
|
|
|
This module provides optimized database configuration settings for the multi-tenant SaaS platform,
|
|
including connection pooling, query optimization, caching strategies, and performance tuning
|
|
specifically designed for Malaysian deployment scenarios.
|
|
"""
|
|
|
|
import os
|
|
from typing import Dict, Any, List, Optional
|
|
from dataclasses import dataclass
|
|
from enum import Enum
|
|
|
|
|
|
class DatabaseEngine(Enum):
|
|
"""Supported database engines."""
|
|
POSTGRESQL = "postgresql"
|
|
MYSQL = "mysql"
|
|
SQLITE = "sqlite3"
|
|
|
|
|
|
class CacheBackend(Enum):
|
|
"""Supported cache backends."""
|
|
REDIS = "redis"
|
|
MEMCACHED = "memcached"
|
|
DATABASE = "database"
|
|
DUMMY = "dummy"
|
|
|
|
|
|
@dataclass
|
|
class ConnectionPoolConfig:
|
|
"""Configuration for database connection pooling."""
|
|
max_connections: int = 100
|
|
min_connections: int = 2
|
|
connect_timeout: int = 10
|
|
idle_timeout: int = 300
|
|
max_lifetime: int = 3600
|
|
reuse_timeout: int = 30
|
|
health_check_interval: int = 60
|
|
health_check_timeout: int = 5
|
|
|
|
|
|
@dataclass
|
|
class QueryOptimizationConfig:
|
|
"""Configuration for query optimization."""
|
|
slow_query_threshold: float = 1.0 # seconds
|
|
query_cache_timeout: int = 3600 # seconds
|
|
enable_query_logging: bool = True
|
|
max_query_length: int = 10000
|
|
force_index_hints: bool = False
|
|
optimize_joins: bool = True
|
|
batch_size: int = 1000
|
|
|
|
|
|
@dataclass
|
|
class CacheConfig:
|
|
"""Configuration for caching."""
|
|
backend: CacheBackend = CacheBackend.REDIS
|
|
location: str = "redis://127.0.0.1:6379/1"
|
|
timeout: int = 300
|
|
key_prefix: str = "saas_"
|
|
version: int = 1
|
|
options: Dict[str, Any] = None
|
|
|
|
def __post_init__(self):
|
|
if self.options is None:
|
|
self.options = {}
|
|
|
|
|
|
@dataclass
|
|
class MultiTenantConfig:
|
|
"""Configuration for multi-tenant database optimization."""
|
|
shared_tables: List[str] = None
|
|
tenant_table_prefix: str = "tenant_"
|
|
enable_tenant_caching: bool = True
|
|
tenant_cache_timeout: int = 1800
|
|
enable_cross_tenant_queries: bool = False
|
|
tenant_isolation_level: str = "strict"
|
|
|
|
def __post_init__(self):
|
|
if self.shared_tables is None:
|
|
self.shared_tables = [
|
|
"public.tenant",
|
|
"public.django_migrations",
|
|
"public.django_content_type",
|
|
"public.django_admin_log"
|
|
]
|
|
|
|
|
|
@dataclass
|
|
class MalaysianConfig:
|
|
"""Configuration specific to Malaysian deployment."""
|
|
timezone: str = "Asia/Kuala_Lumpur"
|
|
locale: str = "ms_MY"
|
|
currency: str = "MYR"
|
|
enable_local_caching: bool = True
|
|
local_cache_timeout: int = 900
|
|
malaysian_indexes_enabled: bool = True
|
|
sst_calculation_cache: bool = True
|
|
ic_validation_cache: bool = True
|
|
address_optimization: bool = True
|
|
|
|
|
|
@dataclass
|
|
class PerformanceConfig:
|
|
"""General performance configuration."""
|
|
enable_connection_pooling: bool = True
|
|
enable_query_optimization: bool = True
|
|
enable_caching: bool = True
|
|
enable_monitoring: bool = True
|
|
log_slow_queries: bool = True
|
|
enable_query_profiling: bool = False
|
|
enable_database_maintenance: bool = True
|
|
|
|
|
|
class DatabaseConfig:
|
|
"""
|
|
Centralized database configuration management for the multi-tenant SaaS platform.
|
|
|
|
This class provides optimized configuration settings for different deployment scenarios
|
|
with specific optimizations for Malaysian market requirements.
|
|
"""
|
|
|
|
def __init__(self, environment: str = "production"):
|
|
self.environment = environment
|
|
self.connection_pool = self._get_connection_pool_config()
|
|
self.query_optimization = self._get_query_optimization_config()
|
|
self.cache = self._get_cache_config()
|
|
self.multi_tenant = self._get_multi_tenant_config()
|
|
self.malaysian = self._get_malaysian_config()
|
|
self.performance = self._get_performance_config()
|
|
|
|
def _get_connection_pool_config(self) -> ConnectionPoolConfig:
|
|
"""Get connection pool configuration based on environment."""
|
|
if self.environment == "production":
|
|
return ConnectionPoolConfig(
|
|
max_connections=200,
|
|
min_connections=10,
|
|
connect_timeout=10,
|
|
idle_timeout=600,
|
|
max_lifetime=7200,
|
|
reuse_timeout=60,
|
|
health_check_interval=120,
|
|
health_check_timeout=10
|
|
)
|
|
elif self.environment == "staging":
|
|
return ConnectionPoolConfig(
|
|
max_connections=100,
|
|
min_connections=5,
|
|
connect_timeout=15,
|
|
idle_timeout=300,
|
|
max_lifetime=3600,
|
|
reuse_timeout=30,
|
|
health_check_interval=60,
|
|
health_check_timeout=5
|
|
)
|
|
else: # development
|
|
return ConnectionPoolConfig(
|
|
max_connections=50,
|
|
min_connections=2,
|
|
connect_timeout=5,
|
|
idle_timeout=60,
|
|
max_lifetime=1800,
|
|
reuse_timeout=15,
|
|
health_check_interval=30,
|
|
health_check_timeout=3
|
|
)
|
|
|
|
def _get_query_optimization_config(self) -> QueryOptimizationConfig:
|
|
"""Get query optimization configuration based on environment."""
|
|
if self.environment == "production":
|
|
return QueryOptimizationConfig(
|
|
slow_query_threshold=0.5,
|
|
query_cache_timeout=7200,
|
|
enable_query_logging=True,
|
|
max_query_length=50000,
|
|
force_index_hints=True,
|
|
optimize_joins=True,
|
|
batch_size=2000
|
|
)
|
|
elif self.environment == "staging":
|
|
return QueryOptimizationConfig(
|
|
slow_query_threshold=1.0,
|
|
query_cache_timeout=3600,
|
|
enable_query_logging=True,
|
|
max_query_length=10000,
|
|
force_index_hints=False,
|
|
optimize_joins=True,
|
|
batch_size=1000
|
|
)
|
|
else: # development
|
|
return QueryOptimizationConfig(
|
|
slow_query_threshold=2.0,
|
|
query_cache_timeout=1800,
|
|
enable_query_logging=False,
|
|
max_query_length=10000,
|
|
force_index_hints=False,
|
|
optimize_joins=False,
|
|
batch_size=500
|
|
)
|
|
|
|
def _get_cache_config(self) -> CacheConfig:
|
|
"""Get cache configuration based on environment."""
|
|
if self.environment == "production":
|
|
return CacheConfig(
|
|
backend=CacheBackend.REDIS,
|
|
location=os.getenv("REDIS_URL", "redis://127.0.0.1:6379/1"),
|
|
timeout=3600,
|
|
key_prefix="saas_prod_",
|
|
version=1,
|
|
options={
|
|
"CLIENT_KWARGS": {
|
|
"socket_connect_timeout": 5,
|
|
"socket_timeout": 5,
|
|
"retry_on_timeout": True
|
|
}
|
|
}
|
|
)
|
|
elif self.environment == "staging":
|
|
return CacheConfig(
|
|
backend=CacheBackend.REDIS,
|
|
location=os.getenv("REDIS_URL", "redis://127.0.0.1:6379/2"),
|
|
timeout=1800,
|
|
key_prefix="saas_staging_",
|
|
version=1,
|
|
options={
|
|
"CLIENT_KWARGS": {
|
|
"socket_connect_timeout": 10,
|
|
"socket_timeout": 10
|
|
}
|
|
}
|
|
)
|
|
else: # development
|
|
return CacheConfig(
|
|
backend=CacheBackend.DUMMY,
|
|
location="",
|
|
timeout=300,
|
|
key_prefix="saas_dev_",
|
|
version=1,
|
|
options={}
|
|
)
|
|
|
|
def _get_multi_tenant_config(self) -> MultiTenantConfig:
|
|
"""Get multi-tenant configuration based on environment."""
|
|
shared_tables = [
|
|
"public.tenant",
|
|
"public.django_migrations",
|
|
"public.django_content_type",
|
|
"public.django_admin_log",
|
|
"public.django_session"
|
|
]
|
|
|
|
if self.environment == "production":
|
|
return MultiTenantConfig(
|
|
shared_tables=shared_tables,
|
|
tenant_table_prefix="tenant_",
|
|
enable_tenant_caching=True,
|
|
tenant_cache_timeout=1800,
|
|
enable_cross_tenant_queries=False,
|
|
tenant_isolation_level="strict"
|
|
)
|
|
else:
|
|
return MultiTenantConfig(
|
|
shared_tables=shared_tables,
|
|
tenant_table_prefix="tenant_",
|
|
enable_tenant_caching=True,
|
|
tenant_cache_timeout=900,
|
|
enable_cross_tenant_queries=True,
|
|
tenant_isolation_level="moderate"
|
|
)
|
|
|
|
def _get_malaysian_config(self) -> MalaysianConfig:
|
|
"""Get Malaysian-specific configuration."""
|
|
return MalaysianConfig(
|
|
timezone="Asia/Kuala_Lumpur",
|
|
locale="ms_MY",
|
|
currency="MYR",
|
|
enable_local_caching=True,
|
|
local_cache_timeout=900,
|
|
malaysian_indexes_enabled=True,
|
|
sst_calculation_cache=True,
|
|
ic_validation_cache=True,
|
|
address_optimization=True
|
|
)
|
|
|
|
def _get_performance_config(self) -> PerformanceConfig:
|
|
"""Get general performance configuration."""
|
|
if self.environment == "production":
|
|
return PerformanceConfig(
|
|
enable_connection_pooling=True,
|
|
enable_query_optimization=True,
|
|
enable_caching=True,
|
|
enable_monitoring=True,
|
|
log_slow_queries=True,
|
|
enable_query_profiling=True,
|
|
enable_database_maintenance=True
|
|
)
|
|
elif self.environment == "staging":
|
|
return PerformanceConfig(
|
|
enable_connection_pooling=True,
|
|
enable_query_optimization=True,
|
|
enable_caching=True,
|
|
enable_monitoring=True,
|
|
log_slow_queries=True,
|
|
enable_query_profiling=False,
|
|
enable_database_maintenance=True
|
|
)
|
|
else: # development
|
|
return PerformanceConfig(
|
|
enable_connection_pooling=False,
|
|
enable_query_optimization=False,
|
|
enable_caching=False,
|
|
enable_monitoring=False,
|
|
log_slow_queries=False,
|
|
enable_query_profiling=False,
|
|
enable_database_maintenance=False
|
|
)
|
|
|
|
def get_django_database_config(self) -> Dict[str, Any]:
|
|
"""
|
|
Get Django database configuration dictionary.
|
|
|
|
Returns:
|
|
Dictionary suitable for Django DATABASES setting
|
|
"""
|
|
base_config = {
|
|
"ENGINE": "django_tenants.postgresql_backend",
|
|
"NAME": os.getenv("DB_NAME", "saas_platform"),
|
|
"USER": os.getenv("DB_USER", "postgres"),
|
|
"PASSWORD": os.getenv("DB_PASSWORD", ""),
|
|
"HOST": os.getenv("DB_HOST", "localhost"),
|
|
"PORT": os.getenv("DB_PORT", "5432"),
|
|
"CONN_MAX_AGE": self.connection_pool.max_lifetime,
|
|
"OPTIONS": {
|
|
"connect_timeout": self.connection_pool.connect_timeout,
|
|
"application_name": f"saas_platform_{self.environment}",
|
|
"tcp_user_timeout": 10000,
|
|
"statement_timeout": 30000,
|
|
"idle_in_transaction_session_timeout": 60000,
|
|
}
|
|
}
|
|
|
|
# Add connection pooling options if enabled
|
|
if self.performance.enable_connection_pooling:
|
|
base_config["OPTIONS"].update({
|
|
"MAX_CONNS": self.connection_pool.max_connections,
|
|
"MIN_CONNS": self.connection_pool.min_connections,
|
|
"REUSE_CONNS": self.connection_pool.reuse_timeout,
|
|
"IDLE_TIMEOUT": self.connection_pool.idle_timeout,
|
|
})
|
|
|
|
return {
|
|
"default": base_config
|
|
}
|
|
|
|
def get_django_cache_config(self) -> Dict[str, Any]:
|
|
"""
|
|
Get Django cache configuration dictionary.
|
|
|
|
Returns:
|
|
Dictionary suitable for Django CACHES setting
|
|
"""
|
|
if not self.performance.enable_caching:
|
|
return {
|
|
"default": {
|
|
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
|
|
}
|
|
}
|
|
|
|
if self.cache.backend == CacheBackend.REDIS:
|
|
return {
|
|
"default": {
|
|
"BACKEND": "django_redis.cache.RedisCache",
|
|
"LOCATION": self.cache.location,
|
|
"TIMEOUT": self.cache.timeout,
|
|
"KEY_PREFIX": self.cache.key_prefix,
|
|
"VERSION": self.cache.version,
|
|
"OPTIONS": self.cache.options
|
|
},
|
|
"tenant_cache": {
|
|
"BACKEND": "django_redis.cache.RedisCache",
|
|
"LOCATION": self.cache.location.replace("/1", "/2"),
|
|
"TIMEOUT": self.multi_tenant.tenant_cache_timeout,
|
|
"KEY_PREFIX": "tenant_",
|
|
"VERSION": 1,
|
|
"OPTIONS": self.cache.options
|
|
},
|
|
"malaysian_cache": {
|
|
"BACKEND": "django_redis.cache.RedisCache",
|
|
"LOCATION": self.cache.location.replace("/1", "/3"),
|
|
"TIMEOUT": self.malaysian.local_cache_timeout,
|
|
"KEY_PREFIX": "malaysian_",
|
|
"VERSION": 1,
|
|
"OPTIONS": self.cache.options
|
|
}
|
|
}
|
|
elif self.cache.backend == CacheBackend.MEMCACHED:
|
|
return {
|
|
"default": {
|
|
"BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
|
|
"LOCATION": self.cache.location,
|
|
"TIMEOUT": self.cache.timeout,
|
|
"KEY_PREFIX": self.cache.key_prefix,
|
|
"VERSION": self.cache.version
|
|
}
|
|
}
|
|
else:
|
|
return {
|
|
"default": {
|
|
"BACKEND": "django.core.cache.backends.db.DatabaseCache",
|
|
"LOCATION": "cache_table",
|
|
"TIMEOUT": self.cache.timeout,
|
|
"KEY_PREFIX": self.cache.key_prefix,
|
|
"VERSION": self.cache.version
|
|
}
|
|
}
|
|
|
|
def get_database_optimization_settings(self) -> Dict[str, Any]:
|
|
"""
|
|
Get database optimization settings.
|
|
|
|
Returns:
|
|
Dictionary with optimization settings
|
|
"""
|
|
return {
|
|
"connection_pool": asdict(self.connection_pool),
|
|
"query_optimization": asdict(self.query_optimization),
|
|
"cache": asdict(self.cache),
|
|
"multi_tenant": asdict(self.multi_tenant),
|
|
"malaysian": asdict(self.malaysian),
|
|
"performance": asdict(self.performance)
|
|
}
|
|
|
|
def get_postgresql_settings(self) -> List[str]:
|
|
"""
|
|
Get PostgreSQL configuration settings.
|
|
|
|
Returns:
|
|
List of PostgreSQL configuration commands
|
|
"""
|
|
settings = []
|
|
|
|
if self.environment == "production":
|
|
settings.extend([
|
|
"ALTER SYSTEM SET shared_buffers = '256MB'",
|
|
"ALTER SYSTEM SET effective_cache_size = '1GB'",
|
|
"ALTER SYSTEM SET maintenance_work_mem = '64MB'",
|
|
"ALTER SYSTEM SET checkpoint_completion_target = 0.9",
|
|
"ALTER SYSTEM SET wal_buffers = '16MB'",
|
|
"ALTER SYSTEM SET default_statistics_target = 100",
|
|
"ALTER SYSTEM SET random_page_cost = 1.1",
|
|
"ALTER SYSTEM SET effective_io_concurrency = 200",
|
|
"ALTER SYSTEM SET work_mem = '4MB'",
|
|
"ALTER SYSTEM SET min_wal_size = '1GB'",
|
|
"ALTER SYSTEM SET max_wal_size = '4GB'",
|
|
"ALTER SYSTEM SET max_worker_processes = 8",
|
|
"ALTER SYSTEM SET max_parallel_workers_per_gather = 4",
|
|
"ALTER SYSTEM SET max_parallel_workers = 8",
|
|
"ALTER SYSTEM SET max_parallel_maintenance_workers = 4",
|
|
"ALTER SYSTEM SET log_statement = 'mod'",
|
|
"ALTER SYSTEM SET log_min_duration_statement = '500'",
|
|
"ALTER SYSTEM SET log_checkpoints = 'on'",
|
|
"ALTER SYSTEM SET log_connections = 'on'",
|
|
"ALTER SYSTEM SET log_disconnections = 'on'",
|
|
"ALTER SYSTEM SET log_lock_waits = 'on'",
|
|
"ALTER SYSTEM SET log_temp_files = '0'",
|
|
"ALTER SYSTEM SET log_timezone = 'Asia/Kuala_Lumpur'",
|
|
"ALTER SYSTEM SET timezone = 'Asia/Kuala_Lumpur'",
|
|
])
|
|
elif self.environment == "staging":
|
|
settings.extend([
|
|
"ALTER SYSTEM SET shared_buffers = '128MB'",
|
|
"ALTER SYSTEM SET effective_cache_size = '512MB'",
|
|
"ALTER SYSTEM SET maintenance_work_mem = '32MB'",
|
|
"ALTER SYSTEM SET checkpoint_completion_target = 0.7",
|
|
"ALTER SYSTEM SET default_statistics_target = 50",
|
|
"ALTER SYSTEM SET work_mem = '2MB'",
|
|
"ALTER SYSTEM SET log_min_duration_statement = '1000'",
|
|
"ALTER SYSTEM SET log_timezone = 'Asia/Kuala_Lumpur'",
|
|
"ALTER SYSTEM SET timezone = 'Asia/Kuala_Lumpur'",
|
|
])
|
|
|
|
return settings
|
|
|
|
def get_environment_overrides(self) -> Dict[str, Any]:
|
|
"""
|
|
Get environment-specific overrides.
|
|
|
|
Returns:
|
|
Dictionary with environment overrides
|
|
"""
|
|
env_overrides = os.getenv("DB_CONFIG_OVERRIDES")
|
|
if env_overrides:
|
|
try:
|
|
import json
|
|
return json.loads(env_overrides)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
return {}
|
|
|
|
def validate_configuration(self) -> List[str]:
|
|
"""
|
|
Validate the current configuration.
|
|
|
|
Returns:
|
|
List of validation warnings or errors
|
|
"""
|
|
warnings = []
|
|
|
|
# Check connection pool settings
|
|
if self.performance.enable_connection_pooling:
|
|
if self.connection_pool.max_connections < 10:
|
|
warnings.append("Max connections might be too low for production")
|
|
if self.connection_pool.min_connections > self.connection_pool.max_connections // 2:
|
|
warnings.append("Min connections should not exceed half of max connections")
|
|
|
|
# Check cache settings
|
|
if self.performance.enable_caching:
|
|
if self.cache.backend == CacheBackend.REDIS:
|
|
if not self.cache.location.startswith("redis://"):
|
|
warnings.append("Redis URL format is incorrect")
|
|
|
|
# Check query optimization settings
|
|
if self.query_optimization.slow_query_threshold < 0.1:
|
|
warnings.append("Slow query threshold might be too aggressive")
|
|
|
|
# Check multi-tenant settings
|
|
if not self.multi_tenant.shared_tables:
|
|
warnings.append("No shared tables configured for multi-tenant setup")
|
|
|
|
return warnings
|
|
|
|
def get_performance_recommendations(self) -> List[str]:
|
|
"""
|
|
Get performance recommendations based on current configuration.
|
|
|
|
Returns:
|
|
List of performance recommendations
|
|
"""
|
|
recommendations = []
|
|
|
|
if self.environment == "production":
|
|
if self.connection_pool.max_connections < 100:
|
|
recommendations.append("Consider increasing max_connections for better concurrency")
|
|
if self.query_optimization.slow_query_threshold > 1.0:
|
|
recommendations.append("Consider reducing slow_query_threshold for better monitoring")
|
|
if not self.performance.enable_query_profiling:
|
|
recommendations.append("Consider enabling query profiling for production optimization")
|
|
|
|
# Malaysian-specific recommendations
|
|
if self.malaysian.enable_local_caching:
|
|
recommendations.append("Malaysian local caching enabled - monitor cache hit rates")
|
|
if self.malaysian.malaysian_indexes_enabled:
|
|
recommendations.append("Ensure Malaysian-specific indexes are created and maintained")
|
|
|
|
# Multi-tenant recommendations
|
|
if self.multi_tenant.enable_tenant_caching:
|
|
recommendations.append("Monitor tenant cache hit rates and memory usage")
|
|
|
|
return recommendations
|
|
|
|
|
|
# Configuration factory functions
|
|
def get_config(environment: str = None) -> DatabaseConfig:
|
|
"""
|
|
Get database configuration for specified environment.
|
|
|
|
Args:
|
|
environment: Environment name (production, staging, development)
|
|
|
|
Returns:
|
|
DatabaseConfig instance
|
|
"""
|
|
if environment is None:
|
|
environment = os.getenv("DJANGO_ENV", "development")
|
|
|
|
return DatabaseConfig(environment)
|
|
|
|
|
|
def get_production_config() -> DatabaseConfig:
|
|
"""Get production database configuration."""
|
|
return DatabaseConfig("production")
|
|
|
|
|
|
def get_staging_config() -> DatabaseConfig:
|
|
"""Get staging database configuration."""
|
|
return DatabaseConfig("staging")
|
|
|
|
|
|
def get_development_config() -> DatabaseConfig:
|
|
"""Get development database configuration."""
|
|
return DatabaseConfig("development")
|
|
|
|
|
|
# Configuration validation
|
|
def validate_environment_config(environment: str) -> bool:
|
|
"""
|
|
Validate configuration for specified environment.
|
|
|
|
Args:
|
|
environment: Environment name
|
|
|
|
Returns:
|
|
True if configuration is valid
|
|
"""
|
|
config = get_config(environment)
|
|
warnings = config.validate_configuration()
|
|
return len(warnings) == 0
|
|
|
|
|
|
# Export classes and functions
|
|
__all__ = [
|
|
'DatabaseConfig',
|
|
'ConnectionPoolConfig',
|
|
'QueryOptimizationConfig',
|
|
'CacheConfig',
|
|
'MultiTenantConfig',
|
|
'MalaysianConfig',
|
|
'PerformanceConfig',
|
|
'DatabaseEngine',
|
|
'CacheBackend',
|
|
'get_config',
|
|
'get_production_config',
|
|
'get_staging_config',
|
|
'get_development_config',
|
|
'validate_environment_config',
|
|
] |