Backend Changes:
- Add tier-based max_sessions config (free: 1, basic: 2, premium: 3, elite: 5)
- Add DELETE /api/v1/sessions/{id} endpoint for hard session deletion
- Cascade delete chat messages when session is deleted
- Add GET /api/v1/usage endpoint for daily turn limit info
- Replace hardcoded TIER_LIMITS with config-based ai_calls_per_day
- Handle unlimited (-1) tier in rate limiter service
Frontend Changes:
- Add inline session delete buttons with HTMX on character list
- Add usage_display.html component showing remaining daily turns
- Display usage indicator on character list and game play pages
- Page refresh after session deletion to update UI state
Documentation:
- Update API_REFERENCE.md with new endpoints and tier limits
- Update API_TESTING.md with session endpoint examples
- Update SESSION_MANAGEMENT.md with tier-based limits
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
334 lines
9.4 KiB
Python
334 lines
9.4 KiB
Python
"""
|
|
Configuration loader for Code of Conquest.
|
|
|
|
Loads configuration from YAML files and environment variables,
|
|
providing typed access to all configuration values.
|
|
"""
|
|
|
|
import os
|
|
from dataclasses import dataclass, field
|
|
from typing import Dict, List, Optional
|
|
import yaml
|
|
from dotenv import load_dotenv
|
|
|
|
|
|
@dataclass
|
|
class AppConfig:
|
|
"""Application configuration."""
|
|
name: str
|
|
version: str
|
|
environment: str
|
|
debug: bool
|
|
|
|
|
|
@dataclass
|
|
class ServerConfig:
|
|
"""Server configuration."""
|
|
host: str
|
|
port: int
|
|
workers: int
|
|
|
|
|
|
@dataclass
|
|
class RedisConfig:
|
|
"""Redis configuration."""
|
|
host: str
|
|
port: int
|
|
db: int
|
|
max_connections: int
|
|
|
|
@property
|
|
def url(self) -> str:
|
|
"""Generate Redis URL."""
|
|
return f"redis://{self.host}:{self.port}/{self.db}"
|
|
|
|
|
|
@dataclass
|
|
class RQConfig:
|
|
"""RQ (Redis Queue) configuration."""
|
|
queues: List[str]
|
|
worker_timeout: int
|
|
job_timeout: int
|
|
|
|
|
|
@dataclass
|
|
class AIModelConfig:
|
|
"""AI model configuration."""
|
|
provider: str
|
|
model: str
|
|
max_tokens: int
|
|
temperature: float
|
|
|
|
|
|
@dataclass
|
|
class AIConfig:
|
|
"""AI service configuration."""
|
|
timeout: int
|
|
max_retries: int
|
|
cost_alert_threshold: float
|
|
models: Dict[str, AIModelConfig] = field(default_factory=dict)
|
|
|
|
|
|
@dataclass
|
|
class RateLimitTier:
|
|
"""Rate limit configuration for a subscription tier."""
|
|
requests_per_minute: int
|
|
ai_calls_per_day: int
|
|
custom_actions_per_day: int # -1 for unlimited
|
|
custom_action_char_limit: int
|
|
max_sessions: int = 1 # Maximum active game sessions allowed
|
|
|
|
|
|
@dataclass
|
|
class RateLimitingConfig:
|
|
"""Rate limiting configuration."""
|
|
enabled: bool
|
|
storage_url: str
|
|
tiers: Dict[str, RateLimitTier] = field(default_factory=dict)
|
|
|
|
|
|
@dataclass
|
|
class SessionCacheConfig:
|
|
"""Session cache configuration for reducing Appwrite API calls."""
|
|
enabled: bool = True
|
|
ttl_seconds: int = 300 # 5 minutes
|
|
redis_db: int = 2 # Separate from RQ (db 0) and rate limiting (db 1)
|
|
|
|
|
|
@dataclass
|
|
class AuthConfig:
|
|
"""Authentication configuration."""
|
|
cookie_name: str
|
|
duration_normal: int
|
|
duration_remember_me: int
|
|
http_only: bool
|
|
secure: bool
|
|
same_site: str
|
|
path: str
|
|
password_min_length: int
|
|
password_require_uppercase: bool
|
|
password_require_lowercase: bool
|
|
password_require_number: bool
|
|
password_require_special: bool
|
|
name_min_length: int
|
|
name_max_length: int
|
|
email_max_length: int
|
|
session_cache: SessionCacheConfig = field(default_factory=SessionCacheConfig)
|
|
|
|
|
|
@dataclass
|
|
class SessionConfig:
|
|
"""Game session configuration."""
|
|
timeout_minutes: int
|
|
auto_save_interval: int
|
|
min_players: int
|
|
max_players_by_tier: Dict[str, int] = field(default_factory=dict)
|
|
|
|
|
|
@dataclass
|
|
class MarketplaceConfig:
|
|
"""Marketplace configuration."""
|
|
auction_check_interval: int
|
|
max_listings_by_tier: Dict[str, int] = field(default_factory=dict)
|
|
|
|
|
|
@dataclass
|
|
class CORSConfig:
|
|
"""CORS configuration."""
|
|
origins: List[str]
|
|
|
|
|
|
@dataclass
|
|
class LoggingConfig:
|
|
"""Logging configuration."""
|
|
level: str
|
|
format: str
|
|
handlers: List[str]
|
|
file_path: str
|
|
|
|
|
|
@dataclass
|
|
class Config:
|
|
"""
|
|
Main configuration container.
|
|
|
|
Loads configuration from YAML file based on environment,
|
|
with overrides from environment variables.
|
|
"""
|
|
app: AppConfig
|
|
server: ServerConfig
|
|
redis: RedisConfig
|
|
rq: RQConfig
|
|
ai: AIConfig
|
|
rate_limiting: RateLimitingConfig
|
|
auth: AuthConfig
|
|
session: SessionConfig
|
|
marketplace: MarketplaceConfig
|
|
cors: CORSConfig
|
|
logging: LoggingConfig
|
|
|
|
# Environment variables (loaded from .env)
|
|
secret_key: str = ""
|
|
appwrite_endpoint: str = ""
|
|
appwrite_project_id: str = ""
|
|
appwrite_api_key: str = ""
|
|
appwrite_database_id: str = ""
|
|
anthropic_api_key: str = ""
|
|
replicate_api_token: str = ""
|
|
|
|
@classmethod
|
|
def load(cls, environment: Optional[str] = None) -> 'Config':
|
|
"""
|
|
Load configuration from YAML file and environment variables.
|
|
|
|
Args:
|
|
environment: Environment name (development, production, etc.).
|
|
If not provided, uses FLASK_ENV from environment.
|
|
|
|
Returns:
|
|
Config: Loaded configuration object.
|
|
|
|
Raises:
|
|
FileNotFoundError: If config file not found.
|
|
ValueError: If required environment variables missing.
|
|
"""
|
|
# Load environment variables from .env file
|
|
load_dotenv()
|
|
|
|
# Determine environment
|
|
if environment is None:
|
|
environment = os.getenv('FLASK_ENV', 'development')
|
|
|
|
# Load YAML configuration
|
|
config_path = os.path.join('config', f'{environment}.yaml')
|
|
|
|
if not os.path.exists(config_path):
|
|
raise FileNotFoundError(
|
|
f"Configuration file not found: {config_path}"
|
|
)
|
|
|
|
with open(config_path, 'r') as f:
|
|
config_data = yaml.safe_load(f)
|
|
|
|
# Parse configuration sections
|
|
app_config = AppConfig(**config_data['app'])
|
|
server_config = ServerConfig(**config_data['server'])
|
|
redis_config = RedisConfig(**config_data['redis'])
|
|
rq_config = RQConfig(**config_data['rq'])
|
|
|
|
# Parse AI models
|
|
ai_models = {}
|
|
for tier, model_data in config_data['ai']['models'].items():
|
|
ai_models[tier] = AIModelConfig(**model_data)
|
|
|
|
ai_config = AIConfig(
|
|
timeout=config_data['ai']['timeout'],
|
|
max_retries=config_data['ai']['max_retries'],
|
|
cost_alert_threshold=config_data['ai']['cost_alert_threshold'],
|
|
models=ai_models
|
|
)
|
|
|
|
# Parse rate limiting tiers
|
|
rate_limit_tiers = {}
|
|
for tier, tier_data in config_data['rate_limiting']['tiers'].items():
|
|
rate_limit_tiers[tier] = RateLimitTier(**tier_data)
|
|
|
|
rate_limiting_config = RateLimitingConfig(
|
|
enabled=config_data['rate_limiting']['enabled'],
|
|
storage_url=config_data['rate_limiting']['storage_url'],
|
|
tiers=rate_limit_tiers
|
|
)
|
|
|
|
# Parse auth config with nested session_cache
|
|
auth_data = config_data['auth'].copy()
|
|
session_cache_data = auth_data.pop('session_cache', {})
|
|
session_cache_config = SessionCacheConfig(**session_cache_data) if session_cache_data else SessionCacheConfig()
|
|
auth_config = AuthConfig(**auth_data, session_cache=session_cache_config)
|
|
session_config = SessionConfig(**config_data['session'])
|
|
marketplace_config = MarketplaceConfig(**config_data['marketplace'])
|
|
cors_config = CORSConfig(**config_data['cors'])
|
|
logging_config = LoggingConfig(**config_data['logging'])
|
|
|
|
# Load environment variables (secrets)
|
|
secret_key = os.getenv('SECRET_KEY')
|
|
if not secret_key:
|
|
raise ValueError("SECRET_KEY environment variable is required")
|
|
|
|
appwrite_endpoint = os.getenv('APPWRITE_ENDPOINT', '')
|
|
appwrite_project_id = os.getenv('APPWRITE_PROJECT_ID', '')
|
|
appwrite_api_key = os.getenv('APPWRITE_API_KEY', '')
|
|
appwrite_database_id = os.getenv('APPWRITE_DATABASE_ID', 'main')
|
|
anthropic_api_key = os.getenv('ANTHROPIC_API_KEY', '')
|
|
replicate_api_token = os.getenv('REPLICATE_API_TOKEN', '')
|
|
|
|
# Create and return config object
|
|
return cls(
|
|
app=app_config,
|
|
server=server_config,
|
|
redis=redis_config,
|
|
rq=rq_config,
|
|
ai=ai_config,
|
|
rate_limiting=rate_limiting_config,
|
|
auth=auth_config,
|
|
session=session_config,
|
|
marketplace=marketplace_config,
|
|
cors=cors_config,
|
|
logging=logging_config,
|
|
secret_key=secret_key,
|
|
appwrite_endpoint=appwrite_endpoint,
|
|
appwrite_project_id=appwrite_project_id,
|
|
appwrite_api_key=appwrite_api_key,
|
|
appwrite_database_id=appwrite_database_id,
|
|
anthropic_api_key=anthropic_api_key,
|
|
replicate_api_token=replicate_api_token
|
|
)
|
|
|
|
def validate(self) -> None:
|
|
"""
|
|
Validate configuration values.
|
|
|
|
Raises:
|
|
ValueError: If configuration is invalid.
|
|
"""
|
|
# Validate AI API keys if needed
|
|
if self.app.environment == 'production':
|
|
if not self.anthropic_api_key:
|
|
raise ValueError(
|
|
"ANTHROPIC_API_KEY required in production environment"
|
|
)
|
|
if not self.appwrite_endpoint or not self.appwrite_project_id:
|
|
raise ValueError(
|
|
"Appwrite configuration required in production environment"
|
|
)
|
|
|
|
# Validate logging level
|
|
valid_log_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
|
|
if self.logging.level not in valid_log_levels:
|
|
raise ValueError(
|
|
f"Invalid log level: {self.logging.level}. "
|
|
f"Must be one of {valid_log_levels}"
|
|
)
|
|
|
|
|
|
# Global config instance (loaded lazily)
|
|
_config: Optional[Config] = None
|
|
|
|
|
|
def get_config(environment: Optional[str] = None) -> Config:
|
|
"""
|
|
Get the global configuration instance.
|
|
|
|
Args:
|
|
environment: Optional environment override.
|
|
|
|
Returns:
|
|
Config: Configuration object.
|
|
"""
|
|
global _config
|
|
|
|
if _config is None or environment is not None:
|
|
_config = Config.load(environment)
|
|
_config.validate()
|
|
|
|
return _config
|