init commit
This commit is contained in:
56
app/utils/extensions.py
Normal file
56
app/utils/extensions.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# app/utils/extensions.py
|
||||
from flask_login import LoginManager
|
||||
from flask import session
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from app.services.appwrite_client import AppwriteAccountClient
|
||||
|
||||
login_manager = LoginManager()
|
||||
|
||||
@dataclass
|
||||
class User:
|
||||
id: str
|
||||
email: str
|
||||
name: Optional[str] = None
|
||||
email_verification: bool = False
|
||||
|
||||
def is_active(self): return True
|
||||
def is_authenticated(self): return True
|
||||
def is_anonymous(self): return False
|
||||
def get_id(self): return self.id
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id: str) -> Optional[User]:
|
||||
# First: use cached profile
|
||||
u = session.get("user_profile")
|
||||
if u and u.get("$id") == user_id:
|
||||
return User(
|
||||
id=u["$id"],
|
||||
email=u["email"],
|
||||
name=u.get("name"),
|
||||
email_verification=bool(u.get("emailVerification", False)),
|
||||
)
|
||||
|
||||
# Next: use the session secret we stored at login
|
||||
secret = session.get("appwrite_cookies")
|
||||
if not secret:
|
||||
return None
|
||||
|
||||
aw = AppwriteAccountClient(cookies=secret)
|
||||
try:
|
||||
acc = aw.get_account()
|
||||
session["user_profile"] = acc
|
||||
return User(
|
||||
id=acc["$id"],
|
||||
email=acc["email"],
|
||||
name=acc.get("name"),
|
||||
email_verification=bool(acc.get("emailVerification", False)),
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_client_from_session() -> AppwriteAccountClient:
|
||||
secret = session.get("appwrite_cookies")
|
||||
if not secret:
|
||||
raise RuntimeError("No Appwrite session is available. Please log in.")
|
||||
return AppwriteAccountClient(cookies=secret)
|
||||
149
app/utils/logging.py
Normal file
149
app/utils/logging.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""
|
||||
Structured logging setup for Code of Conquest.
|
||||
|
||||
Environment-aware behavior:
|
||||
- dev → colorful console output (human-friendly)
|
||||
- test/prod → JSON logs (machine-friendly)
|
||||
|
||||
Features:
|
||||
- Includes logger name, level, filename, and line number
|
||||
- Unified stdlib + structlog integration
|
||||
- Respects LOG_LEVEL from environment
|
||||
- Works with both ChatGPT/Ollama components and Web/TUI layers
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
import structlog
|
||||
from structlog.dev import ConsoleRenderer
|
||||
from structlog.processors import JSONRenderer, TimeStamper, CallsiteParameterAdder, CallsiteParameter
|
||||
from structlog.stdlib import ProcessorFormatter
|
||||
|
||||
|
||||
@dataclass
|
||||
class _ConfiguredState:
|
||||
configured: bool = False
|
||||
level_name: str = "INFO"
|
||||
|
||||
|
||||
_state = _ConfiguredState()
|
||||
|
||||
|
||||
def _level_from_name(name: str) -> int:
|
||||
mapping = {
|
||||
"CRITICAL": logging.CRITICAL,
|
||||
"ERROR": logging.ERROR,
|
||||
"WARNING": logging.WARNING,
|
||||
"INFO": logging.INFO,
|
||||
"DEBUG": logging.DEBUG,
|
||||
"NOTSET": logging.NOTSET,
|
||||
}
|
||||
return mapping.get((name or "INFO").upper(), logging.INFO)
|
||||
|
||||
|
||||
def _shared_processors():
|
||||
"""
|
||||
Processors common to both console and JSON pipelines.
|
||||
Adds level, logger name, and callsite metadata.
|
||||
"""
|
||||
return [
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
CallsiteParameterAdder(
|
||||
parameters=[
|
||||
CallsiteParameter.FILENAME,
|
||||
CallsiteParameter.LINENO,
|
||||
CallsiteParameter.FUNC_NAME,
|
||||
]
|
||||
),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
]
|
||||
|
||||
|
||||
|
||||
def _console_renderer():
|
||||
"""Pretty colored logs for development."""
|
||||
return ConsoleRenderer()
|
||||
|
||||
|
||||
def _json_renderer():
|
||||
"""Machine-friendly JSON logs for test/prod."""
|
||||
return JSONRenderer(sort_keys=True)
|
||||
|
||||
|
||||
def configure_logging(settings=None) -> None:
|
||||
"""
|
||||
Configure structlog + stdlib logging once for the process.
|
||||
"""
|
||||
if _state.configured:
|
||||
return
|
||||
|
||||
if settings is None:
|
||||
from app.core.utils.settings import get_settings # lazy import
|
||||
settings = get_settings()
|
||||
|
||||
env = settings.env.value
|
||||
level_name = settings.log_level or "INFO"
|
||||
level = _level_from_name(level_name)
|
||||
|
||||
# Choose renderers
|
||||
if env == "dev":
|
||||
renderer = _console_renderer()
|
||||
foreign_pre_chain = _shared_processors()
|
||||
else:
|
||||
renderer = _json_renderer()
|
||||
foreign_pre_chain = _shared_processors() + [
|
||||
TimeStamper(fmt="iso", utc=True, key="ts")
|
||||
]
|
||||
|
||||
# stdlib -> structlog bridge
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(level)
|
||||
handler.setFormatter(
|
||||
ProcessorFormatter(
|
||||
processor=renderer,
|
||||
foreign_pre_chain=foreign_pre_chain,
|
||||
)
|
||||
)
|
||||
|
||||
root = logging.getLogger()
|
||||
root.handlers.clear()
|
||||
root.setLevel(level)
|
||||
root.addHandler(handler)
|
||||
|
||||
# Quiet noisy libs
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
logging.getLogger("uvicorn").setLevel(logging.INFO)
|
||||
|
||||
# structlog pipeline
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.contextvars.merge_contextvars,
|
||||
structlog.stdlib.filter_by_level,
|
||||
*foreign_pre_chain,
|
||||
ProcessorFormatter.wrap_for_formatter, # hand off to renderer
|
||||
],
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
_state.configured = True
|
||||
_state.level_name = level_name
|
||||
|
||||
|
||||
def get_logger(name: Optional[str] = None) -> structlog.stdlib.BoundLogger:
|
||||
"""
|
||||
Retrieve a structlog logger.
|
||||
"""
|
||||
if name is None:
|
||||
return structlog.get_logger()
|
||||
return structlog.get_logger(name)
|
||||
129
app/utils/settings.py
Normal file
129
app/utils/settings.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""
|
||||
Environment-aware settings for Code of Conquest.
|
||||
|
||||
- Loads environment variables from OS and `.env` (OS wins).
|
||||
- Provides repo-relative default paths for data storage.
|
||||
- Validates a few key fields (env, model backend).
|
||||
- Ensures important directories exist on first load.
|
||||
- Exposes a tiny singleton: get_settings().
|
||||
|
||||
Style:
|
||||
- Python 3.11+
|
||||
- Dataclasses (no Pydantic)
|
||||
- Docstrings + inline comments
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
class Environment(str, Enum):
|
||||
DEV = "dev"
|
||||
TEST = "test"
|
||||
PROD = "prod"
|
||||
|
||||
def _repo_root_from_here() -> Path:
|
||||
"""
|
||||
Resolve the repository root by walking up from this file.
|
||||
|
||||
This file lives at: project/app/core/utils/settings.py
|
||||
So parents[3] should be the repo root:
|
||||
parents[0] = utils
|
||||
parents[1] = core
|
||||
parents[2] = app
|
||||
parents[3] = project root
|
||||
"""
|
||||
here = Path(__file__).resolve()
|
||||
repo_root = here.parents[3]
|
||||
return repo_root
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
"""
|
||||
Settings container for Code of Conquest.
|
||||
|
||||
Load order:
|
||||
1) OS environment
|
||||
2) .env file (at repo root)
|
||||
3) Defaults below
|
||||
|
||||
Paths default into the repo under ./data unless overridden.
|
||||
"""
|
||||
|
||||
# --- Core Tunables---
|
||||
env: Environment = Environment.DEV
|
||||
log_level: str = "INFO"
|
||||
flask_secret_key: str = os.getenv("FLASK_SECRET_KEY","change-me-for-prod")
|
||||
|
||||
# APPWRITE Things
|
||||
appwrite_endpoint: str = os.getenv("APPWRITE_ENDPOINT","NOT SET")
|
||||
appwrite_project_id: str = os.getenv("APPWRITE_PROJECT_ID","NOT SET")
|
||||
appwrite_api_key: str = os.getenv("APPWRITE_API_KEY","NOT SET")
|
||||
|
||||
app_name: str = "Code of Conquest"
|
||||
app_version: str = "v 0.0.1"
|
||||
|
||||
# --- Paths (default under ./data) ---
|
||||
repo_root: Path = field(default_factory=_repo_root_from_here)
|
||||
|
||||
# --- Build paths for convenience (not env-controlled directly) ---
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
# Basic validation
|
||||
if self.env not in (Environment.DEV, Environment.TEST, Environment.PROD):
|
||||
raise ValueError(f"Invalid COC_ENV: {self.env}")
|
||||
|
||||
@staticmethod
|
||||
def _ensure_dir(path: Path) -> None:
|
||||
if path is None:
|
||||
return
|
||||
if not path.exists():
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# ---- Singleton loader ----
|
||||
_settings_singleton: Optional[Settings] = None
|
||||
|
||||
|
||||
def get_settings() -> Settings:
|
||||
"""
|
||||
Load settings from environment and `.env` once, then reuse.
|
||||
OS env always takes precedence over `.env`.
|
||||
|
||||
Returns:
|
||||
Settings: A process-wide singleton instance.
|
||||
"""
|
||||
global _settings_singleton
|
||||
if _settings_singleton is not None:
|
||||
return _settings_singleton
|
||||
|
||||
# Load .env from repo root
|
||||
repo_root = _repo_root_from_here()
|
||||
dotenv_path = repo_root / ".env"
|
||||
load_dotenv(dotenv_path=dotenv_path, override=False)
|
||||
|
||||
# Environment
|
||||
env_str = os.getenv("COC_ENV", "dev").strip().lower()
|
||||
if env_str == "dev":
|
||||
env_val = Environment.DEV
|
||||
elif env_str == "test":
|
||||
env_val = Environment.TEST
|
||||
elif env_str == "prod":
|
||||
env_val = Environment.PROD
|
||||
else:
|
||||
raise ValueError(f"COC_ENV must be one of dev|test|prod, got '{env_str}'")
|
||||
|
||||
# Construct settings
|
||||
_settings_singleton = Settings(
|
||||
env=env_val,
|
||||
log_level=os.getenv("LOG_LEVEL", "INFO").strip().upper(),
|
||||
)
|
||||
|
||||
return _settings_singleton
|
||||
27
app/utils/tokens.py
Normal file
27
app/utils/tokens.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import time
|
||||
from flask import session
|
||||
from ..services.appwrite_client import AppwriteAccountClient
|
||||
|
||||
def ensure_fresh_appwrite_jwt(skew_seconds: int = 120) -> str:
|
||||
"""
|
||||
Returns a valid Appwrite JWT, refreshing it if it's missing or expiring soon.
|
||||
Relies on the saved Appwrite session cookie in Flask's session.
|
||||
"""
|
||||
jwt_info = session.get("appwrite_jwt")
|
||||
now = int(time.time())
|
||||
|
||||
if jwt_info and isinstance(jwt_info, dict):
|
||||
exp = int(jwt_info.get("expire", 0))
|
||||
# If token still safely valid, reuse it
|
||||
if exp - now > skew_seconds and "jwt" in jwt_info:
|
||||
return jwt_info["jwt"]
|
||||
|
||||
# Need to mint a new JWT using the user's Appwrite session cookie
|
||||
cookies = session.get("appwrite_cookies")
|
||||
if not cookies:
|
||||
raise RuntimeError("Missing Appwrite session; user must sign in again.")
|
||||
|
||||
aw = AppwriteAccountClient(cookies=cookies)
|
||||
new_jwt = aw.create_jwt() # -> {"jwt": "...", "expire": <unix>}
|
||||
session["appwrite_jwt"] = new_jwt
|
||||
return new_jwt["jwt"]
|
||||
Reference in New Issue
Block a user