init commit

This commit is contained in:
2025-11-02 01:14:41 -05:00
commit 7bf81109b3
31 changed files with 2387 additions and 0 deletions

279
app/utils/api_response.py Normal file
View File

@@ -0,0 +1,279 @@
from __future__ import annotations
from dataclasses import asdict, is_dataclass
from datetime import datetime, timezone
from typing import Any, Callable, Dict, Optional, Tuple, Union
import uuid
from flask import jsonify, make_response, request, Flask, Response
class ApiResponder:
"""
Centralized JSON response builder for Flask APIs.
This class enforces a consistent envelope for all responses:
{
"app": "<APP NAME>",
"version": "<APP VERSION>",
"status": <HTTP STATUS CODE>,
"timestamp": "<UTC ISO8601>",
"request_id": "<optional request id>",
"result": <your data OR null>,
"error": {
"code": "<optional machine code>",
"message": "<human message>",
"details": {...} # optional extras (validation fields, etc.)
},
"meta": { ... } # optional metadata (pagination, etc.)
}
Usage:
responder = ApiResponder(app_name="Code of Conquest",
version_provider=lambda: CURRENT_VERSION)
return responder.ok({"hello": "world"})
return responder.created({"id": 123})
return responder.bad_request("Missing field `name`", details={"field": "name"})
"""
def __init__(
self,
app_name: str,
version_provider: str,
include_request_id: bool = True,
default_headers: Optional[Dict[str, str]] = None,
) -> None:
"""
:param app_name: Human-friendly app name included in every response.
:param version_provider: Callable returning a version string at call time.
:param include_request_id: When True, include request id (from X-Request-ID header if present, else generated).
:param default_headers: Extra headers to attach to every response (e.g., CORS, caching).
"""
self.app_name = app_name
self.version_provider = version_provider
self.include_request_id = include_request_id
self.default_headers = default_headers or {}
# ---------- Public helpers for common statuses ----------
def ok(self, result: Any = None, meta: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""200 OK."""
return self._build(status=200, result=result, meta=meta, headers=headers)
def created(self, result: Any = None, meta: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""201 Created."""
return self._build(status=201, result=result, meta=meta, headers=headers)
def accepted(self, result: Any = None, meta: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""202 Accepted."""
return self._build(status=202, result=result, meta=meta, headers=headers)
def no_content(self, headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""
204 No Content. Returns the standard envelope with result=null for consistency.
(If you prefer an empty body, switch to make_response(("", 204)) in your code.)
"""
return self._build(status=204, result=None, headers=headers)
def bad_request(self, message: str, code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""400 Bad Request."""
return self._build_error(400, message, code, details, headers)
def unauthorized(self, message: str = "Unauthorized", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""401 Unauthorized."""
return self._build_error(401, message, code, details, headers)
def forbidden(self, message: str = "Forbidden", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""403 Forbidden."""
return self._build_error(403, message, code, details, headers)
def not_found(self, message: str = "Not Found", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""404 Not Found."""
return self._build_error(404, message, code, details, headers)
def conflict(self, message: str = "Conflict", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""409 Conflict."""
return self._build_error(409, message, code, details, headers)
def unprocessable(self, message: str = "Unprocessable Entity", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""422 Unprocessable Entity (great for validation errors)."""
return self._build_error(422, message, code, details, headers)
def error(self, message: str = "Internal Server Error", code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""500 Internal Server Error."""
return self._build_error(500, message, code, details, headers)
# ---------- Pagination helper ----------
def paginate(self,
items: Any,
total: int,
page: int,
per_page: int,
extra_meta: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""
200 OK with pagination metadata.
:param items: The current page of items (list or serializable value).
:param total: Total count of items across all pages.
:param page: 1-based page index.
:param per_page: Page size.
:param extra_meta: Optional extra metadata to merge into the meta block.
"""
# Build pagination metadata explicitly for clarity (no list comps).
meta: Dict[str, Any] = {}
meta["total"] = int(total)
meta["page"] = int(page)
meta["per_page"] = int(per_page)
# Compute total pages carefully with integer math.
total_pages = int((total + per_page - 1) // per_page) if per_page > 0 else 0
meta["total_pages"] = total_pages
if extra_meta is not None:
for key in extra_meta:
meta[key] = extra_meta[key]
return self._build(status=200, result=items, meta=meta, headers=headers)
# ---------- Exception binding (optional but handy) ----------
def register_error_handlers(self, app: Flask) -> None:
"""
Registers generic error handlers that convert exceptions into standard JSON.
Override selectively in your app as needed.
"""
@app.errorhandler(400)
def _h400(e): # pragma: no cover
return self.bad_request(getattr(e, "description", "Bad Request"))
@app.errorhandler(401)
def _h401(e): # pragma: no cover
return self.unauthorized(getattr(e, "description", "Unauthorized"))
@app.errorhandler(403)
def _h403(e): # pragma: no cover
return self.forbidden(getattr(e, "description", "Forbidden"))
@app.errorhandler(404)
def _h404(e): # pragma: no cover
return self.not_found(getattr(e, "description", "Not Found"))
@app.errorhandler(422)
def _h422(e): # pragma: no cover
message = getattr(e, "description", "Unprocessable Entity")
# Marshmallow/WTF often attach data on e.data; include if present.
details = getattr(e, "data", None)
return self.unprocessable(message=message, details=details)
@app.errorhandler(500)
def _h500(e): # pragma: no cover
return self.error()
# ---------- Core builder ----------
def _build(self,
status: int,
result: Any = None,
meta: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None) -> Tuple[Response, int]:
"""
Build the canonical JSON body and Response object.
"""
# Convert dataclasses to plain dicts to keep jsonify happy.
safe_result = self._to_plain(result)
body: Dict[str, Any] = {}
body["app"] = self.app_name
body["version"] = self.version_provider or ""
body["status"] = int(status)
body["timestamp"] = datetime.now(timezone.utc).isoformat()
if self.include_request_id:
# Prefer inbound request id if the client provided one.
req_id = request.headers.get("X-Request-ID")
if req_id is None or req_id == "":
req_id = str(uuid.uuid4())
body["request_id"] = req_id
body["result"] = safe_result
if meta is not None:
body["meta"] = meta
response = make_response(jsonify(body), status)
# Attach default headers first, then per-call overrides.
for key in self.default_headers:
response.headers[key] = self.default_headers[key]
if headers is not None:
for key in headers:
response.headers[key] = headers[key]
return response, status
def _build_error(self,
status: int,
message: str,
code: Optional[str],
details: Optional[Dict[str, Any]],
headers: Optional[Dict[str, str]]) -> Tuple[Response, int]:
"""
Build a standardized error envelope.
"""
error_block: Dict[str, Any] = {}
error_block["message"] = message
if code is not None and code != "":
error_block["code"] = code
if details is not None:
# Convert nested dataclasses if any.
error_block["details"] = self._to_plain(details)
# Errors carry result=null
return self._build(status=status, result=None, meta={"error": error_block}, headers=headers)
def _to_plain(self, value: Any) -> Any:
"""
Convert dataclasses to dicts recursively; leave other JSON-serializable values as-is.
"""
if is_dataclass(value):
return asdict(value)
# Handle lists/tuples without comprehensions for clarity.
if isinstance(value, (list, tuple)):
converted = []
for item in value:
converted.append(self._to_plain(item))
return converted
if isinstance(value, dict):
converted_dict: Dict[str, Any] = {}
for k in value:
converted_dict[k] = self._to_plain(value[k])
return converted_dict
# Let Flask jsonify handle the rest (numbers, strings, None, bool).
return value

View File

@@ -0,0 +1,181 @@
# hero_catalog.py
from __future__ import annotations
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, List, Optional, Literal, Any
import yaml # PyYAML
from app.models.enums import HeroClass
from app.models.primitives import Attributes, Resources
TargetType = Literal["self", "ally", "all_allies", "single_enemy", "all_enemies", "jumps_3_targets"]
@dataclass
class StatusSpec:
chance: float
potency: int
duration_turns: int
@dataclass
class SkillRef:
id: str
min_level: int = 1
@dataclass
class SpellDef:
id: str
element: Optional[str] = None
cost_mp: int = 0
power: int = 0
scaling: Dict[str, float] = field(default_factory=dict) # e.g., {"int": 0.9, "luk": 0.03}
variance: float = 0.0
target: TargetType = "single_enemy"
tags: List[str] = field(default_factory=list)
status_chance: Dict[str, StatusSpec] = field(default_factory=dict) # {"burn": StatusSpec(...)}
crit_bonus: Optional[Dict[str, float]] = None # {"chance":0.05,"multiplier":1.5}
@staticmethod
def from_yaml(id_: str, raw: Dict[str, Any]) -> "SpellDef":
sc = {}
for k, v in (raw.get("status_chance") or {}).items():
sc[k] = StatusSpec(**v)
return SpellDef(
id=id_,
element=raw.get("element"),
cost_mp=int((raw.get("cost") or {}).get("mp", 0)),
power=int(raw.get("power", 0)),
scaling={k: float(v) for k, v in (raw.get("scaling") or {}).items()},
variance=float(raw.get("variance", 0.0)),
target=(raw.get("target") or "single_enemy"),
tags=list(raw.get("tags") or []),
status_chance=sc,
crit_bonus=raw.get("crit_bonus"),
)
@dataclass
class SkillDef:
id: str
type: Literal["active", "passive", "buff", "debuff", "utility"] = "active"
cost_mp: int = 0
tags: List[str] = field(default_factory=list)
# Free-form payloads your engine can interpret:
passive_modifiers: Dict[str, Any] = field(default_factory=dict) # e.g., damage_multiplier_vs_tags.fire=1.10
effects: List[Dict[str, Any]] = field(default_factory=list) # e.g., [{"kind":"buff","stat":"int","amount":2,...}]
@staticmethod
def from_yaml(id_: str, raw: Dict[str, Any]) -> "SkillDef":
return SkillDef(
id=id_,
type=(raw.get("type") or "active"),
cost_mp=int((raw.get("cost") or {}).get("mp", 0)),
tags=list(raw.get("tags") or []),
passive_modifiers=dict(raw.get("passive_modifiers") or {}),
effects=list(raw.get("effects") or []),
)
@dataclass
class HeroArchetype:
key: HeroClass
display_name: str
background: str
base_attributes: Attributes = field(default_factory=Attributes)
starting_resources: Resources = field(default_factory=Resources)
starting_skills: List[str] = field(default_factory=list)
starting_spells: List[str] = field(default_factory=list)
skill_trees: Dict[str, List[SkillRef]] = field(default_factory=dict)
spells_by_level: Dict[int, List[str]] = field(default_factory=dict)
bonus_abilities: List[str] = field(default_factory=list)
# Local, per-class catalogs (optional—can be empty if you centralize elsewhere)
skills: Dict[str, SkillDef] = field(default_factory=dict)
spells: Dict[str, SpellDef] = field(default_factory=dict)
class HeroDataRegistry:
"""
In-memory catalog of YAML-defined class sheets.
Keyed by HeroClass (enum).
"""
def __init__(self) -> None:
self._by_class: Dict[HeroClass, HeroArchetype] = {}
self._skills: Dict[str, SkillDef] = {}
self._spells: Dict[str, SpellDef] = {}
def load_dir(self, directory: Path | str) -> None:
directory = Path(directory)
for path in sorted(directory.glob("*.y*ml")):
with path.open("r", encoding="utf-8") as f:
raw = yaml.safe_load(f) or {}
# --- required ---
key_raw = raw.get("key")
if not key_raw:
raise ValueError(f"{path.name}: missing required 'key' field")
key = HeroClass(key_raw) # validate against Enum
# --- optional / nested ---
base_attributes = Attributes(**(raw.get("base_attributes") or {}))
starting_resources = Resources(**(raw.get("starting_resources") or {}))
# trees
raw_trees = raw.get("skill_trees") or {}
trees: Dict[str, List[SkillRef]] = {}
for tree_name, nodes in raw_trees.items():
typed_nodes: List[SkillRef] = []
if nodes:
for node in nodes:
typed_nodes.append(SkillRef(**node))
trees[tree_name] = typed_nodes
# spells by level (keys may be strings in YAML)
sbl_in = raw.get("spells_by_level") or {}
spells_by_level: Dict[int, List[str]] = {}
for lvl_key, spell_list in sbl_in.items():
lvl = int(lvl_key)
spells_by_level[lvl] = list(spell_list or [])
arch = HeroArchetype(
key=key,
display_name=raw.get("display_name", key.value.title()),
background=raw.get("background","A person with an unknown origin"),
base_attributes=base_attributes,
starting_resources=starting_resources,
starting_skills=list(raw.get("starting_skills") or []),
starting_spells=list(raw.get("starting_spells") or []),
skill_trees=trees,
spells_by_level=spells_by_level,
bonus_abilities=list(raw.get("bonus_abilities") or []),
)
# parse local catalogs if present:
skills_raw = raw.get("skills") or {}
for sid, sdef in skills_raw.items():
arch.skills[sid] = SkillDef.from_yaml(sid, sdef)
self._skills[sid] = arch.skills[sid] # promote to global map
spells_raw = raw.get("spells") or {}
for spid, spdef in spells_raw.items():
arch.spells[spid] = SpellDef.from_yaml(spid, spdef)
self._spells[spid] = arch.spells[spid]
self._by_class[key] = arch
# Lookups (prefer global catalogs for cross-class reuse)
def get_spell(self, spell_id: str) -> SpellDef:
try:
return self._spells[spell_id]
except KeyError:
raise KeyError(f"Unknown spell id '{spell_id}'")
def get_skill(self, skill_id: str) -> SkillDef:
try:
return self._skills[skill_id]
except KeyError:
raise KeyError(f"Unknown skill id '{skill_id}'")
def for_class(self, hero_class: HeroClass) -> HeroArchetype:
if hero_class not in self._by_class:
raise KeyError(f"No archetype loaded for class {hero_class.value}")
return self._by_class[hero_class]

View File

@@ -0,0 +1,72 @@
# race_catalog.py
from __future__ import annotations
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Any
import yaml
from app.models.enums import Race
from app.models.primitives import Attributes, Resources
from app.models.races import RaceSheet, RacialTrait
from app.utils.hero_catalog import SkillDef, SpellDef
class RaceDataRegistry:
"""
In-memory catalog of YAML-defined race sheets.
"""
def __init__(self) -> None:
self._by_race: Dict[Race, RaceSheet] = {}
# promoted global catalogs for cross-use at runtime (optional)
self._skills: Dict[str, SkillDef] = {}
self._spells: Dict[str, SpellDef] = {}
def load_dir(self, directory: str | Path) -> None:
directory = Path(directory)
for path in sorted(directory.glob("*.y*ml")):
with path.open("r", encoding="utf-8") as f:
raw = yaml.safe_load(f) or {}
key_raw = raw.get("key")
if not key_raw:
raise ValueError(f"{path.name}: missing required 'key'")
race = Race(key_raw) # validates enum
base_attributes = Attributes(**(raw.get("base_attributes") or {}))
starting_resources = Resources(**(raw.get("starting_resources") or {}))
sheet = RaceSheet(
key=race,
display_name=raw.get("display_name", race.value.title()),
base_attributes=base_attributes,
starting_resources=starting_resources,
starting_skills=list(raw.get("starting_skills") or []),
starting_spells=list(raw.get("starting_spells") or []),
)
# Local skill/spell catalogs (optional)
for sid, sdef in (raw.get("skills") or {}).items():
s = SkillDef.from_yaml(sid, sdef)
sheet.skills[sid] = s
self._skills[sid] = s
for spid, spdef in (raw.get("spells") or {}).items():
sp = SpellDef.from_yaml(spid, spdef)
sheet.spells[spid] = sp
self._spells[spid] = sp
# Traits
traits_raw = raw.get("traits") or []
for t in traits_raw:
sheet.traits.append(RacialTrait(id=t.get("id"), data=dict(t.get("data") or {})))
self._by_race[race] = sheet
def for_race(self, race: Race) -> RaceSheet:
if race not in self._by_race:
raise KeyError(f"No race sheet loaded for {race.value}")
return self._by_race[race]
# Optional global lookups (if you want to fetch a skill from race-only files)
def get_skill(self, skill_id: str) -> SkillDef: return self._skills[skill_id]
def get_spell(self, spell_id: str) -> SpellDef: return self._spells[spell_id]

View File

@@ -0,0 +1,30 @@
# app/utils/skill_registry.py
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional
import yaml
@dataclass
class Skill:
id: str
name: str
description: str
tags: List[str]
max_rank: int = 1
class SkillRegistry:
def __init__(self) -> None:
self.by_id: Dict[str, Skill] = {}
def load_file(self, path: Path) -> None:
data = yaml.safe_load(path.read_text()) or []
for raw in data:
skill = Skill(**raw)
self.by_id[skill.id] = skill
def load_dir(self, root: Path) -> None:
for p in sorted(root.glob("*.y*ml")):
self.load_file(p)
def get(self, skill_id: str) -> Optional[Skill]:
return self.by_id.get(skill_id)

View File

@@ -0,0 +1,33 @@
# app/utils/spell_registry.py
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional
import yaml
@dataclass
class Spell:
id: str
name: str
school: str
element: Optional[str] = None
rank: int = 1
cost_mp: int = 0
description: str = ""
aoe: Optional[str] = None
class SpellRegistry:
def __init__(self) -> None:
self.by_id: Dict[str, Spell] = {}
def load_file(self, path: Path) -> None:
data = yaml.safe_load(path.read_text()) or []
for raw in data:
spell = Spell(**raw)
self.by_id[spell.id] = spell
def load_dir(self, root: Path) -> None:
for p in sorted(root.glob("*.y*ml")):
self.load_file(p)
def get(self, spell_id: str) -> Optional[Spell]:
return self.by_id.get(spell_id)

149
app/utils/logging.py Normal file
View File

@@ -0,0 +1,149 @@
"""
Structured logging setup for Code of Conquest.
Environment-aware behavior:
- dev → colorful console output (human-friendly)
- test/prod → JSON logs (machine-friendly)
Features:
- Includes logger name, level, filename, and line number
- Unified stdlib + structlog integration
- Respects LOG_LEVEL from environment
- Works with both ChatGPT/Ollama components and Web/TUI layers
"""
from __future__ import annotations
import logging
import sys
from dataclasses import dataclass
from typing import Optional
import structlog
from structlog.dev import ConsoleRenderer
from structlog.processors import JSONRenderer, TimeStamper, CallsiteParameterAdder, CallsiteParameter
from structlog.stdlib import ProcessorFormatter
@dataclass
class _ConfiguredState:
configured: bool = False
level_name: str = "INFO"
_state = _ConfiguredState()
def _level_from_name(name: str) -> int:
mapping = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
return mapping.get((name or "INFO").upper(), logging.INFO)
def _shared_processors():
"""
Processors common to both console and JSON pipelines.
Adds level, logger name, and callsite metadata.
"""
return [
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
CallsiteParameterAdder(
parameters=[
CallsiteParameter.FILENAME,
CallsiteParameter.LINENO,
CallsiteParameter.FUNC_NAME,
]
),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
]
def _console_renderer():
"""Pretty colored logs for development."""
return ConsoleRenderer()
def _json_renderer():
"""Machine-friendly JSON logs for test/prod."""
return JSONRenderer(sort_keys=True)
def configure_logging(settings=None) -> None:
"""
Configure structlog + stdlib logging once for the process.
"""
if _state.configured:
return
if settings is None:
from app.utils.settings import get_settings # lazy import
settings = get_settings()
env = settings.env.value
level_name = settings.log_level or "INFO"
level = _level_from_name(level_name)
# Choose renderers
if env == "dev":
renderer = _console_renderer()
foreign_pre_chain = _shared_processors()
else:
renderer = _json_renderer()
foreign_pre_chain = _shared_processors() + [
TimeStamper(fmt="iso", utc=True, key="ts")
]
# stdlib -> structlog bridge
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
handler.setFormatter(
ProcessorFormatter(
processor=renderer,
foreign_pre_chain=foreign_pre_chain,
)
)
root = logging.getLogger()
root.handlers.clear()
root.setLevel(level)
root.addHandler(handler)
# Quiet noisy libs
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("uvicorn").setLevel(logging.INFO)
# structlog pipeline
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.filter_by_level,
*foreign_pre_chain,
ProcessorFormatter.wrap_for_formatter, # hand off to renderer
],
wrapper_class=structlog.stdlib.BoundLogger,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
_state.configured = True
_state.level_name = level_name
def get_logger(name: Optional[str] = None) -> structlog.stdlib.BoundLogger:
"""
Retrieve a structlog logger.
"""
if name is None:
return structlog.get_logger()
return structlog.get_logger(name)

69
app/utils/merging.py Normal file
View File

@@ -0,0 +1,69 @@
# merging.py
from __future__ import annotations
from typing import Iterable, Dict, List, Tuple
from app.models.primitives import Attributes, Resources
from app.models.races import RaceSheet
from app.utils.hero_catalog import HeroArchetype, SkillDef, SpellDef
def _add_attributes(a: Attributes, b: Attributes) -> Attributes:
return Attributes(
base_str=a.base_str + b.base_str,
base_dex=a.base_dex + b.base_dex,
base_int=a.base_int + b.base_int,
base_wis=a.base_wis + b.base_wis,
base_luk=a.base_luk + b.base_luk,
base_cha=a.base_cha + b.base_cha,
)
def _add_resources(a: Resources, b: Resources) -> Resources:
return Resources(
maxhp=a.maxhp + b.maxhp,
hp=a.hp + b.hp,
maxmp=a.maxmp + b.maxmp,
mp=a.mp + b.mp,
gold=a.gold + b.gold if hasattr(a, "gold") else getattr(b, "gold", 0),
)
def _unique_chain(strings: Iterable[str]) -> List[str]:
seen = set()
out: List[str] = []
for s in strings:
if s not in seen:
out.append(s)
seen.add(s)
return out
def merge_catalogs(
race_skills: Dict[str, "SkillDef"],
race_spells: Dict[str, "SpellDef"],
class_skills: Dict[str, "SkillDef"],
class_spells: Dict[str, "SpellDef"],
) -> Tuple[Dict[str, "SkillDef"], Dict[str, "SpellDef"]]:
"""
Return merged skill and spell maps. By default, CLASS overrides RACE on conflicts.
"""
skills = dict(race_skills)
skills.update(class_skills)
spells = dict(race_spells)
spells.update(class_spells)
return skills, spells
def merge_sheets(race: RaceSheet, cls: HeroArchetype):
"""
Compute final base Attributes/Resources and starting lists, plus merged catalogs.
"""
attrs = _add_attributes(race.base_attributes, cls.base_attributes)
res = _add_resources(race.starting_resources, cls.starting_resources)
starting_skills = _unique_chain([*race.starting_skills, *cls.starting_skills])
starting_spells = _unique_chain([*race.starting_spells, *cls.starting_spells])
skills_catalog, spells_catalog = merge_catalogs(
race.skills, race.spells, cls.skills, cls.spells
)
return attrs, res, starting_skills, starting_spells, skills_catalog, spells_catalog

129
app/utils/settings.py Normal file
View File

@@ -0,0 +1,129 @@
"""
Environment-aware settings for Code of Conquest.
- Loads environment variables from OS and `.env` (OS wins).
- Provides repo-relative default paths for data storage.
- Validates a few key fields (env, model backend).
- Ensures important directories exist on first load.
- Exposes a tiny singleton: get_settings().
Style:
- Python 3.11+
- Dataclasses (no Pydantic)
- Docstrings + inline comments
"""
from __future__ import annotations
import os
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Optional
from dotenv import load_dotenv
class Environment(str, Enum):
DEV = "dev"
TEST = "test"
PROD = "prod"
def _repo_root_from_here() -> Path:
"""
Resolve the repository root by walking up from this file.
This file lives at: project/app/core/utils/settings.py
So parents[3] should be the repo root:
parents[0] = utils
parents[1] = core
parents[2] = app
parents[3] = project root
"""
here = Path(__file__).resolve()
repo_root = here.parents[3]
return repo_root
@dataclass
class Settings:
"""
Settings container for Code of Conquest.
Load order:
1) OS environment
2) .env file (at repo root)
3) Defaults below
Paths default into the repo under ./data unless overridden.
"""
# --- Core Tunables---
env: Environment = Environment.DEV
log_level: str = "INFO"
flask_secret_key: str = os.getenv("FLASK_SECRET_KEY","change-me-for-prod")
# APPWRITE Things
appwrite_endpoint: str = os.getenv("APPWRITE_ENDPOINT","NOT SET")
appwrite_project_id: str = os.getenv("APPWRITE_PROJECT_ID","NOT SET")
appwrite_api_key: str = os.getenv("APPWRITE_API_KEY","NOT SET")
app_name: str = "Code of Conquest"
app_version: str = "0.0.1"
# --- Paths (default under ./data) ---
repo_root: Path = field(default_factory=_repo_root_from_here)
# --- Build paths for convenience (not env-controlled directly) ---
def __post_init__(self) -> None:
# Basic validation
if self.env not in (Environment.DEV, Environment.TEST, Environment.PROD):
raise ValueError(f"Invalid COC_ENV: {self.env}")
@staticmethod
def _ensure_dir(path: Path) -> None:
if path is None:
return
if not path.exists():
path.mkdir(parents=True, exist_ok=True)
# ---- Singleton loader ----
_settings_singleton: Optional[Settings] = None
def get_settings() -> Settings:
"""
Load settings from environment and `.env` once, then reuse.
OS env always takes precedence over `.env`.
Returns:
Settings: A process-wide singleton instance.
"""
global _settings_singleton
if _settings_singleton is not None:
return _settings_singleton
# Load .env from repo root
repo_root = _repo_root_from_here()
dotenv_path = repo_root / ".env"
load_dotenv(dotenv_path=dotenv_path, override=False)
# Environment
env_str = os.getenv("COC_ENV", "dev").strip().lower()
if env_str == "dev":
env_val = Environment.DEV
elif env_str == "test":
env_val = Environment.TEST
elif env_str == "prod":
env_val = Environment.PROD
else:
raise ValueError(f"COC_ENV must be one of dev|test|prod, got '{env_str}'")
# Construct settings
_settings_singleton = Settings(
env=env_val,
log_level=os.getenv("LOG_LEVEL", "INFO").strip().upper(),
)
return _settings_singleton

10
app/utils/typed_flask.py Normal file
View File

@@ -0,0 +1,10 @@
# app/core/typed_flask.py
from flask import Flask
from typing import Optional
from app.utils.api_response import ApiResponder
class CoCFlask(Flask):
"""
A typed subclass of Flask that includes an `api` attribute for IDE support.
"""
api: Optional[ApiResponder] = None