init commit
This commit is contained in:
3
app/__init__.py
Normal file
3
app/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Weather Alerts application package."""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
5
app/config/__init__.py
Normal file
5
app/config/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Configuration loading and management."""
|
||||
|
||||
from app.config.loader import load_config, AppConfig
|
||||
|
||||
__all__ = ["load_config", "AppConfig"]
|
||||
159
app/config/loader.py
Normal file
159
app/config/loader.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""Typed configuration loader with YAML and environment variable support."""
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
import yaml
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from app.models.alerts import AlertRules
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppSettings:
|
||||
"""Application-level settings."""
|
||||
|
||||
name: str = "weather-alerts"
|
||||
version: str = "1.0.0"
|
||||
log_level: str = "INFO"
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeatherSettings:
|
||||
"""Weather API settings."""
|
||||
|
||||
location: str = "viola,tn"
|
||||
hours_ahead: int = 24
|
||||
unit_group: str = "us"
|
||||
api_key: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class NtfySettings:
|
||||
"""Ntfy notification settings."""
|
||||
|
||||
server_url: str = "https://ntfy.sneakygeek.net"
|
||||
topic: str = "weather-alerts"
|
||||
priority: str = "high"
|
||||
tags: list[str] = field(default_factory=lambda: ["cloud", "warning"])
|
||||
access_token: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationSettings:
|
||||
"""Notification settings container."""
|
||||
|
||||
ntfy: NtfySettings = field(default_factory=NtfySettings)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StateSettings:
|
||||
"""State management settings."""
|
||||
|
||||
file_path: str = "./data/state.json"
|
||||
dedup_window_hours: int = 24
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlertSettings:
|
||||
"""Alert configuration settings."""
|
||||
|
||||
rules: AlertRules = field(default_factory=AlertRules)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppConfig:
|
||||
"""Complete application configuration."""
|
||||
|
||||
app: AppSettings = field(default_factory=AppSettings)
|
||||
weather: WeatherSettings = field(default_factory=WeatherSettings)
|
||||
alerts: AlertSettings = field(default_factory=AlertSettings)
|
||||
notifications: NotificationSettings = field(default_factory=NotificationSettings)
|
||||
state: StateSettings = field(default_factory=StateSettings)
|
||||
|
||||
|
||||
def load_config(
|
||||
config_path: Optional[str] = None,
|
||||
env_path: Optional[str] = None,
|
||||
) -> AppConfig:
|
||||
"""Load configuration from YAML file and environment variables.
|
||||
|
||||
Args:
|
||||
config_path: Path to the YAML config file. Defaults to app/config/settings.yaml.
|
||||
env_path: Path to the .env file. Defaults to .env in the project root.
|
||||
|
||||
Returns:
|
||||
A fully populated AppConfig instance.
|
||||
"""
|
||||
# Load environment variables from .env file
|
||||
if env_path:
|
||||
load_dotenv(env_path)
|
||||
else:
|
||||
load_dotenv()
|
||||
|
||||
# Determine config file path
|
||||
if config_path is None:
|
||||
config_path = os.environ.get(
|
||||
"WEATHER_ALERTS_CONFIG",
|
||||
str(Path(__file__).parent / "settings.yaml"),
|
||||
)
|
||||
|
||||
# Load YAML config
|
||||
config_data: dict[str, Any] = {}
|
||||
config_file = Path(config_path)
|
||||
if config_file.exists():
|
||||
with open(config_file) as f:
|
||||
config_data = yaml.safe_load(f) or {}
|
||||
|
||||
# Build configuration with defaults
|
||||
app_data = config_data.get("app", {})
|
||||
weather_data = config_data.get("weather", {})
|
||||
alerts_data = config_data.get("alerts", {})
|
||||
notifications_data = config_data.get("notifications", {})
|
||||
state_data = config_data.get("state", {})
|
||||
|
||||
# Build app settings
|
||||
app_settings = AppSettings(
|
||||
name=app_data.get("name", "weather-alerts"),
|
||||
version=app_data.get("version", "1.0.0"),
|
||||
log_level=app_data.get("log_level", "INFO"),
|
||||
)
|
||||
|
||||
# Build weather settings with API key from environment
|
||||
weather_settings = WeatherSettings(
|
||||
location=weather_data.get("location", "viola,tn"),
|
||||
hours_ahead=weather_data.get("hours_ahead", 24),
|
||||
unit_group=weather_data.get("unit_group", "us"),
|
||||
api_key=os.environ.get("VISUALCROSSING_API_KEY", ""),
|
||||
)
|
||||
|
||||
# Build alert settings
|
||||
rules_data = alerts_data.get("rules", {})
|
||||
alert_settings = AlertSettings(rules=AlertRules.from_dict(rules_data))
|
||||
|
||||
# Build notification settings with token from environment
|
||||
ntfy_data = notifications_data.get("ntfy", {})
|
||||
ntfy_settings = NtfySettings(
|
||||
server_url=ntfy_data.get("server_url", "https://ntfy.sneakygeek.net"),
|
||||
topic=ntfy_data.get("topic", "weather-alerts"),
|
||||
priority=ntfy_data.get("priority", "high"),
|
||||
tags=ntfy_data.get("tags", ["cloud", "warning"]),
|
||||
access_token=os.environ.get("NTFY_ACCESS_TOKEN", ""),
|
||||
)
|
||||
notification_settings = NotificationSettings(ntfy=ntfy_settings)
|
||||
|
||||
# Build state settings
|
||||
state_settings = StateSettings(
|
||||
file_path=state_data.get("file_path", "./data/state.json"),
|
||||
dedup_window_hours=state_data.get("dedup_window_hours", 24),
|
||||
)
|
||||
|
||||
return AppConfig(
|
||||
app=app_settings,
|
||||
weather=weather_settings,
|
||||
alerts=alert_settings,
|
||||
notifications=notification_settings,
|
||||
state=state_settings,
|
||||
)
|
||||
40
app/config/settings.example.yaml
Normal file
40
app/config/settings.example.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
# Weather Alerts Configuration Example
|
||||
# Copy this file to settings.yaml and customize as needed.
|
||||
# Secrets (API keys, tokens) should be in .env file, not here.
|
||||
|
||||
app:
|
||||
name: "weather-alerts"
|
||||
version: "1.0.0"
|
||||
log_level: "INFO" # DEBUG, INFO, WARNING, ERROR
|
||||
|
||||
weather:
|
||||
location: "viola,tn" # City,State or ZIP code
|
||||
hours_ahead: 24 # Number of forecast hours to check
|
||||
unit_group: "us" # "us" for Fahrenheit/mph, "metric" for Celsius/kph
|
||||
|
||||
alerts:
|
||||
rules:
|
||||
temperature:
|
||||
enabled: true
|
||||
below: 32 # Alert when temp falls below this (freezing)
|
||||
above: 100 # Alert when temp exceeds this (extreme heat)
|
||||
precipitation:
|
||||
enabled: true
|
||||
probability_above: 70 # Alert when precipitation chance exceeds this %
|
||||
wind:
|
||||
enabled: true
|
||||
speed_above: 25 # Alert when sustained wind exceeds this (mph)
|
||||
gust_above: 40 # Alert when wind gusts exceed this (mph)
|
||||
severe_weather:
|
||||
enabled: true # Forward severe weather alerts from the API
|
||||
|
||||
notifications:
|
||||
ntfy:
|
||||
server_url: "https://ntfy.sneakygeek.net"
|
||||
topic: "weather-alerts"
|
||||
priority: "high" # min, low, default, high, urgent
|
||||
tags: ["cloud", "warning"] # Emoji tags for notification
|
||||
|
||||
state:
|
||||
file_path: "./data/state.json"
|
||||
dedup_window_hours: 24 # Don't repeat same alert within this window
|
||||
36
app/config/settings.yaml
Normal file
36
app/config/settings.yaml
Normal file
@@ -0,0 +1,36 @@
|
||||
app:
|
||||
name: "weather-alerts"
|
||||
version: "1.0.0"
|
||||
log_level: "INFO"
|
||||
|
||||
weather:
|
||||
location: "viola,tn"
|
||||
hours_ahead: 24
|
||||
unit_group: "us"
|
||||
|
||||
alerts:
|
||||
rules:
|
||||
temperature:
|
||||
enabled: true
|
||||
below: 32
|
||||
above: 95
|
||||
precipitation:
|
||||
enabled: true
|
||||
probability_above: 60
|
||||
wind:
|
||||
enabled: true
|
||||
speed_above: 25
|
||||
gust_above: 30
|
||||
severe_weather:
|
||||
enabled: true
|
||||
|
||||
notifications:
|
||||
ntfy:
|
||||
server_url: "https://ntfy.sneakygeek.net"
|
||||
topic: "weather-alerts"
|
||||
priority: "high"
|
||||
tags: ["cloud", "warning"]
|
||||
|
||||
state:
|
||||
file_path: "./data/state.json"
|
||||
dedup_window_hours: 24
|
||||
185
app/main.py
Normal file
185
app/main.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""Main orchestration module for weather alerts application."""
|
||||
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from app.config.loader import AppConfig, load_config
|
||||
from app.services.alert_aggregator import AlertAggregator
|
||||
from app.services.notification_service import NotificationService
|
||||
from app.services.rule_engine import RuleEngine
|
||||
from app.services.state_manager import StateManager
|
||||
from app.services.weather_service import WeatherService, WeatherServiceError
|
||||
from app.utils.http_client import HttpClient
|
||||
from app.utils.logging_config import configure_logging, get_logger
|
||||
|
||||
|
||||
class WeatherAlertsApp:
|
||||
"""Main application class for weather alerts."""
|
||||
|
||||
def __init__(self, config: AppConfig) -> None:
|
||||
"""Initialize the application with configuration.
|
||||
|
||||
Args:
|
||||
config: The application configuration.
|
||||
"""
|
||||
self.config = config
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
# Initialize HTTP client (shared across services)
|
||||
self.http_client = HttpClient()
|
||||
|
||||
# Initialize services
|
||||
self.weather_service = WeatherService(
|
||||
api_key=config.weather.api_key,
|
||||
http_client=self.http_client,
|
||||
)
|
||||
|
||||
self.rule_engine = RuleEngine(rules=config.alerts.rules)
|
||||
|
||||
self.state_manager = StateManager(
|
||||
file_path=config.state.file_path,
|
||||
dedup_window_hours=config.state.dedup_window_hours,
|
||||
)
|
||||
|
||||
self.alert_aggregator = AlertAggregator()
|
||||
|
||||
self.notification_service = NotificationService(
|
||||
server_url=config.notifications.ntfy.server_url,
|
||||
topic=config.notifications.ntfy.topic,
|
||||
access_token=config.notifications.ntfy.access_token,
|
||||
priority=config.notifications.ntfy.priority,
|
||||
default_tags=config.notifications.ntfy.tags,
|
||||
http_client=self.http_client,
|
||||
)
|
||||
|
||||
def run(self) -> int:
|
||||
"""Execute the main application flow.
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for error).
|
||||
"""
|
||||
self.logger.info(
|
||||
"app_starting",
|
||||
version=self.config.app.version,
|
||||
location=self.config.weather.location,
|
||||
)
|
||||
|
||||
try:
|
||||
# Step 1: Fetch weather forecast
|
||||
self.logger.info("step_fetch_forecast")
|
||||
forecast = self.weather_service.get_forecast(
|
||||
location=self.config.weather.location,
|
||||
hours_ahead=self.config.weather.hours_ahead,
|
||||
unit_group=self.config.weather.unit_group,
|
||||
)
|
||||
|
||||
# Step 2: Evaluate rules against forecast
|
||||
self.logger.info("step_evaluate_rules")
|
||||
triggered_alerts = self.rule_engine.evaluate(forecast)
|
||||
|
||||
if not triggered_alerts:
|
||||
self.logger.info("no_alerts_triggered")
|
||||
return 0
|
||||
|
||||
self.logger.info(
|
||||
"alerts_triggered",
|
||||
count=len(triggered_alerts),
|
||||
)
|
||||
|
||||
# Step 2.5: Aggregate alerts by type
|
||||
self.logger.info("step_aggregate_alerts")
|
||||
aggregated_alerts = self.alert_aggregator.aggregate(triggered_alerts)
|
||||
|
||||
self.logger.info(
|
||||
"alerts_aggregated",
|
||||
input_count=len(triggered_alerts),
|
||||
output_count=len(aggregated_alerts),
|
||||
)
|
||||
|
||||
# Step 3: Filter duplicates
|
||||
self.logger.info("step_filter_duplicates")
|
||||
new_alerts = self.state_manager.filter_duplicates(aggregated_alerts)
|
||||
|
||||
if not new_alerts:
|
||||
self.logger.info("all_alerts_are_duplicates")
|
||||
return 0
|
||||
|
||||
# Step 4: Send notifications
|
||||
self.logger.info(
|
||||
"step_send_notifications",
|
||||
count=len(new_alerts),
|
||||
)
|
||||
results = self.notification_service.send_batch(new_alerts)
|
||||
|
||||
# Step 5: Record sent alerts
|
||||
self.logger.info("step_record_sent")
|
||||
for result in results:
|
||||
if result.success:
|
||||
self.state_manager.record_sent(result.alert)
|
||||
|
||||
# Step 6: Purge old records and save state
|
||||
self.state_manager.purge_old_records()
|
||||
self.state_manager.save()
|
||||
|
||||
# Report results
|
||||
success_count = sum(1 for r in results if r.success)
|
||||
failed_count = len(results) - success_count
|
||||
|
||||
self.logger.info(
|
||||
"app_complete",
|
||||
alerts_sent=success_count,
|
||||
alerts_failed=failed_count,
|
||||
)
|
||||
|
||||
return 0 if failed_count == 0 else 1
|
||||
|
||||
except WeatherServiceError as e:
|
||||
self.logger.error("weather_service_error", error=str(e))
|
||||
return 1
|
||||
|
||||
except Exception as e:
|
||||
self.logger.exception("unexpected_error", error=str(e))
|
||||
return 1
|
||||
|
||||
finally:
|
||||
self.http_client.close()
|
||||
|
||||
|
||||
def main(config_path: Optional[str] = None) -> int:
|
||||
"""Main entry point for the application.
|
||||
|
||||
Args:
|
||||
config_path: Optional path to configuration file.
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for error).
|
||||
"""
|
||||
# Load configuration
|
||||
try:
|
||||
config = load_config(config_path)
|
||||
except Exception as e:
|
||||
print(f"Failed to load configuration: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Configure logging
|
||||
configure_logging(config.app.log_level)
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# Validate required secrets
|
||||
if not config.weather.api_key:
|
||||
logger.error("missing_api_key", hint="Set VISUALCROSSING_API_KEY environment variable")
|
||||
return 1
|
||||
|
||||
if not config.notifications.ntfy.access_token:
|
||||
logger.warning(
|
||||
"missing_ntfy_token",
|
||||
hint="Set NTFY_ACCESS_TOKEN if your server requires auth",
|
||||
)
|
||||
|
||||
# Run the application
|
||||
app = WeatherAlertsApp(config)
|
||||
return app.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
17
app/models/__init__.py
Normal file
17
app/models/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Data models for weather alerts."""
|
||||
|
||||
from app.models.weather import HourlyForecast, WeatherForecast, WeatherAlert
|
||||
from app.models.alerts import AggregatedAlert, AlertRules, AlertType, TriggeredAlert
|
||||
from app.models.state import AlertState, SentAlertRecord
|
||||
|
||||
__all__ = [
|
||||
"HourlyForecast",
|
||||
"WeatherForecast",
|
||||
"WeatherAlert",
|
||||
"AggregatedAlert",
|
||||
"AlertRules",
|
||||
"AlertType",
|
||||
"TriggeredAlert",
|
||||
"AlertState",
|
||||
"SentAlertRecord",
|
||||
]
|
||||
181
app/models/alerts.py
Normal file
181
app/models/alerts.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""Alert rule and triggered alert models."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class AlertType(Enum):
|
||||
"""Types of weather alerts that can be triggered."""
|
||||
|
||||
TEMPERATURE_LOW = "temperature_low"
|
||||
TEMPERATURE_HIGH = "temperature_high"
|
||||
PRECIPITATION = "precipitation"
|
||||
WIND_SPEED = "wind_speed"
|
||||
WIND_GUST = "wind_gust"
|
||||
SEVERE_WEATHER = "severe_weather"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TemperatureRule:
|
||||
"""Temperature alert rule configuration."""
|
||||
|
||||
enabled: bool = True
|
||||
below: Optional[float] = 32
|
||||
above: Optional[float] = 100
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrecipitationRule:
|
||||
"""Precipitation alert rule configuration."""
|
||||
|
||||
enabled: bool = True
|
||||
probability_above: float = 70
|
||||
|
||||
|
||||
@dataclass
|
||||
class WindRule:
|
||||
"""Wind alert rule configuration."""
|
||||
|
||||
enabled: bool = True
|
||||
speed_above: float = 25
|
||||
gust_above: float = 40
|
||||
|
||||
|
||||
@dataclass
|
||||
class SevereWeatherRule:
|
||||
"""Severe weather alert rule configuration."""
|
||||
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlertRules:
|
||||
"""Collection of all alert rules."""
|
||||
|
||||
temperature: TemperatureRule = field(default_factory=TemperatureRule)
|
||||
precipitation: PrecipitationRule = field(default_factory=PrecipitationRule)
|
||||
wind: WindRule = field(default_factory=WindRule)
|
||||
severe_weather: SevereWeatherRule = field(default_factory=SevereWeatherRule)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> "AlertRules":
|
||||
"""Create AlertRules from a configuration dict.
|
||||
|
||||
Args:
|
||||
data: The rules configuration dict.
|
||||
|
||||
Returns:
|
||||
An AlertRules instance.
|
||||
"""
|
||||
temp_data = data.get("temperature", {})
|
||||
precip_data = data.get("precipitation", {})
|
||||
wind_data = data.get("wind", {})
|
||||
severe_data = data.get("severe_weather", {})
|
||||
|
||||
return cls(
|
||||
temperature=TemperatureRule(
|
||||
enabled=temp_data.get("enabled", True),
|
||||
below=temp_data.get("below", 32),
|
||||
above=temp_data.get("above", 100),
|
||||
),
|
||||
precipitation=PrecipitationRule(
|
||||
enabled=precip_data.get("enabled", True),
|
||||
probability_above=precip_data.get("probability_above", 70),
|
||||
),
|
||||
wind=WindRule(
|
||||
enabled=wind_data.get("enabled", True),
|
||||
speed_above=wind_data.get("speed_above", 25),
|
||||
gust_above=wind_data.get("gust_above", 40),
|
||||
),
|
||||
severe_weather=SevereWeatherRule(
|
||||
enabled=severe_data.get("enabled", True),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TriggeredAlert:
|
||||
"""Represents an alert that was triggered by a rule evaluation."""
|
||||
|
||||
alert_type: AlertType
|
||||
title: str
|
||||
message: str
|
||||
forecast_hour: str
|
||||
value: float
|
||||
threshold: float
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
@property
|
||||
def dedup_key(self) -> str:
|
||||
"""Generate a deduplication key for this alert.
|
||||
|
||||
Format: {alert_type}:{forecast_hour}
|
||||
This allows re-alerting for different time periods while preventing
|
||||
duplicate alerts for the same hour.
|
||||
"""
|
||||
return f"{self.alert_type.value}:{self.forecast_hour}"
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"alert_type": self.alert_type.value,
|
||||
"title": self.title,
|
||||
"message": self.message,
|
||||
"forecast_hour": self.forecast_hour,
|
||||
"value": self.value,
|
||||
"threshold": self.threshold,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"dedup_key": self.dedup_key,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class AggregatedAlert:
|
||||
"""Represents multiple alerts of the same type aggregated into one notification."""
|
||||
|
||||
alert_type: AlertType
|
||||
title: str
|
||||
message: str
|
||||
triggered_hours: list[str]
|
||||
start_time: str
|
||||
end_time: str
|
||||
extreme_value: float
|
||||
extreme_hour: str
|
||||
threshold: float
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
@property
|
||||
def dedup_key(self) -> str:
|
||||
"""Generate a deduplication key for this aggregated alert.
|
||||
|
||||
Format: {alert_type}:{date}
|
||||
Day-level deduplication prevents re-sending aggregated alerts
|
||||
for the same alert type on the same day.
|
||||
"""
|
||||
# Extract date from the first triggered hour (format: YYYY-MM-DD-HH)
|
||||
date_part = self.start_time.rsplit("-", 1)[0] if self.start_time else ""
|
||||
return f"{self.alert_type.value}:{date_part}"
|
||||
|
||||
@property
|
||||
def hour_count(self) -> int:
|
||||
"""Number of hours that triggered this alert."""
|
||||
return len(self.triggered_hours)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"alert_type": self.alert_type.value,
|
||||
"title": self.title,
|
||||
"message": self.message,
|
||||
"triggered_hours": self.triggered_hours,
|
||||
"start_time": self.start_time,
|
||||
"end_time": self.end_time,
|
||||
"extreme_value": self.extreme_value,
|
||||
"extreme_hour": self.extreme_hour,
|
||||
"threshold": self.threshold,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"dedup_key": self.dedup_key,
|
||||
"hour_count": self.hour_count,
|
||||
}
|
||||
133
app/models/state.py
Normal file
133
app/models/state.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""State management models for alert deduplication."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class SentAlertRecord:
|
||||
"""Record of a sent alert for deduplication."""
|
||||
|
||||
dedup_key: str
|
||||
alert_type: str
|
||||
sent_at: datetime
|
||||
forecast_hour: str
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
"dedup_key": self.dedup_key,
|
||||
"alert_type": self.alert_type,
|
||||
"sent_at": self.sent_at.isoformat(),
|
||||
"forecast_hour": self.forecast_hour,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> "SentAlertRecord":
|
||||
"""Create from dictionary.
|
||||
|
||||
Args:
|
||||
data: The serialized record dict.
|
||||
|
||||
Returns:
|
||||
A SentAlertRecord instance.
|
||||
"""
|
||||
return cls(
|
||||
dedup_key=data["dedup_key"],
|
||||
alert_type=data["alert_type"],
|
||||
sent_at=datetime.fromisoformat(data["sent_at"]),
|
||||
forecast_hour=data["forecast_hour"],
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlertState:
|
||||
"""State container for tracking sent alerts."""
|
||||
|
||||
sent_alerts: dict[str, SentAlertRecord] = field(default_factory=dict)
|
||||
last_updated: datetime = field(default_factory=datetime.now)
|
||||
|
||||
def is_duplicate(self, dedup_key: str) -> bool:
|
||||
"""Check if an alert with this dedup key has already been sent.
|
||||
|
||||
Args:
|
||||
dedup_key: The deduplication key to check.
|
||||
|
||||
Returns:
|
||||
True if this alert has already been sent.
|
||||
"""
|
||||
return dedup_key in self.sent_alerts
|
||||
|
||||
def record_sent(self, dedup_key: str, alert_type: str, forecast_hour: str) -> None:
|
||||
"""Record that an alert was sent.
|
||||
|
||||
Args:
|
||||
dedup_key: The deduplication key.
|
||||
alert_type: The type of alert.
|
||||
forecast_hour: The forecast hour this alert was for.
|
||||
"""
|
||||
self.sent_alerts[dedup_key] = SentAlertRecord(
|
||||
dedup_key=dedup_key,
|
||||
alert_type=alert_type,
|
||||
sent_at=datetime.now(),
|
||||
forecast_hour=forecast_hour,
|
||||
)
|
||||
self.last_updated = datetime.now()
|
||||
|
||||
def purge_old_records(self, window_hours: int) -> int:
|
||||
"""Remove records older than the deduplication window.
|
||||
|
||||
Args:
|
||||
window_hours: Number of hours to retain records.
|
||||
|
||||
Returns:
|
||||
Number of records purged.
|
||||
"""
|
||||
cutoff = datetime.now()
|
||||
original_count = len(self.sent_alerts)
|
||||
|
||||
self.sent_alerts = {
|
||||
key: record
|
||||
for key, record in self.sent_alerts.items()
|
||||
if (cutoff - record.sent_at).total_seconds() < (window_hours * 3600)
|
||||
}
|
||||
|
||||
purged = original_count - len(self.sent_alerts)
|
||||
if purged > 0:
|
||||
self.last_updated = datetime.now()
|
||||
|
||||
return purged
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
"sent_alerts": {
|
||||
key: record.to_dict() for key, record in self.sent_alerts.items()
|
||||
},
|
||||
"last_updated": self.last_updated.isoformat(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> "AlertState":
|
||||
"""Create from dictionary.
|
||||
|
||||
Args:
|
||||
data: The serialized state dict.
|
||||
|
||||
Returns:
|
||||
An AlertState instance.
|
||||
"""
|
||||
sent_alerts = {
|
||||
key: SentAlertRecord.from_dict(record_data)
|
||||
for key, record_data in data.get("sent_alerts", {}).items()
|
||||
}
|
||||
|
||||
last_updated_str = data.get("last_updated")
|
||||
last_updated = (
|
||||
datetime.fromisoformat(last_updated_str)
|
||||
if last_updated_str
|
||||
else datetime.now()
|
||||
)
|
||||
|
||||
return cls(sent_alerts=sent_alerts, last_updated=last_updated)
|
||||
160
app/models/weather.py
Normal file
160
app/models/weather.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Weather data models for VisualCrossing API responses."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class HourlyForecast:
|
||||
"""Represents a single hour's weather forecast."""
|
||||
|
||||
datetime_str: str
|
||||
datetime_epoch: int
|
||||
temp: float
|
||||
feelslike: float
|
||||
humidity: float
|
||||
precip: float
|
||||
precip_prob: float
|
||||
snow: float
|
||||
snow_depth: float
|
||||
wind_speed: float
|
||||
wind_gust: float
|
||||
wind_dir: float
|
||||
pressure: float
|
||||
visibility: float
|
||||
cloud_cover: float
|
||||
uv_index: float
|
||||
conditions: str
|
||||
icon: str
|
||||
|
||||
@property
|
||||
def datetime(self) -> datetime:
|
||||
"""Convert epoch to datetime."""
|
||||
return datetime.fromtimestamp(self.datetime_epoch)
|
||||
|
||||
@property
|
||||
def hour_key(self) -> str:
|
||||
"""Get a key representing this forecast hour (YYYY-MM-DD-HH)."""
|
||||
return self.datetime.strftime("%Y-%m-%d-%H")
|
||||
|
||||
@classmethod
|
||||
def from_api_data(cls, data: dict[str, Any]) -> "HourlyForecast":
|
||||
"""Create an HourlyForecast from VisualCrossing API data.
|
||||
|
||||
Args:
|
||||
data: The hourly data dict from the API response.
|
||||
|
||||
Returns:
|
||||
An HourlyForecast instance.
|
||||
"""
|
||||
return cls(
|
||||
datetime_str=data.get("datetime", ""),
|
||||
datetime_epoch=data.get("datetimeEpoch", 0),
|
||||
temp=float(data.get("temp", 0)),
|
||||
feelslike=float(data.get("feelslike", 0)),
|
||||
humidity=float(data.get("humidity", 0)),
|
||||
precip=float(data.get("precip") or 0),
|
||||
precip_prob=float(data.get("precipprob") or 0),
|
||||
snow=float(data.get("snow") or 0),
|
||||
snow_depth=float(data.get("snowdepth") or 0),
|
||||
wind_speed=float(data.get("windspeed") or 0),
|
||||
wind_gust=float(data.get("windgust") or 0),
|
||||
wind_dir=float(data.get("winddir") or 0),
|
||||
pressure=float(data.get("pressure") or 0),
|
||||
visibility=float(data.get("visibility") or 0),
|
||||
cloud_cover=float(data.get("cloudcover") or 0),
|
||||
uv_index=float(data.get("uvindex") or 0),
|
||||
conditions=data.get("conditions", ""),
|
||||
icon=data.get("icon", ""),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeatherAlert:
|
||||
"""Represents a severe weather alert from the API."""
|
||||
|
||||
event: str
|
||||
headline: str
|
||||
description: str
|
||||
onset: Optional[str]
|
||||
ends: Optional[str]
|
||||
id: str
|
||||
language: str
|
||||
link: str
|
||||
|
||||
@classmethod
|
||||
def from_api_data(cls, data: dict[str, Any]) -> "WeatherAlert":
|
||||
"""Create a WeatherAlert from VisualCrossing API data.
|
||||
|
||||
Args:
|
||||
data: The alert data dict from the API response.
|
||||
|
||||
Returns:
|
||||
A WeatherAlert instance.
|
||||
"""
|
||||
return cls(
|
||||
event=data.get("event", "Unknown"),
|
||||
headline=data.get("headline", ""),
|
||||
description=data.get("description", ""),
|
||||
onset=data.get("onset"),
|
||||
ends=data.get("ends"),
|
||||
id=data.get("id", ""),
|
||||
language=data.get("language", "en"),
|
||||
link=data.get("link", ""),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeatherForecast:
|
||||
"""Complete weather forecast response."""
|
||||
|
||||
location: str
|
||||
resolved_address: str
|
||||
timezone: str
|
||||
hourly_forecasts: list[HourlyForecast] = field(default_factory=list)
|
||||
alerts: list[WeatherAlert] = field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def from_api_data(
|
||||
cls,
|
||||
data: dict[str, Any],
|
||||
hours_ahead: int = 24,
|
||||
) -> "WeatherForecast":
|
||||
"""Create a WeatherForecast from VisualCrossing API data.
|
||||
|
||||
Args:
|
||||
data: The full API response dict.
|
||||
hours_ahead: Number of hours of forecast to include.
|
||||
|
||||
Returns:
|
||||
A WeatherForecast instance.
|
||||
"""
|
||||
# Extract hourly forecasts from days
|
||||
hourly_forecasts: list[HourlyForecast] = []
|
||||
now = datetime.now()
|
||||
|
||||
for day in data.get("days", []):
|
||||
for hour_data in day.get("hours", []):
|
||||
forecast = HourlyForecast.from_api_data(hour_data)
|
||||
# Only include future hours up to hours_ahead
|
||||
if forecast.datetime > now:
|
||||
hourly_forecasts.append(forecast)
|
||||
if len(hourly_forecasts) >= hours_ahead:
|
||||
break
|
||||
if len(hourly_forecasts) >= hours_ahead:
|
||||
break
|
||||
|
||||
# Extract alerts
|
||||
alerts = [
|
||||
WeatherAlert.from_api_data(alert_data)
|
||||
for alert_data in data.get("alerts", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
location=data.get("address", ""),
|
||||
resolved_address=data.get("resolvedAddress", ""),
|
||||
timezone=data.get("timezone", ""),
|
||||
hourly_forecasts=hourly_forecasts,
|
||||
alerts=alerts,
|
||||
)
|
||||
13
app/services/__init__.py
Normal file
13
app/services/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Service layer for weather alerts."""
|
||||
|
||||
from app.services.weather_service import WeatherService
|
||||
from app.services.notification_service import NotificationService
|
||||
from app.services.rule_engine import RuleEngine
|
||||
from app.services.state_manager import StateManager
|
||||
|
||||
__all__ = [
|
||||
"WeatherService",
|
||||
"NotificationService",
|
||||
"RuleEngine",
|
||||
"StateManager",
|
||||
]
|
||||
283
app/services/alert_aggregator.py
Normal file
283
app/services/alert_aggregator.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""Alert aggregator service for combining multiple alerts of the same type."""
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.alerts import AggregatedAlert, AlertType, TriggeredAlert
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
|
||||
class AlertAggregator:
|
||||
"""Aggregates multiple alerts of the same type into single notifications."""
|
||||
|
||||
# Alert types where lower values are worse (e.g., low temperature)
|
||||
LOWER_IS_WORSE: set[AlertType] = {AlertType.TEMPERATURE_LOW}
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the alert aggregator."""
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
def aggregate(
|
||||
self,
|
||||
alerts: list[TriggeredAlert],
|
||||
) -> list[AggregatedAlert]:
|
||||
"""Aggregate alerts by type into summary notifications.
|
||||
|
||||
Severe weather alerts pass through unchanged.
|
||||
Other alert types are grouped and combined into a single notification
|
||||
per type with time range and extreme value information.
|
||||
|
||||
Args:
|
||||
alerts: List of triggered alerts to aggregate.
|
||||
|
||||
Returns:
|
||||
List of aggregated alerts (one per alert type).
|
||||
"""
|
||||
if not alerts:
|
||||
return []
|
||||
|
||||
# Separate severe weather alerts from regular hourly alerts
|
||||
severe_alerts: list[TriggeredAlert] = []
|
||||
hourly_alerts: list[TriggeredAlert] = []
|
||||
|
||||
for alert in alerts:
|
||||
if alert.alert_type == AlertType.SEVERE_WEATHER:
|
||||
severe_alerts.append(alert)
|
||||
else:
|
||||
hourly_alerts.append(alert)
|
||||
|
||||
aggregated: list[AggregatedAlert] = []
|
||||
|
||||
# Convert severe weather alerts to AggregatedAlert (pass through)
|
||||
aggregated.extend(self._convert_severe_alerts(severe_alerts))
|
||||
|
||||
# Aggregate hourly alerts by type
|
||||
aggregated.extend(self._aggregate_by_type(hourly_alerts))
|
||||
|
||||
self.logger.info(
|
||||
"alerts_aggregated",
|
||||
input_count=len(alerts),
|
||||
output_count=len(aggregated),
|
||||
severe_count=len(severe_alerts),
|
||||
)
|
||||
|
||||
return aggregated
|
||||
|
||||
def _convert_severe_alerts(
|
||||
self,
|
||||
alerts: list[TriggeredAlert],
|
||||
) -> list[AggregatedAlert]:
|
||||
"""Convert severe weather alerts to AggregatedAlert format.
|
||||
|
||||
Severe weather alerts are not aggregated - each one becomes
|
||||
its own AggregatedAlert for individual notification.
|
||||
|
||||
Args:
|
||||
alerts: List of severe weather triggered alerts.
|
||||
|
||||
Returns:
|
||||
List of AggregatedAlert, one per severe weather alert.
|
||||
"""
|
||||
return [
|
||||
AggregatedAlert(
|
||||
alert_type=alert.alert_type,
|
||||
title=alert.title,
|
||||
message=alert.message,
|
||||
triggered_hours=[alert.forecast_hour],
|
||||
start_time=alert.forecast_hour,
|
||||
end_time=alert.forecast_hour,
|
||||
extreme_value=alert.value,
|
||||
extreme_hour=alert.forecast_hour,
|
||||
threshold=alert.threshold,
|
||||
created_at=alert.created_at,
|
||||
)
|
||||
for alert in alerts
|
||||
]
|
||||
|
||||
def _aggregate_by_type(
|
||||
self,
|
||||
alerts: list[TriggeredAlert],
|
||||
) -> list[AggregatedAlert]:
|
||||
"""Aggregate hourly alerts by alert type.
|
||||
|
||||
Args:
|
||||
alerts: List of hourly triggered alerts.
|
||||
|
||||
Returns:
|
||||
List of AggregatedAlert, one per alert type.
|
||||
"""
|
||||
# Group alerts by type
|
||||
by_type: dict[AlertType, list[TriggeredAlert]] = defaultdict(list)
|
||||
|
||||
for alert in alerts:
|
||||
by_type[alert.alert_type].append(alert)
|
||||
|
||||
aggregated: list[AggregatedAlert] = []
|
||||
|
||||
for alert_type, type_alerts in by_type.items():
|
||||
aggregated_alert = self._aggregate_type_group(alert_type, type_alerts)
|
||||
aggregated.append(aggregated_alert)
|
||||
|
||||
return aggregated
|
||||
|
||||
def _aggregate_type_group(
|
||||
self,
|
||||
alert_type: AlertType,
|
||||
alerts: list[TriggeredAlert],
|
||||
) -> AggregatedAlert:
|
||||
"""Create a single AggregatedAlert from a group of same-type alerts.
|
||||
|
||||
Args:
|
||||
alert_type: The type of all alerts in the group.
|
||||
alerts: List of alerts of the same type.
|
||||
|
||||
Returns:
|
||||
A single AggregatedAlert summarizing the group.
|
||||
"""
|
||||
# Sort by forecast hour for chronological ordering
|
||||
sorted_alerts = sorted(alerts, key=lambda a: a.forecast_hour)
|
||||
|
||||
# Collect all triggered hours
|
||||
triggered_hours = [a.forecast_hour for a in sorted_alerts]
|
||||
start_time = sorted_alerts[0].forecast_hour
|
||||
end_time = sorted_alerts[-1].forecast_hour
|
||||
|
||||
# Find extreme value (lowest for low temp, highest for others)
|
||||
if alert_type in self.LOWER_IS_WORSE:
|
||||
extreme_alert = min(sorted_alerts, key=lambda a: a.value)
|
||||
else:
|
||||
extreme_alert = max(sorted_alerts, key=lambda a: a.value)
|
||||
|
||||
extreme_value = extreme_alert.value
|
||||
extreme_hour = extreme_alert.forecast_hour
|
||||
threshold = sorted_alerts[0].threshold # Same for all alerts of same type
|
||||
|
||||
# Build summary message
|
||||
message = self._build_summary_message(
|
||||
alert_type=alert_type,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
extreme_value=extreme_value,
|
||||
extreme_hour=extreme_hour,
|
||||
threshold=threshold,
|
||||
hour_count=len(sorted_alerts),
|
||||
)
|
||||
|
||||
# Build title
|
||||
title = self._build_title(alert_type)
|
||||
|
||||
return AggregatedAlert(
|
||||
alert_type=alert_type,
|
||||
title=title,
|
||||
message=message,
|
||||
triggered_hours=triggered_hours,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
extreme_value=extreme_value,
|
||||
extreme_hour=extreme_hour,
|
||||
threshold=threshold,
|
||||
)
|
||||
|
||||
def _build_title(self, alert_type: AlertType) -> str:
|
||||
"""Build a title for the aggregated alert.
|
||||
|
||||
Args:
|
||||
alert_type: The type of alert.
|
||||
|
||||
Returns:
|
||||
Title string.
|
||||
"""
|
||||
titles = {
|
||||
AlertType.TEMPERATURE_LOW: "Low Temperature Alert",
|
||||
AlertType.TEMPERATURE_HIGH: "High Temperature Alert",
|
||||
AlertType.PRECIPITATION: "Precipitation Alert",
|
||||
AlertType.WIND_SPEED: "High Wind Alert",
|
||||
AlertType.WIND_GUST: "Wind Gust Alert",
|
||||
}
|
||||
return titles.get(alert_type, f"{alert_type.value} Alert")
|
||||
|
||||
def _build_summary_message(
|
||||
self,
|
||||
alert_type: AlertType,
|
||||
start_time: str,
|
||||
end_time: str,
|
||||
extreme_value: float,
|
||||
extreme_hour: str,
|
||||
threshold: float,
|
||||
hour_count: int,
|
||||
) -> str:
|
||||
"""Build a summary message for the aggregated alert.
|
||||
|
||||
Args:
|
||||
alert_type: The type of alert.
|
||||
start_time: First hour that triggered (YYYY-MM-DD-HH format).
|
||||
end_time: Last hour that triggered (YYYY-MM-DD-HH format).
|
||||
extreme_value: The most extreme value recorded.
|
||||
extreme_hour: Hour when extreme value occurred.
|
||||
threshold: The threshold that was exceeded.
|
||||
hour_count: Number of hours that triggered.
|
||||
|
||||
Returns:
|
||||
Human-readable summary message.
|
||||
"""
|
||||
# Format times for display
|
||||
start_display = self._format_hour_display(start_time)
|
||||
end_display = self._format_hour_display(end_time)
|
||||
extreme_display = self._format_hour_display(extreme_hour)
|
||||
|
||||
# Build type-specific message
|
||||
if alert_type == AlertType.TEMPERATURE_LOW:
|
||||
return (
|
||||
f"Low temps from {start_display} - {end_display}. "
|
||||
f"Lowest: {extreme_value:.0f}°F at {extreme_display}. "
|
||||
f"({hour_count} hours below {threshold:.0f}°F)"
|
||||
)
|
||||
|
||||
elif alert_type == AlertType.TEMPERATURE_HIGH:
|
||||
return (
|
||||
f"High temps from {start_display} - {end_display}. "
|
||||
f"Highest: {extreme_value:.0f}°F at {extreme_display}. "
|
||||
f"({hour_count} hours above {threshold:.0f}°F)"
|
||||
)
|
||||
|
||||
elif alert_type == AlertType.PRECIPITATION:
|
||||
return (
|
||||
f"Precipitation likely from {start_display} - {end_display}. "
|
||||
f"Peak: {extreme_value:.0f}% at {extreme_display}. "
|
||||
f"({hour_count} hours above {threshold:.0f}%)"
|
||||
)
|
||||
|
||||
elif alert_type == AlertType.WIND_SPEED:
|
||||
return (
|
||||
f"High winds from {start_display} - {end_display}. "
|
||||
f"Peak: {extreme_value:.0f} mph at {extreme_display}. "
|
||||
f"({hour_count} hours above {threshold:.0f} mph)"
|
||||
)
|
||||
|
||||
elif alert_type == AlertType.WIND_GUST:
|
||||
return (
|
||||
f"Wind gusts from {start_display} - {end_display}. "
|
||||
f"Peak: {extreme_value:.0f} mph at {extreme_display}. "
|
||||
f"({hour_count} hours above {threshold:.0f} mph)"
|
||||
)
|
||||
|
||||
# Fallback for unknown types
|
||||
return (
|
||||
f"Alert from {start_display} - {end_display}. "
|
||||
f"({hour_count} hours affected)"
|
||||
)
|
||||
|
||||
def _format_hour_display(self, hour_key: str) -> str:
|
||||
"""Format an hour key for human display.
|
||||
|
||||
Args:
|
||||
hour_key: Hour key in YYYY-MM-DD-HH format.
|
||||
|
||||
Returns:
|
||||
Human-readable time string (e.g., "3 PM" or "6 AM").
|
||||
"""
|
||||
try:
|
||||
dt = datetime.strptime(hour_key, "%Y-%m-%d-%H")
|
||||
return dt.strftime("%-I %p")
|
||||
except ValueError:
|
||||
return hour_key
|
||||
177
app/services/notification_service.py
Normal file
177
app/services/notification_service.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""Notification service for sending alerts via ntfy."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Union
|
||||
|
||||
from app.models.alerts import AggregatedAlert, AlertType, TriggeredAlert
|
||||
from app.utils.http_client import HttpClient
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
# Type alias for alerts that can be sent
|
||||
SendableAlert = Union[TriggeredAlert, AggregatedAlert]
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationResult:
|
||||
"""Result of sending a notification."""
|
||||
|
||||
alert: SendableAlert
|
||||
success: bool
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class NotificationServiceError(Exception):
|
||||
"""Raised when notification service encounters an error."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NotificationService:
|
||||
"""Service for sending notifications via ntfy."""
|
||||
|
||||
# Map alert types to emoji tags
|
||||
ALERT_TYPE_TAGS: dict[AlertType, list[str]] = {
|
||||
AlertType.TEMPERATURE_LOW: ["cold_face", "thermometer"],
|
||||
AlertType.TEMPERATURE_HIGH: ["hot_face", "thermometer"],
|
||||
AlertType.PRECIPITATION: ["cloud_with_rain", "umbrella"],
|
||||
AlertType.WIND_SPEED: ["wind_face", "dash"],
|
||||
AlertType.WIND_GUST: ["tornado", "dash"],
|
||||
AlertType.SEVERE_WEATHER: ["rotating_light", "warning"],
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_url: str,
|
||||
topic: str,
|
||||
access_token: str = "",
|
||||
priority: str = "high",
|
||||
default_tags: Optional[list[str]] = None,
|
||||
http_client: Optional[HttpClient] = None,
|
||||
) -> None:
|
||||
"""Initialize the notification service.
|
||||
|
||||
Args:
|
||||
server_url: The ntfy server URL.
|
||||
topic: The topic to publish to.
|
||||
access_token: Optional bearer token for authentication.
|
||||
priority: Default notification priority.
|
||||
default_tags: Default tags to include with notifications.
|
||||
http_client: Optional HTTP client instance.
|
||||
"""
|
||||
self.server_url = server_url.rstrip("/")
|
||||
self.topic = topic
|
||||
self.access_token = access_token
|
||||
self.priority = priority
|
||||
self.default_tags = default_tags or ["cloud", "warning"]
|
||||
self.http_client = http_client or HttpClient()
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
def send(self, alert: SendableAlert) -> NotificationResult:
|
||||
"""Send a single alert notification.
|
||||
|
||||
Args:
|
||||
alert: The triggered or aggregated alert to send.
|
||||
|
||||
Returns:
|
||||
NotificationResult indicating success or failure.
|
||||
"""
|
||||
url = f"{self.server_url}/{self.topic}"
|
||||
|
||||
# Build headers
|
||||
headers = {
|
||||
"Title": alert.title,
|
||||
"Priority": self._get_priority(alert),
|
||||
"Tags": ",".join(self._get_tags(alert)),
|
||||
}
|
||||
|
||||
if self.access_token:
|
||||
headers["Authorization"] = f"Bearer {self.access_token}"
|
||||
|
||||
self.logger.debug(
|
||||
"sending_notification",
|
||||
alert_type=alert.alert_type.value,
|
||||
title=alert.title,
|
||||
)
|
||||
|
||||
response = self.http_client.post(
|
||||
url,
|
||||
data=alert.message.encode("utf-8"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if response.success:
|
||||
self.logger.info(
|
||||
"notification_sent",
|
||||
alert_type=alert.alert_type.value,
|
||||
dedup_key=alert.dedup_key,
|
||||
)
|
||||
return NotificationResult(alert=alert, success=True)
|
||||
else:
|
||||
error_msg = f"HTTP {response.status_code}: {response.text[:100]}"
|
||||
self.logger.error(
|
||||
"notification_failed",
|
||||
alert_type=alert.alert_type.value,
|
||||
error=error_msg,
|
||||
)
|
||||
return NotificationResult(alert=alert, success=False, error=error_msg)
|
||||
|
||||
def send_batch(
|
||||
self,
|
||||
alerts: list[SendableAlert],
|
||||
) -> list[NotificationResult]:
|
||||
"""Send multiple alert notifications.
|
||||
|
||||
Args:
|
||||
alerts: List of triggered or aggregated alerts to send.
|
||||
|
||||
Returns:
|
||||
List of NotificationResult for each alert.
|
||||
"""
|
||||
if not alerts:
|
||||
self.logger.info("no_alerts_to_send")
|
||||
return []
|
||||
|
||||
results: list[NotificationResult] = []
|
||||
|
||||
for alert in alerts:
|
||||
result = self.send(alert)
|
||||
results.append(result)
|
||||
|
||||
success_count = sum(1 for r in results if r.success)
|
||||
self.logger.info(
|
||||
"batch_send_complete",
|
||||
total=len(alerts),
|
||||
success=success_count,
|
||||
failed=len(alerts) - success_count,
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _get_priority(self, alert: SendableAlert) -> str:
|
||||
"""Determine notification priority based on alert type.
|
||||
|
||||
Args:
|
||||
alert: The triggered or aggregated alert.
|
||||
|
||||
Returns:
|
||||
Priority string for ntfy.
|
||||
"""
|
||||
# Severe weather always gets urgent priority
|
||||
if alert.alert_type == AlertType.SEVERE_WEATHER:
|
||||
return "urgent"
|
||||
|
||||
return self.priority
|
||||
|
||||
def _get_tags(self, alert: SendableAlert) -> list[str]:
|
||||
"""Get notification tags for an alert.
|
||||
|
||||
Args:
|
||||
alert: The triggered or aggregated alert.
|
||||
|
||||
Returns:
|
||||
List of emoji tags for ntfy.
|
||||
"""
|
||||
# Start with alert-specific tags
|
||||
tags = list(self.ALERT_TYPE_TAGS.get(alert.alert_type, self.default_tags))
|
||||
|
||||
return tags
|
||||
231
app/services/rule_engine.py
Normal file
231
app/services/rule_engine.py
Normal file
@@ -0,0 +1,231 @@
|
||||
"""Rule engine for evaluating weather conditions against alert rules."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from app.models.alerts import AlertRules, AlertType, TriggeredAlert
|
||||
from app.models.weather import HourlyForecast, WeatherAlert, WeatherForecast
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
|
||||
class RuleEngine:
|
||||
"""Evaluates weather forecasts against configured alert rules."""
|
||||
|
||||
def __init__(self, rules: AlertRules) -> None:
|
||||
"""Initialize the rule engine.
|
||||
|
||||
Args:
|
||||
rules: The alert rules configuration.
|
||||
"""
|
||||
self.rules = rules
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
def evaluate(self, forecast: WeatherForecast) -> list[TriggeredAlert]:
|
||||
"""Evaluate a forecast against all enabled rules.
|
||||
|
||||
Args:
|
||||
forecast: The weather forecast to evaluate.
|
||||
|
||||
Returns:
|
||||
List of triggered alerts.
|
||||
"""
|
||||
alerts: list[TriggeredAlert] = []
|
||||
|
||||
# Evaluate hourly forecasts
|
||||
for hourly in forecast.hourly_forecasts:
|
||||
alerts.extend(self._evaluate_hourly(hourly))
|
||||
|
||||
# Evaluate severe weather alerts from API
|
||||
if self.rules.severe_weather.enabled:
|
||||
alerts.extend(self._evaluate_severe_alerts(forecast.alerts))
|
||||
|
||||
self.logger.info(
|
||||
"rules_evaluated",
|
||||
hourly_count=len(forecast.hourly_forecasts),
|
||||
triggered_count=len(alerts),
|
||||
)
|
||||
|
||||
return alerts
|
||||
|
||||
def _evaluate_hourly(self, hourly: HourlyForecast) -> list[TriggeredAlert]:
|
||||
"""Evaluate a single hourly forecast against rules.
|
||||
|
||||
Args:
|
||||
hourly: The hourly forecast data.
|
||||
|
||||
Returns:
|
||||
List of triggered alerts for this hour.
|
||||
"""
|
||||
alerts: list[TriggeredAlert] = []
|
||||
|
||||
# Temperature rules
|
||||
if self.rules.temperature.enabled:
|
||||
alert = self._check_temperature(hourly)
|
||||
if alert:
|
||||
alerts.append(alert)
|
||||
|
||||
# Precipitation rules
|
||||
if self.rules.precipitation.enabled:
|
||||
alert = self._check_precipitation(hourly)
|
||||
if alert:
|
||||
alerts.append(alert)
|
||||
|
||||
# Wind rules
|
||||
if self.rules.wind.enabled:
|
||||
wind_alerts = self._check_wind(hourly)
|
||||
alerts.extend(wind_alerts)
|
||||
|
||||
return alerts
|
||||
|
||||
def _check_temperature(
|
||||
self,
|
||||
hourly: HourlyForecast,
|
||||
) -> Optional[TriggeredAlert]:
|
||||
"""Check temperature thresholds.
|
||||
|
||||
Args:
|
||||
hourly: The hourly forecast data.
|
||||
|
||||
Returns:
|
||||
TriggeredAlert if threshold exceeded, None otherwise.
|
||||
"""
|
||||
temp_rule = self.rules.temperature
|
||||
|
||||
# Check low temperature
|
||||
if temp_rule.below is not None and hourly.temp < temp_rule.below:
|
||||
return TriggeredAlert(
|
||||
alert_type=AlertType.TEMPERATURE_LOW,
|
||||
title="Low Temperature Alert",
|
||||
message=(
|
||||
f"Temperature expected to drop to {hourly.temp:.0f}°F "
|
||||
f"at {hourly.datetime.strftime('%I:%M %p on %b %d')}. "
|
||||
f"Threshold: {temp_rule.below:.0f}°F"
|
||||
),
|
||||
forecast_hour=hourly.hour_key,
|
||||
value=hourly.temp,
|
||||
threshold=temp_rule.below,
|
||||
)
|
||||
|
||||
# Check high temperature
|
||||
if temp_rule.above is not None and hourly.temp > temp_rule.above:
|
||||
return TriggeredAlert(
|
||||
alert_type=AlertType.TEMPERATURE_HIGH,
|
||||
title="High Temperature Alert",
|
||||
message=(
|
||||
f"Temperature expected to reach {hourly.temp:.0f}°F "
|
||||
f"at {hourly.datetime.strftime('%I:%M %p on %b %d')}. "
|
||||
f"Threshold: {temp_rule.above:.0f}°F"
|
||||
),
|
||||
forecast_hour=hourly.hour_key,
|
||||
value=hourly.temp,
|
||||
threshold=temp_rule.above,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _check_precipitation(
|
||||
self,
|
||||
hourly: HourlyForecast,
|
||||
) -> Optional[TriggeredAlert]:
|
||||
"""Check precipitation probability threshold.
|
||||
|
||||
Args:
|
||||
hourly: The hourly forecast data.
|
||||
|
||||
Returns:
|
||||
TriggeredAlert if threshold exceeded, None otherwise.
|
||||
"""
|
||||
precip_rule = self.rules.precipitation
|
||||
threshold = precip_rule.probability_above
|
||||
|
||||
if hourly.precip_prob > threshold:
|
||||
return TriggeredAlert(
|
||||
alert_type=AlertType.PRECIPITATION,
|
||||
title="Precipitation Alert",
|
||||
message=(
|
||||
f"{hourly.precip_prob:.0f}% chance of precipitation "
|
||||
f"at {hourly.datetime.strftime('%I:%M %p on %b %d')}. "
|
||||
f"Conditions: {hourly.conditions}"
|
||||
),
|
||||
forecast_hour=hourly.hour_key,
|
||||
value=hourly.precip_prob,
|
||||
threshold=threshold,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _check_wind(self, hourly: HourlyForecast) -> list[TriggeredAlert]:
|
||||
"""Check wind speed and gust thresholds.
|
||||
|
||||
Args:
|
||||
hourly: The hourly forecast data.
|
||||
|
||||
Returns:
|
||||
List of triggered wind alerts.
|
||||
"""
|
||||
alerts: list[TriggeredAlert] = []
|
||||
wind_rule = self.rules.wind
|
||||
|
||||
# Check sustained wind speed
|
||||
if hourly.wind_speed > wind_rule.speed_above:
|
||||
alerts.append(
|
||||
TriggeredAlert(
|
||||
alert_type=AlertType.WIND_SPEED,
|
||||
title="High Wind Alert",
|
||||
message=(
|
||||
f"Sustained winds of {hourly.wind_speed:.0f} mph expected "
|
||||
f"at {hourly.datetime.strftime('%I:%M %p on %b %d')}. "
|
||||
f"Threshold: {wind_rule.speed_above:.0f} mph"
|
||||
),
|
||||
forecast_hour=hourly.hour_key,
|
||||
value=hourly.wind_speed,
|
||||
threshold=wind_rule.speed_above,
|
||||
)
|
||||
)
|
||||
|
||||
# Check wind gusts
|
||||
if hourly.wind_gust > wind_rule.gust_above:
|
||||
alerts.append(
|
||||
TriggeredAlert(
|
||||
alert_type=AlertType.WIND_GUST,
|
||||
title="Wind Gust Alert",
|
||||
message=(
|
||||
f"Wind gusts up to {hourly.wind_gust:.0f} mph expected "
|
||||
f"at {hourly.datetime.strftime('%I:%M %p on %b %d')}. "
|
||||
f"Threshold: {wind_rule.gust_above:.0f} mph"
|
||||
),
|
||||
forecast_hour=hourly.hour_key,
|
||||
value=hourly.wind_gust,
|
||||
threshold=wind_rule.gust_above,
|
||||
)
|
||||
)
|
||||
|
||||
return alerts
|
||||
|
||||
def _evaluate_severe_alerts(
|
||||
self,
|
||||
api_alerts: list[WeatherAlert],
|
||||
) -> list[TriggeredAlert]:
|
||||
"""Convert API severe weather alerts to triggered alerts.
|
||||
|
||||
Args:
|
||||
api_alerts: List of WeatherAlert from the API.
|
||||
|
||||
Returns:
|
||||
List of triggered severe weather alerts.
|
||||
"""
|
||||
triggered: list[TriggeredAlert] = []
|
||||
|
||||
for api_alert in api_alerts:
|
||||
# Use alert ID as the hour key for deduplication
|
||||
triggered.append(
|
||||
TriggeredAlert(
|
||||
alert_type=AlertType.SEVERE_WEATHER,
|
||||
title=f"Severe Weather: {api_alert.event}",
|
||||
message=api_alert.headline or api_alert.description[:200],
|
||||
forecast_hour=api_alert.id or api_alert.event,
|
||||
value=1.0, # Placeholder - severe alerts don't have numeric values
|
||||
threshold=0.0,
|
||||
)
|
||||
)
|
||||
|
||||
return triggered
|
||||
185
app/services/state_manager.py
Normal file
185
app/services/state_manager.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""State manager for alert deduplication with atomic file persistence."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
|
||||
from app.models.alerts import AggregatedAlert, TriggeredAlert
|
||||
from app.models.state import AlertState
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
# Type alias for alerts that can be deduplicated
|
||||
DeduplicableAlert = Union[TriggeredAlert, AggregatedAlert]
|
||||
|
||||
|
||||
class StateManagerError(Exception):
|
||||
"""Raised when state management encounters an error."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class StateManager:
|
||||
"""Manages alert state persistence for deduplication."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_path: str,
|
||||
dedup_window_hours: int = 24,
|
||||
) -> None:
|
||||
"""Initialize the state manager.
|
||||
|
||||
Args:
|
||||
file_path: Path to the state JSON file.
|
||||
dedup_window_hours: Hours to retain sent alert records.
|
||||
"""
|
||||
self.file_path = Path(file_path)
|
||||
self.dedup_window_hours = dedup_window_hours
|
||||
self.logger = get_logger(__name__)
|
||||
self._state: Optional[AlertState] = None
|
||||
|
||||
@property
|
||||
def state(self) -> AlertState:
|
||||
"""Get the current state, loading from file if necessary."""
|
||||
if self._state is None:
|
||||
self._state = self.load()
|
||||
return self._state
|
||||
|
||||
def load(self) -> AlertState:
|
||||
"""Load state from file.
|
||||
|
||||
Returns:
|
||||
AlertState instance, empty if file doesn't exist.
|
||||
"""
|
||||
if not self.file_path.exists():
|
||||
self.logger.info("state_file_not_found", path=str(self.file_path))
|
||||
return AlertState()
|
||||
|
||||
try:
|
||||
with open(self.file_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
state = AlertState.from_dict(data)
|
||||
self.logger.info(
|
||||
"state_loaded",
|
||||
path=str(self.file_path),
|
||||
record_count=len(state.sent_alerts),
|
||||
)
|
||||
return state
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
self.logger.warning(
|
||||
"state_file_corrupt",
|
||||
path=str(self.file_path),
|
||||
error=str(e),
|
||||
)
|
||||
return AlertState()
|
||||
|
||||
def save(self) -> None:
|
||||
"""Save state to file with atomic write.
|
||||
|
||||
Uses write-to-temp-then-rename for crash safety.
|
||||
"""
|
||||
if self._state is None:
|
||||
return
|
||||
|
||||
# Ensure directory exists
|
||||
self.file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write to temp file first
|
||||
dir_path = self.file_path.parent
|
||||
try:
|
||||
fd, temp_path = tempfile.mkstemp(
|
||||
suffix=".tmp",
|
||||
prefix="state_",
|
||||
dir=dir_path,
|
||||
)
|
||||
try:
|
||||
with os.fdopen(fd, "w") as f:
|
||||
json.dump(self._state.to_dict(), f, indent=2)
|
||||
|
||||
# Atomic rename
|
||||
os.replace(temp_path, self.file_path)
|
||||
|
||||
self.logger.debug(
|
||||
"state_saved",
|
||||
path=str(self.file_path),
|
||||
record_count=len(self._state.sent_alerts),
|
||||
)
|
||||
|
||||
except Exception:
|
||||
# Clean up temp file on error
|
||||
if os.path.exists(temp_path):
|
||||
os.unlink(temp_path)
|
||||
raise
|
||||
|
||||
except OSError as e:
|
||||
self.logger.error("state_save_failed", error=str(e))
|
||||
raise StateManagerError(f"Failed to save state: {e}")
|
||||
|
||||
def filter_duplicates(
|
||||
self,
|
||||
alerts: list[DeduplicableAlert],
|
||||
) -> list[DeduplicableAlert]:
|
||||
"""Filter out alerts that have already been sent.
|
||||
|
||||
Args:
|
||||
alerts: List of triggered or aggregated alerts.
|
||||
|
||||
Returns:
|
||||
List of alerts that haven't been sent within the dedup window.
|
||||
"""
|
||||
new_alerts: list[DeduplicableAlert] = []
|
||||
|
||||
for alert in alerts:
|
||||
if not self.state.is_duplicate(alert.dedup_key):
|
||||
new_alerts.append(alert)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"alert_filtered_duplicate",
|
||||
dedup_key=alert.dedup_key,
|
||||
)
|
||||
|
||||
filtered_count = len(alerts) - len(new_alerts)
|
||||
if filtered_count > 0:
|
||||
self.logger.info(
|
||||
"duplicates_filtered",
|
||||
total=len(alerts),
|
||||
new=len(new_alerts),
|
||||
duplicates=filtered_count,
|
||||
)
|
||||
|
||||
return new_alerts
|
||||
|
||||
def record_sent(self, alert: DeduplicableAlert) -> None:
|
||||
"""Record that an alert was sent.
|
||||
|
||||
Args:
|
||||
alert: The triggered or aggregated alert that was sent.
|
||||
"""
|
||||
# Get the forecast hour - AggregatedAlert uses start_time, TriggeredAlert uses forecast_hour
|
||||
if isinstance(alert, AggregatedAlert):
|
||||
forecast_hour = alert.start_time
|
||||
else:
|
||||
forecast_hour = alert.forecast_hour
|
||||
|
||||
self.state.record_sent(
|
||||
dedup_key=alert.dedup_key,
|
||||
alert_type=alert.alert_type.value,
|
||||
forecast_hour=forecast_hour,
|
||||
)
|
||||
self.logger.debug("alert_recorded", dedup_key=alert.dedup_key)
|
||||
|
||||
def purge_old_records(self) -> int:
|
||||
"""Remove records older than the deduplication window.
|
||||
|
||||
Returns:
|
||||
Number of records purged.
|
||||
"""
|
||||
purged = self.state.purge_old_records(self.dedup_window_hours)
|
||||
|
||||
if purged > 0:
|
||||
self.logger.info("old_records_purged", count=purged)
|
||||
|
||||
return purged
|
||||
101
app/services/weather_service.py
Normal file
101
app/services/weather_service.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Weather service for fetching forecasts from VisualCrossing API."""
|
||||
|
||||
from typing import Optional
|
||||
from urllib.parse import quote
|
||||
|
||||
from app.models.weather import WeatherForecast
|
||||
from app.utils.http_client import HttpClient
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
|
||||
class WeatherServiceError(Exception):
|
||||
"""Raised when the weather service encounters an error."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class WeatherService:
|
||||
"""Client for the VisualCrossing Weather API."""
|
||||
|
||||
BASE_URL = "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
api_key: str,
|
||||
http_client: Optional[HttpClient] = None,
|
||||
) -> None:
|
||||
"""Initialize the weather service.
|
||||
|
||||
Args:
|
||||
api_key: VisualCrossing API key.
|
||||
http_client: Optional HTTP client instance. Creates one if not provided.
|
||||
"""
|
||||
if not api_key:
|
||||
raise WeatherServiceError("VisualCrossing API key is required")
|
||||
|
||||
self.api_key = api_key
|
||||
self.http_client = http_client or HttpClient()
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
def get_forecast(
|
||||
self,
|
||||
location: str,
|
||||
hours_ahead: int = 24,
|
||||
unit_group: str = "us",
|
||||
) -> WeatherForecast:
|
||||
"""Fetch weather forecast for a location.
|
||||
|
||||
Args:
|
||||
location: Location string (e.g., "viola,tn" or ZIP code).
|
||||
hours_ahead: Number of hours of forecast to retrieve.
|
||||
unit_group: Unit system ("us" for imperial, "metric" for metric).
|
||||
|
||||
Returns:
|
||||
WeatherForecast with hourly data and any active alerts.
|
||||
|
||||
Raises:
|
||||
WeatherServiceError: If the API request fails.
|
||||
"""
|
||||
self.logger.info(
|
||||
"fetching_forecast",
|
||||
location=location,
|
||||
hours_ahead=hours_ahead,
|
||||
)
|
||||
|
||||
# Build API URL
|
||||
encoded_location = quote(location, safe="")
|
||||
url = f"{self.BASE_URL}/{encoded_location}"
|
||||
|
||||
params = {
|
||||
"unitGroup": unit_group,
|
||||
"include": "days,hours,alerts,current,events",
|
||||
"key": self.api_key,
|
||||
"contentType": "json",
|
||||
}
|
||||
|
||||
response = self.http_client.get(url, params=params)
|
||||
|
||||
if not response.success:
|
||||
self.logger.error(
|
||||
"forecast_fetch_failed",
|
||||
status_code=response.status_code,
|
||||
error=response.text,
|
||||
)
|
||||
raise WeatherServiceError(
|
||||
f"Failed to fetch forecast: {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
if response.json_data is None:
|
||||
self.logger.error("forecast_invalid_json", response_text=response.text[:200])
|
||||
raise WeatherServiceError("Invalid JSON response from weather API")
|
||||
|
||||
forecast = WeatherForecast.from_api_data(response.json_data, hours_ahead)
|
||||
|
||||
self.logger.info(
|
||||
"forecast_fetched",
|
||||
location=forecast.resolved_address,
|
||||
hourly_count=len(forecast.hourly_forecasts),
|
||||
alert_count=len(forecast.alerts),
|
||||
)
|
||||
|
||||
return forecast
|
||||
6
app/utils/__init__.py
Normal file
6
app/utils/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Utility modules for weather alerts."""
|
||||
|
||||
from app.utils.http_client import HttpClient
|
||||
from app.utils.logging_config import configure_logging, get_logger
|
||||
|
||||
__all__ = ["HttpClient", "configure_logging", "get_logger"]
|
||||
162
app/utils/http_client.py
Normal file
162
app/utils/http_client.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""Centralized HTTP client wrapper with retries and consistent error handling."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from app.utils.logging_config import get_logger
|
||||
|
||||
|
||||
@dataclass
|
||||
class HttpResponse:
|
||||
"""Wrapper for HTTP response data."""
|
||||
|
||||
status_code: int
|
||||
json_data: Optional[dict[str, Any]]
|
||||
text: str
|
||||
success: bool
|
||||
|
||||
|
||||
class HttpClient:
|
||||
"""HTTP client with automatic retries, timeouts, and logging."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
timeout: int = 30,
|
||||
max_retries: int = 3,
|
||||
backoff_factor: float = 0.5,
|
||||
) -> None:
|
||||
"""Initialize the HTTP client.
|
||||
|
||||
Args:
|
||||
timeout: Request timeout in seconds.
|
||||
max_retries: Maximum number of retry attempts.
|
||||
backoff_factor: Multiplier for exponential backoff between retries.
|
||||
"""
|
||||
self.timeout = timeout
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
# Configure retry strategy
|
||||
retry_strategy = Retry(
|
||||
total=max_retries,
|
||||
backoff_factor=backoff_factor,
|
||||
status_forcelist=[429, 500, 502, 503, 504],
|
||||
allowed_methods=["GET", "POST"],
|
||||
)
|
||||
|
||||
# Create session with retry adapter
|
||||
self.session = requests.Session()
|
||||
adapter = HTTPAdapter(max_retries=retry_strategy)
|
||||
self.session.mount("http://", adapter)
|
||||
self.session.mount("https://", adapter)
|
||||
|
||||
def get(
|
||||
self,
|
||||
url: str,
|
||||
params: Optional[dict[str, Any]] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
) -> HttpResponse:
|
||||
"""Perform a GET request.
|
||||
|
||||
Args:
|
||||
url: The URL to request.
|
||||
params: Optional query parameters.
|
||||
headers: Optional HTTP headers.
|
||||
|
||||
Returns:
|
||||
HttpResponse with status, data, and success flag.
|
||||
"""
|
||||
self.logger.debug("http_get", url=url, params=params)
|
||||
|
||||
try:
|
||||
response = self.session.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
return self._build_response(response)
|
||||
except requests.RequestException as e:
|
||||
self.logger.error("http_get_failed", url=url, error=str(e))
|
||||
return HttpResponse(
|
||||
status_code=0,
|
||||
json_data=None,
|
||||
text=str(e),
|
||||
success=False,
|
||||
)
|
||||
|
||||
def post(
|
||||
self,
|
||||
url: str,
|
||||
data: Optional[dict[str, Any]] = None,
|
||||
json_data: Optional[dict[str, Any]] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
) -> HttpResponse:
|
||||
"""Perform a POST request.
|
||||
|
||||
Args:
|
||||
url: The URL to request.
|
||||
data: Optional form data.
|
||||
json_data: Optional JSON data.
|
||||
headers: Optional HTTP headers.
|
||||
|
||||
Returns:
|
||||
HttpResponse with status, data, and success flag.
|
||||
"""
|
||||
self.logger.debug("http_post", url=url)
|
||||
|
||||
try:
|
||||
response = self.session.post(
|
||||
url,
|
||||
data=data,
|
||||
json=json_data,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
return self._build_response(response)
|
||||
except requests.RequestException as e:
|
||||
self.logger.error("http_post_failed", url=url, error=str(e))
|
||||
return HttpResponse(
|
||||
status_code=0,
|
||||
json_data=None,
|
||||
text=str(e),
|
||||
success=False,
|
||||
)
|
||||
|
||||
def _build_response(self, response: requests.Response) -> HttpResponse:
|
||||
"""Build an HttpResponse from a requests Response.
|
||||
|
||||
Args:
|
||||
response: The requests library Response object.
|
||||
|
||||
Returns:
|
||||
HttpResponse wrapper.
|
||||
"""
|
||||
json_data = None
|
||||
if response.headers.get("content-type", "").startswith("application/json"):
|
||||
try:
|
||||
json_data = response.json()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
success = 200 <= response.status_code < 300
|
||||
|
||||
self.logger.debug(
|
||||
"http_response",
|
||||
status_code=response.status_code,
|
||||
success=success,
|
||||
)
|
||||
|
||||
return HttpResponse(
|
||||
status_code=response.status_code,
|
||||
json_data=json_data,
|
||||
text=response.text,
|
||||
success=success,
|
||||
)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the HTTP session."""
|
||||
self.session.close()
|
||||
54
app/utils/logging_config.py
Normal file
54
app/utils/logging_config.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Structlog configuration for consistent logging throughout the application."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
import structlog
|
||||
|
||||
|
||||
def configure_logging(log_level: str = "INFO") -> None:
|
||||
"""Configure structlog with console output.
|
||||
|
||||
Args:
|
||||
log_level: The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).
|
||||
"""
|
||||
level = getattr(logging, log_level.upper(), logging.INFO)
|
||||
|
||||
# Configure standard library logging
|
||||
logging.basicConfig(
|
||||
format="%(message)s",
|
||||
stream=sys.stdout,
|
||||
level=level,
|
||||
)
|
||||
|
||||
# Configure structlog
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.dev.ConsoleRenderer(colors=sys.stdout.isatty()),
|
||||
],
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
|
||||
def get_logger(name: Optional[str] = None) -> structlog.stdlib.BoundLogger:
|
||||
"""Get a configured logger instance.
|
||||
|
||||
Args:
|
||||
name: Optional name for the logger. If not provided, uses the calling module.
|
||||
|
||||
Returns:
|
||||
A configured structlog logger.
|
||||
"""
|
||||
return structlog.get_logger(name)
|
||||
Reference in New Issue
Block a user