init commit
This commit is contained in:
30
app/utils/models.py
Normal file
30
app/utils/models.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class PortFinding:
|
||||
"""
|
||||
A single discovered port on a host.
|
||||
protocol: 'tcp' or 'udp'
|
||||
state: 'open', 'closed', 'filtered', 'open|filtered', etc.
|
||||
service: optional nmap-reported service name (e.g., 'ssh', 'http')
|
||||
"""
|
||||
port: int
|
||||
protocol: str
|
||||
state: str
|
||||
service: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class HostResult:
|
||||
"""
|
||||
Results for a single host.
|
||||
address: IP address (e.g., '192.0.2.10')
|
||||
host: primary hostname if reported by nmap (may be None)
|
||||
ports: list of PortFinding instances for this host
|
||||
"""
|
||||
address: str
|
||||
host: Optional[str] = None
|
||||
ports: List[PortFinding] = field(default_factory=list)
|
||||
210
app/utils/scan_config_loader.py
Normal file
210
app/utils/scan_config_loader.py
Normal file
@@ -0,0 +1,210 @@
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
import os
|
||||
import yaml
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanTarget:
|
||||
"""
|
||||
One IP and its expected ports.
|
||||
"""
|
||||
ip: str
|
||||
expected_tcp: List[int] = field(default_factory=list)
|
||||
expected_udp: List[int] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanOptions:
|
||||
"""
|
||||
Feature toggles that affect how scans are executed.
|
||||
"""
|
||||
udp_scan: bool = False
|
||||
tls_security_scan: bool = True
|
||||
tls_exp_check: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class Reporting:
|
||||
"""
|
||||
Output/report preferences for this config file.
|
||||
"""
|
||||
report_name: str = "Scan Report"
|
||||
report_filename: str = "report.html"
|
||||
full_details: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanConfigFile:
|
||||
"""
|
||||
Full configuration for a single logical scan "set" (e.g., DMZ, WAN).
|
||||
"""
|
||||
name: str = "Unnamed"
|
||||
scan_options: ScanOptions = field(default_factory=ScanOptions)
|
||||
reporting: Reporting = field(default_factory=Reporting)
|
||||
scan_targets: List[ScanTarget] = field(default_factory=list)
|
||||
|
||||
|
||||
class ScanConfigRepository:
|
||||
"""
|
||||
Loads and validates *.yaml scan configuration files from a directory.
|
||||
|
||||
Search order for the config directory:
|
||||
1) Explicit path argument to load_all()
|
||||
2) Environment variable SCAN_TARGETS_DIR
|
||||
3) Default: /data/scan_targets
|
||||
"""
|
||||
|
||||
SUPPORTED_EXT = (".yaml", ".yml")
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._loaded: List[ScanConfigFile] = []
|
||||
|
||||
def load_all(self, directory: Optional[Path] = None) -> List[ScanConfigFile]:
|
||||
"""
|
||||
Load all YAML configs from the given directory and return them.
|
||||
|
||||
:param directory: Optional explicit directory path.
|
||||
"""
|
||||
root = self._resolve_directory(directory)
|
||||
logger.info("Loading scan configs from: %s", root)
|
||||
|
||||
files = sorted([p for p in root.iterdir() if p.suffix.lower() in self.SUPPORTED_EXT])
|
||||
logger.info("Found %d config file(s).", len(files))
|
||||
|
||||
configs: List[ScanConfigFile] = []
|
||||
for fpath in files:
|
||||
try:
|
||||
data = self._read_yaml(fpath)
|
||||
cfg = self._parse_config(data, default_name=fpath.stem)
|
||||
self._validate_config(cfg, source=str(fpath))
|
||||
configs.append(cfg)
|
||||
logger.info("Loaded config: %s (%s targets)", cfg.name, len(cfg.scan_targets))
|
||||
except Exception as exc:
|
||||
# Fail-open vs fail-fast is up to you; here we log and continue.
|
||||
logger.error("Failed to load %s: %s", fpath, exc)
|
||||
|
||||
self._loaded = configs
|
||||
return configs
|
||||
|
||||
def _resolve_directory(self, directory: Optional[Path]) -> Path:
|
||||
"""
|
||||
Decide which directory to load from.
|
||||
"""
|
||||
if directory:
|
||||
return directory
|
||||
env = os.getenv("SCAN_TARGETS_DIR")
|
||||
if env:
|
||||
return Path(env)
|
||||
return Path("/data/scan_targets")
|
||||
|
||||
@staticmethod
|
||||
def _read_yaml(path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Safely read YAML file into a Python dict.
|
||||
"""
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError("Top-level YAML must be a mapping (dict).")
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _as_int_list(value: Any, field_name: str) -> List[int]:
|
||||
"""
|
||||
Coerce a sequence to a list of ints; raise if invalid.
|
||||
"""
|
||||
if value in (None, []):
|
||||
return []
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise TypeError(f"'{field_name}' must be a list of integers.")
|
||||
out: List[int] = []
|
||||
for v in value:
|
||||
if isinstance(v, bool):
|
||||
# Avoid True/False being treated as 1/0
|
||||
raise TypeError(f"'{field_name}' must contain integers, not booleans.")
|
||||
try:
|
||||
out.append(int(v))
|
||||
except Exception as exc:
|
||||
raise TypeError(f"'{field_name}' contains a non-integer: {v!r}") from exc
|
||||
return out
|
||||
|
||||
def _parse_config(self, data: Dict[str, Any], default_name: str) -> ScanConfigFile:
|
||||
"""
|
||||
Convert a raw dict (from YAML) into a validated ScanConfigFile.
|
||||
"""
|
||||
name = str(data.get("name", default_name))
|
||||
|
||||
# Parse scan_options
|
||||
so_raw = data.get("scan_options", {}) or {}
|
||||
scan_options = ScanOptions(
|
||||
udp_scan=bool(so_raw.get("udp_scan", False)),
|
||||
tls_security_scan=bool(so_raw.get("tls_security_scan", True)),
|
||||
tls_exp_check=bool(so_raw.get("tls_exp_check", True)),
|
||||
)
|
||||
|
||||
# Parse reporting
|
||||
rep_raw = data.get("reporting", {}) or {}
|
||||
reporting = Reporting(
|
||||
report_name=str(rep_raw.get("report_name", "Scan Report")),
|
||||
report_filename=str(rep_raw.get("report_filename", "report.html")),
|
||||
full_details=bool(rep_raw.get("full_details", False)),
|
||||
)
|
||||
|
||||
# Parse targets
|
||||
targets_raw = data.get("scan_targets", []) or []
|
||||
if not isinstance(targets_raw, list):
|
||||
raise TypeError("'scan_targets' must be a list.")
|
||||
targets: List[ScanTarget] = []
|
||||
for idx, item in enumerate(targets_raw, start=1):
|
||||
if not isinstance(item, dict):
|
||||
raise TypeError(f"scan_targets[{idx}] must be a mapping (dict).")
|
||||
ip = item.get("ip")
|
||||
if not ip or not isinstance(ip, str):
|
||||
raise ValueError(f"scan_targets[{idx}].ip must be a non-empty string.")
|
||||
expected_tcp = self._as_int_list(item.get("expected_tcp", []), "expected_tcp")
|
||||
expected_udp = self._as_int_list(item.get("expected_udp", []), "expected_udp")
|
||||
targets.append(ScanTarget(ip=ip, expected_tcp=expected_tcp, expected_udp=expected_udp))
|
||||
|
||||
return ScanConfigFile(
|
||||
name=name,
|
||||
scan_options=scan_options,
|
||||
reporting=reporting,
|
||||
scan_targets=targets,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _validate_config(cfg: ScanConfigFile, source: str) -> None:
|
||||
"""
|
||||
Lightweight semantic checks.
|
||||
"""
|
||||
# Example: disallow duplicate IPs within a single file
|
||||
seen: Dict[str, int] = {}
|
||||
for t in cfg.scan_targets:
|
||||
seen[t.ip] = seen.get(t.ip, 0) + 1
|
||||
dups = [ip for ip, count in seen.items() if count > 1]
|
||||
if dups:
|
||||
raise ValueError(f"{source}: duplicate IP(s) in scan_targets: {dups}")
|
||||
|
||||
# Optional helpers
|
||||
|
||||
def list_configs(self) -> List[str]:
|
||||
"""
|
||||
Return names of loaded configs for UI selection.
|
||||
"""
|
||||
return [c.name for c in self._loaded]
|
||||
|
||||
def get_by_name(self, name: str) -> Optional[ScanConfigFile]:
|
||||
"""
|
||||
Fetch a loaded config by its name.
|
||||
"""
|
||||
for c in self._loaded:
|
||||
if c.name == name:
|
||||
return c
|
||||
return None
|
||||
179
app/utils/scanner.py
Normal file
179
app/utils/scanner.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List, Dict, Optional, Tuple
|
||||
|
||||
from utils.models import HostResult, PortFinding
|
||||
|
||||
|
||||
class nmap_scanner:
|
||||
|
||||
TCP_REPORT_PATH = Path() / "data" / "nmap-tcp-results.xml"
|
||||
UDP_REPORT_PATH = Path() / "data" / "nmap-udp-results.xml"
|
||||
NMAP_RESULTS_PATH = Path() / "data" / "nmap-results.xml"
|
||||
|
||||
def __init__(self, targets:Iterable[str],scan_udp=False):
|
||||
self.targets = list(targets)
|
||||
self.scan_udp = scan_udp
|
||||
pass
|
||||
|
||||
def scan_targets(self):
|
||||
tcp_results = self.run_nmap_tcp_all()
|
||||
|
||||
if self.scan_udp:
|
||||
udp_results = self.run_nmap_udp()
|
||||
all_results = List[HostResult] = self.merge_host_results(tcp_results,udp_results)
|
||||
else:
|
||||
all_results = tcp_results
|
||||
|
||||
return all_results
|
||||
|
||||
def run_nmap_tcp_all(self, min_rate: int = 1000, assume_up: bool = True) -> List[HostResult]:
|
||||
"""
|
||||
Run a TCP SYN scan across all ports (0-65535) for the given targets and parse results.
|
||||
Returns a list of HostResult objects.
|
||||
"""
|
||||
targets_list = self.targets
|
||||
if not targets_list:
|
||||
return []
|
||||
|
||||
cmd = [
|
||||
"nmap",
|
||||
"-sS", # TCP SYN scan
|
||||
"-p-", # all TCP ports
|
||||
"-T4",
|
||||
"--min-rate", str(min_rate),
|
||||
"-oX", str(self.TCP_REPORT_PATH),
|
||||
]
|
||||
if assume_up:
|
||||
cmd.append("-Pn")
|
||||
cmd.extend(targets_list)
|
||||
|
||||
self._run_nmap(cmd)
|
||||
return self.parse_nmap_xml(self.TCP_REPORT_PATH)
|
||||
|
||||
def run_nmap_udp(self, ports: Optional[Iterable[int]] = None, min_rate: int = 500, assume_up: bool = True) -> List[HostResult]:
|
||||
"""
|
||||
Run a UDP scan for the provided ports (recommended to keep this list small).
|
||||
If 'ports' is None, nmap defaults to its "top" UDP ports; full -p- UDP is very slow.
|
||||
"""
|
||||
targets_list = self.targets
|
||||
if not targets_list:
|
||||
return []
|
||||
|
||||
cmd = [
|
||||
"nmap",
|
||||
"-sU", # UDP scan
|
||||
"-T3", # less aggressive timing by default for UDP
|
||||
"--min-rate", str(min_rate),
|
||||
"-oX", str(self.UDP_REPORT_PATH),
|
||||
]
|
||||
if assume_up:
|
||||
cmd.append("-Pn")
|
||||
|
||||
if ports:
|
||||
# Explicit port set
|
||||
port_list = sorted(set(int(p) for p in ports))
|
||||
port_str = ",".join(str(p) for p in port_list)
|
||||
cmd.extend(["-p", port_str])
|
||||
|
||||
cmd.extend(targets_list)
|
||||
|
||||
self._run_nmap(cmd)
|
||||
return self.parse_nmap_xml(self.UDP_REPORT_PATH)
|
||||
|
||||
def merge_host_results(self, *result_sets: List[HostResult]) -> List[HostResult]:
|
||||
"""
|
||||
Merge multiple lists of HostResult (e.g., TCP set + UDP set) by address.
|
||||
Ports are combined; hostnames preserved if found in any set.
|
||||
"""
|
||||
merged: Dict[str, HostResult] = {}
|
||||
for results in result_sets:
|
||||
for hr in results:
|
||||
if hr.address not in merged:
|
||||
merged[hr.address] = HostResult(address=hr.address, host=hr.host, ports=list(hr.ports))
|
||||
else:
|
||||
existing = merged[hr.address]
|
||||
# prefer a hostname if we didn't have one yet
|
||||
if not existing.host and hr.host:
|
||||
existing.host = hr.host
|
||||
# merge ports (avoid dupes)
|
||||
existing_ports_key = {(p.protocol, p.port, p.state, p.service) for p in existing.ports}
|
||||
for p in hr.ports:
|
||||
key = (p.protocol, p.port, p.state, p.service)
|
||||
if key not in existing_ports_key:
|
||||
existing.ports.append(p)
|
||||
existing_ports_key.add(key)
|
||||
# Sort ports in each host for stable output
|
||||
for hr in merged.values():
|
||||
hr.ports.sort(key=lambda p: (p.protocol, p.port))
|
||||
return sorted(merged.values(), key=lambda h: h.address)
|
||||
|
||||
def parse_nmap_xml(self, xml_path: Path) -> List[HostResult]:
|
||||
"""
|
||||
Parse an Nmap XML file into a list of HostResult objects.
|
||||
Captures per-port protocol, state, and optional service name.
|
||||
"""
|
||||
tree = ET.parse(str(xml_path))
|
||||
root = tree.getroot()
|
||||
|
||||
results: List[HostResult] = []
|
||||
|
||||
for host_el in root.findall("host"):
|
||||
# Address
|
||||
addr_el = host_el.find("address")
|
||||
if addr_el is None:
|
||||
continue
|
||||
address = addr_el.get("addr", "")
|
||||
|
||||
# Hostname (if present)
|
||||
hostname: Optional[str] = None
|
||||
hostnames_el = host_el.find("hostnames")
|
||||
if hostnames_el is not None:
|
||||
hn_el = hostnames_el.find("hostname")
|
||||
if hn_el is not None:
|
||||
hostname = hn_el.get("name")
|
||||
|
||||
findings: List[PortFinding] = []
|
||||
ports_el = host_el.find("ports")
|
||||
if ports_el is not None:
|
||||
for port_el in ports_el.findall("port"):
|
||||
protocol = (port_el.get("protocol") or "").lower() # 'tcp' or 'udp'
|
||||
portid_str = port_el.get("portid", "0")
|
||||
try:
|
||||
portid = int(portid_str)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
state_el = port_el.find("state")
|
||||
state = (state_el.get("state") if state_el is not None else "unknown").lower()
|
||||
|
||||
# optional service info
|
||||
service_el = port_el.find("service")
|
||||
service_name = service_el.get("name") if service_el is not None else None
|
||||
|
||||
findings.append(PortFinding(port=portid, protocol=protocol, state=state, service=service_name))
|
||||
|
||||
results.append(HostResult(address=address, host=hostname, ports=findings))
|
||||
|
||||
return results
|
||||
|
||||
def _run_nmap(self, cmd: List[str]) -> None:
|
||||
"""
|
||||
Run a command and raise on non-zero exit with a readable message.
|
||||
"""
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise RuntimeError(f"Command failed ({exc.returncode}): {' '.join(cmd)}") from exc
|
||||
|
||||
def cleanup(self):
|
||||
if self.TCP_REPORT_PATH.exists():
|
||||
self.TCP_REPORT_PATH.unlink()
|
||||
if self.UDP_REPORT_PATH.exists():
|
||||
self.UDP_REPORT_PATH.unlink()
|
||||
if self.NMAP_RESULTS_PATH.exists:
|
||||
self.NMAP_RESULTS_PATH.unlink()
|
||||
|
||||
127
app/utils/settings.py
Normal file
127
app/utils/settings.py
Normal file
@@ -0,0 +1,127 @@
|
||||
#
|
||||
# Note the settings file is hardcoded in this class at the top after imports.
|
||||
#
|
||||
# To make a new settings section, just add the setting dict to your yaml
|
||||
# and then define the data class below in the config data classes area.
|
||||
#
|
||||
# Example use from anywhere - this will always return the same singleton
|
||||
# from settings import get_settings
|
||||
# def main():
|
||||
# settings = get_settings()
|
||||
# print(settings.database.host) # Autocomplete works
|
||||
# print(settings.logging.level)
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# main()
|
||||
|
||||
import functools
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, TypeVar
|
||||
from dataclasses import dataclass, fields, is_dataclass, field, MISSING
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ModuleNotFoundError:
|
||||
import logging
|
||||
import sys
|
||||
|
||||
logger = logging.getLogger(__file__)
|
||||
msg = (
|
||||
"Required modules are not installed. "
|
||||
"Can not continue with module / application loading.\n"
|
||||
"Install it with: pip install -r requirements"
|
||||
)
|
||||
print(msg, file=sys.stderr)
|
||||
logger.error(msg)
|
||||
exit()
|
||||
|
||||
DEFAULT_SETTINGS_FILE = Path.cwd() / "config" /"settings.yaml"
|
||||
|
||||
# ---------- CONFIG DATA CLASSES ----------
|
||||
@dataclass
|
||||
class DatabaseConfig:
|
||||
host: str = "localhost"
|
||||
port: int = 5432
|
||||
username: str = "root"
|
||||
password: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppConfig:
|
||||
name: str = "MyApp"
|
||||
version_major: int = 1
|
||||
version_minor: int = 0
|
||||
production: bool = False
|
||||
enabled: bool = True
|
||||
token_expiry: int = 3600
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
database: DatabaseConfig = field(default_factory=DatabaseConfig)
|
||||
app: AppConfig = field(default_factory=AppConfig)
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, path: str | Path) -> "Settings":
|
||||
"""Load settings from YAML file into a Settings object."""
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
raw: dict[str, Any] = yaml.safe_load(f) or {}
|
||||
|
||||
init_kwargs = {}
|
||||
for f_def in fields(cls):
|
||||
yaml_value = raw.get(f_def.name, None)
|
||||
|
||||
# Determine default value from default_factory or default
|
||||
if f_def.default_factory is not MISSING:
|
||||
default_value = f_def.default_factory()
|
||||
elif f_def.default is not MISSING:
|
||||
default_value = f_def.default
|
||||
else:
|
||||
default_value = None
|
||||
|
||||
# Handle nested dataclasses
|
||||
if is_dataclass(f_def.type):
|
||||
if isinstance(yaml_value, dict):
|
||||
# Merge YAML values with defaults
|
||||
merged_data = {fld.name: getattr(default_value, fld.name) for fld in fields(f_def.type)}
|
||||
merged_data.update(yaml_value)
|
||||
init_kwargs[f_def.name] = f_def.type(**merged_data)
|
||||
else:
|
||||
init_kwargs[f_def.name] = default_value
|
||||
else:
|
||||
init_kwargs[f_def.name] = yaml_value if yaml_value is not None else default_value
|
||||
|
||||
return cls(**init_kwargs)
|
||||
|
||||
|
||||
# ---------- SINGLETON DECORATOR ----------
|
||||
T = TypeVar("T")
|
||||
|
||||
def singleton_loader(func: Callable[..., T]) -> Callable[..., T]:
|
||||
"""Ensure the function only runs once, returning the cached value."""
|
||||
cache: dict[str, T] = {}
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs) -> T:
|
||||
if func.__name__ not in cache:
|
||||
cache[func.__name__] = func(*args, **kwargs)
|
||||
return cache[func.__name__]
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@singleton_loader
|
||||
def get_settings(config_path: str | Path | None = None) -> Settings:
|
||||
"""
|
||||
Returns the singleton Settings instance.
|
||||
|
||||
Args:
|
||||
config_path: Optional path to the YAML config file. If not provided,
|
||||
defaults to 'config/settings.yaml' in the current working directory.
|
||||
"""
|
||||
if config_path is None:
|
||||
config_path = DEFAULT_SETTINGS_FILE
|
||||
else:
|
||||
config_path = Path(config_path)
|
||||
|
||||
return Settings.from_yaml(config_path)
|
||||
Reference in New Issue
Block a user