restructure of dirs, huge docs update
This commit is contained in:
327
app/src/report_generator.py
Executable file
327
app/src/report_generator.py
Executable file
@@ -0,0 +1,327 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
HTML Report Generator for SneakyScanner
|
||||
|
||||
Generates comprehensive HTML reports from JSON scan results with:
|
||||
- Summary dashboard (statistics, drift alerts, security warnings)
|
||||
- Site-by-site breakdown with service details
|
||||
- SSL/TLS certificate and cipher suite information
|
||||
- Visual badges for expected vs. unexpected services
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HTMLReportGenerator:
|
||||
"""Generates HTML reports from SneakyScanner JSON output."""
|
||||
|
||||
def __init__(self, json_report_path: str, template_dir: str = 'templates'):
|
||||
"""
|
||||
Initialize the HTML report generator.
|
||||
|
||||
Args:
|
||||
json_report_path: Path to the JSON scan report
|
||||
template_dir: Directory containing Jinja2 templates
|
||||
"""
|
||||
self.json_report_path = Path(json_report_path)
|
||||
self.template_dir = Path(template_dir)
|
||||
self.report_data = None
|
||||
|
||||
# Initialize Jinja2 environment
|
||||
self.jinja_env = Environment(
|
||||
loader=FileSystemLoader(self.template_dir),
|
||||
autoescape=select_autoescape(['html', 'xml'])
|
||||
)
|
||||
|
||||
# Register custom filters
|
||||
self.jinja_env.filters['format_date'] = self._format_date
|
||||
self.jinja_env.filters['format_duration'] = self._format_duration
|
||||
|
||||
def generate_report(self, output_path: Optional[str] = None) -> str:
|
||||
"""
|
||||
Generate HTML report from JSON scan data.
|
||||
|
||||
Args:
|
||||
output_path: Path for output HTML file. If None, derives from JSON filename.
|
||||
|
||||
Returns:
|
||||
Path to generated HTML report
|
||||
"""
|
||||
logger.info(f"Loading JSON report from {self.json_report_path}")
|
||||
self._load_json_report()
|
||||
|
||||
logger.info("Calculating summary statistics")
|
||||
summary_stats = self._calculate_summary_stats()
|
||||
|
||||
logger.info("Identifying drift alerts")
|
||||
drift_alerts = self._identify_drift_alerts()
|
||||
|
||||
logger.info("Identifying security warnings")
|
||||
security_warnings = self._identify_security_warnings()
|
||||
|
||||
# Prepare template context
|
||||
context = {
|
||||
'title': self.report_data.get('title', 'SneakyScanner Report'),
|
||||
'scan_time': self.report_data.get('scan_time'),
|
||||
'scan_duration': self.report_data.get('scan_duration'),
|
||||
'config_file': self.report_data.get('config_file'),
|
||||
'sites': self.report_data.get('sites', []),
|
||||
'summary_stats': summary_stats,
|
||||
'drift_alerts': drift_alerts,
|
||||
'security_warnings': security_warnings,
|
||||
}
|
||||
|
||||
# Determine output path
|
||||
if output_path is None:
|
||||
output_path = self.json_report_path.with_suffix('.html')
|
||||
else:
|
||||
output_path = Path(output_path)
|
||||
|
||||
logger.info("Rendering HTML template")
|
||||
template = self.jinja_env.get_template('report_template.html')
|
||||
html_content = template.render(**context)
|
||||
|
||||
logger.info(f"Writing HTML report to {output_path}")
|
||||
output_path.write_text(html_content, encoding='utf-8')
|
||||
|
||||
logger.info(f"Successfully generated HTML report: {output_path}")
|
||||
return str(output_path)
|
||||
|
||||
def _load_json_report(self) -> None:
|
||||
"""Load and parse JSON scan report."""
|
||||
if not self.json_report_path.exists():
|
||||
raise FileNotFoundError(f"JSON report not found: {self.json_report_path}")
|
||||
|
||||
with open(self.json_report_path, 'r') as f:
|
||||
self.report_data = json.load(f)
|
||||
|
||||
def _calculate_summary_stats(self) -> Dict[str, int]:
|
||||
"""
|
||||
Calculate summary statistics for the dashboard.
|
||||
|
||||
Returns:
|
||||
Dictionary with stat counts
|
||||
"""
|
||||
stats = {
|
||||
'total_ips': 0,
|
||||
'tcp_ports': 0,
|
||||
'udp_ports': 0,
|
||||
'services': 0,
|
||||
'web_services': 0,
|
||||
'screenshots': 0,
|
||||
}
|
||||
|
||||
for site in self.report_data.get('sites', []):
|
||||
for ip_data in site.get('ips', []):
|
||||
stats['total_ips'] += 1
|
||||
|
||||
actual = ip_data.get('actual', {})
|
||||
stats['tcp_ports'] += len(actual.get('tcp_ports', []))
|
||||
stats['udp_ports'] += len(actual.get('udp_ports', []))
|
||||
|
||||
services = actual.get('services', [])
|
||||
stats['services'] += len(services)
|
||||
|
||||
# Count web services (HTTP/HTTPS)
|
||||
for service in services:
|
||||
if service.get('http_info'):
|
||||
stats['web_services'] += 1
|
||||
if service['http_info'].get('screenshot'):
|
||||
stats['screenshots'] += 1
|
||||
|
||||
return stats
|
||||
|
||||
def _identify_drift_alerts(self) -> Dict[str, int]:
|
||||
"""
|
||||
Identify infrastructure drift (unexpected/missing items).
|
||||
|
||||
Returns:
|
||||
Dictionary with drift alert counts
|
||||
"""
|
||||
alerts = {
|
||||
'unexpected_tcp': 0,
|
||||
'unexpected_udp': 0,
|
||||
'missing_tcp': 0,
|
||||
'missing_udp': 0,
|
||||
'new_services': 0,
|
||||
}
|
||||
|
||||
for site in self.report_data.get('sites', []):
|
||||
for ip_data in site.get('ips', []):
|
||||
expected = ip_data.get('expected', {})
|
||||
actual = ip_data.get('actual', {})
|
||||
|
||||
expected_tcp = set(expected.get('tcp_ports', []))
|
||||
actual_tcp = set(actual.get('tcp_ports', []))
|
||||
expected_udp = set(expected.get('udp_ports', []))
|
||||
actual_udp = set(actual.get('udp_ports', []))
|
||||
|
||||
# Count unexpected ports
|
||||
alerts['unexpected_tcp'] += len(actual_tcp - expected_tcp)
|
||||
alerts['unexpected_udp'] += len(actual_udp - expected_udp)
|
||||
|
||||
# Count missing ports
|
||||
alerts['missing_tcp'] += len(expected_tcp - actual_tcp)
|
||||
alerts['missing_udp'] += len(expected_udp - actual_udp)
|
||||
|
||||
# Count new services (any service on unexpected port)
|
||||
unexpected_ports = (actual_tcp - expected_tcp) | (actual_udp - expected_udp)
|
||||
for service in actual.get('services', []):
|
||||
if service.get('port') in unexpected_ports:
|
||||
alerts['new_services'] += 1
|
||||
|
||||
return alerts
|
||||
|
||||
def _identify_security_warnings(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Identify security issues (cert expiry, weak TLS, etc.).
|
||||
|
||||
Returns:
|
||||
Dictionary with security warning counts and details
|
||||
"""
|
||||
warnings = {
|
||||
'expiring_certs': 0,
|
||||
'weak_tls': 0,
|
||||
'self_signed': 0,
|
||||
'high_ports': 0,
|
||||
'expiring_cert_details': [], # List of IPs with expiring certs
|
||||
}
|
||||
|
||||
for site in self.report_data.get('sites', []):
|
||||
for ip_data in site.get('ips', []):
|
||||
actual = ip_data.get('actual', {})
|
||||
|
||||
for service in actual.get('services', []):
|
||||
port = service.get('port')
|
||||
|
||||
# Check for high ports (>10000)
|
||||
if port and port > 10000:
|
||||
warnings['high_ports'] += 1
|
||||
|
||||
# Check SSL/TLS if present
|
||||
http_info = service.get('http_info', {})
|
||||
ssl_tls = http_info.get('ssl_tls', {})
|
||||
|
||||
if ssl_tls:
|
||||
# Check certificate expiry
|
||||
cert = ssl_tls.get('certificate', {})
|
||||
days_until_expiry = cert.get('days_until_expiry')
|
||||
|
||||
if days_until_expiry is not None and days_until_expiry < 30:
|
||||
warnings['expiring_certs'] += 1
|
||||
warnings['expiring_cert_details'].append({
|
||||
'ip': ip_data.get('address'),
|
||||
'port': port,
|
||||
'days': days_until_expiry,
|
||||
'subject': cert.get('subject'),
|
||||
})
|
||||
|
||||
# Check for self-signed
|
||||
issuer = cert.get('issuer', '')
|
||||
subject = cert.get('subject', '')
|
||||
if issuer and subject and issuer == subject:
|
||||
warnings['self_signed'] += 1
|
||||
|
||||
# Check for weak TLS versions
|
||||
tls_versions = ssl_tls.get('tls_versions', {})
|
||||
if tls_versions.get('TLS 1.0', {}).get('supported'):
|
||||
warnings['weak_tls'] += 1
|
||||
elif tls_versions.get('TLS 1.1', {}).get('supported'):
|
||||
warnings['weak_tls'] += 1
|
||||
|
||||
return warnings
|
||||
|
||||
@staticmethod
|
||||
def _format_date(date_str: Optional[str]) -> str:
|
||||
"""
|
||||
Format ISO date string for display.
|
||||
|
||||
Args:
|
||||
date_str: ISO format date string
|
||||
|
||||
Returns:
|
||||
Formatted date string
|
||||
"""
|
||||
if not date_str:
|
||||
return 'N/A'
|
||||
|
||||
try:
|
||||
dt = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
|
||||
return dt.strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
except (ValueError, AttributeError):
|
||||
return str(date_str)
|
||||
|
||||
@staticmethod
|
||||
def _format_duration(duration: Optional[float]) -> str:
|
||||
"""
|
||||
Format scan duration for display.
|
||||
|
||||
Args:
|
||||
duration: Duration in seconds
|
||||
|
||||
Returns:
|
||||
Formatted duration string
|
||||
"""
|
||||
if duration is None:
|
||||
return 'N/A'
|
||||
|
||||
if duration < 60:
|
||||
return f"{duration:.1f} seconds"
|
||||
elif duration < 3600:
|
||||
minutes = duration / 60
|
||||
return f"{minutes:.1f} minutes"
|
||||
else:
|
||||
hours = duration / 3600
|
||||
return f"{hours:.2f} hours"
|
||||
|
||||
|
||||
def main():
|
||||
"""Command-line entry point for standalone usage."""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python report_generator.py <json_report_path> [output_html_path]")
|
||||
print("\nExample:")
|
||||
print(" python report_generator.py output/scan_report_20251114_103000.json")
|
||||
print(" python report_generator.py output/scan_report.json custom_report.html")
|
||||
sys.exit(1)
|
||||
|
||||
json_path = sys.argv[1]
|
||||
output_path = sys.argv[2] if len(sys.argv) > 2 else None
|
||||
|
||||
try:
|
||||
# Determine template directory relative to script location
|
||||
script_dir = Path(__file__).parent.parent
|
||||
template_dir = script_dir / 'templates'
|
||||
|
||||
generator = HTMLReportGenerator(json_path, template_dir=str(template_dir))
|
||||
result_path = generator.generate_report(output_path)
|
||||
|
||||
print(f"\n✓ Successfully generated HTML report:")
|
||||
print(f" {result_path}")
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.error(f"File not found: {e}")
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Invalid JSON in report file: {e}")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating report: {e}", exc_info=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
826
app/src/scanner.py
Normal file
826
app/src/scanner.py
Normal file
@@ -0,0 +1,826 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
SneakyScanner - Masscan-based network scanner with YAML configuration
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import yaml
|
||||
from libnmap.process import NmapProcess
|
||||
from libnmap.parser import NmapParser
|
||||
|
||||
from src.screenshot_capture import ScreenshotCapture
|
||||
from src.report_generator import HTMLReportGenerator
|
||||
|
||||
# Force unbuffered output for Docker
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
sys.stderr.reconfigure(line_buffering=True)
|
||||
|
||||
|
||||
class SneakyScanner:
|
||||
"""Wrapper for masscan to perform network scans based on YAML config"""
|
||||
|
||||
def __init__(self, config_path: str, output_dir: str = "/app/output"):
|
||||
self.config_path = Path(config_path)
|
||||
self.output_dir = Path(output_dir)
|
||||
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.config = self._load_config()
|
||||
self.screenshot_capture = None
|
||||
|
||||
def _load_config(self) -> Dict[str, Any]:
|
||||
"""Load and validate YAML configuration"""
|
||||
if not self.config_path.exists():
|
||||
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
||||
|
||||
with open(self.config_path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
if not config.get('title'):
|
||||
raise ValueError("Config must include 'title' field")
|
||||
if not config.get('sites'):
|
||||
raise ValueError("Config must include 'sites' field")
|
||||
|
||||
return config
|
||||
|
||||
def _run_masscan(self, targets: List[str], ports: str, protocol: str) -> List[Dict]:
|
||||
"""
|
||||
Run masscan and return parsed results
|
||||
|
||||
Args:
|
||||
targets: List of IP addresses to scan
|
||||
ports: Port range string (e.g., "0-65535")
|
||||
protocol: "tcp" or "udp"
|
||||
"""
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
# Create temporary file for targets
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.txt') as f:
|
||||
f.write('\n'.join(targets))
|
||||
target_file = f.name
|
||||
|
||||
# Create temporary output file
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.json') as f:
|
||||
output_file = f.name
|
||||
|
||||
try:
|
||||
# Build command based on protocol
|
||||
if protocol == 'tcp':
|
||||
cmd = [
|
||||
'masscan',
|
||||
'-iL', target_file,
|
||||
'-p', ports,
|
||||
'--rate', '10000',
|
||||
'-oJ', output_file,
|
||||
'--wait', '0'
|
||||
]
|
||||
elif protocol == 'udp':
|
||||
cmd = [
|
||||
'masscan',
|
||||
'-iL', target_file,
|
||||
'--udp-ports', ports,
|
||||
'--rate', '10000',
|
||||
'-oJ', output_file,
|
||||
'--wait', '0'
|
||||
]
|
||||
else:
|
||||
raise ValueError(f"Invalid protocol: {protocol}")
|
||||
|
||||
print(f"Running: {' '.join(cmd)}", flush=True)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
print(f"Masscan {protocol.upper()} scan completed", flush=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Masscan stderr: {result.stderr}", file=sys.stderr)
|
||||
|
||||
# Parse masscan JSON output
|
||||
results = []
|
||||
with open(output_file, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
try:
|
||||
results.append(json.loads(line.rstrip(',')))
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
return results
|
||||
|
||||
finally:
|
||||
# Cleanup temp files
|
||||
Path(target_file).unlink(missing_ok=True)
|
||||
Path(output_file).unlink(missing_ok=True)
|
||||
|
||||
def _run_ping_scan(self, targets: List[str]) -> Dict[str, bool]:
|
||||
"""
|
||||
Run ping scan using masscan ICMP echo
|
||||
|
||||
Returns:
|
||||
Dict mapping IP addresses to ping response status
|
||||
"""
|
||||
if not targets:
|
||||
return {}
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.txt') as f:
|
||||
f.write('\n'.join(targets))
|
||||
target_file = f.name
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.json') as f:
|
||||
output_file = f.name
|
||||
|
||||
try:
|
||||
cmd = [
|
||||
'masscan',
|
||||
'-iL', target_file,
|
||||
'--ping',
|
||||
'--rate', '10000',
|
||||
'-oJ', output_file,
|
||||
'--wait', '0'
|
||||
]
|
||||
|
||||
print(f"Running: {' '.join(cmd)}", flush=True)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
print(f"Masscan PING scan completed", flush=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Masscan stderr: {result.stderr}", file=sys.stderr, flush=True)
|
||||
|
||||
# Parse results
|
||||
responding_ips = set()
|
||||
with open(output_file, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
try:
|
||||
data = json.loads(line.rstrip(','))
|
||||
if 'ip' in data:
|
||||
responding_ips.add(data['ip'])
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
# Create result dict for all targets
|
||||
return {ip: (ip in responding_ips) for ip in targets}
|
||||
|
||||
finally:
|
||||
Path(target_file).unlink(missing_ok=True)
|
||||
Path(output_file).unlink(missing_ok=True)
|
||||
|
||||
def _run_nmap_service_detection(self, ip_ports: Dict[str, List[int]]) -> Dict[str, List[Dict]]:
|
||||
"""
|
||||
Run nmap service detection on discovered ports
|
||||
|
||||
Args:
|
||||
ip_ports: Dict mapping IP addresses to list of TCP ports
|
||||
|
||||
Returns:
|
||||
Dict mapping IP addresses to list of service info dicts
|
||||
"""
|
||||
if not ip_ports:
|
||||
return {}
|
||||
|
||||
all_services = {}
|
||||
|
||||
for ip, ports in ip_ports.items():
|
||||
if not ports:
|
||||
all_services[ip] = []
|
||||
continue
|
||||
|
||||
# Build port list string
|
||||
port_list = ','.join(map(str, sorted(ports)))
|
||||
|
||||
print(f" Scanning {ip} ports {port_list}...", flush=True)
|
||||
|
||||
# Create temporary output file for XML
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.xml') as f:
|
||||
xml_output = f.name
|
||||
|
||||
try:
|
||||
# Run nmap with service detection
|
||||
cmd = [
|
||||
'nmap',
|
||||
'-sV', # Service version detection
|
||||
'--version-intensity', '5', # Balanced speed/accuracy
|
||||
'-p', port_list,
|
||||
'-oX', xml_output, # XML output
|
||||
'--host-timeout', '5m', # Timeout per host
|
||||
ip
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=600)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f" Nmap warning for {ip}: {result.stderr}", file=sys.stderr, flush=True)
|
||||
|
||||
# Parse XML output
|
||||
services = self._parse_nmap_xml(xml_output)
|
||||
all_services[ip] = services
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
print(f" Nmap timeout for {ip}, skipping service detection", file=sys.stderr, flush=True)
|
||||
all_services[ip] = []
|
||||
except Exception as e:
|
||||
print(f" Nmap error for {ip}: {e}", file=sys.stderr, flush=True)
|
||||
all_services[ip] = []
|
||||
finally:
|
||||
Path(xml_output).unlink(missing_ok=True)
|
||||
|
||||
return all_services
|
||||
|
||||
def _parse_nmap_xml(self, xml_file: str) -> List[Dict]:
|
||||
"""
|
||||
Parse nmap XML output to extract service information
|
||||
|
||||
Args:
|
||||
xml_file: Path to nmap XML output file
|
||||
|
||||
Returns:
|
||||
List of service info dictionaries
|
||||
"""
|
||||
services = []
|
||||
|
||||
try:
|
||||
tree = ET.parse(xml_file)
|
||||
root = tree.getroot()
|
||||
|
||||
# Find all ports
|
||||
for port_elem in root.findall('.//port'):
|
||||
port_id = port_elem.get('portid')
|
||||
protocol = port_elem.get('protocol', 'tcp')
|
||||
|
||||
# Get state
|
||||
state_elem = port_elem.find('state')
|
||||
if state_elem is None or state_elem.get('state') != 'open':
|
||||
continue
|
||||
|
||||
# Get service info
|
||||
service_elem = port_elem.find('service')
|
||||
if service_elem is not None:
|
||||
service_info = {
|
||||
'port': int(port_id),
|
||||
'protocol': protocol,
|
||||
'service': service_elem.get('name', 'unknown'),
|
||||
'product': service_elem.get('product', ''),
|
||||
'version': service_elem.get('version', ''),
|
||||
'extrainfo': service_elem.get('extrainfo', ''),
|
||||
'ostype': service_elem.get('ostype', '')
|
||||
}
|
||||
|
||||
# Clean up empty fields
|
||||
service_info = {k: v for k, v in service_info.items() if v}
|
||||
|
||||
services.append(service_info)
|
||||
else:
|
||||
# Port is open but no service info
|
||||
services.append({
|
||||
'port': int(port_id),
|
||||
'protocol': protocol,
|
||||
'service': 'unknown'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
print(f" Error parsing nmap XML: {e}", file=sys.stderr, flush=True)
|
||||
|
||||
return services
|
||||
|
||||
def _is_likely_web_service(self, service: Dict) -> bool:
|
||||
"""
|
||||
Check if a service is likely HTTP/HTTPS based on nmap detection or common web ports
|
||||
|
||||
Args:
|
||||
service: Service dictionary from nmap results
|
||||
|
||||
Returns:
|
||||
True if service appears to be web-related
|
||||
"""
|
||||
# Check service name
|
||||
web_services = ['http', 'https', 'ssl', 'http-proxy', 'https-alt',
|
||||
'http-alt', 'ssl/http', 'ssl/https']
|
||||
service_name = service.get('service', '').lower()
|
||||
|
||||
if service_name in web_services:
|
||||
return True
|
||||
|
||||
# Check common non-standard web ports
|
||||
web_ports = [80, 443, 8000, 8006, 8008, 8080, 8081, 8443, 8888, 9443]
|
||||
port = service.get('port')
|
||||
|
||||
return port in web_ports
|
||||
|
||||
def _detect_http_https(self, ip: str, port: int, timeout: int = 5) -> str:
|
||||
"""
|
||||
Detect if a port is HTTP or HTTPS
|
||||
|
||||
Args:
|
||||
ip: IP address
|
||||
port: Port number
|
||||
timeout: Connection timeout in seconds
|
||||
|
||||
Returns:
|
||||
'http', 'https', or 'unknown'
|
||||
"""
|
||||
import socket
|
||||
import ssl as ssl_module
|
||||
|
||||
# Try HTTPS first
|
||||
try:
|
||||
context = ssl_module.create_default_context()
|
||||
context.check_hostname = False
|
||||
context.verify_mode = ssl_module.CERT_NONE
|
||||
|
||||
with socket.create_connection((ip, port), timeout=timeout) as sock:
|
||||
with context.wrap_socket(sock, server_hostname=ip) as ssock:
|
||||
return 'https'
|
||||
except ssl_module.SSLError:
|
||||
# Not HTTPS, try HTTP
|
||||
pass
|
||||
except (socket.timeout, socket.error, ConnectionRefusedError):
|
||||
return 'unknown'
|
||||
|
||||
# Try HTTP
|
||||
try:
|
||||
with socket.create_connection((ip, port), timeout=timeout) as sock:
|
||||
sock.send(b'HEAD / HTTP/1.0\r\n\r\n')
|
||||
response = sock.recv(1024)
|
||||
if b'HTTP' in response:
|
||||
return 'http'
|
||||
except (socket.timeout, socket.error, ConnectionRefusedError):
|
||||
pass
|
||||
|
||||
return 'unknown'
|
||||
|
||||
def _analyze_ssl_tls(self, ip: str, port: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze SSL/TLS configuration including certificate and supported versions
|
||||
|
||||
Args:
|
||||
ip: IP address
|
||||
port: Port number
|
||||
|
||||
Returns:
|
||||
Dictionary with certificate info and TLS version support
|
||||
"""
|
||||
from sslyze import (
|
||||
Scanner,
|
||||
ServerScanRequest,
|
||||
ServerNetworkLocation,
|
||||
ScanCommand,
|
||||
ScanCommandAttemptStatusEnum,
|
||||
ServerScanStatusEnum
|
||||
)
|
||||
from cryptography import x509
|
||||
from datetime import datetime
|
||||
|
||||
result = {
|
||||
'certificate': {},
|
||||
'tls_versions': {},
|
||||
'errors': []
|
||||
}
|
||||
|
||||
try:
|
||||
# Create server location
|
||||
server_location = ServerNetworkLocation(
|
||||
hostname=ip,
|
||||
port=port
|
||||
)
|
||||
|
||||
# Create scan request with all TLS version scans
|
||||
scan_request = ServerScanRequest(
|
||||
server_location=server_location,
|
||||
scan_commands={
|
||||
ScanCommand.CERTIFICATE_INFO,
|
||||
ScanCommand.SSL_2_0_CIPHER_SUITES,
|
||||
ScanCommand.SSL_3_0_CIPHER_SUITES,
|
||||
ScanCommand.TLS_1_0_CIPHER_SUITES,
|
||||
ScanCommand.TLS_1_1_CIPHER_SUITES,
|
||||
ScanCommand.TLS_1_2_CIPHER_SUITES,
|
||||
ScanCommand.TLS_1_3_CIPHER_SUITES,
|
||||
}
|
||||
)
|
||||
|
||||
# Run scan
|
||||
scanner = Scanner()
|
||||
scanner.queue_scans([scan_request])
|
||||
|
||||
# Process results
|
||||
for scan_result in scanner.get_results():
|
||||
if scan_result.scan_status != ServerScanStatusEnum.COMPLETED:
|
||||
result['errors'].append('Connection failed')
|
||||
return result
|
||||
|
||||
server_scan_result = scan_result.scan_result
|
||||
|
||||
# Extract certificate information
|
||||
cert_attempt = getattr(server_scan_result, 'certificate_info', None)
|
||||
if cert_attempt and cert_attempt.status == ScanCommandAttemptStatusEnum.COMPLETED:
|
||||
cert_result = cert_attempt.result
|
||||
if cert_result.certificate_deployments:
|
||||
deployment = cert_result.certificate_deployments[0]
|
||||
leaf_cert = deployment.received_certificate_chain[0]
|
||||
|
||||
# Calculate days until expiry
|
||||
not_after = leaf_cert.not_valid_after_utc
|
||||
days_until_expiry = (not_after - datetime.now(not_after.tzinfo)).days
|
||||
|
||||
# Extract SANs
|
||||
sans = []
|
||||
try:
|
||||
san_ext = leaf_cert.extensions.get_extension_for_class(
|
||||
x509.SubjectAlternativeName
|
||||
)
|
||||
sans = [name.value for name in san_ext.value]
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
result['certificate'] = {
|
||||
'subject': leaf_cert.subject.rfc4514_string(),
|
||||
'issuer': leaf_cert.issuer.rfc4514_string(),
|
||||
'serial_number': str(leaf_cert.serial_number),
|
||||
'not_valid_before': leaf_cert.not_valid_before_utc.isoformat(),
|
||||
'not_valid_after': leaf_cert.not_valid_after_utc.isoformat(),
|
||||
'days_until_expiry': days_until_expiry,
|
||||
'sans': sans
|
||||
}
|
||||
|
||||
# Test TLS versions
|
||||
tls_attributes = {
|
||||
'TLS 1.0': 'tls_1_0_cipher_suites',
|
||||
'TLS 1.1': 'tls_1_1_cipher_suites',
|
||||
'TLS 1.2': 'tls_1_2_cipher_suites',
|
||||
'TLS 1.3': 'tls_1_3_cipher_suites'
|
||||
}
|
||||
|
||||
for version_name, attr_name in tls_attributes.items():
|
||||
tls_attempt = getattr(server_scan_result, attr_name, None)
|
||||
if tls_attempt and tls_attempt.status == ScanCommandAttemptStatusEnum.COMPLETED:
|
||||
tls_result = tls_attempt.result
|
||||
supported = len(tls_result.accepted_cipher_suites) > 0
|
||||
cipher_suites = [
|
||||
suite.cipher_suite.name
|
||||
for suite in tls_result.accepted_cipher_suites
|
||||
]
|
||||
result['tls_versions'][version_name] = {
|
||||
'supported': supported,
|
||||
'cipher_suites': cipher_suites
|
||||
}
|
||||
else:
|
||||
result['tls_versions'][version_name] = {
|
||||
'supported': False,
|
||||
'cipher_suites': []
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
result['errors'].append(str(e))
|
||||
|
||||
return result
|
||||
|
||||
def _run_http_analysis(self, ip_services: Dict[str, List[Dict]]) -> Dict[str, Dict[int, Dict]]:
|
||||
"""
|
||||
Analyze HTTP/HTTPS services and SSL/TLS configuration
|
||||
|
||||
Args:
|
||||
ip_services: Dict mapping IP addresses to their service lists
|
||||
|
||||
Returns:
|
||||
Dict mapping IPs to port-specific HTTP analysis results
|
||||
"""
|
||||
if not ip_services:
|
||||
return {}
|
||||
|
||||
all_results = {}
|
||||
|
||||
for ip, services in ip_services.items():
|
||||
ip_results = {}
|
||||
|
||||
for service in services:
|
||||
if not self._is_likely_web_service(service):
|
||||
continue
|
||||
|
||||
port = service['port']
|
||||
print(f" Analyzing {ip}:{port}...", flush=True)
|
||||
|
||||
# Detect HTTP vs HTTPS
|
||||
protocol = self._detect_http_https(ip, port, timeout=5)
|
||||
|
||||
if protocol == 'unknown':
|
||||
continue
|
||||
|
||||
result = {'protocol': protocol}
|
||||
|
||||
# Capture screenshot if screenshot capture is enabled
|
||||
if self.screenshot_capture:
|
||||
try:
|
||||
screenshot_path = self.screenshot_capture.capture(ip, port, protocol)
|
||||
if screenshot_path:
|
||||
result['screenshot'] = screenshot_path
|
||||
except Exception as e:
|
||||
print(f" Screenshot capture error for {ip}:{port}: {e}",
|
||||
file=sys.stderr, flush=True)
|
||||
|
||||
# If HTTPS, analyze SSL/TLS
|
||||
if protocol == 'https':
|
||||
try:
|
||||
ssl_info = self._analyze_ssl_tls(ip, port)
|
||||
# Only include ssl_tls if we got meaningful data
|
||||
if ssl_info.get('certificate') or ssl_info.get('tls_versions'):
|
||||
result['ssl_tls'] = ssl_info
|
||||
elif ssl_info.get('errors'):
|
||||
# Log errors even if we don't include ssl_tls in output
|
||||
print(f" SSL/TLS analysis failed for {ip}:{port}: {ssl_info['errors']}",
|
||||
file=sys.stderr, flush=True)
|
||||
except Exception as e:
|
||||
print(f" SSL/TLS analysis error for {ip}:{port}: {e}",
|
||||
file=sys.stderr, flush=True)
|
||||
|
||||
ip_results[port] = result
|
||||
|
||||
if ip_results:
|
||||
all_results[ip] = ip_results
|
||||
|
||||
return all_results
|
||||
|
||||
def scan(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform complete scan based on configuration
|
||||
|
||||
Returns:
|
||||
Dictionary containing scan results
|
||||
"""
|
||||
print(f"Starting scan: {self.config['title']}", flush=True)
|
||||
print(f"Config: {self.config_path}", flush=True)
|
||||
|
||||
# Record start time
|
||||
start_time = time.time()
|
||||
scan_timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
# Initialize screenshot capture
|
||||
self.screenshot_capture = ScreenshotCapture(
|
||||
output_dir=str(self.output_dir),
|
||||
scan_timestamp=scan_timestamp,
|
||||
timeout=15
|
||||
)
|
||||
|
||||
# Collect all unique IPs
|
||||
all_ips = set()
|
||||
ip_to_site = {}
|
||||
ip_expected = {}
|
||||
|
||||
for site in self.config['sites']:
|
||||
site_name = site['name']
|
||||
for ip_config in site['ips']:
|
||||
ip = ip_config['address']
|
||||
all_ips.add(ip)
|
||||
ip_to_site[ip] = site_name
|
||||
ip_expected[ip] = ip_config.get('expected', {})
|
||||
|
||||
all_ips = sorted(list(all_ips))
|
||||
print(f"Total IPs to scan: {len(all_ips)}", flush=True)
|
||||
|
||||
# Perform ping scan
|
||||
print(f"\n[1/5] Performing ping scan on {len(all_ips)} IPs...", flush=True)
|
||||
ping_results = self._run_ping_scan(all_ips)
|
||||
|
||||
# Perform TCP scan (all ports)
|
||||
print(f"\n[2/5] Performing TCP scan on {len(all_ips)} IPs (ports 0-65535)...", flush=True)
|
||||
tcp_results = self._run_masscan(all_ips, '0-65535', 'tcp')
|
||||
|
||||
# Perform UDP scan (all ports)
|
||||
print(f"\n[3/5] Performing UDP scan on {len(all_ips)} IPs (ports 0-65535)...", flush=True)
|
||||
udp_results = self._run_masscan(all_ips, '0-65535', 'udp')
|
||||
|
||||
# Organize results by IP
|
||||
results_by_ip = {}
|
||||
for ip in all_ips:
|
||||
results_by_ip[ip] = {
|
||||
'site': ip_to_site[ip],
|
||||
'expected': ip_expected[ip],
|
||||
'actual': {
|
||||
'ping': ping_results.get(ip, False),
|
||||
'tcp_ports': [],
|
||||
'udp_ports': [],
|
||||
'services': []
|
||||
}
|
||||
}
|
||||
|
||||
# Add TCP ports
|
||||
for result in tcp_results:
|
||||
ip = result.get('ip')
|
||||
port = result.get('ports', [{}])[0].get('port')
|
||||
if ip in results_by_ip and port:
|
||||
results_by_ip[ip]['actual']['tcp_ports'].append(port)
|
||||
|
||||
# Add UDP ports
|
||||
for result in udp_results:
|
||||
ip = result.get('ip')
|
||||
port = result.get('ports', [{}])[0].get('port')
|
||||
if ip in results_by_ip and port:
|
||||
results_by_ip[ip]['actual']['udp_ports'].append(port)
|
||||
|
||||
# Sort ports
|
||||
for ip in results_by_ip:
|
||||
results_by_ip[ip]['actual']['tcp_ports'].sort()
|
||||
results_by_ip[ip]['actual']['udp_ports'].sort()
|
||||
|
||||
# Perform service detection on TCP ports
|
||||
print(f"\n[4/5] Performing service detection on discovered TCP ports...", flush=True)
|
||||
ip_ports = {ip: results_by_ip[ip]['actual']['tcp_ports'] for ip in all_ips}
|
||||
service_results = self._run_nmap_service_detection(ip_ports)
|
||||
|
||||
# Add service information to results
|
||||
for ip, services in service_results.items():
|
||||
if ip in results_by_ip:
|
||||
results_by_ip[ip]['actual']['services'] = services
|
||||
|
||||
# Perform HTTP/HTTPS analysis on web services
|
||||
print(f"\n[5/5] Analyzing HTTP/HTTPS services and SSL/TLS configuration...", flush=True)
|
||||
http_results = self._run_http_analysis(service_results)
|
||||
|
||||
# Merge HTTP analysis into service results
|
||||
for ip, port_results in http_results.items():
|
||||
if ip in results_by_ip:
|
||||
for service in results_by_ip[ip]['actual']['services']:
|
||||
port = service['port']
|
||||
if port in port_results:
|
||||
service['http_info'] = port_results[port]
|
||||
|
||||
# Calculate scan duration
|
||||
end_time = time.time()
|
||||
scan_duration = round(end_time - start_time, 2)
|
||||
|
||||
# Build final report
|
||||
report = {
|
||||
'title': self.config['title'],
|
||||
'scan_time': datetime.utcnow().isoformat() + 'Z',
|
||||
'scan_duration': scan_duration,
|
||||
'config_file': str(self.config_path),
|
||||
'sites': []
|
||||
}
|
||||
|
||||
for site in self.config['sites']:
|
||||
site_result = {
|
||||
'name': site['name'],
|
||||
'ips': []
|
||||
}
|
||||
|
||||
for ip_config in site['ips']:
|
||||
ip = ip_config['address']
|
||||
site_result['ips'].append({
|
||||
'address': ip,
|
||||
'expected': ip_expected[ip],
|
||||
'actual': results_by_ip[ip]['actual']
|
||||
})
|
||||
|
||||
report['sites'].append(site_result)
|
||||
|
||||
# Clean up screenshot capture browser
|
||||
if self.screenshot_capture:
|
||||
self.screenshot_capture._close_browser()
|
||||
|
||||
return report, scan_timestamp
|
||||
|
||||
def save_report(self, report: Dict[str, Any], scan_timestamp: str) -> Path:
|
||||
"""Save scan report to JSON file using provided timestamp"""
|
||||
output_file = self.output_dir / f"scan_report_{scan_timestamp}.json"
|
||||
|
||||
with open(output_file, 'w') as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
print(f"\nReport saved to: {output_file}", flush=True)
|
||||
return output_file
|
||||
|
||||
def generate_outputs(self, report: Dict[str, Any], scan_timestamp: str) -> Dict[str, Path]:
|
||||
"""
|
||||
Generate all output formats: JSON, HTML report, and ZIP archive
|
||||
|
||||
Args:
|
||||
report: Scan report dictionary
|
||||
scan_timestamp: Timestamp string in format YYYYMMDD_HHMMSS
|
||||
|
||||
Returns:
|
||||
Dictionary with paths to generated files: {'json': Path, 'html': Path, 'zip': Path}
|
||||
"""
|
||||
output_paths = {}
|
||||
|
||||
# Step 1: Save JSON report
|
||||
print("\n" + "="*60, flush=True)
|
||||
print("Generating outputs...", flush=True)
|
||||
print("="*60, flush=True)
|
||||
|
||||
json_path = self.save_report(report, scan_timestamp)
|
||||
output_paths['json'] = json_path
|
||||
|
||||
# Step 2: Generate HTML report
|
||||
html_path = self.output_dir / f"scan_report_{scan_timestamp}.html"
|
||||
|
||||
try:
|
||||
print(f"\nGenerating HTML report...", flush=True)
|
||||
|
||||
# Auto-detect template directory relative to this script
|
||||
template_dir = Path(__file__).parent.parent / 'templates'
|
||||
|
||||
# Create HTML report generator
|
||||
generator = HTMLReportGenerator(
|
||||
json_report_path=str(json_path),
|
||||
template_dir=str(template_dir)
|
||||
)
|
||||
|
||||
# Generate report
|
||||
html_result = generator.generate_report(output_path=str(html_path))
|
||||
output_paths['html'] = Path(html_result)
|
||||
|
||||
print(f"HTML report saved to: {html_path}", flush=True)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: HTML report generation failed: {e}", file=sys.stderr, flush=True)
|
||||
print(f"Continuing with JSON output only...", file=sys.stderr, flush=True)
|
||||
# Don't add html_path to output_paths if it failed
|
||||
|
||||
# Step 3: Create ZIP archive
|
||||
zip_path = self.output_dir / f"scan_report_{scan_timestamp}.zip"
|
||||
|
||||
try:
|
||||
print(f"\nCreating ZIP archive...", flush=True)
|
||||
|
||||
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
# Add JSON report
|
||||
zipf.write(json_path, json_path.name)
|
||||
|
||||
# Add HTML report if it was generated
|
||||
if 'html' in output_paths and html_path.exists():
|
||||
zipf.write(html_path, html_path.name)
|
||||
|
||||
# Add screenshots directory if it exists
|
||||
screenshot_dir = self.output_dir / f"scan_report_{scan_timestamp}_screenshots"
|
||||
if screenshot_dir.exists() and screenshot_dir.is_dir():
|
||||
# Add all files in screenshot directory
|
||||
for screenshot_file in screenshot_dir.iterdir():
|
||||
if screenshot_file.is_file():
|
||||
# Preserve directory structure in ZIP
|
||||
arcname = f"{screenshot_dir.name}/{screenshot_file.name}"
|
||||
zipf.write(screenshot_file, arcname)
|
||||
|
||||
output_paths['zip'] = zip_path
|
||||
print(f"ZIP archive saved to: {zip_path}", flush=True)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: ZIP archive creation failed: {e}", file=sys.stderr, flush=True)
|
||||
# Don't add zip_path to output_paths if it failed
|
||||
|
||||
return output_paths
|
||||
|
||||
|
||||
def main():
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[logging.StreamHandler(sys.stderr)]
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='SneakyScanner - Masscan-based network scanner'
|
||||
)
|
||||
parser.add_argument(
|
||||
'config',
|
||||
help='Path to YAML configuration file'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output-dir',
|
||||
default='/app/output',
|
||||
help='Output directory for scan results (default: /app/output)'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
scanner = SneakyScanner(args.config, args.output_dir)
|
||||
report, scan_timestamp = scanner.scan()
|
||||
output_paths = scanner.generate_outputs(report, scan_timestamp)
|
||||
|
||||
print("\n" + "="*60, flush=True)
|
||||
print("Scan completed successfully!", flush=True)
|
||||
print("="*60, flush=True)
|
||||
print(f" JSON Report: {output_paths.get('json', 'N/A')}", flush=True)
|
||||
print(f" HTML Report: {output_paths.get('html', 'N/A')}", flush=True)
|
||||
print(f" ZIP Archive: {output_paths.get('zip', 'N/A')}", flush=True)
|
||||
print("="*60, flush=True)
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr, flush=True)
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
201
app/src/screenshot_capture.py
Normal file
201
app/src/screenshot_capture.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Screenshot capture module for SneakyScanner.
|
||||
|
||||
Uses Playwright with Chromium to capture screenshots of discovered web services.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from playwright.sync_api import sync_playwright, TimeoutError as PlaywrightTimeout
|
||||
|
||||
|
||||
class ScreenshotCapture:
|
||||
"""
|
||||
Handles webpage screenshot capture for web services discovered during scanning.
|
||||
|
||||
Uses Playwright with Chromium in headless mode to capture viewport screenshots
|
||||
of HTTP and HTTPS services. Handles SSL certificate errors gracefully.
|
||||
"""
|
||||
|
||||
def __init__(self, output_dir, scan_timestamp, timeout=15, viewport=None):
|
||||
"""
|
||||
Initialize the screenshot capture handler.
|
||||
|
||||
Args:
|
||||
output_dir (str): Base output directory for scan reports
|
||||
scan_timestamp (str): Timestamp string for this scan (format: YYYYMMDD_HHMMSS)
|
||||
timeout (int): Timeout in seconds for page load and screenshot (default: 15)
|
||||
viewport (dict): Viewport size dict with 'width' and 'height' keys
|
||||
(default: {'width': 1280, 'height': 720})
|
||||
"""
|
||||
self.output_dir = output_dir
|
||||
self.scan_timestamp = scan_timestamp
|
||||
self.timeout = timeout * 1000 # Convert to milliseconds for Playwright
|
||||
self.viewport = viewport or {'width': 1280, 'height': 720}
|
||||
|
||||
self.playwright = None
|
||||
self.browser = None
|
||||
self.screenshot_dir = None
|
||||
|
||||
# Set up logging
|
||||
self.logger = logging.getLogger('SneakyScanner.Screenshot')
|
||||
|
||||
def _get_screenshot_dir(self):
|
||||
"""
|
||||
Create and return the screenshots subdirectory for this scan.
|
||||
|
||||
Returns:
|
||||
Path: Path object for the screenshots directory
|
||||
"""
|
||||
if self.screenshot_dir is None:
|
||||
dir_name = f"scan_report_{self.scan_timestamp}_screenshots"
|
||||
self.screenshot_dir = Path(self.output_dir) / dir_name
|
||||
self.screenshot_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.logger.info(f"Created screenshot directory: {self.screenshot_dir}")
|
||||
|
||||
return self.screenshot_dir
|
||||
|
||||
def _generate_filename(self, ip, port):
|
||||
"""
|
||||
Generate a filename for the screenshot.
|
||||
|
||||
Args:
|
||||
ip (str): IP address of the service
|
||||
port (int): Port number of the service
|
||||
|
||||
Returns:
|
||||
str: Filename in format: {ip}_{port}.png
|
||||
"""
|
||||
# Replace dots in IP with underscores for filesystem compatibility
|
||||
safe_ip = ip.replace('.', '_')
|
||||
return f"{safe_ip}_{port}.png"
|
||||
|
||||
def _launch_browser(self):
|
||||
"""
|
||||
Launch Playwright and Chromium browser in headless mode.
|
||||
|
||||
Returns:
|
||||
bool: True if browser launched successfully, False otherwise
|
||||
"""
|
||||
if self.browser is not None:
|
||||
return True # Already launched
|
||||
|
||||
try:
|
||||
self.logger.info("Launching Chromium browser...")
|
||||
self.playwright = sync_playwright().start()
|
||||
self.browser = self.playwright.chromium.launch(
|
||||
headless=True,
|
||||
args=[
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-gpu',
|
||||
]
|
||||
)
|
||||
self.logger.info("Chromium browser launched successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to launch browser: {e}")
|
||||
return False
|
||||
|
||||
def _close_browser(self):
|
||||
"""
|
||||
Close the browser and cleanup Playwright resources.
|
||||
"""
|
||||
if self.browser:
|
||||
try:
|
||||
self.browser.close()
|
||||
self.logger.info("Browser closed")
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error closing browser: {e}")
|
||||
finally:
|
||||
self.browser = None
|
||||
|
||||
if self.playwright:
|
||||
try:
|
||||
self.playwright.stop()
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error stopping playwright: {e}")
|
||||
finally:
|
||||
self.playwright = None
|
||||
|
||||
def capture(self, ip, port, protocol):
|
||||
"""
|
||||
Capture a screenshot of a web service.
|
||||
|
||||
Args:
|
||||
ip (str): IP address of the service
|
||||
port (int): Port number of the service
|
||||
protocol (str): Protocol to use ('http' or 'https')
|
||||
|
||||
Returns:
|
||||
str: Relative path to the screenshot file, or None if capture failed
|
||||
"""
|
||||
# Validate protocol
|
||||
if protocol not in ['http', 'https']:
|
||||
self.logger.warning(f"Invalid protocol '{protocol}' for {ip}:{port}")
|
||||
return None
|
||||
|
||||
# Launch browser if not already running
|
||||
if not self._launch_browser():
|
||||
return None
|
||||
|
||||
# Build URL
|
||||
url = f"{protocol}://{ip}:{port}"
|
||||
|
||||
# Generate screenshot filename
|
||||
filename = self._generate_filename(ip, port)
|
||||
screenshot_dir = self._get_screenshot_dir()
|
||||
screenshot_path = screenshot_dir / filename
|
||||
|
||||
try:
|
||||
self.logger.info(f"Capturing screenshot: {url}")
|
||||
|
||||
# Create new browser context with viewport and SSL settings
|
||||
context = self.browser.new_context(
|
||||
viewport=self.viewport,
|
||||
ignore_https_errors=True, # Handle self-signed certs
|
||||
user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
|
||||
)
|
||||
|
||||
# Create new page
|
||||
page = context.new_page()
|
||||
|
||||
# Set default timeout
|
||||
page.set_default_timeout(self.timeout)
|
||||
|
||||
# Navigate to URL
|
||||
page.goto(url, wait_until='networkidle', timeout=self.timeout)
|
||||
|
||||
# Take screenshot (viewport only)
|
||||
page.screenshot(path=str(screenshot_path), type='png')
|
||||
|
||||
# Close page and context
|
||||
page.close()
|
||||
context.close()
|
||||
|
||||
self.logger.info(f"Screenshot saved: {screenshot_path}")
|
||||
|
||||
# Return relative path (relative to output directory)
|
||||
relative_path = f"{screenshot_dir.name}/{filename}"
|
||||
return relative_path
|
||||
|
||||
except PlaywrightTimeout:
|
||||
self.logger.warning(f"Timeout capturing screenshot for {url}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to capture screenshot for {url}: {e}")
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
self._launch_browser()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit - cleanup browser resources."""
|
||||
self._close_browser()
|
||||
return False # Don't suppress exceptions
|
||||
Reference in New Issue
Block a user