Compare commits
54 Commits
b8c3e4e2d8
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| b802779faa | |||
| 52378eaaf4 | |||
| 7667d80d2f | |||
| 9a0b7c7920 | |||
| d02a065bde | |||
| 4c22948ea2 | |||
| 51fa4caaf5 | |||
| 8c34f8b2eb | |||
| 136276497d | |||
| 6bc733fefd | |||
| 4b197e0b3d | |||
| 30f0987a99 | |||
| 9e2fc348b7 | |||
| 847e05abbe | |||
| 07c2bcfd11 | |||
| a560bae800 | |||
| 56828e4184 | |||
| 5e3a70f837 | |||
| 451c7e92ff | |||
| 8b89fd506d | |||
| f24bd11dfd | |||
| 9bd2f67150 | |||
| 3058c69c39 | |||
| 04dc238aea | |||
| c592000c96 | |||
| 4c6b4bf35d | |||
| 3adb51ece2 | |||
| c4cbbee280 | |||
| 889e1eaac3 | |||
| a682e5233c | |||
| 7a14f1602b | |||
| 949bccf644 | |||
| 801ddc8d81 | |||
| db5c828b5f | |||
| a044c19a46 | |||
| a5e2b43944 | |||
| 3219f8a861 | |||
| 480065ed14 | |||
| 73a3b95834 | |||
| 8d8e53c903 | |||
| 12d5aff7a5 | |||
| cc3758f92d | |||
| 9804f9c032 | |||
| e3b647521e | |||
| 7460c9e23e | |||
| 66b02edc84 | |||
| f8b89c46c2 | |||
| 6d5005403c | |||
| 05f846809e | |||
| 7c26824aa1 | |||
| 91507cc8f8 | |||
| 7437716613 | |||
| 657f4784bf | |||
| 73d04cae5e |
15
.env.example
15
.env.example
File diff suppressed because one or more lines are too long
@@ -39,13 +39,12 @@ COPY app/web/ ./web/
|
||||
COPY app/migrations/ ./migrations/
|
||||
COPY app/alembic.ini .
|
||||
COPY app/init_db.py .
|
||||
COPY app/docker-entrypoint.sh /docker-entrypoint.sh
|
||||
|
||||
# Create required directories
|
||||
RUN mkdir -p /app/output /app/logs
|
||||
|
||||
# Make scripts executable
|
||||
RUN chmod +x /app/src/scanner.py /app/init_db.py /docker-entrypoint.sh
|
||||
RUN chmod +x /app/src/scanner.py /app/init_db.py
|
||||
|
||||
# Force Python unbuffered output
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
30
README.md
30
README.md
@@ -3,7 +3,7 @@
|
||||
A comprehensive network scanning and infrastructure monitoring platform with web interface and CLI scanner. SneakyScanner uses masscan for fast port discovery, nmap for service detection, sslyze for SSL/TLS analysis, and Playwright for webpage screenshots to perform comprehensive infrastructure audits.
|
||||
|
||||
**Primary Interface**: Web Application (Flask-based GUI)
|
||||
**Alternative**: Standalone CLI Scanner (for testing and CI/CD)
|
||||
**Scripting/Automation**: REST API (see [API Reference](docs/API_REFERENCE.md))
|
||||
|
||||
---
|
||||
|
||||
@@ -12,7 +12,7 @@ A comprehensive network scanning and infrastructure monitoring platform with web
|
||||
- 🌐 **Web Dashboard** - Modern web UI for scan management, scheduling, and historical analysis
|
||||
- 📊 **Database Storage** - SQLite-based scan history with trend analysis and comparison
|
||||
- ⏰ **Scheduled Scans** - Cron-based automated scanning with APScheduler
|
||||
- 🔧 **Config Creator** - CIDR-to-YAML configuration builder for quick setup
|
||||
- 🔧 **Config Creator** - Web-based target configuration builder for quick setup
|
||||
- 🔍 **Network Discovery** - Fast port scanning with masscan (all 65535 ports, TCP/UDP)
|
||||
- 🎯 **Service Detection** - Nmap-based service enumeration with version detection
|
||||
- 🔒 **SSL/TLS Analysis** - Certificate extraction, TLS version testing, cipher suite analysis
|
||||
@@ -27,7 +27,7 @@ A comprehensive network scanning and infrastructure monitoring platform with web
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Web Application (Recommended)
|
||||
### Web Application
|
||||
|
||||
**Easy Setup (One Command):**
|
||||
|
||||
@@ -69,28 +69,13 @@ docker compose up --build -d
|
||||
|
||||
**See [Deployment Guide](docs/DEPLOYMENT.md) for detailed setup instructions.**
|
||||
|
||||
### CLI Scanner (Standalone)
|
||||
|
||||
For quick one-off scans without the web interface:
|
||||
|
||||
```bash
|
||||
# Build and run
|
||||
docker compose -f docker-compose-standalone.yml build
|
||||
docker compose -f docker-compose-standalone.yml up
|
||||
|
||||
# Results saved to ./output/
|
||||
```
|
||||
|
||||
**See [CLI Scanning Guide](docs/CLI_SCANNING.md) for detailed usage.**
|
||||
|
||||
---
|
||||
|
||||
## Documentation
|
||||
|
||||
### User Guides
|
||||
- **[Deployment Guide](docs/DEPLOYMENT.md)** - Installation, configuration, and production deployment
|
||||
- **[CLI Scanning Guide](docs/CLI_SCANNING.md)** - Standalone scanner usage, configuration, and output formats
|
||||
- **[API Reference](docs/API_REFERENCE.md)** - Complete REST API documentation
|
||||
- **[API Reference](docs/API_REFERENCE.md)** - Complete REST API documentation for scripting and automation
|
||||
|
||||
### Developer Resources
|
||||
- **[Roadmap](docs/ROADMAP.md)** - Project roadmap, architecture, and planned features
|
||||
@@ -107,7 +92,7 @@ docker compose -f docker-compose-standalone.yml up
|
||||
- ✅ **Phase 1**: Database schema, SQLAlchemy models, settings system
|
||||
- ✅ **Phase 2**: REST API, background jobs, authentication, web UI
|
||||
- ✅ **Phase 3**: Dashboard, scheduling, trend charts
|
||||
- ✅ **Phase 4**: Config creator, YAML editor, config management UI
|
||||
- ✅ **Phase 4**: Config creator, target editor, config management UI
|
||||
- ✅ **Phase 5**: Webhooks & alerting, notification templates, alert rules
|
||||
|
||||
### Next Up: Phase 6 - CLI as API Client
|
||||
@@ -188,7 +173,7 @@ See [Deployment Guide](docs/DEPLOYMENT.md) for production security checklist.
|
||||
|
||||
## Contributing
|
||||
|
||||
This is a personal/small team project. For bugs or feature requests:
|
||||
This is a personal project. For bugs or feature requests:
|
||||
|
||||
1. Check existing issues
|
||||
2. Create detailed bug reports with reproduction steps
|
||||
@@ -206,7 +191,6 @@ MIT License - See LICENSE file for details
|
||||
|
||||
**Documentation**:
|
||||
- [Deployment Guide](docs/DEPLOYMENT.md)
|
||||
- [CLI Scanning Guide](docs/CLI_SCANNING.md)
|
||||
- [API Reference](docs/API_REFERENCE.md)
|
||||
- [Roadmap](docs/ROADMAP.md)
|
||||
|
||||
@@ -214,5 +198,5 @@ MIT License - See LICENSE file for details
|
||||
|
||||
---
|
||||
|
||||
**Version**: Phase 5 Complete
|
||||
**Version**: 1.0.0-beta
|
||||
**Last Updated**: 2025-11-19
|
||||
|
||||
@@ -69,8 +69,12 @@ def run_migrations_online() -> None:
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
# Enable batch mode for SQLite to support ALTER TABLE operations
|
||||
# like DROP COLUMN which SQLite doesn't natively support
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
||||
@@ -1,125 +1,214 @@
|
||||
"""Initial database schema for SneakyScanner
|
||||
"""Initial schema for SneakyScanner
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-11-13 18:00:00.000000
|
||||
Revises: None
|
||||
Create Date: 2025-12-24
|
||||
|
||||
This is the complete initial schema for SneakyScanner. All tables are created
|
||||
in the correct order to satisfy foreign key constraints.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Create all initial tables for SneakyScanner."""
|
||||
def upgrade():
|
||||
"""Create all tables for SneakyScanner."""
|
||||
|
||||
# Create schedules table first (referenced by scans)
|
||||
op.create_table('schedules',
|
||||
# =========================================================================
|
||||
# Settings Table (no dependencies)
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'settings',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Schedule name (e.g., \'Daily prod scan\')'),
|
||||
sa.Column('config_file', sa.Text(), nullable=False, comment='Path to YAML config'),
|
||||
sa.Column('cron_expression', sa.String(length=100), nullable=False, comment='Cron-like schedule (e.g., \'0 2 * * *\')'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, comment='Is schedule active?'),
|
||||
sa.Column('last_run', sa.DateTime(), nullable=True, comment='Last execution time'),
|
||||
sa.Column('next_run', sa.DateTime(), nullable=True, comment='Next scheduled execution'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Schedule creation time'),
|
||||
sa.Column('key', sa.String(length=255), nullable=False, comment='Setting key'),
|
||||
sa.Column('value', sa.Text(), nullable=True, comment='Setting value (JSON for complex values)'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('key')
|
||||
)
|
||||
op.create_index('ix_settings_key', 'settings', ['key'], unique=True)
|
||||
|
||||
# =========================================================================
|
||||
# Reusable Site Definition Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Unique site name'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Site description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Site creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_index('ix_sites_name', 'sites', ['name'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'site_ips',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Expected ping response'),
|
||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports'),
|
||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='IP creation time'),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('site_id', 'ip_address', name='uix_site_ip_address')
|
||||
)
|
||||
op.create_index('ix_site_ips_site_id', 'site_ips', ['site_id'])
|
||||
|
||||
# =========================================================================
|
||||
# Scan Configuration Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'scan_configs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False, comment='Configuration title'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Configuration description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Config creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create scans table
|
||||
op.create_table('scans',
|
||||
op.create_table(
|
||||
'scan_config_sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('config_id', sa.Integer(), nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('config_id', 'site_id', name='uix_config_site')
|
||||
)
|
||||
op.create_index('ix_scan_config_sites_config_id', 'scan_config_sites', ['config_id'])
|
||||
op.create_index('ix_scan_config_sites_site_id', 'scan_config_sites', ['site_id'])
|
||||
|
||||
# =========================================================================
|
||||
# Scheduling Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'schedules',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Schedule name'),
|
||||
sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'),
|
||||
sa.Column('cron_expression', sa.String(length=100), nullable=False, comment='Cron-like schedule'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is schedule active?'),
|
||||
sa.Column('last_run', sa.DateTime(), nullable=True, comment='Last execution time'),
|
||||
sa.Column('next_run', sa.DateTime(), nullable=True, comment='Next scheduled execution'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Schedule creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_schedules_config_id', 'schedules', ['config_id'])
|
||||
|
||||
# =========================================================================
|
||||
# Core Scan Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'scans',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('timestamp', sa.DateTime(), nullable=False, comment='Scan start time (UTC)'),
|
||||
sa.Column('duration', sa.Float(), nullable=True, comment='Total scan duration in seconds'),
|
||||
sa.Column('status', sa.String(length=20), nullable=False, comment='running, completed, failed'),
|
||||
sa.Column('config_file', sa.Text(), nullable=True, comment='Path to YAML config used'),
|
||||
sa.Column('status', sa.String(length=20), nullable=False, default='running', comment='running, finalizing, completed, failed, cancelled'),
|
||||
sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'),
|
||||
sa.Column('title', sa.Text(), nullable=True, comment='Scan title from config'),
|
||||
sa.Column('json_path', sa.Text(), nullable=True, comment='Path to JSON report'),
|
||||
sa.Column('html_path', sa.Text(), nullable=True, comment='Path to HTML report'),
|
||||
sa.Column('zip_path', sa.Text(), nullable=True, comment='Path to ZIP archive'),
|
||||
sa.Column('screenshot_dir', sa.Text(), nullable=True, comment='Path to screenshot directory'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Record creation time'),
|
||||
sa.Column('triggered_by', sa.String(length=50), nullable=False, comment='manual, scheduled, api'),
|
||||
sa.Column('triggered_by', sa.String(length=50), nullable=False, default='manual', comment='manual, scheduled, api'),
|
||||
sa.Column('schedule_id', sa.Integer(), nullable=True, comment='FK to schedules if triggered by schedule'),
|
||||
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id'], ),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True, comment='Scan execution start time'),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True, comment='Scan execution completion time'),
|
||||
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if scan failed'),
|
||||
sa.Column('current_phase', sa.String(length=50), nullable=True, comment='Current scan phase'),
|
||||
sa.Column('total_ips', sa.Integer(), nullable=True, comment='Total number of IPs to scan'),
|
||||
sa.Column('completed_ips', sa.Integer(), nullable=True, default=0, comment='Number of IPs completed'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_scans_timestamp'), 'scans', ['timestamp'], unique=False)
|
||||
op.create_index('ix_scans_timestamp', 'scans', ['timestamp'])
|
||||
op.create_index('ix_scans_config_id', 'scans', ['config_id'])
|
||||
|
||||
# Create scan_sites table
|
||||
op.create_table('scan_sites',
|
||||
op.create_table(
|
||||
'scan_sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('site_name', sa.String(length=255), nullable=False, comment='Site name from config'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_sites_scan_id'), 'scan_sites', ['scan_id'], unique=False)
|
||||
op.create_index('ix_scan_sites_scan_id', 'scan_sites', ['scan_id'])
|
||||
|
||||
# Create scan_ips table
|
||||
op.create_table('scan_ips',
|
||||
op.create_table(
|
||||
'scan_ips',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to scan_sites'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||
sa.Column('ping_expected', sa.Boolean(), nullable=True, comment='Expected ping response'),
|
||||
sa.Column('ping_actual', sa.Boolean(), nullable=True, comment='Actual ping response'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['scan_sites.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['scan_sites.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_ip')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_ips_scan_id'), 'scan_ips', ['scan_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_ips_site_id'), 'scan_ips', ['site_id'], unique=False)
|
||||
op.create_index('ix_scan_ips_scan_id', 'scan_ips', ['scan_id'])
|
||||
op.create_index('ix_scan_ips_site_id', 'scan_ips', ['site_id'])
|
||||
|
||||
# Create scan_ports table
|
||||
op.create_table('scan_ports',
|
||||
op.create_table(
|
||||
'scan_ports',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('ip_id', sa.Integer(), nullable=False, comment='FK to scan_ips'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('ip_id', sa.Integer(), nullable=False),
|
||||
sa.Column('port', sa.Integer(), nullable=False, comment='Port number (1-65535)'),
|
||||
sa.Column('protocol', sa.String(length=10), nullable=False, comment='tcp or udp'),
|
||||
sa.Column('expected', sa.Boolean(), nullable=True, comment='Was this port expected?'),
|
||||
sa.Column('state', sa.String(length=20), nullable=False, comment='open, closed, filtered'),
|
||||
sa.ForeignKeyConstraint(['ip_id'], ['scan_ips.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.Column('state', sa.String(length=20), nullable=False, default='open', comment='open, closed, filtered'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['ip_id'], ['scan_ips.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('scan_id', 'ip_id', 'port', 'protocol', name='uix_scan_ip_port')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_ports_ip_id'), 'scan_ports', ['ip_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_ports_scan_id'), 'scan_ports', ['scan_id'], unique=False)
|
||||
op.create_index('ix_scan_ports_scan_id', 'scan_ports', ['scan_id'])
|
||||
op.create_index('ix_scan_ports_ip_id', 'scan_ports', ['ip_id'])
|
||||
|
||||
# Create scan_services table
|
||||
op.create_table('scan_services',
|
||||
op.create_table(
|
||||
'scan_services',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('port_id', sa.Integer(), nullable=False, comment='FK to scan_ports'),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=True, comment='Service name (e.g., ssh, http)'),
|
||||
sa.Column('product', sa.String(length=255), nullable=True, comment='Product name (e.g., OpenSSH)'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('port_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=True, comment='Service name'),
|
||||
sa.Column('product', sa.String(length=255), nullable=True, comment='Product name'),
|
||||
sa.Column('version', sa.String(length=100), nullable=True, comment='Version string'),
|
||||
sa.Column('extrainfo', sa.Text(), nullable=True, comment='Additional nmap info'),
|
||||
sa.Column('ostype', sa.String(length=100), nullable=True, comment='OS type if detected'),
|
||||
sa.Column('http_protocol', sa.String(length=10), nullable=True, comment='http or https (if web service)'),
|
||||
sa.Column('http_protocol', sa.String(length=10), nullable=True, comment='http or https'),
|
||||
sa.Column('screenshot_path', sa.Text(), nullable=True, comment='Relative path to screenshot'),
|
||||
sa.ForeignKeyConstraint(['port_id'], ['scan_ports.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['port_id'], ['scan_ports.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_services_port_id'), 'scan_services', ['port_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_services_scan_id'), 'scan_services', ['scan_id'], unique=False)
|
||||
op.create_index('ix_scan_services_scan_id', 'scan_services', ['scan_id'])
|
||||
op.create_index('ix_scan_services_port_id', 'scan_services', ['port_id'])
|
||||
|
||||
# Create scan_certificates table
|
||||
op.create_table('scan_certificates',
|
||||
op.create_table(
|
||||
'scan_certificates',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False, comment='FK to scan_services'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('subject', sa.Text(), nullable=True, comment='Certificate subject (CN)'),
|
||||
sa.Column('issuer', sa.Text(), nullable=True, comment='Certificate issuer'),
|
||||
sa.Column('serial_number', sa.Text(), nullable=True, comment='Serial number'),
|
||||
@@ -127,95 +216,177 @@ def upgrade() -> None:
|
||||
sa.Column('not_valid_after', sa.DateTime(), nullable=True, comment='Validity end date'),
|
||||
sa.Column('days_until_expiry', sa.Integer(), nullable=True, comment='Days until expiration'),
|
||||
sa.Column('sans', sa.Text(), nullable=True, comment='JSON array of SANs'),
|
||||
sa.Column('is_self_signed', sa.Boolean(), nullable=True, comment='Self-signed certificate flag'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['scan_services.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
comment='Index on expiration date for alert queries'
|
||||
sa.Column('is_self_signed', sa.Boolean(), nullable=True, default=False, comment='Self-signed flag'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['scan_services.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_certificates_scan_id'), 'scan_certificates', ['scan_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_certificates_service_id'), 'scan_certificates', ['service_id'], unique=False)
|
||||
op.create_index('ix_scan_certificates_scan_id', 'scan_certificates', ['scan_id'])
|
||||
op.create_index('ix_scan_certificates_service_id', 'scan_certificates', ['service_id'])
|
||||
|
||||
# Create scan_tls_versions table
|
||||
op.create_table('scan_tls_versions',
|
||||
op.create_table(
|
||||
'scan_tls_versions',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('certificate_id', sa.Integer(), nullable=False, comment='FK to scan_certificates'),
|
||||
sa.Column('tls_version', sa.String(length=20), nullable=False, comment='TLS 1.0, TLS 1.1, TLS 1.2, TLS 1.3'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('certificate_id', sa.Integer(), nullable=False),
|
||||
sa.Column('tls_version', sa.String(length=20), nullable=False, comment='TLS 1.0, 1.1, 1.2, 1.3'),
|
||||
sa.Column('supported', sa.Boolean(), nullable=False, comment='Is this version supported?'),
|
||||
sa.Column('cipher_suites', sa.Text(), nullable=True, comment='JSON array of cipher suites'),
|
||||
sa.ForeignKeyConstraint(['certificate_id'], ['scan_certificates.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['certificate_id'], ['scan_certificates.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_tls_versions_certificate_id'), 'scan_tls_versions', ['certificate_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_tls_versions_scan_id'), 'scan_tls_versions', ['scan_id'], unique=False)
|
||||
op.create_index('ix_scan_tls_versions_scan_id', 'scan_tls_versions', ['scan_id'])
|
||||
op.create_index('ix_scan_tls_versions_certificate_id', 'scan_tls_versions', ['certificate_id'])
|
||||
|
||||
# Create alerts table
|
||||
op.create_table('alerts',
|
||||
op.create_table(
|
||||
'scan_progress',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('alert_type', sa.String(length=50), nullable=False, comment='new_port, cert_expiry, service_change, ping_failed'),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False, comment='info, warning, critical'),
|
||||
sa.Column('message', sa.Text(), nullable=False, comment='Human-readable alert message'),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True, comment='Related IP (optional)'),
|
||||
sa.Column('port', sa.Integer(), nullable=True, comment='Related port (optional)'),
|
||||
sa.Column('email_sent', sa.Boolean(), nullable=False, comment='Was email notification sent?'),
|
||||
sa.Column('email_sent_at', sa.DateTime(), nullable=True, comment='Email send timestamp'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Alert creation time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IP address being scanned'),
|
||||
sa.Column('site_name', sa.String(length=255), nullable=True, comment='Site name'),
|
||||
sa.Column('phase', sa.String(length=50), nullable=False, comment='Phase: ping, tcp_scan, etc.'),
|
||||
sa.Column('status', sa.String(length=20), nullable=False, default='pending', comment='pending, in_progress, completed, failed'),
|
||||
sa.Column('ping_result', sa.Boolean(), nullable=True, comment='Ping response result'),
|
||||
sa.Column('tcp_ports', sa.Text(), nullable=True, comment='JSON array of TCP ports'),
|
||||
sa.Column('udp_ports', sa.Text(), nullable=True, comment='JSON array of UDP ports'),
|
||||
sa.Column('services', sa.Text(), nullable=True, comment='JSON array of services'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Entry creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last update time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
comment='Indexes for alert filtering'
|
||||
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_progress_ip')
|
||||
)
|
||||
op.create_index(op.f('ix_alerts_scan_id'), 'alerts', ['scan_id'], unique=False)
|
||||
op.create_index('ix_scan_progress_scan_id', 'scan_progress', ['scan_id'])
|
||||
|
||||
# Create alert_rules table
|
||||
op.create_table('alert_rules',
|
||||
op.create_table(
|
||||
'scan_site_associations',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('rule_type', sa.String(length=50), nullable=False, comment='unexpected_port, cert_expiry, service_down, etc.'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, comment='Is rule active?'),
|
||||
sa.Column('threshold', sa.Integer(), nullable=True, comment='Threshold value (e.g., days for cert expiry)'),
|
||||
sa.Column('email_enabled', sa.Boolean(), nullable=False, comment='Send email for this rule?'),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('scan_id', 'site_id', name='uix_scan_site')
|
||||
)
|
||||
op.create_index('ix_scan_site_associations_scan_id', 'scan_site_associations', ['scan_id'])
|
||||
op.create_index('ix_scan_site_associations_site_id', 'scan_site_associations', ['site_id'])
|
||||
|
||||
# =========================================================================
|
||||
# Alert Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'alert_rules',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True, comment='User-friendly rule name'),
|
||||
sa.Column('rule_type', sa.String(length=50), nullable=False, comment='unexpected_port, cert_expiry, etc.'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is rule active?'),
|
||||
sa.Column('threshold', sa.Integer(), nullable=True, comment='Threshold value'),
|
||||
sa.Column('email_enabled', sa.Boolean(), nullable=False, default=False, comment='Send email?'),
|
||||
sa.Column('webhook_enabled', sa.Boolean(), nullable=False, default=False, comment='Send webhook?'),
|
||||
sa.Column('severity', sa.String(length=20), nullable=True, comment='critical, warning, info'),
|
||||
sa.Column('filter_conditions', sa.Text(), nullable=True, comment='JSON filter conditions'),
|
||||
sa.Column('config_id', sa.Integer(), nullable=True, comment='Optional: specific config this rule applies to'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Rule creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True, comment='Last update time'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_alert_rules_config_id', 'alert_rules', ['config_id'])
|
||||
|
||||
op.create_table(
|
||||
'alerts',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||
sa.Column('rule_id', sa.Integer(), nullable=True, comment='Associated alert rule'),
|
||||
sa.Column('alert_type', sa.String(length=50), nullable=False, comment='Alert type'),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False, comment='info, warning, critical'),
|
||||
sa.Column('message', sa.Text(), nullable=False, comment='Human-readable message'),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True, comment='Related IP'),
|
||||
sa.Column('port', sa.Integer(), nullable=True, comment='Related port'),
|
||||
sa.Column('email_sent', sa.Boolean(), nullable=False, default=False, comment='Was email sent?'),
|
||||
sa.Column('email_sent_at', sa.DateTime(), nullable=True, comment='Email send timestamp'),
|
||||
sa.Column('webhook_sent', sa.Boolean(), nullable=False, default=False, comment='Was webhook sent?'),
|
||||
sa.Column('webhook_sent_at', sa.DateTime(), nullable=True, comment='Webhook send timestamp'),
|
||||
sa.Column('acknowledged', sa.Boolean(), nullable=False, default=False, comment='Was alert acknowledged?'),
|
||||
sa.Column('acknowledged_at', sa.DateTime(), nullable=True, comment='Acknowledgment timestamp'),
|
||||
sa.Column('acknowledged_by', sa.String(length=255), nullable=True, comment='User who acknowledged'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Alert creation time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||
sa.ForeignKeyConstraint(['rule_id'], ['alert_rules.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('ix_alerts_scan_id', 'alerts', ['scan_id'])
|
||||
op.create_index('ix_alerts_rule_id', 'alerts', ['rule_id'])
|
||||
op.create_index('ix_alerts_acknowledged', 'alerts', ['acknowledged'])
|
||||
|
||||
# =========================================================================
|
||||
# Webhook Tables
|
||||
# =========================================================================
|
||||
op.create_table(
|
||||
'webhooks',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Webhook name'),
|
||||
sa.Column('url', sa.Text(), nullable=False, comment='Webhook URL'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is webhook enabled?'),
|
||||
sa.Column('auth_type', sa.String(length=20), nullable=True, comment='none, bearer, basic, custom'),
|
||||
sa.Column('auth_token', sa.Text(), nullable=True, comment='Encrypted auth token'),
|
||||
sa.Column('custom_headers', sa.Text(), nullable=True, comment='JSON custom headers'),
|
||||
sa.Column('alert_types', sa.Text(), nullable=True, comment='JSON array of alert types'),
|
||||
sa.Column('severity_filter', sa.Text(), nullable=True, comment='JSON array of severities'),
|
||||
sa.Column('timeout', sa.Integer(), nullable=True, default=10, comment='Request timeout'),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=True, default=3, comment='Retry attempts'),
|
||||
sa.Column('template', sa.Text(), nullable=True, comment='Jinja2 template for payload'),
|
||||
sa.Column('template_format', sa.String(length=20), nullable=True, default='json', comment='json, text'),
|
||||
sa.Column('content_type_override', sa.String(length=100), nullable=True, comment='Custom Content-Type'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last update time'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create settings table
|
||||
op.create_table('settings',
|
||||
op.create_table(
|
||||
'webhook_delivery_log',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('key', sa.String(length=255), nullable=False, comment='Setting key (e.g., smtp_server)'),
|
||||
sa.Column('value', sa.Text(), nullable=True, comment='Setting value (JSON for complex values)'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('key')
|
||||
sa.Column('webhook_id', sa.Integer(), nullable=False, comment='Associated webhook'),
|
||||
sa.Column('alert_id', sa.Integer(), nullable=False, comment='Associated alert'),
|
||||
sa.Column('status', sa.String(length=20), nullable=True, comment='success, failed, retrying'),
|
||||
sa.Column('response_code', sa.Integer(), nullable=True, comment='HTTP response code'),
|
||||
sa.Column('response_body', sa.Text(), nullable=True, comment='Response body'),
|
||||
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if failed'),
|
||||
sa.Column('attempt_number', sa.Integer(), nullable=True, comment='Which attempt'),
|
||||
sa.Column('delivered_at', sa.DateTime(), nullable=False, comment='Delivery timestamp'),
|
||||
sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id']),
|
||||
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id']),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_settings_key'), 'settings', ['key'], unique=True)
|
||||
op.create_index('ix_webhook_delivery_log_webhook_id', 'webhook_delivery_log', ['webhook_id'])
|
||||
op.create_index('ix_webhook_delivery_log_alert_id', 'webhook_delivery_log', ['alert_id'])
|
||||
op.create_index('ix_webhook_delivery_log_status', 'webhook_delivery_log', ['status'])
|
||||
|
||||
print("\n✓ Initial schema created successfully")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Drop all tables."""
|
||||
op.drop_index(op.f('ix_settings_key'), table_name='settings')
|
||||
op.drop_table('settings')
|
||||
op.drop_table('alert_rules')
|
||||
op.drop_index(op.f('ix_alerts_scan_id'), table_name='alerts')
|
||||
def downgrade():
|
||||
"""Drop all tables in reverse order."""
|
||||
op.drop_table('webhook_delivery_log')
|
||||
op.drop_table('webhooks')
|
||||
op.drop_table('alerts')
|
||||
op.drop_index(op.f('ix_scan_tls_versions_scan_id'), table_name='scan_tls_versions')
|
||||
op.drop_index(op.f('ix_scan_tls_versions_certificate_id'), table_name='scan_tls_versions')
|
||||
op.drop_table('alert_rules')
|
||||
op.drop_table('scan_site_associations')
|
||||
op.drop_table('scan_progress')
|
||||
op.drop_table('scan_tls_versions')
|
||||
op.drop_index(op.f('ix_scan_certificates_service_id'), table_name='scan_certificates')
|
||||
op.drop_index(op.f('ix_scan_certificates_scan_id'), table_name='scan_certificates')
|
||||
op.drop_table('scan_certificates')
|
||||
op.drop_index(op.f('ix_scan_services_scan_id'), table_name='scan_services')
|
||||
op.drop_index(op.f('ix_scan_services_port_id'), table_name='scan_services')
|
||||
op.drop_table('scan_services')
|
||||
op.drop_index(op.f('ix_scan_ports_scan_id'), table_name='scan_ports')
|
||||
op.drop_index(op.f('ix_scan_ports_ip_id'), table_name='scan_ports')
|
||||
op.drop_table('scan_ports')
|
||||
op.drop_index(op.f('ix_scan_ips_site_id'), table_name='scan_ips')
|
||||
op.drop_index(op.f('ix_scan_ips_scan_id'), table_name='scan_ips')
|
||||
op.drop_table('scan_ips')
|
||||
op.drop_index(op.f('ix_scan_sites_scan_id'), table_name='scan_sites')
|
||||
op.drop_table('scan_sites')
|
||||
op.drop_index(op.f('ix_scans_timestamp'), table_name='scans')
|
||||
op.drop_table('scans')
|
||||
op.drop_table('schedules')
|
||||
op.drop_table('scan_config_sites')
|
||||
op.drop_table('scan_configs')
|
||||
op.drop_table('site_ips')
|
||||
op.drop_table('sites')
|
||||
op.drop_table('settings')
|
||||
|
||||
print("\n✓ All tables dropped")
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Add indexes for scan queries
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2025-11-14 00:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add database indexes for better query performance."""
|
||||
# Add index on scans.status for filtering
|
||||
# Note: index on scans.timestamp already exists from migration 001
|
||||
op.create_index('ix_scans_status', 'scans', ['status'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove indexes."""
|
||||
op.drop_index('ix_scans_status', table_name='scans')
|
||||
@@ -1,39 +0,0 @@
|
||||
"""Add timing and error fields to scans table
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2025-11-14
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Add fields for tracking scan execution timing and errors.
|
||||
|
||||
New fields:
|
||||
- started_at: When scan execution actually started
|
||||
- completed_at: When scan execution finished (success or failure)
|
||||
- error_message: Error message if scan failed
|
||||
"""
|
||||
with op.batch_alter_table('scans') as batch_op:
|
||||
batch_op.add_column(sa.Column('started_at', sa.DateTime(), nullable=True, comment='Scan execution start time'))
|
||||
batch_op.add_column(sa.Column('completed_at', sa.DateTime(), nullable=True, comment='Scan execution completion time'))
|
||||
batch_op.add_column(sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if scan failed'))
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove the timing and error fields."""
|
||||
with op.batch_alter_table('scans') as batch_op:
|
||||
batch_op.drop_column('error_message')
|
||||
batch_op.drop_column('completed_at')
|
||||
batch_op.drop_column('started_at')
|
||||
@@ -1,120 +0,0 @@
|
||||
"""Add enhanced alert features for Phase 5
|
||||
|
||||
Revision ID: 004
|
||||
Revises: 003
|
||||
Create Date: 2025-11-18
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '004'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Add enhancements for Phase 5 Alert Rule Engine:
|
||||
- Enhanced alert_rules fields
|
||||
- Enhanced alerts fields
|
||||
- New webhooks table
|
||||
- New webhook_delivery_log table
|
||||
"""
|
||||
|
||||
# Enhance alert_rules table
|
||||
with op.batch_alter_table('alert_rules') as batch_op:
|
||||
batch_op.add_column(sa.Column('name', sa.String(255), nullable=True, comment='User-friendly rule name'))
|
||||
batch_op.add_column(sa.Column('webhook_enabled', sa.Boolean(), nullable=False, server_default='0', comment='Whether to send webhooks for this rule'))
|
||||
batch_op.add_column(sa.Column('severity', sa.String(20), nullable=True, comment='Alert severity level (critical, warning, info)'))
|
||||
batch_op.add_column(sa.Column('filter_conditions', sa.Text(), nullable=True, comment='JSON filter conditions for the rule'))
|
||||
batch_op.add_column(sa.Column('config_file', sa.String(255), nullable=True, comment='Optional: specific config file this rule applies to'))
|
||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True, comment='Last update timestamp'))
|
||||
|
||||
# Enhance alerts table
|
||||
with op.batch_alter_table('alerts') as batch_op:
|
||||
batch_op.add_column(sa.Column('rule_id', sa.Integer(), nullable=True, comment='Associated alert rule'))
|
||||
batch_op.add_column(sa.Column('webhook_sent', sa.Boolean(), nullable=False, server_default='0', comment='Whether webhook was sent'))
|
||||
batch_op.add_column(sa.Column('webhook_sent_at', sa.DateTime(), nullable=True, comment='When webhook was sent'))
|
||||
batch_op.add_column(sa.Column('acknowledged', sa.Boolean(), nullable=False, server_default='0', comment='Whether alert was acknowledged'))
|
||||
batch_op.add_column(sa.Column('acknowledged_at', sa.DateTime(), nullable=True, comment='When alert was acknowledged'))
|
||||
batch_op.add_column(sa.Column('acknowledged_by', sa.String(255), nullable=True, comment='User who acknowledged the alert'))
|
||||
batch_op.create_foreign_key('fk_alerts_rule_id', 'alert_rules', ['rule_id'], ['id'])
|
||||
batch_op.create_index('idx_alerts_rule_id', ['rule_id'])
|
||||
batch_op.create_index('idx_alerts_acknowledged', ['acknowledged'])
|
||||
|
||||
# Create webhooks table
|
||||
op.create_table('webhooks',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False, comment='Webhook name'),
|
||||
sa.Column('url', sa.Text(), nullable=False, comment='Webhook URL'),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False, server_default='1', comment='Whether webhook is enabled'),
|
||||
sa.Column('auth_type', sa.String(20), nullable=True, comment='Authentication type: none, bearer, basic, custom'),
|
||||
sa.Column('auth_token', sa.Text(), nullable=True, comment='Encrypted authentication token'),
|
||||
sa.Column('custom_headers', sa.Text(), nullable=True, comment='JSON custom headers'),
|
||||
sa.Column('alert_types', sa.Text(), nullable=True, comment='JSON array of alert types to trigger on'),
|
||||
sa.Column('severity_filter', sa.Text(), nullable=True, comment='JSON array of severities to trigger on'),
|
||||
sa.Column('timeout', sa.Integer(), nullable=True, server_default='10', comment='Request timeout in seconds'),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=True, server_default='3', comment='Number of retry attempts'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create webhook_delivery_log table
|
||||
op.create_table('webhook_delivery_log',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('webhook_id', sa.Integer(), nullable=False, comment='Associated webhook'),
|
||||
sa.Column('alert_id', sa.Integer(), nullable=False, comment='Associated alert'),
|
||||
sa.Column('status', sa.String(20), nullable=True, comment='Delivery status: success, failed, retrying'),
|
||||
sa.Column('response_code', sa.Integer(), nullable=True, comment='HTTP response code'),
|
||||
sa.Column('response_body', sa.Text(), nullable=True, comment='Response body from webhook'),
|
||||
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if failed'),
|
||||
sa.Column('attempt_number', sa.Integer(), nullable=True, comment='Which attempt this was'),
|
||||
sa.Column('delivered_at', sa.DateTime(), nullable=False, comment='Delivery timestamp'),
|
||||
sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id'], ),
|
||||
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for webhook_delivery_log
|
||||
op.create_index('idx_webhook_delivery_alert_id', 'webhook_delivery_log', ['alert_id'])
|
||||
op.create_index('idx_webhook_delivery_webhook_id', 'webhook_delivery_log', ['webhook_id'])
|
||||
op.create_index('idx_webhook_delivery_status', 'webhook_delivery_log', ['status'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove Phase 5 alert enhancements."""
|
||||
|
||||
# Drop webhook_delivery_log table and its indexes
|
||||
op.drop_index('idx_webhook_delivery_status', table_name='webhook_delivery_log')
|
||||
op.drop_index('idx_webhook_delivery_webhook_id', table_name='webhook_delivery_log')
|
||||
op.drop_index('idx_webhook_delivery_alert_id', table_name='webhook_delivery_log')
|
||||
op.drop_table('webhook_delivery_log')
|
||||
|
||||
# Drop webhooks table
|
||||
op.drop_table('webhooks')
|
||||
|
||||
# Remove enhancements from alerts table
|
||||
with op.batch_alter_table('alerts') as batch_op:
|
||||
batch_op.drop_index('idx_alerts_acknowledged')
|
||||
batch_op.drop_index('idx_alerts_rule_id')
|
||||
batch_op.drop_constraint('fk_alerts_rule_id', type_='foreignkey')
|
||||
batch_op.drop_column('acknowledged_by')
|
||||
batch_op.drop_column('acknowledged_at')
|
||||
batch_op.drop_column('acknowledged')
|
||||
batch_op.drop_column('webhook_sent_at')
|
||||
batch_op.drop_column('webhook_sent')
|
||||
batch_op.drop_column('rule_id')
|
||||
|
||||
# Remove enhancements from alert_rules table
|
||||
with op.batch_alter_table('alert_rules') as batch_op:
|
||||
batch_op.drop_column('updated_at')
|
||||
batch_op.drop_column('config_file')
|
||||
batch_op.drop_column('filter_conditions')
|
||||
batch_op.drop_column('severity')
|
||||
batch_op.drop_column('webhook_enabled')
|
||||
batch_op.drop_column('name')
|
||||
@@ -1,83 +0,0 @@
|
||||
"""Add webhook template support
|
||||
|
||||
Revision ID: 005
|
||||
Revises: 004
|
||||
Create Date: 2025-11-18
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import json
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '005'
|
||||
down_revision = '004'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
# Default template that matches the current JSON payload structure
|
||||
DEFAULT_TEMPLATE = """{
|
||||
"event": "alert.created",
|
||||
"alert": {
|
||||
"id": {{ alert.id }},
|
||||
"type": "{{ alert.type }}",
|
||||
"severity": "{{ alert.severity }}",
|
||||
"message": "{{ alert.message }}",
|
||||
{% if alert.ip_address %}"ip_address": "{{ alert.ip_address }}",{% endif %}
|
||||
{% if alert.port %}"port": {{ alert.port }},{% endif %}
|
||||
"acknowledged": {{ alert.acknowledged|lower }},
|
||||
"created_at": "{{ alert.created_at.isoformat() }}"
|
||||
},
|
||||
"scan": {
|
||||
"id": {{ scan.id }},
|
||||
"title": "{{ scan.title }}",
|
||||
"timestamp": "{{ scan.timestamp.isoformat() }}",
|
||||
"status": "{{ scan.status }}"
|
||||
},
|
||||
"rule": {
|
||||
"id": {{ rule.id }},
|
||||
"name": "{{ rule.name }}",
|
||||
"type": "{{ rule.type }}",
|
||||
"threshold": {{ rule.threshold if rule.threshold else 'null' }}
|
||||
}
|
||||
}"""
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Add webhook template fields:
|
||||
- template: Jinja2 template for payload
|
||||
- template_format: Output format (json, text)
|
||||
- content_type_override: Optional custom Content-Type
|
||||
"""
|
||||
|
||||
# Add new columns to webhooks table
|
||||
with op.batch_alter_table('webhooks') as batch_op:
|
||||
batch_op.add_column(sa.Column('template', sa.Text(), nullable=True, comment='Jinja2 template for webhook payload'))
|
||||
batch_op.add_column(sa.Column('template_format', sa.String(20), nullable=True, server_default='json', comment='Template output format: json, text'))
|
||||
batch_op.add_column(sa.Column('content_type_override', sa.String(100), nullable=True, comment='Optional custom Content-Type header'))
|
||||
|
||||
# Populate existing webhooks with default template
|
||||
# This ensures backward compatibility by converting existing webhooks to use the
|
||||
# same JSON structure they're currently sending
|
||||
connection = op.get_bind()
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
UPDATE webhooks
|
||||
SET template = :template,
|
||||
template_format = 'json'
|
||||
WHERE template IS NULL
|
||||
"""),
|
||||
{"template": DEFAULT_TEMPLATE}
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove webhook template fields."""
|
||||
|
||||
with op.batch_alter_table('webhooks') as batch_op:
|
||||
batch_op.drop_column('content_type_override')
|
||||
batch_op.drop_column('template_format')
|
||||
batch_op.drop_column('template')
|
||||
@@ -1,161 +0,0 @@
|
||||
"""Add reusable site definitions
|
||||
|
||||
Revision ID: 006
|
||||
Revises: 005
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration introduces reusable site definitions that can be shared across
|
||||
multiple scans. Sites are defined once with CIDR ranges and can be referenced
|
||||
in multiple scan configurations.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '006'
|
||||
down_revision = '005'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Create new site tables and migrate existing scan_sites data to the new structure.
|
||||
"""
|
||||
|
||||
# Create sites table (master site definitions)
|
||||
op.create_table('sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Unique site name'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Site description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Site creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name', name='uix_site_name')
|
||||
)
|
||||
op.create_index(op.f('ix_sites_name'), 'sites', ['name'], unique=True)
|
||||
|
||||
# Create site_cidrs table (CIDR ranges for each site)
|
||||
op.create_table('site_cidrs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Expected ping response for this CIDR'),
|
||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports'),
|
||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='CIDR creation time'),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
||||
)
|
||||
op.create_index(op.f('ix_site_cidrs_site_id'), 'site_cidrs', ['site_id'], unique=False)
|
||||
|
||||
# Create site_ips table (IP-level overrides within CIDRs)
|
||||
op.create_table('site_ips',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_cidr_id', sa.Integer(), nullable=False, comment='FK to site_cidrs'),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Override ping expectation for this IP'),
|
||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports (overrides CIDR)'),
|
||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports (overrides CIDR)'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='IP override creation time'),
|
||||
sa.ForeignKeyConstraint(['site_cidr_id'], ['site_cidrs.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('site_cidr_id', 'ip_address', name='uix_site_cidr_ip')
|
||||
)
|
||||
op.create_index(op.f('ix_site_ips_site_cidr_id'), 'site_ips', ['site_cidr_id'], unique=False)
|
||||
|
||||
# Create scan_site_associations table (many-to-many between scans and sites)
|
||||
op.create_table('scan_site_associations',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('scan_id', 'site_id', name='uix_scan_site')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_site_associations_scan_id'), 'scan_site_associations', ['scan_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_site_associations_site_id'), 'scan_site_associations', ['site_id'], unique=False)
|
||||
|
||||
# Migrate existing data
|
||||
connection = op.get_bind()
|
||||
|
||||
# 1. Extract unique site names from existing scan_sites and create master Site records
|
||||
# This groups all historical scan sites by name and creates one master site per unique name
|
||||
connection.execute(text("""
|
||||
INSERT INTO sites (name, description, created_at, updated_at)
|
||||
SELECT DISTINCT
|
||||
site_name,
|
||||
'Migrated from scan_sites' as description,
|
||||
datetime('now') as created_at,
|
||||
datetime('now') as updated_at
|
||||
FROM scan_sites
|
||||
WHERE site_name NOT IN (SELECT name FROM sites)
|
||||
"""))
|
||||
|
||||
# 2. Create scan_site_associations linking scans to their sites
|
||||
# This maintains the historical relationship between scans and the sites they used
|
||||
connection.execute(text("""
|
||||
INSERT INTO scan_site_associations (scan_id, site_id, created_at)
|
||||
SELECT DISTINCT
|
||||
ss.scan_id,
|
||||
s.id as site_id,
|
||||
datetime('now') as created_at
|
||||
FROM scan_sites ss
|
||||
INNER JOIN sites s ON s.name = ss.site_name
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM scan_site_associations ssa
|
||||
WHERE ssa.scan_id = ss.scan_id AND ssa.site_id = s.id
|
||||
)
|
||||
"""))
|
||||
|
||||
# 3. For each migrated site, create a CIDR entry from the IPs in scan_ips
|
||||
# Since historical data has individual IPs, we'll create /32 CIDRs for each unique IP
|
||||
# This preserves the exact IP addresses while fitting them into the new CIDR-based model
|
||||
connection.execute(text("""
|
||||
INSERT INTO site_cidrs (site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports, created_at)
|
||||
SELECT DISTINCT
|
||||
s.id as site_id,
|
||||
si.ip_address || '/32' as cidr,
|
||||
si.ping_expected,
|
||||
'[]' as expected_tcp_ports,
|
||||
'[]' as expected_udp_ports,
|
||||
datetime('now') as created_at
|
||||
FROM scan_ips si
|
||||
INNER JOIN scan_sites ss ON ss.id = si.site_id
|
||||
INNER JOIN sites s ON s.name = ss.site_name
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM site_cidrs sc
|
||||
WHERE sc.site_id = s.id AND sc.cidr = si.ip_address || '/32'
|
||||
)
|
||||
GROUP BY s.id, si.ip_address, si.ping_expected
|
||||
"""))
|
||||
|
||||
print("✓ Migration complete: Reusable sites created from historical scan data")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()} master site(s)")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()} CIDR range(s)")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM scan_site_associations')).scalar()} scan-site association(s)")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove reusable site tables."""
|
||||
|
||||
# Drop tables in reverse order of creation (respecting foreign keys)
|
||||
op.drop_index(op.f('ix_scan_site_associations_site_id'), table_name='scan_site_associations')
|
||||
op.drop_index(op.f('ix_scan_site_associations_scan_id'), table_name='scan_site_associations')
|
||||
op.drop_table('scan_site_associations')
|
||||
|
||||
op.drop_index(op.f('ix_site_ips_site_cidr_id'), table_name='site_ips')
|
||||
op.drop_table('site_ips')
|
||||
|
||||
op.drop_index(op.f('ix_site_cidrs_site_id'), table_name='site_cidrs')
|
||||
op.drop_table('site_cidrs')
|
||||
|
||||
op.drop_index(op.f('ix_sites_name'), table_name='sites')
|
||||
op.drop_table('sites')
|
||||
|
||||
print("✓ Downgrade complete: Reusable site tables removed")
|
||||
@@ -1,102 +0,0 @@
|
||||
"""Add database-stored scan configurations
|
||||
|
||||
Revision ID: 007
|
||||
Revises: 006
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration introduces database-stored scan configurations to replace YAML
|
||||
config files. Configs reference sites from the sites table, enabling visual
|
||||
config builder and better data management.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '007'
|
||||
down_revision = '006'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Create scan_configs and scan_config_sites tables.
|
||||
Add config_id foreign keys to scans and schedules tables.
|
||||
"""
|
||||
|
||||
# Create scan_configs table
|
||||
op.create_table('scan_configs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False, comment='Configuration title'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Configuration description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Config creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create scan_config_sites table (many-to-many between configs and sites)
|
||||
op.create_table('scan_config_sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('config_id', sa.Integer(), nullable=False, comment='FK to scan_configs'),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id'], ),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('config_id', 'site_id', name='uix_config_site')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_config_sites_config_id'), 'scan_config_sites', ['config_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_config_sites_site_id'), 'scan_config_sites', ['site_id'], unique=False)
|
||||
|
||||
# Add config_id to scans table
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_scans_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_scans_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
# Mark config_file as deprecated in comment (already has nullable=True)
|
||||
|
||||
# Add config_id to schedules table and make config_file nullable
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_schedules_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_schedules_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
# Make config_file nullable (it was required before)
|
||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=True)
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("✓ Migration complete: Scan configs tables created")
|
||||
print(" - Created scan_configs table for database-stored configurations")
|
||||
print(" - Created scan_config_sites association table")
|
||||
print(" - Added config_id to scans table")
|
||||
print(" - Added config_id to schedules table")
|
||||
print(" - Existing YAML configs remain in config_file column for backward compatibility")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove scan config tables and columns."""
|
||||
|
||||
# Remove foreign keys and columns from schedules
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('fk_schedules_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_schedules_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
# Restore config_file as required
|
||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=False)
|
||||
|
||||
# Remove foreign keys and columns from scans
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('fk_scans_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_scans_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
|
||||
# Drop tables in reverse order
|
||||
op.drop_index(op.f('ix_scan_config_sites_site_id'), table_name='scan_config_sites')
|
||||
op.drop_index(op.f('ix_scan_config_sites_config_id'), table_name='scan_config_sites')
|
||||
op.drop_table('scan_config_sites')
|
||||
|
||||
op.drop_table('scan_configs')
|
||||
|
||||
print("✓ Downgrade complete: Scan config tables and columns removed")
|
||||
@@ -1,270 +0,0 @@
|
||||
"""Expand CIDRs to individual IPs with per-IP settings
|
||||
|
||||
Revision ID: 008
|
||||
Revises: 007
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration changes the site architecture to automatically expand CIDRs into
|
||||
individual IPs in the database. Each IP has its own port and ping settings.
|
||||
|
||||
Changes:
|
||||
- Add site_id to site_ips (direct link to sites, support standalone IPs)
|
||||
- Make site_cidr_id nullable (IPs can exist without a CIDR parent)
|
||||
- Remove settings from site_cidrs (settings now only at IP level)
|
||||
- Add unique constraint: no duplicate IPs within a site
|
||||
- Expand existing CIDRs to individual IPs
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
import ipaddress
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '008'
|
||||
down_revision = '007'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Modify schema to support per-IP settings and auto-expand CIDRs.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check if site_id column already exists
|
||||
inspector = sa.inspect(connection)
|
||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
||||
|
||||
# Step 1: Add site_id column to site_ips (will be populated from site_cidr_id)
|
||||
if 'site_id' not in site_ips_columns:
|
||||
print("Adding site_id column to site_ips...")
|
||||
op.add_column('site_ips', sa.Column('site_id', sa.Integer(), nullable=True, comment='FK to sites (direct link)'))
|
||||
else:
|
||||
print("site_id column already exists in site_ips, skipping...")
|
||||
|
||||
# Step 2: Populate site_id from site_cidr_id (before we make it nullable)
|
||||
print("Populating site_id from existing site_cidr relationships...")
|
||||
connection.execute(text("""
|
||||
UPDATE site_ips
|
||||
SET site_id = (
|
||||
SELECT site_id
|
||||
FROM site_cidrs
|
||||
WHERE site_cidrs.id = site_ips.site_cidr_id
|
||||
)
|
||||
WHERE site_cidr_id IS NOT NULL
|
||||
"""))
|
||||
|
||||
# Step 3: Make site_id NOT NULL and add foreign key
|
||||
# Check if foreign key exists before creating
|
||||
try:
|
||||
op.alter_column('site_ips', 'site_id', nullable=False)
|
||||
print("Made site_id NOT NULL")
|
||||
except Exception as e:
|
||||
print(f"site_id already NOT NULL or error: {e}")
|
||||
|
||||
# Check if foreign key exists
|
||||
try:
|
||||
op.create_foreign_key('fk_site_ips_site_id', 'site_ips', 'sites', ['site_id'], ['id'])
|
||||
print("Created foreign key fk_site_ips_site_id")
|
||||
except Exception as e:
|
||||
print(f"Foreign key already exists or error: {e}")
|
||||
|
||||
# Check if index exists
|
||||
try:
|
||||
op.create_index(op.f('ix_site_ips_site_id'), 'site_ips', ['site_id'], unique=False)
|
||||
print("Created index ix_site_ips_site_id")
|
||||
except Exception as e:
|
||||
print(f"Index already exists or error: {e}")
|
||||
|
||||
# Step 4: Make site_cidr_id nullable (for standalone IPs)
|
||||
try:
|
||||
op.alter_column('site_ips', 'site_cidr_id', nullable=True)
|
||||
print("Made site_cidr_id nullable")
|
||||
except Exception as e:
|
||||
print(f"site_cidr_id already nullable or error: {e}")
|
||||
|
||||
# Step 5: Drop old unique constraint and create new one (site_id, ip_address)
|
||||
# This prevents duplicate IPs within a site (across all CIDRs and standalone)
|
||||
try:
|
||||
op.drop_constraint('uix_site_cidr_ip', 'site_ips', type_='unique')
|
||||
print("Dropped old constraint uix_site_cidr_ip")
|
||||
except Exception as e:
|
||||
print(f"Constraint already dropped or doesn't exist: {e}")
|
||||
|
||||
try:
|
||||
op.create_unique_constraint('uix_site_ip_address', 'site_ips', ['site_id', 'ip_address'])
|
||||
print("Created new constraint uix_site_ip_address")
|
||||
except Exception as e:
|
||||
print(f"Constraint already exists or error: {e}")
|
||||
|
||||
# Step 6: Expand existing CIDRs to individual IPs
|
||||
print("Expanding existing CIDRs to individual IPs...")
|
||||
|
||||
# Get all existing CIDRs
|
||||
cidrs = connection.execute(text("""
|
||||
SELECT id, site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports
|
||||
FROM site_cidrs
|
||||
""")).fetchall()
|
||||
|
||||
expanded_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for cidr_row in cidrs:
|
||||
cidr_id, site_id, cidr_str, expected_ping, expected_tcp_ports, expected_udp_ports = cidr_row
|
||||
|
||||
try:
|
||||
# Parse CIDR
|
||||
network = ipaddress.ip_network(cidr_str, strict=False)
|
||||
|
||||
# Check size - skip if too large (> /24 for IPv4, > /64 for IPv6)
|
||||
if isinstance(network, ipaddress.IPv4Network) and network.prefixlen < 24:
|
||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
||||
skipped_count += 1
|
||||
continue
|
||||
elif isinstance(network, ipaddress.IPv6Network) and network.prefixlen < 64:
|
||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Expand to individual IPs
|
||||
for ip in network.hosts() if network.num_addresses > 2 else [network.network_address]:
|
||||
ip_str = str(ip)
|
||||
|
||||
# Check if this IP already exists (from previous IP overrides)
|
||||
existing = connection.execute(text("""
|
||||
SELECT id FROM site_ips
|
||||
WHERE site_cidr_id = :cidr_id AND ip_address = :ip_address
|
||||
"""), {'cidr_id': cidr_id, 'ip_address': ip_str}).fetchone()
|
||||
|
||||
if not existing:
|
||||
# Insert new IP with settings from CIDR
|
||||
connection.execute(text("""
|
||||
INSERT INTO site_ips (
|
||||
site_id, site_cidr_id, ip_address,
|
||||
expected_ping, expected_tcp_ports, expected_udp_ports,
|
||||
created_at
|
||||
)
|
||||
VALUES (
|
||||
:site_id, :cidr_id, :ip_address,
|
||||
:expected_ping, :expected_tcp_ports, :expected_udp_ports,
|
||||
datetime('now')
|
||||
)
|
||||
"""), {
|
||||
'site_id': site_id,
|
||||
'cidr_id': cidr_id,
|
||||
'ip_address': ip_str,
|
||||
'expected_ping': expected_ping,
|
||||
'expected_tcp_ports': expected_tcp_ports,
|
||||
'expected_udp_ports': expected_udp_ports
|
||||
})
|
||||
expanded_count += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ Error expanding CIDR {cidr_str}: {e}")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
print(f" ✓ Expanded {expanded_count} IPs from CIDRs")
|
||||
if skipped_count > 0:
|
||||
print(f" ⚠ Skipped {skipped_count} CIDRs (too large or errors)")
|
||||
|
||||
# Step 7: Remove settings columns from site_cidrs (now only at IP level)
|
||||
print("Removing settings columns from site_cidrs...")
|
||||
# Re-inspect to get current columns
|
||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
||||
|
||||
if 'expected_ping' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_ping')
|
||||
print("Dropped expected_ping from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_ping: {e}")
|
||||
else:
|
||||
print("expected_ping already dropped from site_cidrs")
|
||||
|
||||
if 'expected_tcp_ports' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_tcp_ports')
|
||||
print("Dropped expected_tcp_ports from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_tcp_ports: {e}")
|
||||
else:
|
||||
print("expected_tcp_ports already dropped from site_cidrs")
|
||||
|
||||
if 'expected_udp_ports' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_udp_ports')
|
||||
print("Dropped expected_udp_ports from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_udp_ports: {e}")
|
||||
else:
|
||||
print("expected_udp_ports already dropped from site_cidrs")
|
||||
|
||||
# Print summary
|
||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
|
||||
print("\n✓ Migration 008 complete: CIDRs expanded to individual IPs")
|
||||
print(f" - Total sites: {total_sites}")
|
||||
print(f" - Total CIDRs: {total_cidrs}")
|
||||
print(f" - Total IPs: {total_ips}")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Revert schema changes (restore CIDR-level settings).
|
||||
Note: This will lose per-IP granularity!
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("Rolling back to CIDR-level settings...")
|
||||
|
||||
# Step 1: Add settings columns back to site_cidrs
|
||||
op.add_column('site_cidrs', sa.Column('expected_ping', sa.Boolean(), nullable=True))
|
||||
op.add_column('site_cidrs', sa.Column('expected_tcp_ports', sa.Text(), nullable=True))
|
||||
op.add_column('site_cidrs', sa.Column('expected_udp_ports', sa.Text(), nullable=True))
|
||||
|
||||
# Step 2: Populate CIDR settings from first IP in each CIDR (approximation)
|
||||
connection.execute(text("""
|
||||
UPDATE site_cidrs
|
||||
SET
|
||||
expected_ping = (
|
||||
SELECT expected_ping FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
),
|
||||
expected_tcp_ports = (
|
||||
SELECT expected_tcp_ports FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
),
|
||||
expected_udp_ports = (
|
||||
SELECT expected_udp_ports FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
)
|
||||
"""))
|
||||
|
||||
# Step 3: Delete auto-expanded IPs (keep only original overrides)
|
||||
# In practice, this is difficult to determine, so we'll keep all IPs
|
||||
# and just remove the schema changes
|
||||
|
||||
# Step 4: Drop new unique constraint and restore old one
|
||||
op.drop_constraint('uix_site_ip_address', 'site_ips', type_='unique')
|
||||
op.create_unique_constraint('uix_site_cidr_ip', 'site_ips', ['site_cidr_id', 'ip_address'])
|
||||
|
||||
# Step 5: Make site_cidr_id NOT NULL again
|
||||
op.alter_column('site_ips', 'site_cidr_id', nullable=False)
|
||||
|
||||
# Step 6: Drop site_id column and related constraints
|
||||
op.drop_index(op.f('ix_site_ips_site_id'), table_name='site_ips')
|
||||
op.drop_constraint('fk_site_ips_site_id', 'site_ips', type_='foreignkey')
|
||||
op.drop_column('site_ips', 'site_id')
|
||||
|
||||
print("✓ Downgrade complete: Reverted to CIDR-level settings")
|
||||
@@ -1,210 +0,0 @@
|
||||
"""Remove CIDR table - make sites IP-only
|
||||
|
||||
Revision ID: 009
|
||||
Revises: 008
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration removes the SiteCIDR table entirely, making sites purely
|
||||
IP-based. CIDRs are now only used as a convenience for bulk IP addition,
|
||||
not stored as permanent entities.
|
||||
|
||||
Changes:
|
||||
- Set all site_ips.site_cidr_id to NULL (preserve all IPs)
|
||||
- Drop foreign key from site_ips to site_cidrs
|
||||
- Drop site_cidrs table
|
||||
- Remove site_cidr_id column from site_ips
|
||||
|
||||
All existing IPs are preserved. They become "standalone" IPs without
|
||||
a CIDR parent.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '009'
|
||||
down_revision = '008'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Remove CIDR table and make all IPs standalone.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
inspector = sa.inspect(connection)
|
||||
|
||||
print("\n=== Migration 009: Remove CIDR Table ===\n")
|
||||
|
||||
# Get counts before migration
|
||||
try:
|
||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
ips_with_cidr = connection.execute(text(
|
||||
'SELECT COUNT(*) FROM site_ips WHERE site_cidr_id IS NOT NULL'
|
||||
)).scalar()
|
||||
|
||||
print(f"Before migration:")
|
||||
print(f" - Total CIDRs: {total_cidrs}")
|
||||
print(f" - Total IPs: {total_ips}")
|
||||
print(f" - IPs linked to CIDRs: {ips_with_cidr}")
|
||||
print(f" - Standalone IPs: {total_ips - ips_with_cidr}\n")
|
||||
except Exception as e:
|
||||
print(f"Could not get pre-migration stats: {e}\n")
|
||||
|
||||
# Step 1: Set all site_cidr_id to NULL (preserve all IPs as standalone)
|
||||
print("Step 1: Converting all IPs to standalone (nulling CIDR associations)...")
|
||||
try:
|
||||
result = connection.execute(text("""
|
||||
UPDATE site_ips
|
||||
SET site_cidr_id = NULL
|
||||
WHERE site_cidr_id IS NOT NULL
|
||||
"""))
|
||||
print(f" ✓ Converted {result.rowcount} IPs to standalone\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error or already done: {e}\n")
|
||||
|
||||
# Step 2: Drop foreign key constraint from site_ips to site_cidrs
|
||||
print("Step 2: Dropping foreign key constraint from site_ips to site_cidrs...")
|
||||
foreign_keys = inspector.get_foreign_keys('site_ips')
|
||||
fk_to_drop = None
|
||||
|
||||
for fk in foreign_keys:
|
||||
if fk['referred_table'] == 'site_cidrs':
|
||||
fk_to_drop = fk['name']
|
||||
break
|
||||
|
||||
if fk_to_drop:
|
||||
try:
|
||||
op.drop_constraint(fk_to_drop, 'site_ips', type_='foreignkey')
|
||||
print(f" ✓ Dropped foreign key constraint: {fk_to_drop}\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop foreign key: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Foreign key constraint not found or already dropped\n")
|
||||
|
||||
# Step 3: Drop index on site_cidr_id (if exists)
|
||||
print("Step 3: Dropping index on site_cidr_id...")
|
||||
indexes = inspector.get_indexes('site_ips')
|
||||
index_to_drop = None
|
||||
|
||||
for idx in indexes:
|
||||
if 'site_cidr_id' in idx['column_names']:
|
||||
index_to_drop = idx['name']
|
||||
break
|
||||
|
||||
if index_to_drop:
|
||||
try:
|
||||
op.drop_index(index_to_drop, table_name='site_ips')
|
||||
print(f" ✓ Dropped index: {index_to_drop}\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop index: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Index not found or already dropped\n")
|
||||
|
||||
# Step 4: Drop site_cidrs table
|
||||
print("Step 4: Dropping site_cidrs table...")
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
if 'site_cidrs' in tables:
|
||||
try:
|
||||
op.drop_table('site_cidrs')
|
||||
print(" ✓ Dropped site_cidrs table\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop table: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Table site_cidrs not found or already dropped\n")
|
||||
|
||||
# Step 5: Drop site_cidr_id column from site_ips
|
||||
print("Step 5: Dropping site_cidr_id column from site_ips...")
|
||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
||||
|
||||
if 'site_cidr_id' in site_ips_columns:
|
||||
try:
|
||||
op.drop_column('site_ips', 'site_cidr_id')
|
||||
print(" ✓ Dropped site_cidr_id column from site_ips\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop column: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Column site_cidr_id not found or already dropped\n")
|
||||
|
||||
# Get counts after migration
|
||||
try:
|
||||
final_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
||||
|
||||
print("After migration:")
|
||||
print(f" - Total sites: {total_sites}")
|
||||
print(f" - Total IPs (all standalone): {final_ips}")
|
||||
print(f" - CIDRs: N/A (table removed)")
|
||||
except Exception as e:
|
||||
print(f"Could not get post-migration stats: {e}")
|
||||
|
||||
print("\n✓ Migration 009 complete: Sites are now IP-only")
|
||||
print(" All IPs preserved as standalone. CIDRs can still be used")
|
||||
print(" via the API/UI for bulk IP creation, but are not stored.\n")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Recreate site_cidrs table (CANNOT restore original CIDR associations).
|
||||
|
||||
WARNING: This downgrade creates an empty site_cidrs table structure but
|
||||
cannot restore the original CIDR-to-IP associations since that data was
|
||||
deleted. All IPs will remain standalone.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("\n=== Downgrade 009: Recreate CIDR Table Structure ===\n")
|
||||
print("⚠ WARNING: Cannot restore original CIDR associations!")
|
||||
print(" The site_cidrs table structure will be recreated but will be empty.")
|
||||
print(" All IPs will remain standalone. This is a PARTIAL downgrade.\n")
|
||||
|
||||
# Step 1: Recreate site_cidrs table (empty)
|
||||
print("Step 1: Recreating site_cidrs table structure...")
|
||||
try:
|
||||
op.create_table(
|
||||
'site_cidrs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
||||
)
|
||||
print(" ✓ Recreated site_cidrs table (empty)\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create table: {e}\n")
|
||||
|
||||
# Step 2: Add site_cidr_id column back to site_ips (nullable)
|
||||
print("Step 2: Adding site_cidr_id column back to site_ips...")
|
||||
try:
|
||||
op.add_column('site_ips', sa.Column('site_cidr_id', sa.Integer(), nullable=True, comment='FK to site_cidrs (optional, for grouping)'))
|
||||
print(" ✓ Added site_cidr_id column (nullable)\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not add column: {e}\n")
|
||||
|
||||
# Step 3: Add foreign key constraint
|
||||
print("Step 3: Adding foreign key constraint...")
|
||||
try:
|
||||
op.create_foreign_key('fk_site_ips_site_cidr_id', 'site_ips', 'site_cidrs', ['site_cidr_id'], ['id'])
|
||||
print(" ✓ Created foreign key constraint\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create foreign key: {e}\n")
|
||||
|
||||
# Step 4: Add index on site_cidr_id
|
||||
print("Step 4: Adding index on site_cidr_id...")
|
||||
try:
|
||||
op.create_index('ix_site_ips_site_cidr_id', 'site_ips', ['site_cidr_id'], unique=False)
|
||||
print(" ✓ Created index on site_cidr_id\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create index: {e}\n")
|
||||
|
||||
print("✓ Downgrade complete: CIDR table structure restored (but empty)")
|
||||
print(" All IPs remain standalone. You would need to manually recreate")
|
||||
print(" CIDR records and associate IPs with them.\n")
|
||||
@@ -1,53 +0,0 @@
|
||||
"""Add config_id to alert_rules table
|
||||
|
||||
Revision ID: 010
|
||||
Revises: 009
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration adds config_id foreign key to alert_rules table to replace
|
||||
the config_file column, completing the migration from file-based to
|
||||
database-based configurations.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '010'
|
||||
down_revision = '009'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Add config_id to alert_rules table and remove config_file.
|
||||
"""
|
||||
|
||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
||||
# Add config_id column with foreign key
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_alert_rules_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_alert_rules_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
|
||||
# Remove the old config_file column
|
||||
batch_op.drop_column('config_file')
|
||||
|
||||
print("✓ Migration complete: AlertRule now uses config_id")
|
||||
print(" - Added config_id foreign key to alert_rules table")
|
||||
print(" - Removed deprecated config_file column")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove config_id and restore config_file on alert_rules."""
|
||||
|
||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
||||
# Remove foreign key and config_id column
|
||||
batch_op.drop_constraint('fk_alert_rules_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_alert_rules_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
|
||||
# Restore config_file column
|
||||
batch_op.add_column(sa.Column('config_file', sa.String(255), nullable=True, comment='Optional: specific config file this rule applies to'))
|
||||
|
||||
print("✓ Downgrade complete: AlertRule config_id removed, config_file restored")
|
||||
@@ -1,86 +0,0 @@
|
||||
"""Drop deprecated config_file columns
|
||||
|
||||
Revision ID: 011
|
||||
Revises: 010
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration removes the deprecated config_file columns from scans and schedules
|
||||
tables. All functionality now uses config_id to reference database-stored configs.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '011'
|
||||
down_revision = '010'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Drop config_file columns from scans and schedules tables.
|
||||
|
||||
Prerequisites:
|
||||
- All scans must have config_id set
|
||||
- All schedules must have config_id set
|
||||
- Code must be updated to no longer reference config_file
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check for any records missing config_id
|
||||
result = connection.execute(sa.text(
|
||||
"SELECT COUNT(*) FROM scans WHERE config_id IS NULL"
|
||||
))
|
||||
scans_without_config = result.scalar()
|
||||
|
||||
result = connection.execute(sa.text(
|
||||
"SELECT COUNT(*) FROM schedules WHERE config_id IS NULL"
|
||||
))
|
||||
schedules_without_config = result.scalar()
|
||||
|
||||
if scans_without_config > 0:
|
||||
print(f"WARNING: {scans_without_config} scans have NULL config_id")
|
||||
print(" These scans will lose their config reference after migration")
|
||||
|
||||
if schedules_without_config > 0:
|
||||
raise Exception(
|
||||
f"Cannot proceed: {schedules_without_config} schedules have NULL config_id. "
|
||||
"Please set config_id for all schedules before running this migration."
|
||||
)
|
||||
|
||||
# Drop config_file from scans table
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.drop_column('config_file')
|
||||
|
||||
# Drop config_file from schedules table
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.drop_column('config_file')
|
||||
|
||||
print("✓ Migration complete: Dropped config_file columns")
|
||||
print(" - Removed config_file from scans table")
|
||||
print(" - Removed config_file from schedules table")
|
||||
print(" - All references should now use config_id")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Re-add config_file columns (data will be lost)."""
|
||||
|
||||
# Add config_file back to scans
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column('config_file', sa.Text(), nullable=True,
|
||||
comment='Path to YAML config used (deprecated)')
|
||||
)
|
||||
|
||||
# Add config_file back to schedules
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column('config_file', sa.Text(), nullable=True,
|
||||
comment='Path to YAML config (deprecated)')
|
||||
)
|
||||
|
||||
print("✓ Downgrade complete: Re-added config_file columns")
|
||||
print(" WARNING: config_file values are lost and will be NULL")
|
||||
@@ -12,7 +12,7 @@ alembic==1.13.0
|
||||
# Authentication & Security
|
||||
Flask-Login==0.6.3
|
||||
bcrypt==4.1.2
|
||||
cryptography==41.0.7
|
||||
cryptography>=46.0.0
|
||||
|
||||
# API & Serialization
|
||||
Flask-CORS==4.0.0
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
PyYAML==6.0.1
|
||||
python-libnmap==0.7.3
|
||||
sslyze==6.0.0
|
||||
sslyze==6.2.0
|
||||
playwright==1.40.0
|
||||
Jinja2==3.1.2
|
||||
|
||||
@@ -6,14 +6,17 @@ SneakyScanner - Masscan-based network scanner with YAML configuration
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any
|
||||
from typing import Dict, List, Any, Callable, Optional
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import yaml
|
||||
@@ -22,12 +25,18 @@ from libnmap.parser import NmapParser
|
||||
|
||||
from src.screenshot_capture import ScreenshotCapture
|
||||
from src.report_generator import HTMLReportGenerator
|
||||
from web.config import NMAP_HOST_TIMEOUT
|
||||
|
||||
# Force unbuffered output for Docker
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
sys.stderr.reconfigure(line_buffering=True)
|
||||
|
||||
|
||||
class ScanCancelledError(Exception):
|
||||
"""Raised when a scan is cancelled by the user."""
|
||||
pass
|
||||
|
||||
|
||||
class SneakyScanner:
|
||||
"""Wrapper for masscan to perform network scans based on YAML config or database config"""
|
||||
|
||||
@@ -61,6 +70,34 @@ class SneakyScanner:
|
||||
|
||||
self.screenshot_capture = None
|
||||
|
||||
# Cancellation support
|
||||
self._cancelled = False
|
||||
self._cancel_lock = threading.Lock()
|
||||
self._active_process = None
|
||||
self._process_lock = threading.Lock()
|
||||
|
||||
def cancel(self):
|
||||
"""
|
||||
Cancel the running scan.
|
||||
|
||||
Terminates any active subprocess and sets cancellation flag.
|
||||
"""
|
||||
with self._cancel_lock:
|
||||
self._cancelled = True
|
||||
|
||||
with self._process_lock:
|
||||
if self._active_process and self._active_process.poll() is None:
|
||||
try:
|
||||
# Terminate the process group
|
||||
os.killpg(os.getpgid(self._active_process.pid), signal.SIGTERM)
|
||||
except (ProcessLookupError, OSError):
|
||||
pass
|
||||
|
||||
def is_cancelled(self) -> bool:
|
||||
"""Check if scan has been cancelled."""
|
||||
with self._cancel_lock:
|
||||
return self._cancelled
|
||||
|
||||
def _load_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Load and validate configuration from file or database.
|
||||
@@ -381,11 +418,31 @@ class SneakyScanner:
|
||||
raise ValueError(f"Invalid protocol: {protocol}")
|
||||
|
||||
print(f"Running: {' '.join(cmd)}", flush=True)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
# Use Popen for cancellation support
|
||||
with self._process_lock:
|
||||
self._active_process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
start_new_session=True
|
||||
)
|
||||
|
||||
stdout, stderr = self._active_process.communicate()
|
||||
returncode = self._active_process.returncode
|
||||
|
||||
with self._process_lock:
|
||||
self._active_process = None
|
||||
|
||||
# Check if cancelled
|
||||
if self.is_cancelled():
|
||||
return []
|
||||
|
||||
print(f"Masscan {protocol.upper()} scan completed", flush=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Masscan stderr: {result.stderr}", file=sys.stderr)
|
||||
if returncode != 0:
|
||||
print(f"Masscan stderr: {stderr}", file=sys.stderr)
|
||||
|
||||
# Parse masscan JSON output
|
||||
results = []
|
||||
@@ -433,11 +490,31 @@ class SneakyScanner:
|
||||
]
|
||||
|
||||
print(f"Running: {' '.join(cmd)}", flush=True)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
# Use Popen for cancellation support
|
||||
with self._process_lock:
|
||||
self._active_process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
start_new_session=True
|
||||
)
|
||||
|
||||
stdout, stderr = self._active_process.communicate()
|
||||
returncode = self._active_process.returncode
|
||||
|
||||
with self._process_lock:
|
||||
self._active_process = None
|
||||
|
||||
# Check if cancelled
|
||||
if self.is_cancelled():
|
||||
return {}
|
||||
|
||||
print(f"Masscan PING scan completed", flush=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Masscan stderr: {result.stderr}", file=sys.stderr, flush=True)
|
||||
if returncode != 0:
|
||||
print(f"Masscan stderr: {stderr}", file=sys.stderr, flush=True)
|
||||
|
||||
# Parse results
|
||||
responding_ips = set()
|
||||
@@ -475,6 +552,10 @@ class SneakyScanner:
|
||||
all_services = {}
|
||||
|
||||
for ip, ports in ip_ports.items():
|
||||
# Check if cancelled before each host
|
||||
if self.is_cancelled():
|
||||
break
|
||||
|
||||
if not ports:
|
||||
all_services[ip] = []
|
||||
continue
|
||||
@@ -496,14 +577,33 @@ class SneakyScanner:
|
||||
'--version-intensity', '5', # Balanced speed/accuracy
|
||||
'-p', port_list,
|
||||
'-oX', xml_output, # XML output
|
||||
'--host-timeout', '5m', # Timeout per host
|
||||
'--host-timeout', NMAP_HOST_TIMEOUT, # Timeout per host
|
||||
ip
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=600)
|
||||
# Use Popen for cancellation support
|
||||
with self._process_lock:
|
||||
self._active_process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
start_new_session=True
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f" Nmap warning for {ip}: {result.stderr}", file=sys.stderr, flush=True)
|
||||
stdout, stderr = self._active_process.communicate(timeout=600)
|
||||
returncode = self._active_process.returncode
|
||||
|
||||
with self._process_lock:
|
||||
self._active_process = None
|
||||
|
||||
# Check if cancelled
|
||||
if self.is_cancelled():
|
||||
Path(xml_output).unlink(missing_ok=True)
|
||||
break
|
||||
|
||||
if returncode != 0:
|
||||
print(f" Nmap warning for {ip}: {stderr}", file=sys.stderr, flush=True)
|
||||
|
||||
# Parse XML output
|
||||
services = self._parse_nmap_xml(xml_output)
|
||||
@@ -576,29 +676,57 @@ class SneakyScanner:
|
||||
|
||||
return services
|
||||
|
||||
def _is_likely_web_service(self, service: Dict) -> bool:
|
||||
def _is_likely_web_service(self, service: Dict, ip: str = None) -> bool:
|
||||
"""
|
||||
Check if a service is likely HTTP/HTTPS based on nmap detection or common web ports
|
||||
Check if a service is a web server by actually making an HTTP request
|
||||
|
||||
Args:
|
||||
service: Service dictionary from nmap results
|
||||
ip: IP address to test (required for HTTP probe)
|
||||
|
||||
Returns:
|
||||
True if service appears to be web-related
|
||||
True if service responds to HTTP/HTTPS requests
|
||||
"""
|
||||
# Check service name
|
||||
import requests
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
# Quick check for known web service names first
|
||||
web_services = ['http', 'https', 'ssl', 'http-proxy', 'https-alt',
|
||||
'http-alt', 'ssl/http', 'ssl/https']
|
||||
service_name = service.get('service', '').lower()
|
||||
|
||||
if service_name in web_services:
|
||||
return True
|
||||
|
||||
# Check common non-standard web ports
|
||||
web_ports = [80, 443, 8000, 8006, 8008, 8080, 8081, 8443, 8888, 9443]
|
||||
# If no IP provided, can't do HTTP probe
|
||||
port = service.get('port')
|
||||
if not ip or not port:
|
||||
# check just the service if no IP - honestly shouldn't get here, but just incase...
|
||||
if service_name in web_services:
|
||||
return True
|
||||
return False
|
||||
|
||||
return port in web_ports
|
||||
# Actually try to connect - this is the definitive test
|
||||
# Try HTTPS first, then HTTP
|
||||
for protocol in ['https', 'http']:
|
||||
url = f"{protocol}://{ip}:{port}/"
|
||||
try:
|
||||
response = requests.get(
|
||||
url,
|
||||
timeout=3,
|
||||
verify=False,
|
||||
allow_redirects=False
|
||||
)
|
||||
# Any status code means it's a web server
|
||||
# (including 404, 500, etc. - still a web server)
|
||||
return True
|
||||
except requests.exceptions.SSLError:
|
||||
# SSL error on HTTPS, try HTTP next
|
||||
continue
|
||||
except (requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout,
|
||||
requests.exceptions.RequestException):
|
||||
continue
|
||||
|
||||
return False
|
||||
|
||||
def _detect_http_https(self, ip: str, port: int, timeout: int = 5) -> str:
|
||||
"""
|
||||
@@ -786,7 +914,7 @@ class SneakyScanner:
|
||||
ip_results = {}
|
||||
|
||||
for service in services:
|
||||
if not self._is_likely_web_service(service):
|
||||
if not self._is_likely_web_service(service, ip):
|
||||
continue
|
||||
|
||||
port = service['port']
|
||||
@@ -832,10 +960,17 @@ class SneakyScanner:
|
||||
|
||||
return all_results
|
||||
|
||||
def scan(self) -> Dict[str, Any]:
|
||||
def scan(self, progress_callback: Optional[Callable] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform complete scan based on configuration
|
||||
|
||||
Args:
|
||||
progress_callback: Optional callback function for progress updates.
|
||||
Called with (phase, ip, data) where:
|
||||
- phase: 'init', 'ping', 'tcp_scan', 'udp_scan', 'service_detection', 'http_analysis'
|
||||
- ip: IP address being processed (or None for phase start)
|
||||
- data: Dict with progress data (results, counts, etc.)
|
||||
|
||||
Returns:
|
||||
Dictionary containing scan results
|
||||
"""
|
||||
@@ -872,17 +1007,61 @@ class SneakyScanner:
|
||||
all_ips = sorted(list(all_ips))
|
||||
print(f"Total IPs to scan: {len(all_ips)}", flush=True)
|
||||
|
||||
# Report initialization with total IP count
|
||||
if progress_callback:
|
||||
progress_callback('init', None, {
|
||||
'total_ips': len(all_ips),
|
||||
'ip_to_site': ip_to_site
|
||||
})
|
||||
|
||||
# Perform ping scan
|
||||
print(f"\n[1/5] Performing ping scan on {len(all_ips)} IPs...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('ping', None, {'status': 'starting'})
|
||||
ping_results = self._run_ping_scan(all_ips)
|
||||
|
||||
# Check for cancellation
|
||||
if self.is_cancelled():
|
||||
print("\nScan cancelled by user", flush=True)
|
||||
raise ScanCancelledError("Scan cancelled by user")
|
||||
|
||||
# Report ping results
|
||||
if progress_callback:
|
||||
progress_callback('ping', None, {
|
||||
'status': 'completed',
|
||||
'results': ping_results
|
||||
})
|
||||
|
||||
# Perform TCP scan (all ports)
|
||||
print(f"\n[2/5] Performing TCP scan on {len(all_ips)} IPs (ports 0-65535)...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('tcp_scan', None, {'status': 'starting'})
|
||||
tcp_results = self._run_masscan(all_ips, '0-65535', 'tcp')
|
||||
|
||||
# Perform UDP scan (all ports)
|
||||
print(f"\n[3/5] Performing UDP scan on {len(all_ips)} IPs (ports 0-65535)...", flush=True)
|
||||
udp_results = self._run_masscan(all_ips, '0-65535', 'udp')
|
||||
# Check for cancellation
|
||||
if self.is_cancelled():
|
||||
print("\nScan cancelled by user", flush=True)
|
||||
raise ScanCancelledError("Scan cancelled by user")
|
||||
|
||||
# Perform UDP scan (if enabled)
|
||||
udp_enabled = os.environ.get('UDP_SCAN_ENABLED', 'false').lower() == 'true'
|
||||
udp_ports = os.environ.get('UDP_PORTS', '53,67,68,69,123,161,500,514,1900')
|
||||
|
||||
if udp_enabled:
|
||||
print(f"\n[3/5] Performing UDP scan on {len(all_ips)} IPs (ports {udp_ports})...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('udp_scan', None, {'status': 'starting'})
|
||||
udp_results = self._run_masscan(all_ips, udp_ports, 'udp')
|
||||
|
||||
# Check for cancellation
|
||||
if self.is_cancelled():
|
||||
print("\nScan cancelled by user", flush=True)
|
||||
raise ScanCancelledError("Scan cancelled by user")
|
||||
else:
|
||||
print(f"\n[3/5] Skipping UDP scan (disabled)...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('udp_scan', None, {'status': 'skipped'})
|
||||
udp_results = []
|
||||
|
||||
# Organize results by IP
|
||||
results_by_ip = {}
|
||||
@@ -917,20 +1096,56 @@ class SneakyScanner:
|
||||
results_by_ip[ip]['actual']['tcp_ports'].sort()
|
||||
results_by_ip[ip]['actual']['udp_ports'].sort()
|
||||
|
||||
# Report TCP/UDP scan results with discovered ports per IP
|
||||
if progress_callback:
|
||||
tcp_udp_results = {}
|
||||
for ip in all_ips:
|
||||
tcp_udp_results[ip] = {
|
||||
'tcp_ports': results_by_ip[ip]['actual']['tcp_ports'],
|
||||
'udp_ports': results_by_ip[ip]['actual']['udp_ports']
|
||||
}
|
||||
progress_callback('tcp_scan', None, {
|
||||
'status': 'completed',
|
||||
'results': tcp_udp_results
|
||||
})
|
||||
|
||||
# Perform service detection on TCP ports
|
||||
print(f"\n[4/5] Performing service detection on discovered TCP ports...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('service_detection', None, {'status': 'starting'})
|
||||
ip_ports = {ip: results_by_ip[ip]['actual']['tcp_ports'] for ip in all_ips}
|
||||
service_results = self._run_nmap_service_detection(ip_ports)
|
||||
|
||||
# Check for cancellation
|
||||
if self.is_cancelled():
|
||||
print("\nScan cancelled by user", flush=True)
|
||||
raise ScanCancelledError("Scan cancelled by user")
|
||||
|
||||
# Add service information to results
|
||||
for ip, services in service_results.items():
|
||||
if ip in results_by_ip:
|
||||
results_by_ip[ip]['actual']['services'] = services
|
||||
|
||||
# Report service detection results
|
||||
if progress_callback:
|
||||
progress_callback('service_detection', None, {
|
||||
'status': 'completed',
|
||||
'results': service_results
|
||||
})
|
||||
|
||||
# Perform HTTP/HTTPS analysis on web services
|
||||
print(f"\n[5/5] Analyzing HTTP/HTTPS services and SSL/TLS configuration...", flush=True)
|
||||
if progress_callback:
|
||||
progress_callback('http_analysis', None, {'status': 'starting'})
|
||||
http_results = self._run_http_analysis(service_results)
|
||||
|
||||
# Report HTTP analysis completion
|
||||
if progress_callback:
|
||||
progress_callback('http_analysis', None, {
|
||||
'status': 'completed',
|
||||
'results': http_results
|
||||
})
|
||||
|
||||
# Merge HTTP analysis into service results
|
||||
for ip, port_results in http_results.items():
|
||||
if ip in results_by_ip:
|
||||
@@ -1054,6 +1269,8 @@ class SneakyScanner:
|
||||
# Preserve directory structure in ZIP
|
||||
arcname = f"{screenshot_dir.name}/{screenshot_file.name}"
|
||||
zipf.write(screenshot_file, arcname)
|
||||
# Track screenshot directory for database storage
|
||||
output_paths['screenshots'] = screenshot_dir
|
||||
|
||||
output_paths['zip'] = zip_path
|
||||
print(f"ZIP archive saved to: {zip_path}", flush=True)
|
||||
|
||||
@@ -146,6 +146,47 @@ def acknowledge_alert(alert_id):
|
||||
}), 400
|
||||
|
||||
|
||||
@bp.route('/acknowledge-all', methods=['POST'])
|
||||
@api_auth_required
|
||||
def acknowledge_all_alerts():
|
||||
"""
|
||||
Acknowledge all unacknowledged alerts.
|
||||
|
||||
Returns:
|
||||
JSON response with count of acknowledged alerts
|
||||
"""
|
||||
acknowledged_by = request.json.get('acknowledged_by', 'api') if request.json else 'api'
|
||||
|
||||
try:
|
||||
# Get all unacknowledged alerts
|
||||
unacked_alerts = current_app.db_session.query(Alert).filter(
|
||||
Alert.acknowledged == False
|
||||
).all()
|
||||
|
||||
count = 0
|
||||
for alert in unacked_alerts:
|
||||
alert.acknowledged = True
|
||||
alert.acknowledged_at = datetime.now(timezone.utc)
|
||||
alert.acknowledged_by = acknowledged_by
|
||||
count += 1
|
||||
|
||||
current_app.db_session.commit()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Acknowledged {count} alerts',
|
||||
'count': count,
|
||||
'acknowledged_by': acknowledged_by
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
current_app.db_session.rollback()
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Failed to acknowledge alerts: {str(e)}'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/rules', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_alert_rules():
|
||||
|
||||
@@ -5,18 +5,107 @@ Handles endpoints for triggering scans, listing scan history, and retrieving
|
||||
scan results.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.models import Scan, ScanProgress
|
||||
from web.services.scan_service import ScanService
|
||||
from web.utils.pagination import validate_page_params
|
||||
from web.jobs.scan_job import stop_scan
|
||||
|
||||
bp = Blueprint('scans', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _recover_orphaned_scan(scan: Scan, session) -> dict:
|
||||
"""
|
||||
Recover an orphaned scan by checking for output files.
|
||||
|
||||
If output files exist: mark as 'completed' (smart recovery)
|
||||
If no output files: mark as 'cancelled'
|
||||
|
||||
Args:
|
||||
scan: The orphaned Scan object
|
||||
session: Database session
|
||||
|
||||
Returns:
|
||||
Dictionary with recovery result for API response
|
||||
"""
|
||||
# Check for existing output files
|
||||
output_exists = False
|
||||
output_files_found = []
|
||||
|
||||
# Check paths stored in database
|
||||
if scan.json_path and Path(scan.json_path).exists():
|
||||
output_exists = True
|
||||
output_files_found.append('json')
|
||||
if scan.html_path and Path(scan.html_path).exists():
|
||||
output_files_found.append('html')
|
||||
if scan.zip_path and Path(scan.zip_path).exists():
|
||||
output_files_found.append('zip')
|
||||
|
||||
# Also check by timestamp pattern if paths not stored yet
|
||||
if not output_exists and scan.started_at:
|
||||
output_dir = Path('/app/output')
|
||||
if output_dir.exists():
|
||||
timestamp_pattern = scan.started_at.strftime('%Y%m%d')
|
||||
for json_file in output_dir.glob(f'scan_report_{timestamp_pattern}*.json'):
|
||||
output_exists = True
|
||||
output_files_found.append('json')
|
||||
# Update scan record with found paths
|
||||
scan.json_path = str(json_file)
|
||||
html_file = json_file.with_suffix('.html')
|
||||
if html_file.exists():
|
||||
scan.html_path = str(html_file)
|
||||
output_files_found.append('html')
|
||||
zip_file = json_file.with_suffix('.zip')
|
||||
if zip_file.exists():
|
||||
scan.zip_path = str(zip_file)
|
||||
output_files_found.append('zip')
|
||||
break
|
||||
|
||||
if output_exists:
|
||||
# Smart recovery: outputs exist, mark as completed
|
||||
scan.status = 'completed'
|
||||
scan.completed_at = datetime.utcnow()
|
||||
if scan.started_at:
|
||||
scan.duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
scan.error_message = None
|
||||
session.commit()
|
||||
|
||||
logger.info(f"Scan {scan.id}: Recovered as completed (files: {output_files_found})")
|
||||
|
||||
return {
|
||||
'scan_id': scan.id,
|
||||
'status': 'completed',
|
||||
'message': f'Scan recovered as completed (output files found: {", ".join(output_files_found)})',
|
||||
'recovery_type': 'smart_recovery'
|
||||
}
|
||||
else:
|
||||
# No outputs: mark as cancelled
|
||||
scan.status = 'cancelled'
|
||||
scan.completed_at = datetime.utcnow()
|
||||
if scan.started_at:
|
||||
scan.duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
scan.error_message = 'Scan process was interrupted before completion. No output files were generated.'
|
||||
session.commit()
|
||||
|
||||
logger.info(f"Scan {scan.id}: Marked as cancelled (orphaned, no output files)")
|
||||
|
||||
return {
|
||||
'scan_id': scan.id,
|
||||
'status': 'cancelled',
|
||||
'message': 'Orphaned scan cancelled (no output files found)',
|
||||
'recovery_type': 'orphan_cleanup'
|
||||
}
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_scans():
|
||||
@@ -240,6 +329,77 @@ def delete_scan(scan_id):
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>/stop', methods=['POST'])
|
||||
@api_auth_required
|
||||
def stop_running_scan(scan_id):
|
||||
"""
|
||||
Stop a running scan with smart recovery for orphaned scans.
|
||||
|
||||
If the scan is actively running in the registry, sends a cancel signal.
|
||||
If the scan shows as running/finalizing but is not in the registry (orphaned),
|
||||
performs smart recovery: marks as 'completed' if output files exist,
|
||||
otherwise marks as 'cancelled'.
|
||||
|
||||
Args:
|
||||
scan_id: Scan ID to stop
|
||||
|
||||
Returns:
|
||||
JSON response with stop status or recovery result
|
||||
"""
|
||||
try:
|
||||
session = current_app.db_session
|
||||
|
||||
# Check if scan exists
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if not scan:
|
||||
logger.warning(f"Scan not found for stop request: {scan_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Scan with ID {scan_id} not found'
|
||||
}), 404
|
||||
|
||||
# Allow stopping scans with status 'running' or 'finalizing'
|
||||
if scan.status not in ('running', 'finalizing'):
|
||||
logger.warning(f"Cannot stop scan {scan_id}: status is '{scan.status}'")
|
||||
return jsonify({
|
||||
'error': 'Invalid state',
|
||||
'message': f"Cannot stop scan: status is '{scan.status}'"
|
||||
}), 400
|
||||
|
||||
# Get database URL from app config
|
||||
db_url = current_app.config['SQLALCHEMY_DATABASE_URI']
|
||||
|
||||
# Attempt to stop the scan
|
||||
stopped = stop_scan(scan_id, db_url)
|
||||
|
||||
if stopped:
|
||||
logger.info(f"Stop signal sent to scan {scan_id}")
|
||||
return jsonify({
|
||||
'scan_id': scan_id,
|
||||
'message': 'Stop signal sent to scan',
|
||||
'status': 'stopping'
|
||||
}), 200
|
||||
else:
|
||||
# Scanner not in registry - this is an orphaned scan
|
||||
# Attempt smart recovery
|
||||
logger.warning(f"Scan {scan_id} not in registry, attempting smart recovery")
|
||||
recovery_result = _recover_orphaned_scan(scan, session)
|
||||
return jsonify(recovery_result), 200
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error stopping scan {scan_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to stop scan'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error stopping scan {scan_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>/status', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_scan_status(scan_id):
|
||||
@@ -281,6 +441,141 @@ def get_scan_status(scan_id):
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>/progress', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_scan_progress(scan_id):
|
||||
"""
|
||||
Get detailed progress for a running scan including per-IP results.
|
||||
|
||||
Args:
|
||||
scan_id: Scan ID
|
||||
|
||||
Returns:
|
||||
JSON response with scan progress including:
|
||||
- current_phase: Current scan phase
|
||||
- total_ips: Total IPs being scanned
|
||||
- completed_ips: Number of IPs completed in current phase
|
||||
- progress_entries: List of per-IP progress with discovered results
|
||||
"""
|
||||
try:
|
||||
session = current_app.db_session
|
||||
|
||||
# Get scan record
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if not scan:
|
||||
logger.warning(f"Scan not found for progress check: {scan_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Scan with ID {scan_id} not found'
|
||||
}), 404
|
||||
|
||||
# Get progress entries
|
||||
progress_entries = session.query(ScanProgress).filter_by(scan_id=scan_id).all()
|
||||
|
||||
# Build progress data
|
||||
entries = []
|
||||
for entry in progress_entries:
|
||||
entry_data = {
|
||||
'ip_address': entry.ip_address,
|
||||
'site_name': entry.site_name,
|
||||
'phase': entry.phase,
|
||||
'status': entry.status,
|
||||
'ping_result': entry.ping_result
|
||||
}
|
||||
|
||||
# Parse JSON fields
|
||||
if entry.tcp_ports:
|
||||
entry_data['tcp_ports'] = json.loads(entry.tcp_ports)
|
||||
else:
|
||||
entry_data['tcp_ports'] = []
|
||||
|
||||
if entry.udp_ports:
|
||||
entry_data['udp_ports'] = json.loads(entry.udp_ports)
|
||||
else:
|
||||
entry_data['udp_ports'] = []
|
||||
|
||||
if entry.services:
|
||||
entry_data['services'] = json.loads(entry.services)
|
||||
else:
|
||||
entry_data['services'] = []
|
||||
|
||||
entries.append(entry_data)
|
||||
|
||||
# Sort entries by site name then IP (numerically)
|
||||
def ip_sort_key(ip_str):
|
||||
"""Convert IP to tuple of integers for proper numeric sorting."""
|
||||
try:
|
||||
return tuple(int(octet) for octet in ip_str.split('.'))
|
||||
except (ValueError, AttributeError):
|
||||
return (0, 0, 0, 0)
|
||||
|
||||
entries.sort(key=lambda x: (x['site_name'] or '', ip_sort_key(x['ip_address'])))
|
||||
|
||||
response = {
|
||||
'scan_id': scan_id,
|
||||
'status': scan.status,
|
||||
'current_phase': scan.current_phase or 'pending',
|
||||
'total_ips': scan.total_ips or 0,
|
||||
'completed_ips': scan.completed_ips or 0,
|
||||
'progress_entries': entries
|
||||
}
|
||||
|
||||
logger.debug(f"Retrieved progress for scan {scan_id}: phase={scan.current_phase}, {scan.completed_ips}/{scan.total_ips} IPs")
|
||||
return jsonify(response)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving scan progress {scan_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve scan progress'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving scan progress {scan_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/by-ip/<ip_address>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_scans_by_ip(ip_address):
|
||||
"""
|
||||
Get last 10 scans containing a specific IP address.
|
||||
|
||||
Args:
|
||||
ip_address: IP address to search for
|
||||
|
||||
Returns:
|
||||
JSON response with list of scans containing the IP
|
||||
"""
|
||||
try:
|
||||
# Get scans from service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
scans = scan_service.get_scans_by_ip(ip_address)
|
||||
|
||||
logger.info(f"Retrieved {len(scans)} scans for IP: {ip_address}")
|
||||
|
||||
return jsonify({
|
||||
'ip_address': ip_address,
|
||||
'scans': scans,
|
||||
'count': len(scans)
|
||||
})
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving scans for IP {ip_address}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve scans'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving scans for IP {ip_address}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id1>/compare/<int:scan_id2>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def compare_scans(scan_id1, scan_id2):
|
||||
|
||||
@@ -36,9 +36,15 @@ def list_sites():
|
||||
if request.args.get('all', '').lower() == 'true':
|
||||
site_service = SiteService(current_app.db_session)
|
||||
sites = site_service.list_all_sites()
|
||||
ip_stats = site_service.get_global_ip_stats()
|
||||
|
||||
logger.info(f"Listed all sites (count={len(sites)})")
|
||||
return jsonify({'sites': sites})
|
||||
return jsonify({
|
||||
'sites': sites,
|
||||
'total_ips': ip_stats['total_ips'],
|
||||
'unique_ips': ip_stats['unique_ips'],
|
||||
'duplicate_ips': ip_stats['duplicate_ips']
|
||||
})
|
||||
|
||||
# Get and validate query parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
|
||||
@@ -307,9 +307,12 @@ def init_scheduler(app: Flask) -> None:
|
||||
with app.app_context():
|
||||
# Clean up any orphaned scans from previous crashes/restarts
|
||||
scan_service = ScanService(app.db_session)
|
||||
orphaned_count = scan_service.cleanup_orphaned_scans()
|
||||
if orphaned_count > 0:
|
||||
app.logger.warning(f"Cleaned up {orphaned_count} orphaned scan(s) on startup")
|
||||
cleanup_result = scan_service.cleanup_orphaned_scans()
|
||||
if cleanup_result['total'] > 0:
|
||||
app.logger.warning(
|
||||
f"Cleaned up {cleanup_result['total']} orphaned scan(s) on startup: "
|
||||
f"{cleanup_result['recovered']} recovered, {cleanup_result['failed']} failed"
|
||||
)
|
||||
|
||||
# Load all enabled schedules from database
|
||||
scheduler.load_schedules_on_startup()
|
||||
|
||||
@@ -7,7 +7,10 @@ that are managed by developers, not stored in the database.
|
||||
|
||||
# Application metadata
|
||||
APP_NAME = 'SneakyScanner'
|
||||
APP_VERSION = '1.0.0-alpha'
|
||||
APP_VERSION = '1.0.0-beta'
|
||||
|
||||
# Repository URL
|
||||
REPO_URL = 'https://git.sneakygeek.net/sneakygeek/SneakyScan'
|
||||
|
||||
# Scanner settings
|
||||
NMAP_HOST_TIMEOUT = '2m' # Timeout per host for nmap service detection
|
||||
|
||||
@@ -5,7 +5,9 @@ This module handles the execution of scans in background threads,
|
||||
updating database status and handling errors.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
@@ -13,13 +15,168 @@ from pathlib import Path
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from src.scanner import SneakyScanner
|
||||
from web.models import Scan
|
||||
from src.scanner import SneakyScanner, ScanCancelledError
|
||||
from web.models import Scan, ScanProgress
|
||||
from web.services.scan_service import ScanService
|
||||
from web.services.alert_service import AlertService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Registry for tracking running scanners (scan_id -> SneakyScanner instance)
|
||||
_running_scanners = {}
|
||||
_running_scanners_lock = threading.Lock()
|
||||
|
||||
|
||||
def get_running_scanner(scan_id: int):
|
||||
"""Get a running scanner instance by scan ID."""
|
||||
with _running_scanners_lock:
|
||||
return _running_scanners.get(scan_id)
|
||||
|
||||
|
||||
def stop_scan(scan_id: int, db_url: str) -> bool:
|
||||
"""
|
||||
Stop a running scan.
|
||||
|
||||
Args:
|
||||
scan_id: ID of the scan to stop
|
||||
db_url: Database connection URL
|
||||
|
||||
Returns:
|
||||
True if scan was cancelled, False if not found or already stopped
|
||||
"""
|
||||
logger.info(f"Attempting to stop scan {scan_id}")
|
||||
|
||||
# Get the scanner instance
|
||||
scanner = get_running_scanner(scan_id)
|
||||
if not scanner:
|
||||
logger.warning(f"Scanner for scan {scan_id} not found in registry")
|
||||
return False
|
||||
|
||||
# Cancel the scanner
|
||||
scanner.cancel()
|
||||
logger.info(f"Cancellation signal sent to scan {scan_id}")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def create_progress_callback(scan_id: int, session):
|
||||
"""
|
||||
Create a progress callback function for updating scan progress in database.
|
||||
|
||||
Args:
|
||||
scan_id: ID of the scan record
|
||||
session: Database session
|
||||
|
||||
Returns:
|
||||
Callback function that accepts (phase, ip, data)
|
||||
"""
|
||||
ip_to_site = {}
|
||||
|
||||
def progress_callback(phase: str, ip: str, data: dict):
|
||||
"""Update scan progress in database."""
|
||||
nonlocal ip_to_site
|
||||
|
||||
try:
|
||||
# Get scan record
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if not scan:
|
||||
return
|
||||
|
||||
# Handle initialization phase
|
||||
if phase == 'init':
|
||||
scan.total_ips = data.get('total_ips', 0)
|
||||
scan.completed_ips = 0
|
||||
scan.current_phase = 'ping'
|
||||
ip_to_site = data.get('ip_to_site', {})
|
||||
|
||||
# Create progress entries for all IPs
|
||||
for ip_addr, site_name in ip_to_site.items():
|
||||
progress = ScanProgress(
|
||||
scan_id=scan_id,
|
||||
ip_address=ip_addr,
|
||||
site_name=site_name,
|
||||
phase='pending',
|
||||
status='pending'
|
||||
)
|
||||
session.add(progress)
|
||||
|
||||
session.commit()
|
||||
return
|
||||
|
||||
# Update current phase
|
||||
if data.get('status') == 'starting':
|
||||
scan.current_phase = phase
|
||||
scan.completed_ips = 0
|
||||
session.commit()
|
||||
return
|
||||
|
||||
# Handle phase completion with results
|
||||
if data.get('status') == 'completed':
|
||||
results = data.get('results', {})
|
||||
|
||||
if phase == 'ping':
|
||||
# Update progress entries with ping results
|
||||
for ip_addr, ping_result in results.items():
|
||||
progress = session.query(ScanProgress).filter_by(
|
||||
scan_id=scan_id, ip_address=ip_addr
|
||||
).first()
|
||||
if progress:
|
||||
progress.ping_result = ping_result
|
||||
progress.phase = 'ping'
|
||||
progress.status = 'completed'
|
||||
|
||||
scan.completed_ips = len(results)
|
||||
|
||||
elif phase == 'tcp_scan':
|
||||
# Update progress entries with TCP/UDP port results
|
||||
for ip_addr, port_data in results.items():
|
||||
progress = session.query(ScanProgress).filter_by(
|
||||
scan_id=scan_id, ip_address=ip_addr
|
||||
).first()
|
||||
if progress:
|
||||
progress.tcp_ports = json.dumps(port_data.get('tcp_ports', []))
|
||||
progress.udp_ports = json.dumps(port_data.get('udp_ports', []))
|
||||
progress.phase = 'tcp_scan'
|
||||
progress.status = 'completed'
|
||||
|
||||
scan.completed_ips = len(results)
|
||||
|
||||
elif phase == 'service_detection':
|
||||
# Update progress entries with service detection results
|
||||
for ip_addr, services in results.items():
|
||||
progress = session.query(ScanProgress).filter_by(
|
||||
scan_id=scan_id, ip_address=ip_addr
|
||||
).first()
|
||||
if progress:
|
||||
# Simplify service data for storage
|
||||
service_list = []
|
||||
for svc in services:
|
||||
service_list.append({
|
||||
'port': svc.get('port'),
|
||||
'service': svc.get('service', 'unknown'),
|
||||
'product': svc.get('product', ''),
|
||||
'version': svc.get('version', '')
|
||||
})
|
||||
progress.services = json.dumps(service_list)
|
||||
progress.phase = 'service_detection'
|
||||
progress.status = 'completed'
|
||||
|
||||
scan.completed_ips = len(results)
|
||||
|
||||
elif phase == 'http_analysis':
|
||||
# Mark HTTP analysis as complete
|
||||
scan.current_phase = 'completed'
|
||||
scan.completed_ips = scan.total_ips
|
||||
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Progress callback error for scan {scan_id}: {str(e)}")
|
||||
# Don't re-raise - we don't want to break the scan
|
||||
session.rollback()
|
||||
|
||||
return progress_callback
|
||||
|
||||
|
||||
def execute_scan(scan_id: int, config_id: int, db_url: str = None):
|
||||
"""
|
||||
@@ -66,23 +223,64 @@ def execute_scan(scan_id: int, config_id: int, db_url: str = None):
|
||||
# Initialize scanner with database config
|
||||
scanner = SneakyScanner(config_id=config_id)
|
||||
|
||||
# Execute scan
|
||||
# Register scanner in the running registry
|
||||
with _running_scanners_lock:
|
||||
_running_scanners[scan_id] = scanner
|
||||
logger.debug(f"Scan {scan_id}: Registered in running scanners registry")
|
||||
|
||||
# Create progress callback
|
||||
progress_callback = create_progress_callback(scan_id, session)
|
||||
|
||||
# Execute scan with progress tracking
|
||||
logger.info(f"Scan {scan_id}: Running scanner...")
|
||||
start_time = datetime.utcnow()
|
||||
report, timestamp = scanner.scan()
|
||||
report, timestamp = scanner.scan(progress_callback=progress_callback)
|
||||
end_time = datetime.utcnow()
|
||||
|
||||
scan_duration = (end_time - start_time).total_seconds()
|
||||
logger.info(f"Scan {scan_id}: Scanner completed in {scan_duration:.2f} seconds")
|
||||
|
||||
# Generate output files (JSON, HTML, ZIP)
|
||||
logger.info(f"Scan {scan_id}: Generating output files...")
|
||||
scanner.generate_outputs(report, timestamp)
|
||||
# Transition to 'finalizing' status before output generation
|
||||
try:
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if scan:
|
||||
scan.status = 'finalizing'
|
||||
scan.current_phase = 'generating_outputs'
|
||||
session.commit()
|
||||
logger.info(f"Scan {scan_id}: Status changed to 'finalizing'")
|
||||
except Exception as e:
|
||||
logger.error(f"Scan {scan_id}: Failed to update status to finalizing: {e}")
|
||||
session.rollback()
|
||||
|
||||
# Save results to database
|
||||
logger.info(f"Scan {scan_id}: Saving results to database...")
|
||||
scan_service = ScanService(session)
|
||||
scan_service._save_scan_to_db(report, scan_id, status='completed')
|
||||
# Generate output files (JSON, HTML, ZIP) with error handling
|
||||
output_paths = {}
|
||||
output_generation_failed = False
|
||||
try:
|
||||
logger.info(f"Scan {scan_id}: Generating output files...")
|
||||
output_paths = scanner.generate_outputs(report, timestamp)
|
||||
except Exception as e:
|
||||
output_generation_failed = True
|
||||
logger.error(f"Scan {scan_id}: Output generation failed: {str(e)}")
|
||||
logger.error(f"Scan {scan_id}: Traceback:\n{traceback.format_exc()}")
|
||||
# Still mark scan as completed with warning since scan data is valid
|
||||
try:
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if scan:
|
||||
scan.status = 'completed'
|
||||
scan.error_message = f"Scan completed but output file generation failed: {str(e)}"
|
||||
scan.completed_at = datetime.utcnow()
|
||||
if scan.started_at:
|
||||
scan.duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
session.commit()
|
||||
logger.info(f"Scan {scan_id}: Marked as completed with output generation warning")
|
||||
except Exception as db_error:
|
||||
logger.error(f"Scan {scan_id}: Failed to update status after output error: {db_error}")
|
||||
|
||||
# Save results to database (only if output generation succeeded)
|
||||
if not output_generation_failed:
|
||||
logger.info(f"Scan {scan_id}: Saving results to database...")
|
||||
scan_service = ScanService(session)
|
||||
scan_service._save_scan_to_db(report, scan_id, status='completed', output_paths=output_paths)
|
||||
|
||||
# Evaluate alert rules
|
||||
logger.info(f"Scan {scan_id}: Evaluating alert rules...")
|
||||
@@ -97,6 +295,19 @@ def execute_scan(scan_id: int, config_id: int, db_url: str = None):
|
||||
|
||||
logger.info(f"Scan {scan_id}: Completed successfully")
|
||||
|
||||
except ScanCancelledError:
|
||||
# Scan was cancelled by user
|
||||
logger.info(f"Scan {scan_id}: Cancelled by user")
|
||||
|
||||
scan = session.query(Scan).filter_by(id=scan_id).first()
|
||||
if scan:
|
||||
scan.status = 'cancelled'
|
||||
scan.error_message = 'Scan cancelled by user'
|
||||
scan.completed_at = datetime.utcnow()
|
||||
if scan.started_at:
|
||||
scan.duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
session.commit()
|
||||
|
||||
except FileNotFoundError as e:
|
||||
# Config file not found
|
||||
error_msg = f"Configuration file not found: {str(e)}"
|
||||
@@ -126,6 +337,12 @@ def execute_scan(scan_id: int, config_id: int, db_url: str = None):
|
||||
logger.error(f"Scan {scan_id}: Failed to update error status in database: {str(db_error)}")
|
||||
|
||||
finally:
|
||||
# Unregister scanner from registry
|
||||
with _running_scanners_lock:
|
||||
if scan_id in _running_scanners:
|
||||
del _running_scanners[scan_id]
|
||||
logger.debug(f"Scan {scan_id}: Unregistered from running scanners registry")
|
||||
|
||||
# Always close the session
|
||||
session.close()
|
||||
logger.info(f"Scan {scan_id}: Background job completed, session closed")
|
||||
|
||||
@@ -45,7 +45,7 @@ class Scan(Base):
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
timestamp = Column(DateTime, nullable=False, index=True, comment="Scan start time (UTC)")
|
||||
duration = Column(Float, nullable=True, comment="Total scan duration in seconds")
|
||||
status = Column(String(20), nullable=False, default='running', comment="running, completed, failed")
|
||||
status = Column(String(20), nullable=False, default='running', comment="running, finalizing, completed, failed, cancelled")
|
||||
config_id = Column(Integer, ForeignKey('scan_configs.id'), nullable=True, index=True, comment="FK to scan_configs table")
|
||||
title = Column(Text, nullable=True, comment="Scan title from config")
|
||||
json_path = Column(Text, nullable=True, comment="Path to JSON report")
|
||||
@@ -59,6 +59,11 @@ class Scan(Base):
|
||||
completed_at = Column(DateTime, nullable=True, comment="Scan execution completion time")
|
||||
error_message = Column(Text, nullable=True, comment="Error message if scan failed")
|
||||
|
||||
# Progress tracking fields
|
||||
current_phase = Column(String(50), nullable=True, comment="Current scan phase: ping, tcp_scan, udp_scan, service_detection, http_analysis")
|
||||
total_ips = Column(Integer, nullable=True, comment="Total number of IPs to scan")
|
||||
completed_ips = Column(Integer, nullable=True, default=0, comment="Number of IPs completed in current phase")
|
||||
|
||||
# Relationships
|
||||
sites = relationship('ScanSite', back_populates='scan', cascade='all, delete-orphan')
|
||||
ips = relationship('ScanIP', back_populates='scan', cascade='all, delete-orphan')
|
||||
@@ -70,6 +75,7 @@ class Scan(Base):
|
||||
schedule = relationship('Schedule', back_populates='scans')
|
||||
config = relationship('ScanConfig', back_populates='scans')
|
||||
site_associations = relationship('ScanSiteAssociation', back_populates='scan', cascade='all, delete-orphan')
|
||||
progress_entries = relationship('ScanProgress', back_populates='scan', cascade='all, delete-orphan')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Scan(id={self.id}, title='{self.title}', status='{self.status}')>"
|
||||
@@ -244,6 +250,43 @@ class ScanTLSVersion(Base):
|
||||
return f"<ScanTLSVersion(id={self.id}, tls_version='{self.tls_version}', supported={self.supported})>"
|
||||
|
||||
|
||||
class ScanProgress(Base):
|
||||
"""
|
||||
Real-time progress tracking for individual IPs during scan execution.
|
||||
|
||||
Stores intermediate results as they become available, allowing users to
|
||||
see progress and results before the full scan completes.
|
||||
"""
|
||||
__tablename__ = 'scan_progress'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
scan_id = Column(Integer, ForeignKey('scans.id'), nullable=False, index=True)
|
||||
ip_address = Column(String(45), nullable=False, comment="IP address being scanned")
|
||||
site_name = Column(String(255), nullable=True, comment="Site name this IP belongs to")
|
||||
phase = Column(String(50), nullable=False, comment="Phase: ping, tcp_scan, udp_scan, service_detection, http_analysis")
|
||||
status = Column(String(20), nullable=False, default='pending', comment="pending, in_progress, completed, failed")
|
||||
|
||||
# Results data (stored as JSON)
|
||||
ping_result = Column(Boolean, nullable=True, comment="Ping response result")
|
||||
tcp_ports = Column(Text, nullable=True, comment="JSON array of discovered TCP ports")
|
||||
udp_ports = Column(Text, nullable=True, comment="JSON array of discovered UDP ports")
|
||||
services = Column(Text, nullable=True, comment="JSON array of detected services")
|
||||
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Entry creation time")
|
||||
updated_at = Column(DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow, comment="Last update time")
|
||||
|
||||
# Relationships
|
||||
scan = relationship('Scan', back_populates='progress_entries')
|
||||
|
||||
# Index for efficient lookups
|
||||
__table_args__ = (
|
||||
UniqueConstraint('scan_id', 'ip_address', name='uix_scan_progress_ip'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ScanProgress(id={self.id}, ip='{self.ip_address}', phase='{self.phase}', status='{self.status}')>"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Reusable Site Definition Tables
|
||||
# ============================================================================
|
||||
|
||||
@@ -5,8 +5,9 @@ Provides dashboard and scan viewing pages.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from flask import Blueprint, current_app, redirect, render_template, url_for
|
||||
from flask import Blueprint, current_app, redirect, render_template, request, send_from_directory, url_for
|
||||
|
||||
from web.auth.decorators import login_required
|
||||
|
||||
@@ -82,6 +83,19 @@ def compare_scans(scan_id1, scan_id2):
|
||||
return render_template('scan_compare.html', scan_id1=scan_id1, scan_id2=scan_id2)
|
||||
|
||||
|
||||
@bp.route('/search/ip')
|
||||
@login_required
|
||||
def search_ip():
|
||||
"""
|
||||
IP search results page - shows scans containing a specific IP address.
|
||||
|
||||
Returns:
|
||||
Rendered search results template
|
||||
"""
|
||||
ip_address = request.args.get('ip', '').strip()
|
||||
return render_template('ip_search_results.html', ip_address=ip_address)
|
||||
|
||||
|
||||
@bp.route('/schedules')
|
||||
@login_required
|
||||
def schedules():
|
||||
@@ -244,3 +258,31 @@ def alert_rules():
|
||||
'alert_rules.html',
|
||||
rules=rules
|
||||
)
|
||||
|
||||
|
||||
@bp.route('/help')
|
||||
@login_required
|
||||
def help():
|
||||
"""
|
||||
Help page - explains how to use the application.
|
||||
|
||||
Returns:
|
||||
Rendered help template
|
||||
"""
|
||||
return render_template('help.html')
|
||||
|
||||
|
||||
@bp.route('/output/<path:filename>')
|
||||
@login_required
|
||||
def serve_output_file(filename):
|
||||
"""
|
||||
Serve output files (JSON, HTML, ZIP) from the output directory.
|
||||
|
||||
Args:
|
||||
filename: Name of the file to serve
|
||||
|
||||
Returns:
|
||||
The requested file
|
||||
"""
|
||||
output_dir = os.environ.get('OUTPUT_DIR', '/app/output')
|
||||
return send_from_directory(output_dir, filename)
|
||||
|
||||
@@ -16,7 +16,7 @@ from sqlalchemy.orm import Session, joinedload
|
||||
|
||||
from web.models import (
|
||||
Scan, ScanSite, ScanIP, ScanPort, ScanService as ScanServiceModel,
|
||||
ScanCertificate, ScanTLSVersion, Site, ScanSiteAssociation
|
||||
ScanCertificate, ScanTLSVersion, Site, ScanSiteAssociation, SiteIP
|
||||
)
|
||||
from web.utils.pagination import paginate, PaginatedResult
|
||||
from web.utils.validators import validate_scan_status
|
||||
@@ -257,58 +257,128 @@ class ScanService:
|
||||
elif scan.status == 'failed':
|
||||
status_info['progress'] = 'Failed'
|
||||
status_info['error_message'] = scan.error_message
|
||||
elif scan.status == 'cancelled':
|
||||
status_info['progress'] = 'Cancelled'
|
||||
status_info['error_message'] = scan.error_message
|
||||
|
||||
return status_info
|
||||
|
||||
def cleanup_orphaned_scans(self) -> int:
|
||||
def get_scans_by_ip(self, ip_address: str, limit: int = 10) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Clean up orphaned scans that are stuck in 'running' status.
|
||||
Get the last N scans containing a specific IP address.
|
||||
|
||||
Args:
|
||||
ip_address: IP address to search for
|
||||
limit: Maximum number of scans to return (default: 10)
|
||||
|
||||
Returns:
|
||||
List of scan summary dictionaries, most recent first
|
||||
"""
|
||||
scans = (
|
||||
self.db.query(Scan)
|
||||
.join(ScanIP, Scan.id == ScanIP.scan_id)
|
||||
.filter(ScanIP.ip_address == ip_address)
|
||||
.filter(Scan.status == 'completed')
|
||||
.order_by(Scan.timestamp.desc())
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
return [self._scan_to_summary_dict(scan) for scan in scans]
|
||||
|
||||
def cleanup_orphaned_scans(self) -> dict:
|
||||
"""
|
||||
Clean up orphaned scans with smart recovery.
|
||||
|
||||
For scans stuck in 'running' or 'finalizing' status:
|
||||
- If output files exist: mark as 'completed' (smart recovery)
|
||||
- If no output files: mark as 'failed'
|
||||
|
||||
This should be called on application startup to handle scans that
|
||||
were running when the system crashed or was restarted.
|
||||
|
||||
Scans in 'running' status are marked as 'failed' with an appropriate
|
||||
error message indicating they were orphaned.
|
||||
|
||||
Returns:
|
||||
Number of orphaned scans cleaned up
|
||||
Dictionary with cleanup results: {'recovered': N, 'failed': N, 'total': N}
|
||||
"""
|
||||
# Find all scans with status='running'
|
||||
orphaned_scans = self.db.query(Scan).filter(Scan.status == 'running').all()
|
||||
# Find all scans with status='running' or 'finalizing'
|
||||
orphaned_scans = self.db.query(Scan).filter(
|
||||
Scan.status.in_(['running', 'finalizing'])
|
||||
).all()
|
||||
|
||||
if not orphaned_scans:
|
||||
logger.info("No orphaned scans found")
|
||||
return 0
|
||||
return {'recovered': 0, 'failed': 0, 'total': 0}
|
||||
|
||||
count = len(orphaned_scans)
|
||||
logger.warning(f"Found {count} orphaned scan(s) in 'running' status, marking as failed")
|
||||
logger.warning(f"Found {count} orphaned scan(s), attempting smart recovery")
|
||||
|
||||
recovered_count = 0
|
||||
failed_count = 0
|
||||
output_dir = Path('/app/output')
|
||||
|
||||
# Mark each orphaned scan as failed
|
||||
for scan in orphaned_scans:
|
||||
scan.status = 'failed'
|
||||
# Check for existing output files
|
||||
output_exists = False
|
||||
output_files_found = []
|
||||
|
||||
# Check paths stored in database
|
||||
if scan.json_path and Path(scan.json_path).exists():
|
||||
output_exists = True
|
||||
output_files_found.append('json')
|
||||
if scan.html_path and Path(scan.html_path).exists():
|
||||
output_files_found.append('html')
|
||||
if scan.zip_path and Path(scan.zip_path).exists():
|
||||
output_files_found.append('zip')
|
||||
|
||||
# Also check by timestamp pattern if paths not stored yet
|
||||
if not output_exists and scan.started_at and output_dir.exists():
|
||||
timestamp_pattern = scan.started_at.strftime('%Y%m%d')
|
||||
for json_file in output_dir.glob(f'scan_report_{timestamp_pattern}*.json'):
|
||||
output_exists = True
|
||||
output_files_found.append('json')
|
||||
# Update scan record with found paths
|
||||
scan.json_path = str(json_file)
|
||||
html_file = json_file.with_suffix('.html')
|
||||
if html_file.exists():
|
||||
scan.html_path = str(html_file)
|
||||
output_files_found.append('html')
|
||||
zip_file = json_file.with_suffix('.zip')
|
||||
if zip_file.exists():
|
||||
scan.zip_path = str(zip_file)
|
||||
output_files_found.append('zip')
|
||||
break
|
||||
|
||||
if output_exists:
|
||||
# Smart recovery: outputs exist, mark as completed
|
||||
scan.status = 'completed'
|
||||
scan.error_message = f'Recovered from orphaned state (output files found: {", ".join(output_files_found)})'
|
||||
recovered_count += 1
|
||||
logger.info(f"Recovered orphaned scan {scan.id} as completed (files: {output_files_found})")
|
||||
else:
|
||||
# No outputs: mark as failed
|
||||
scan.status = 'failed'
|
||||
scan.error_message = (
|
||||
"Scan was interrupted by system shutdown or crash. "
|
||||
"No output files were generated."
|
||||
)
|
||||
failed_count += 1
|
||||
logger.info(f"Marked orphaned scan {scan.id} as failed (no output files)")
|
||||
|
||||
scan.completed_at = datetime.utcnow()
|
||||
scan.error_message = (
|
||||
"Scan was interrupted by system shutdown or crash. "
|
||||
"The scan was running but did not complete normally."
|
||||
)
|
||||
|
||||
# Calculate duration if we have a started_at time
|
||||
if scan.started_at:
|
||||
duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
scan.duration = duration
|
||||
|
||||
logger.info(
|
||||
f"Marked orphaned scan {scan.id} as failed "
|
||||
f"(started: {scan.started_at.isoformat() if scan.started_at else 'unknown'})"
|
||||
)
|
||||
scan.duration = (datetime.utcnow() - scan.started_at).total_seconds()
|
||||
|
||||
self.db.commit()
|
||||
logger.info(f"Cleaned up {count} orphaned scan(s)")
|
||||
logger.info(f"Cleaned up {count} orphaned scan(s): {recovered_count} recovered, {failed_count} failed")
|
||||
|
||||
return count
|
||||
return {
|
||||
'recovered': recovered_count,
|
||||
'failed': failed_count,
|
||||
'total': count
|
||||
}
|
||||
|
||||
def _save_scan_to_db(self, report: Dict[str, Any], scan_id: int,
|
||||
status: str = 'completed') -> None:
|
||||
status: str = 'completed', output_paths: Dict = None) -> None:
|
||||
"""
|
||||
Save scan results to database.
|
||||
|
||||
@@ -319,6 +389,7 @@ class ScanService:
|
||||
report: Scan report dictionary from scanner
|
||||
scan_id: Scan ID to update
|
||||
status: Final scan status (completed or failed)
|
||||
output_paths: Dictionary with paths to generated files {'json': Path, 'html': Path, 'zip': Path}
|
||||
"""
|
||||
scan = self.db.query(Scan).filter(Scan.id == scan_id).first()
|
||||
if not scan:
|
||||
@@ -329,6 +400,17 @@ class ScanService:
|
||||
scan.duration = report.get('scan_duration')
|
||||
scan.completed_at = datetime.utcnow()
|
||||
|
||||
# Save output file paths
|
||||
if output_paths:
|
||||
if 'json' in output_paths:
|
||||
scan.json_path = str(output_paths['json'])
|
||||
if 'html' in output_paths:
|
||||
scan.html_path = str(output_paths['html'])
|
||||
if 'zip' in output_paths:
|
||||
scan.zip_path = str(output_paths['zip'])
|
||||
if 'screenshots' in output_paths:
|
||||
scan.screenshot_dir = str(output_paths['screenshots'])
|
||||
|
||||
# Map report data to database models
|
||||
self._map_report_to_models(report, scan)
|
||||
|
||||
@@ -439,9 +521,10 @@ class ScanService:
|
||||
|
||||
# Process certificate and TLS info if present
|
||||
http_info = service_data.get('http_info', {})
|
||||
if http_info.get('certificate'):
|
||||
ssl_tls = http_info.get('ssl_tls', {})
|
||||
if ssl_tls.get('certificate'):
|
||||
self._process_certificate(
|
||||
http_info['certificate'],
|
||||
ssl_tls,
|
||||
scan_obj.id,
|
||||
service.id
|
||||
)
|
||||
@@ -479,16 +562,19 @@ class ScanService:
|
||||
return service
|
||||
return None
|
||||
|
||||
def _process_certificate(self, cert_data: Dict[str, Any], scan_id: int,
|
||||
def _process_certificate(self, ssl_tls_data: Dict[str, Any], scan_id: int,
|
||||
service_id: int) -> None:
|
||||
"""
|
||||
Process certificate and TLS version data.
|
||||
|
||||
Args:
|
||||
cert_data: Certificate data dictionary
|
||||
ssl_tls_data: SSL/TLS data dictionary containing 'certificate' and 'tls_versions'
|
||||
scan_id: Scan ID
|
||||
service_id: Service ID
|
||||
"""
|
||||
# Extract certificate data from ssl_tls structure
|
||||
cert_data = ssl_tls_data.get('certificate', {})
|
||||
|
||||
# Create ScanCertificate record
|
||||
cert = ScanCertificate(
|
||||
scan_id=scan_id,
|
||||
@@ -506,7 +592,7 @@ class ScanService:
|
||||
self.db.flush()
|
||||
|
||||
# Process TLS versions
|
||||
tls_versions = cert_data.get('tls_versions', {})
|
||||
tls_versions = ssl_tls_data.get('tls_versions', {})
|
||||
for version, version_data in tls_versions.items():
|
||||
tls = ScanTLSVersion(
|
||||
scan_id=scan_id,
|
||||
@@ -588,17 +674,47 @@ class ScanService:
|
||||
|
||||
def _site_to_dict(self, site: ScanSite) -> Dict[str, Any]:
|
||||
"""Convert ScanSite to dictionary."""
|
||||
# Look up the master Site ID from ScanSiteAssociation
|
||||
master_site_id = None
|
||||
assoc = (
|
||||
self.db.query(ScanSiteAssociation)
|
||||
.filter(
|
||||
ScanSiteAssociation.scan_id == site.scan_id,
|
||||
)
|
||||
.join(Site)
|
||||
.filter(Site.name == site.site_name)
|
||||
.first()
|
||||
)
|
||||
if assoc:
|
||||
master_site_id = assoc.site_id
|
||||
|
||||
return {
|
||||
'id': site.id,
|
||||
'name': site.site_name,
|
||||
'ips': [self._ip_to_dict(ip) for ip in site.ips]
|
||||
'site_id': master_site_id, # The actual Site ID for config updates
|
||||
'ips': [self._ip_to_dict(ip, master_site_id) for ip in site.ips]
|
||||
}
|
||||
|
||||
def _ip_to_dict(self, ip: ScanIP) -> Dict[str, Any]:
|
||||
def _ip_to_dict(self, ip: ScanIP, site_id: Optional[int] = None) -> Dict[str, Any]:
|
||||
"""Convert ScanIP to dictionary."""
|
||||
# Look up the SiteIP ID for this IP address in the master Site
|
||||
site_ip_id = None
|
||||
if site_id:
|
||||
site_ip = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(
|
||||
SiteIP.site_id == site_id,
|
||||
SiteIP.ip_address == ip.ip_address
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if site_ip:
|
||||
site_ip_id = site_ip.id
|
||||
|
||||
return {
|
||||
'id': ip.id,
|
||||
'address': ip.ip_address,
|
||||
'site_ip_id': site_ip_id, # The actual SiteIP ID for config updates
|
||||
'ping_expected': ip.ping_expected,
|
||||
'ping_actual': ip.ping_actual,
|
||||
'ports': [self._port_to_dict(port) for port in ip.ports]
|
||||
|
||||
@@ -6,7 +6,7 @@ scheduled scans with cron expressions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from croniter import croniter
|
||||
@@ -71,6 +71,7 @@ class ScheduleService:
|
||||
next_run = self.calculate_next_run(cron_expression) if enabled else None
|
||||
|
||||
# Create schedule record
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
schedule = Schedule(
|
||||
name=name,
|
||||
config_id=config_id,
|
||||
@@ -78,8 +79,8 @@ class ScheduleService:
|
||||
enabled=enabled,
|
||||
last_run=None,
|
||||
next_run=next_run,
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow()
|
||||
created_at=now_utc,
|
||||
updated_at=now_utc
|
||||
)
|
||||
|
||||
self.db.add(schedule)
|
||||
@@ -103,7 +104,14 @@ class ScheduleService:
|
||||
Raises:
|
||||
ValueError: If schedule not found
|
||||
"""
|
||||
schedule = self.db.query(Schedule).filter(Schedule.id == schedule_id).first()
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
schedule = (
|
||||
self.db.query(Schedule)
|
||||
.options(joinedload(Schedule.config))
|
||||
.filter(Schedule.id == schedule_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not schedule:
|
||||
raise ValueError(f"Schedule {schedule_id} not found")
|
||||
@@ -138,8 +146,10 @@ class ScheduleService:
|
||||
'pages': int
|
||||
}
|
||||
"""
|
||||
# Build query
|
||||
query = self.db.query(Schedule)
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
# Build query and eagerly load config relationship
|
||||
query = self.db.query(Schedule).options(joinedload(Schedule.config))
|
||||
|
||||
# Apply filter
|
||||
if enabled_filter is not None:
|
||||
@@ -215,7 +225,7 @@ class ScheduleService:
|
||||
if hasattr(schedule, key):
|
||||
setattr(schedule, key, value)
|
||||
|
||||
schedule.updated_at = datetime.utcnow()
|
||||
schedule.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(schedule)
|
||||
@@ -298,7 +308,7 @@ class ScheduleService:
|
||||
|
||||
schedule.last_run = last_run
|
||||
schedule.next_run = next_run
|
||||
schedule.updated_at = datetime.utcnow()
|
||||
schedule.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
@@ -311,23 +321,43 @@ class ScheduleService:
|
||||
Validate a cron expression.
|
||||
|
||||
Args:
|
||||
cron_expr: Cron expression to validate
|
||||
cron_expr: Cron expression to validate in standard crontab format
|
||||
Format: minute hour day month day_of_week
|
||||
Day of week: 0=Sunday, 1=Monday, ..., 6=Saturday
|
||||
(APScheduler will convert this to its internal format automatically)
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
- (True, None) if valid
|
||||
- (False, error_message) if invalid
|
||||
|
||||
Note:
|
||||
This validates using croniter which uses standard crontab format.
|
||||
APScheduler's from_crontab() will handle the conversion when the
|
||||
schedule is registered with the scheduler.
|
||||
"""
|
||||
try:
|
||||
# Try to create a croniter instance
|
||||
base_time = datetime.utcnow()
|
||||
# croniter uses standard crontab format (Sunday=0)
|
||||
from datetime import timezone
|
||||
base_time = datetime.now(timezone.utc)
|
||||
cron = croniter(cron_expr, base_time)
|
||||
|
||||
# Try to get the next run time (validates the expression)
|
||||
cron.get_next(datetime)
|
||||
|
||||
# Validate basic format (5 fields)
|
||||
fields = cron_expr.split()
|
||||
if len(fields) != 5:
|
||||
return (False, f"Cron expression must have 5 fields (minute hour day month day_of_week), got {len(fields)}")
|
||||
|
||||
return (True, None)
|
||||
except (ValueError, KeyError) as e:
|
||||
error_msg = str(e)
|
||||
# Add helpful hint for day_of_week errors
|
||||
if "day" in error_msg.lower() and len(cron_expr.split()) >= 5:
|
||||
hint = "\nNote: Use standard crontab format where 0=Sunday, 1=Monday, ..., 6=Saturday"
|
||||
return (False, f"{error_msg}{hint}")
|
||||
return (False, str(e))
|
||||
except Exception as e:
|
||||
return (False, f"Unexpected error: {str(e)}")
|
||||
@@ -345,17 +375,24 @@ class ScheduleService:
|
||||
from_time: Base time (defaults to now UTC)
|
||||
|
||||
Returns:
|
||||
Next run datetime (UTC)
|
||||
Next run datetime (UTC, timezone-aware)
|
||||
|
||||
Raises:
|
||||
ValueError: If cron expression is invalid
|
||||
"""
|
||||
if from_time is None:
|
||||
from_time = datetime.utcnow()
|
||||
from_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
cron = croniter(cron_expr, from_time)
|
||||
return cron.get_next(datetime)
|
||||
next_run = cron.get_next(datetime)
|
||||
|
||||
# croniter returns naive datetime, so we need to add timezone info
|
||||
# Since we're using UTC for all calculations, add UTC timezone
|
||||
if next_run.tzinfo is None:
|
||||
next_run = next_run.replace(tzinfo=timezone.utc)
|
||||
|
||||
return next_run
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid cron expression '{cron_expr}': {str(e)}")
|
||||
|
||||
@@ -403,10 +440,16 @@ class ScheduleService:
|
||||
Returns:
|
||||
Dictionary representation
|
||||
"""
|
||||
# Get config title if relationship is loaded
|
||||
config_name = None
|
||||
if schedule.config:
|
||||
config_name = schedule.config.title
|
||||
|
||||
return {
|
||||
'id': schedule.id,
|
||||
'name': schedule.name,
|
||||
'config_id': schedule.config_id,
|
||||
'config_name': config_name,
|
||||
'cron_expression': schedule.cron_expression,
|
||||
'enabled': schedule.enabled,
|
||||
'last_run': schedule.last_run.isoformat() if schedule.last_run else None,
|
||||
@@ -421,7 +464,7 @@ class ScheduleService:
|
||||
Format datetime as relative time.
|
||||
|
||||
Args:
|
||||
dt: Datetime to format (UTC)
|
||||
dt: Datetime to format (UTC, can be naive or aware)
|
||||
|
||||
Returns:
|
||||
Human-readable relative time (e.g., "in 2 hours", "yesterday")
|
||||
@@ -429,7 +472,13 @@ class ScheduleService:
|
||||
if dt is None:
|
||||
return None
|
||||
|
||||
now = datetime.utcnow()
|
||||
# Ensure both datetimes are timezone-aware for comparison
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# If dt is naive, assume it's UTC and add timezone info
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
diff = dt - now
|
||||
|
||||
# Future times
|
||||
|
||||
@@ -149,6 +149,51 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading schedules on startup: {str(e)}", exc_info=True)
|
||||
|
||||
@staticmethod
|
||||
def validate_cron_expression(cron_expression: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Validate a cron expression and provide helpful feedback.
|
||||
|
||||
Args:
|
||||
cron_expression: Cron expression to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid: bool, message: str)
|
||||
- If valid: (True, "Valid cron expression")
|
||||
- If invalid: (False, "Error message with details")
|
||||
|
||||
Note:
|
||||
Standard crontab format: minute hour day month day_of_week
|
||||
Day of week: 0=Sunday, 1=Monday, ..., 6=Saturday (or 7=Sunday)
|
||||
"""
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
try:
|
||||
# Try to parse the expression
|
||||
trigger = CronTrigger.from_crontab(cron_expression)
|
||||
|
||||
# Validate basic format (5 fields)
|
||||
fields = cron_expression.split()
|
||||
if len(fields) != 5:
|
||||
return False, f"Cron expression must have 5 fields (minute hour day month day_of_week), got {len(fields)}"
|
||||
|
||||
return True, "Valid cron expression"
|
||||
|
||||
except (ValueError, KeyError) as e:
|
||||
error_msg = str(e)
|
||||
|
||||
# Provide helpful hints for common errors
|
||||
if "day_of_week" in error_msg.lower() or (len(cron_expression.split()) >= 5):
|
||||
# Check if day_of_week field might be using APScheduler format by mistake
|
||||
fields = cron_expression.split()
|
||||
if len(fields) == 5:
|
||||
dow_field = fields[4]
|
||||
if dow_field.isdigit() and int(dow_field) >= 0:
|
||||
hint = "\nNote: Use standard crontab format where 0=Sunday, 1=Monday, ..., 6=Saturday"
|
||||
return False, f"Invalid cron expression: {error_msg}{hint}"
|
||||
|
||||
return False, f"Invalid cron expression: {error_msg}"
|
||||
|
||||
def queue_scan(self, scan_id: int, config_id: int) -> str:
|
||||
"""
|
||||
Queue a scan for immediate background execution.
|
||||
@@ -188,6 +233,10 @@ class SchedulerService:
|
||||
schedule_id: Database ID of the schedule
|
||||
config_id: Database config ID
|
||||
cron_expression: Cron expression (e.g., "0 2 * * *" for 2am daily)
|
||||
IMPORTANT: Use standard crontab format where:
|
||||
- Day of week: 0 = Sunday, 1 = Monday, ..., 6 = Saturday
|
||||
- APScheduler automatically converts to its internal format
|
||||
- from_crontab() handles the conversion properly
|
||||
|
||||
Returns:
|
||||
Job ID from APScheduler
|
||||
@@ -195,18 +244,29 @@ class SchedulerService:
|
||||
Raises:
|
||||
RuntimeError: If scheduler not initialized
|
||||
ValueError: If cron expression is invalid
|
||||
|
||||
Note:
|
||||
APScheduler internally uses Monday=0, but from_crontab() accepts
|
||||
standard crontab format (Sunday=0) and converts it automatically.
|
||||
"""
|
||||
if not self.scheduler:
|
||||
raise RuntimeError("Scheduler not initialized. Call init_scheduler() first.")
|
||||
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
# Validate cron expression first to provide helpful error messages
|
||||
is_valid, message = self.validate_cron_expression(cron_expression)
|
||||
if not is_valid:
|
||||
raise ValueError(message)
|
||||
|
||||
# Create cron trigger from expression using local timezone
|
||||
# This allows users to specify times in their local timezone
|
||||
# from_crontab() parses standard crontab format (Sunday=0)
|
||||
# and converts to APScheduler's internal format (Monday=0) automatically
|
||||
try:
|
||||
trigger = CronTrigger.from_crontab(cron_expression)
|
||||
# timezone defaults to local system timezone
|
||||
except (ValueError, KeyError) as e:
|
||||
# This should not happen due to validation above, but catch anyway
|
||||
raise ValueError(f"Invalid cron expression '{cron_expression}': {str(e)}")
|
||||
|
||||
# Add cron job
|
||||
@@ -294,11 +354,16 @@ class SchedulerService:
|
||||
|
||||
# Update schedule's last_run and next_run
|
||||
from croniter import croniter
|
||||
next_run = croniter(schedule['cron_expression'], datetime.utcnow()).get_next(datetime)
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
next_run = croniter(schedule['cron_expression'], now_utc).get_next(datetime)
|
||||
|
||||
# croniter returns naive datetime, add UTC timezone
|
||||
if next_run.tzinfo is None:
|
||||
next_run = next_run.replace(tzinfo=timezone.utc)
|
||||
|
||||
schedule_service.update_run_times(
|
||||
schedule_id=schedule_id,
|
||||
last_run=datetime.utcnow(),
|
||||
last_run=now_utc,
|
||||
next_run=next_run
|
||||
)
|
||||
|
||||
|
||||
@@ -228,6 +228,34 @@ class SiteService:
|
||||
|
||||
return [self._site_to_dict(site) for site in sites]
|
||||
|
||||
def get_global_ip_stats(self) -> Dict[str, int]:
|
||||
"""
|
||||
Get global IP statistics across all sites.
|
||||
|
||||
Returns:
|
||||
Dictionary with:
|
||||
- total_ips: Total count of IP entries (including duplicates)
|
||||
- unique_ips: Count of distinct IP addresses
|
||||
- duplicate_ips: Number of duplicate entries (total - unique)
|
||||
"""
|
||||
# Total IP entries
|
||||
total_ips = (
|
||||
self.db.query(func.count(SiteIP.id))
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
# Unique IP addresses
|
||||
unique_ips = (
|
||||
self.db.query(func.count(func.distinct(SiteIP.ip_address)))
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
return {
|
||||
'total_ips': total_ips,
|
||||
'unique_ips': unique_ips,
|
||||
'duplicate_ips': total_ips - unique_ips
|
||||
}
|
||||
|
||||
def bulk_add_ips_from_cidr(self, site_id: int, cidr: str,
|
||||
expected_ping: Optional[bool] = None,
|
||||
expected_tcp_ports: Optional[List[int]] = None,
|
||||
|
||||
@@ -6,9 +6,14 @@
|
||||
<div class="row mt-4">
|
||||
<div class="col-12 d-flex justify-content-between align-items-center mb-4">
|
||||
<h1>Alert History</h1>
|
||||
<a href="{{ url_for('main.alert_rules') }}" class="btn btn-primary">
|
||||
<i class="bi bi-gear"></i> Manage Alert Rules
|
||||
</a>
|
||||
<div>
|
||||
<button class="btn btn-success me-2" onclick="acknowledgeAllAlerts()">
|
||||
<i class="bi bi-check-all"></i> Ack All
|
||||
</button>
|
||||
<a href="{{ url_for('main.alert_rules') }}" class="btn btn-primary">
|
||||
<i class="bi bi-gear"></i> Manage Alert Rules
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -265,5 +270,34 @@ function acknowledgeAlert(alertId) {
|
||||
alert('Failed to acknowledge alert');
|
||||
});
|
||||
}
|
||||
|
||||
function acknowledgeAllAlerts() {
|
||||
if (!confirm('Acknowledge all unacknowledged alerts?')) {
|
||||
return;
|
||||
}
|
||||
|
||||
fetch('/api/alerts/acknowledge-all', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': localStorage.getItem('api_key') || ''
|
||||
},
|
||||
body: JSON.stringify({
|
||||
acknowledged_by: 'web_user'
|
||||
})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.status === 'success') {
|
||||
location.reload();
|
||||
} else {
|
||||
alert('Failed to acknowledge alerts: ' + (data.message || 'Unknown error'));
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('Failed to acknowledge alerts');
|
||||
});
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -76,7 +76,20 @@
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<form class="d-flex me-3" action="{{ url_for('main.search_ip') }}" method="GET">
|
||||
<input class="form-control form-control-sm me-2" type="search" name="ip"
|
||||
placeholder="Search IP..." aria-label="Search IP" style="width: 150px;">
|
||||
<button class="btn btn-outline-primary btn-sm" type="submit">
|
||||
<i class="bi bi-search"></i>
|
||||
</button>
|
||||
</form>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link {% if request.endpoint == 'main.help' %}active{% endif %}"
|
||||
href="{{ url_for('main.help') }}">
|
||||
<i class="bi bi-question-circle"></i> Help
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{ url_for('auth.logout') }}">Logout</a>
|
||||
</li>
|
||||
@@ -108,7 +121,7 @@
|
||||
</div>
|
||||
|
||||
<!-- Global notification container - always above modals -->
|
||||
<div id="notification-container" style="position: fixed; top: 20px; right: 20px; z-index: 1100; min-width: 300px;"></div>
|
||||
<div id="notification-container" style="position: fixed; top: 20px; right: 20px; z-index: 9999; min-width: 300px;"></div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
{% block scripts %}{% endblock %}
|
||||
|
||||
375
app/web/templates/help.html
Normal file
375
app/web/templates/help.html
Normal file
@@ -0,0 +1,375 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Help - SneakyScanner{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="row mt-4">
|
||||
<div class="col-12">
|
||||
<h1 class="mb-4"><i class="bi bi-question-circle"></i> Help & Documentation</h1>
|
||||
<p class="text-muted">Learn how to use SneakyScanner to manage your network scanning operations.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Quick Navigation -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-compass"></i> Quick Navigation</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="row g-2">
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#getting-started" class="btn btn-outline-primary w-100">Getting Started</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#sites" class="btn btn-outline-primary w-100">Sites</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#scan-configs" class="btn btn-outline-primary w-100">Scan Configs</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#running-scans" class="btn btn-outline-primary w-100">Running Scans</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#scheduling" class="btn btn-outline-primary w-100">Scheduling</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#comparisons" class="btn btn-outline-primary w-100">Comparisons</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#alerts" class="btn btn-outline-primary w-100">Alerts</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-6">
|
||||
<a href="#webhooks" class="btn btn-outline-primary w-100">Webhooks</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Getting Started -->
|
||||
<div class="row mb-4" id="getting-started">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-rocket-takeoff"></i> Getting Started</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p>SneakyScanner helps you perform network vulnerability scans and track changes over time. Here's the typical workflow:</p>
|
||||
|
||||
<div class="alert alert-info">
|
||||
<strong>Basic Workflow:</strong>
|
||||
<ol class="mb-0 mt-2">
|
||||
<li><strong>Create a Site</strong> - Define a logical grouping for your targets</li>
|
||||
<li><strong>Add IPs</strong> - Add IP addresses or ranges to your site</li>
|
||||
<li><strong>Create a Scan Config</strong> - Configure how scans should run using your site</li>
|
||||
<li><strong>Run a Scan</strong> - Execute scans manually or on a schedule</li>
|
||||
<li><strong>Review Results</strong> - Analyze findings and compare scans over time</li>
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Sites -->
|
||||
<div class="row mb-4" id="sites">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-globe"></i> Creating Sites & Adding IPs</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>What is a Site?</h6>
|
||||
<p>A Site is a logical grouping of IP addresses that you want to scan together. For example, you might create separate sites for "Production Servers", "Development Environment", or "Office Network".</p>
|
||||
|
||||
<h6>Creating a Site</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Configs → Sites</strong> in the navigation menu</li>
|
||||
<li>Click the <strong>Create Site</strong> button</li>
|
||||
<li>Enter a descriptive name for your site</li>
|
||||
<li>Optionally add a description to help identify the site's purpose</li>
|
||||
<li>Click <strong>Create</strong> to save the site</li>
|
||||
</ol>
|
||||
|
||||
<h6>Adding IP Addresses</h6>
|
||||
<p>After creating a site, you need to add the IP addresses you want to scan:</p>
|
||||
<ol>
|
||||
<li>Find your site in the Sites list</li>
|
||||
<li>Click the <strong>Manage IPs</strong> button (or the site name)</li>
|
||||
<li>Click <strong>Add IP</strong></li>
|
||||
<li>Enter the IP address or CIDR range (e.g., <code>192.168.1.1</code> or <code>192.168.1.0/24</code>)</li>
|
||||
<li>Click <strong>Add</strong> to save</li>
|
||||
</ol>
|
||||
|
||||
<div class="alert alert-warning">
|
||||
<i class="bi bi-exclamation-triangle"></i> <strong>Note:</strong> You can add individual IPs or CIDR notation ranges. Large ranges will result in longer scan times.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scan Configs -->
|
||||
<div class="row mb-4" id="scan-configs">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-gear"></i> Creating Scan Configurations</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>What is a Scan Config?</h6>
|
||||
<p>A Scan Configuration defines how a scan should be performed. It links to a Site and specifies scanning parameters like ports to scan, timing options, and other settings.</p>
|
||||
|
||||
<h6>Creating a Scan Config</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Configs → Scan Configs</strong> in the navigation menu</li>
|
||||
<li>Click the <strong>Create Config</strong> button</li>
|
||||
<li>Enter a name for the configuration</li>
|
||||
<li>Select the <strong>Site</strong> to associate with this config</li>
|
||||
<li>Configure scan parameters:
|
||||
<ul>
|
||||
<li><strong>Ports</strong> - Specify ports to scan (e.g., <code>22,80,443</code> or <code>1-1000</code>)</li>
|
||||
<li><strong>Timing</strong> - Set scan speed/aggressiveness</li>
|
||||
<li><strong>Additional Options</strong> - Configure other nmap parameters as needed</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Click <strong>Create</strong> to save the configuration</li>
|
||||
</ol>
|
||||
|
||||
<div class="alert alert-info">
|
||||
<i class="bi bi-info-circle"></i> <strong>Tip:</strong> Create different configs for different purposes - a quick config for daily checks and a thorough config for weekly deep scans.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Running Scans -->
|
||||
<div class="row mb-4" id="running-scans">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-play-circle"></i> Running Scans</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>Starting a Manual Scan</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Scans</strong> in the navigation menu</li>
|
||||
<li>Click the <strong>New Scan</strong> button</li>
|
||||
<li>Select the <strong>Scan Config</strong> you want to use</li>
|
||||
<li>Click <strong>Start Scan</strong></li>
|
||||
</ol>
|
||||
|
||||
<h6>Monitoring Scan Progress</h6>
|
||||
<p>While a scan is running:</p>
|
||||
<ul>
|
||||
<li>The scan will appear in the Scans list with a <span class="badge badge-warning">Running</span> status</li>
|
||||
<li>You can view live progress by clicking on the scan</li>
|
||||
<li>The Dashboard also shows active scans</li>
|
||||
</ul>
|
||||
|
||||
<h6>Viewing Scan Results</h6>
|
||||
<ol>
|
||||
<li>Once complete, click on a scan in the Scans list</li>
|
||||
<li>View discovered hosts, open ports, and services</li>
|
||||
<li>Export results or compare with previous scans</li>
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scheduling -->
|
||||
<div class="row mb-4" id="scheduling">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-calendar-check"></i> Scheduling Scans</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>Why Schedule Scans?</h6>
|
||||
<p>Scheduled scans allow you to automatically run scans at regular intervals, ensuring continuous monitoring of your network without manual intervention.</p>
|
||||
|
||||
<h6>Creating a Schedule</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Schedules</strong> in the navigation menu</li>
|
||||
<li>Click the <strong>Create Schedule</strong> button</li>
|
||||
<li>Enter a name for the schedule</li>
|
||||
<li>Select the <strong>Scan Config</strong> to use</li>
|
||||
<li>Configure the schedule:
|
||||
<ul>
|
||||
<li><strong>Frequency</strong> - How often to run (daily, weekly, monthly, custom cron)</li>
|
||||
<li><strong>Time</strong> - When to start the scan</li>
|
||||
<li><strong>Days</strong> - Which days to run (for weekly schedules)</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Enable/disable the schedule as needed</li>
|
||||
<li>Click <strong>Create</strong> to save</li>
|
||||
</ol>
|
||||
|
||||
<h6>Managing Schedules</h6>
|
||||
<ul>
|
||||
<li><strong>Enable/Disable</strong> - Toggle schedules on or off without deleting them</li>
|
||||
<li><strong>Edit</strong> - Modify the schedule timing or associated config</li>
|
||||
<li><strong>Delete</strong> - Remove schedules you no longer need</li>
|
||||
<li><strong>View History</strong> - See past runs triggered by the schedule</li>
|
||||
</ul>
|
||||
|
||||
<div class="alert alert-info">
|
||||
<i class="bi bi-info-circle"></i> <strong>Tip:</strong> Schedule comprehensive scans during off-peak hours to minimize network impact.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scan Comparisons -->
|
||||
<div class="row mb-4" id="comparisons">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-arrow-left-right"></i> Scan Comparisons</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>Why Compare Scans?</h6>
|
||||
<p>Comparing scans helps you identify changes in your network over time - new hosts, closed ports, new services, or potential security issues.</p>
|
||||
|
||||
<h6>Comparing Two Scans</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Scans</strong> in the navigation menu</li>
|
||||
<li>Find the scan you want to use as the baseline</li>
|
||||
<li>Click on the scan to view its details</li>
|
||||
<li>Click the <strong>Compare</strong> button</li>
|
||||
<li>Select another scan to compare against</li>
|
||||
<li>Review the comparison results</li>
|
||||
</ol>
|
||||
|
||||
<h6>Understanding Comparison Results</h6>
|
||||
<p>The comparison view shows:</p>
|
||||
<ul>
|
||||
<li><span class="badge badge-success">New</span> - Hosts or ports that appear in the newer scan but not the older one</li>
|
||||
<li><span class="badge badge-danger">Removed</span> - Hosts or ports that were in the older scan but not the newer one</li>
|
||||
<li><span class="badge badge-warning">Changed</span> - Services or states that differ between scans</li>
|
||||
<li><span class="badge badge-info">Unchanged</span> - Items that remain the same</li>
|
||||
</ul>
|
||||
|
||||
<div class="alert alert-warning">
|
||||
<i class="bi bi-exclamation-triangle"></i> <strong>Security Note:</strong> Pay close attention to unexpected new open ports or services - these could indicate unauthorized changes or potential compromises.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Alerts -->
|
||||
<div class="row mb-4" id="alerts">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-bell"></i> Alerts & Alert Rules</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>Understanding Alerts</h6>
|
||||
<p>Alerts notify you when scan results match certain conditions you define. This helps you stay informed about important changes without manually reviewing every scan.</p>
|
||||
|
||||
<h6>Viewing Alert History</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Alerts → Alert History</strong></li>
|
||||
<li>View all triggered alerts with timestamps and details</li>
|
||||
<li>Filter alerts by severity, date, or type</li>
|
||||
<li>Click on an alert to see full details and the associated scan</li>
|
||||
</ol>
|
||||
|
||||
<h6>Creating Alert Rules</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Alerts → Alert Rules</strong></li>
|
||||
<li>Click <strong>Create Rule</strong></li>
|
||||
<li>Configure the rule:
|
||||
<ul>
|
||||
<li><strong>Name</strong> - A descriptive name for the rule</li>
|
||||
<li><strong>Condition</strong> - What triggers the alert (e.g., new open port, new host, specific service detected)</li>
|
||||
<li><strong>Severity</strong> - How critical is this alert (Info, Warning, Critical)</li>
|
||||
<li><strong>Scope</strong> - Which sites or configs this rule applies to</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Enable the rule</li>
|
||||
<li>Click <strong>Create</strong> to save</li>
|
||||
</ol>
|
||||
|
||||
<h6>Common Alert Rule Examples</h6>
|
||||
<ul>
|
||||
<li><strong>New Host Detected</strong> - Alert when a previously unknown host appears</li>
|
||||
<li><strong>New Open Port</strong> - Alert when a new port opens on any host</li>
|
||||
<li><strong>Critical Port Open</strong> - Alert for specific high-risk ports (e.g., 23/Telnet, 3389/RDP)</li>
|
||||
<li><strong>Service Change</strong> - Alert when a service version changes</li>
|
||||
<li><strong>Host Offline</strong> - Alert when an expected host stops responding</li>
|
||||
</ul>
|
||||
|
||||
<div class="alert alert-info">
|
||||
<i class="bi bi-info-circle"></i> <strong>Tip:</strong> Start with a few important rules and refine them over time to avoid alert fatigue.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Webhooks -->
|
||||
<div class="row mb-4" id="webhooks">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0"><i class="bi bi-broadcast"></i> Webhooks</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<h6>What are Webhooks?</h6>
|
||||
<p>Webhooks allow SneakyScanner to send notifications to external services when events occur, such as scan completion or alert triggers. This enables integration with tools like Slack, Discord, Microsoft Teams, or custom systems.</p>
|
||||
|
||||
<h6>Creating a Webhook</h6>
|
||||
<ol>
|
||||
<li>Navigate to <strong>Alerts → Webhooks</strong></li>
|
||||
<li>Click <strong>Create Webhook</strong></li>
|
||||
<li>Configure the webhook:
|
||||
<ul>
|
||||
<li><strong>Name</strong> - A descriptive name</li>
|
||||
<li><strong>URL</strong> - The endpoint to send notifications to</li>
|
||||
<li><strong>Events</strong> - Which events trigger this webhook</li>
|
||||
<li><strong>Secret</strong> - Optional secret for request signing</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Test the webhook to verify it works</li>
|
||||
<li>Click <strong>Create</strong> to save</li>
|
||||
</ol>
|
||||
|
||||
<h6>Webhook Events</h6>
|
||||
<ul>
|
||||
<li><strong>Scan Started</strong> - When a scan begins</li>
|
||||
<li><strong>Scan Completed</strong> - When a scan finishes</li>
|
||||
<li><strong>Scan Failed</strong> - When a scan encounters an error</li>
|
||||
<li><strong>Alert Triggered</strong> - When an alert rule matches</li>
|
||||
</ul>
|
||||
|
||||
<h6>Integration Examples</h6>
|
||||
<ul>
|
||||
<li><strong>Slack</strong> - Use a Slack Incoming Webhook URL</li>
|
||||
<li><strong>Discord</strong> - Use a Discord Webhook URL</li>
|
||||
<li><strong>Microsoft Teams</strong> - Use a Teams Incoming Webhook</li>
|
||||
<li><strong>Custom API</strong> - Send to your own endpoint for custom processing</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Back to Top -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-12 text-center">
|
||||
<a href="#" class="btn btn-outline-secondary">
|
||||
<i class="bi bi-arrow-up"></i> Back to Top
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
175
app/web/templates/ip_search_results.html
Normal file
175
app/web/templates/ip_search_results.html
Normal file
@@ -0,0 +1,175 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Search Results for {{ ip_address }} - SneakyScanner{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="row mt-4">
|
||||
<div class="col-12 d-flex justify-content-between align-items-center mb-4">
|
||||
<h1>
|
||||
<i class="bi bi-search"></i>
|
||||
Search Results
|
||||
{% if ip_address %}
|
||||
<small class="text-muted">for {{ ip_address }}</small>
|
||||
{% endif %}
|
||||
</h1>
|
||||
<a href="{{ url_for('main.scans') }}" class="btn btn-secondary">
|
||||
<i class="bi bi-arrow-left"></i> Back to Scans
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if not ip_address %}
|
||||
<!-- No IP provided -->
|
||||
<div class="row">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-body text-center py-5">
|
||||
<i class="bi bi-exclamation-circle text-warning" style="font-size: 3rem;"></i>
|
||||
<h4 class="mt-3">No IP Address Provided</h4>
|
||||
<p class="text-muted">Please enter an IP address in the search box to find related scans.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<!-- Results Table -->
|
||||
<div class="row">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0">Last 10 Scans Containing {{ ip_address }}</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div id="results-loading" class="text-center py-5">
|
||||
<div class="spinner-border" role="status">
|
||||
<span class="visually-hidden">Loading...</span>
|
||||
</div>
|
||||
<p class="mt-3 text-muted">Searching for scans...</p>
|
||||
</div>
|
||||
<div id="results-error" class="alert alert-danger" style="display: none;"></div>
|
||||
<div id="results-empty" class="text-center py-5 text-muted" style="display: none;">
|
||||
<i class="bi bi-search" style="font-size: 3rem;"></i>
|
||||
<h5 class="mt-3">No Scans Found</h5>
|
||||
<p>No completed scans contain the IP address <strong>{{ ip_address }}</strong>.</p>
|
||||
</div>
|
||||
<div id="results-table-container" style="display: none;">
|
||||
<div class="table-responsive">
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width: 80px;">ID</th>
|
||||
<th>Title</th>
|
||||
<th style="width: 200px;">Timestamp</th>
|
||||
<th style="width: 100px;">Duration</th>
|
||||
<th style="width: 120px;">Status</th>
|
||||
<th style="width: 100px;">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="results-tbody">
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="text-muted mt-3">
|
||||
Found <span id="result-count">0</span> scan(s) containing this IP address.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block scripts %}
|
||||
<script>
|
||||
const ipAddress = "{{ ip_address | e }}";
|
||||
|
||||
// Load results when page loads
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
if (ipAddress) {
|
||||
loadResults();
|
||||
}
|
||||
});
|
||||
|
||||
// Load search results from API
|
||||
async function loadResults() {
|
||||
const loadingEl = document.getElementById('results-loading');
|
||||
const errorEl = document.getElementById('results-error');
|
||||
const emptyEl = document.getElementById('results-empty');
|
||||
const tableEl = document.getElementById('results-table-container');
|
||||
|
||||
// Show loading state
|
||||
loadingEl.style.display = 'block';
|
||||
errorEl.style.display = 'none';
|
||||
emptyEl.style.display = 'none';
|
||||
tableEl.style.display = 'none';
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/scans/by-ip/${encodeURIComponent(ipAddress)}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to search for scans');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const scans = data.scans || [];
|
||||
|
||||
loadingEl.style.display = 'none';
|
||||
|
||||
if (scans.length === 0) {
|
||||
emptyEl.style.display = 'block';
|
||||
} else {
|
||||
tableEl.style.display = 'block';
|
||||
renderResultsTable(scans);
|
||||
document.getElementById('result-count').textContent = data.count;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error searching for scans:', error);
|
||||
loadingEl.style.display = 'none';
|
||||
errorEl.textContent = 'Failed to search for scans. Please try again.';
|
||||
errorEl.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
// Render results table
|
||||
function renderResultsTable(scans) {
|
||||
const tbody = document.getElementById('results-tbody');
|
||||
tbody.innerHTML = '';
|
||||
|
||||
scans.forEach(scan => {
|
||||
const row = document.createElement('tr');
|
||||
row.classList.add('scan-row');
|
||||
|
||||
// Format timestamp
|
||||
const timestamp = new Date(scan.timestamp).toLocaleString();
|
||||
|
||||
// Format duration
|
||||
const duration = scan.duration ? `${scan.duration.toFixed(1)}s` : '-';
|
||||
|
||||
// Status badge
|
||||
let statusBadge = '';
|
||||
if (scan.status === 'completed') {
|
||||
statusBadge = '<span class="badge badge-success">Completed</span>';
|
||||
} else if (scan.status === 'running') {
|
||||
statusBadge = '<span class="badge badge-info">Running</span>';
|
||||
} else if (scan.status === 'failed') {
|
||||
statusBadge = '<span class="badge badge-danger">Failed</span>';
|
||||
} else {
|
||||
statusBadge = `<span class="badge badge-info">${scan.status}</span>`;
|
||||
}
|
||||
|
||||
row.innerHTML = `
|
||||
<td class="mono">${scan.id}</td>
|
||||
<td>${scan.title || 'Untitled Scan'}</td>
|
||||
<td class="text-muted">${timestamp}</td>
|
||||
<td class="mono">${duration}</td>
|
||||
<td>${statusBadge}</td>
|
||||
<td>
|
||||
<a href="/scans/${scan.id}" class="btn btn-sm btn-secondary">View</a>
|
||||
</td>
|
||||
`;
|
||||
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -20,6 +20,10 @@
|
||||
<span id="refresh-text">Refresh</span>
|
||||
<span id="refresh-spinner" class="spinner-border spinner-border-sm ms-1" style="display: none;"></span>
|
||||
</button>
|
||||
<button class="btn btn-warning ms-2" onclick="stopScan()" id="stop-btn" style="display: none;">
|
||||
<span id="stop-text">Stop Scan</span>
|
||||
<span id="stop-spinner" class="spinner-border spinner-border-sm ms-1" style="display: none;"></span>
|
||||
</button>
|
||||
<button class="btn btn-danger ms-2" onclick="deleteScan()" id="delete-btn">Delete Scan</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -84,6 +88,50 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Progress Section (shown when scan is running) -->
|
||||
<div class="row mb-4" id="progress-section" style="display: none;">
|
||||
<div class="col-12">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0" style="color: #60a5fa;">
|
||||
<i class="bi bi-hourglass-split"></i> Scan Progress
|
||||
</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<!-- Phase and Progress Bar -->
|
||||
<div class="mb-3">
|
||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||
<span>Current Phase: <strong id="current-phase">Initializing...</strong></span>
|
||||
<span id="progress-count">0 / 0 IPs</span>
|
||||
</div>
|
||||
<div class="progress" style="height: 20px; background-color: #334155;">
|
||||
<div id="progress-bar" class="progress-bar bg-info" role="progressbar" style="width: 0%"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Per-IP Results Table -->
|
||||
<div class="table-responsive" style="max-height: 400px; overflow-y: auto;">
|
||||
<table class="table table-sm">
|
||||
<thead style="position: sticky; top: 0; background-color: #1e293b;">
|
||||
<tr>
|
||||
<th>Site</th>
|
||||
<th>IP Address</th>
|
||||
<th>Ping</th>
|
||||
<th>TCP Ports</th>
|
||||
<th>UDP Ports</th>
|
||||
<th>Services</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="progress-table-body">
|
||||
<tr><td colspan="6" class="text-center text-muted">Waiting for results...</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Stats Row -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-md-3">
|
||||
@@ -154,6 +202,67 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Certificate Details Modal -->
|
||||
<div class="modal fade" id="certificateModal" tabindex="-1">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #60a5fa;">
|
||||
<i class="bi bi-shield-lock"></i> Certificate Details
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-6">
|
||||
<label class="form-label text-muted">Subject</label>
|
||||
<div id="cert-subject" class="mono" style="word-break: break-all;">-</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<label class="form-label text-muted">Issuer</label>
|
||||
<div id="cert-issuer" class="mono" style="word-break: break-all;">-</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-4">
|
||||
<label class="form-label text-muted">Valid From</label>
|
||||
<div id="cert-valid-from" class="mono">-</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<label class="form-label text-muted">Valid Until</label>
|
||||
<div id="cert-valid-until" class="mono">-</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<label class="form-label text-muted">Days Until Expiry</label>
|
||||
<div id="cert-days-expiry">-</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-6">
|
||||
<label class="form-label text-muted">Serial Number</label>
|
||||
<div id="cert-serial" class="mono" style="word-break: break-all;">-</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<label class="form-label text-muted">Self-Signed</label>
|
||||
<div id="cert-self-signed">-</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label text-muted">Subject Alternative Names (SANs)</label>
|
||||
<div id="cert-sans">-</div>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label text-muted">TLS Version Support</label>
|
||||
<div id="cert-tls-versions">-</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block scripts %}
|
||||
@@ -161,22 +270,162 @@
|
||||
const scanId = {{ scan_id }};
|
||||
let scanData = null;
|
||||
let historyChart = null; // Store chart instance to prevent duplicates
|
||||
let progressInterval = null; // Store progress polling interval
|
||||
|
||||
// Show alert notification
|
||||
function showAlert(type, message) {
|
||||
const container = document.getElementById('notification-container');
|
||||
const notification = document.createElement('div');
|
||||
notification.className = `alert alert-${type} alert-dismissible fade show mb-2`;
|
||||
|
||||
notification.innerHTML = `
|
||||
${message}
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
|
||||
`;
|
||||
|
||||
container.appendChild(notification);
|
||||
|
||||
// Auto-dismiss after 5 seconds
|
||||
setTimeout(() => {
|
||||
notification.remove();
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
// Load scan on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
loadScan().then(() => {
|
||||
findPreviousScan();
|
||||
loadHistoricalChart();
|
||||
|
||||
// Start progress polling if scan is running
|
||||
if (scanData && scanData.status === 'running') {
|
||||
startProgressPolling();
|
||||
}
|
||||
});
|
||||
|
||||
// Auto-refresh every 10 seconds if scan is running
|
||||
setInterval(function() {
|
||||
if (scanData && scanData.status === 'running') {
|
||||
loadScan();
|
||||
}
|
||||
}, 10000);
|
||||
});
|
||||
|
||||
// Start polling for progress updates
|
||||
function startProgressPolling() {
|
||||
// Show progress section
|
||||
document.getElementById('progress-section').style.display = 'block';
|
||||
|
||||
// Initial load
|
||||
loadProgress();
|
||||
|
||||
// Poll every 3 seconds
|
||||
progressInterval = setInterval(loadProgress, 3000);
|
||||
}
|
||||
|
||||
// Stop polling for progress updates
|
||||
function stopProgressPolling() {
|
||||
if (progressInterval) {
|
||||
clearInterval(progressInterval);
|
||||
progressInterval = null;
|
||||
}
|
||||
// Hide progress section when scan completes
|
||||
document.getElementById('progress-section').style.display = 'none';
|
||||
}
|
||||
|
||||
// Load progress data
|
||||
async function loadProgress() {
|
||||
try {
|
||||
const response = await fetch(`/api/scans/${scanId}/progress`);
|
||||
if (!response.ok) return;
|
||||
|
||||
const progress = await response.json();
|
||||
|
||||
// Check if scan is still running
|
||||
if (progress.status !== 'running') {
|
||||
stopProgressPolling();
|
||||
loadScan(); // Refresh full scan data
|
||||
return;
|
||||
}
|
||||
|
||||
renderProgress(progress);
|
||||
} catch (error) {
|
||||
console.error('Error loading progress:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Render progress data
|
||||
function renderProgress(progress) {
|
||||
// Update phase display
|
||||
const phaseNames = {
|
||||
'pending': 'Initializing',
|
||||
'ping': 'Ping Scan',
|
||||
'tcp_scan': 'TCP Port Scan',
|
||||
'udp_scan': 'UDP Port Scan',
|
||||
'service_detection': 'Service Detection',
|
||||
'http_analysis': 'HTTP/HTTPS Analysis',
|
||||
'completed': 'Completing'
|
||||
};
|
||||
|
||||
const phaseName = phaseNames[progress.current_phase] || progress.current_phase;
|
||||
document.getElementById('current-phase').textContent = phaseName;
|
||||
|
||||
// Update progress count and bar
|
||||
const total = progress.total_ips || 0;
|
||||
const completed = progress.completed_ips || 0;
|
||||
const percent = total > 0 ? Math.round((completed / total) * 100) : 0;
|
||||
|
||||
document.getElementById('progress-count').textContent = `${completed} / ${total} IPs`;
|
||||
document.getElementById('progress-bar').style.width = `${percent}%`;
|
||||
|
||||
// Update progress table
|
||||
const tbody = document.getElementById('progress-table-body');
|
||||
const entries = progress.progress_entries || [];
|
||||
|
||||
if (entries.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="6" class="text-center text-muted">Waiting for results...</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
let html = '';
|
||||
entries.forEach(entry => {
|
||||
// Ping result
|
||||
let pingDisplay = '-';
|
||||
if (entry.ping_result !== null && entry.ping_result !== undefined) {
|
||||
pingDisplay = entry.ping_result
|
||||
? '<span class="badge badge-success">Yes</span>'
|
||||
: '<span class="badge badge-danger">No</span>';
|
||||
}
|
||||
|
||||
// TCP ports
|
||||
const tcpPorts = entry.tcp_ports || [];
|
||||
let tcpDisplay = tcpPorts.length > 0
|
||||
? `<span class="badge bg-info">${tcpPorts.length}</span> <small class="text-muted">${tcpPorts.slice(0, 5).join(', ')}${tcpPorts.length > 5 ? '...' : ''}</small>`
|
||||
: '-';
|
||||
|
||||
// UDP ports
|
||||
const udpPorts = entry.udp_ports || [];
|
||||
let udpDisplay = udpPorts.length > 0
|
||||
? `<span class="badge bg-info">${udpPorts.length}</span>`
|
||||
: '-';
|
||||
|
||||
// Services
|
||||
const services = entry.services || [];
|
||||
let svcDisplay = '-';
|
||||
if (services.length > 0) {
|
||||
const svcNames = services.map(s => s.service || 'unknown').slice(0, 3);
|
||||
svcDisplay = `<span class="badge bg-info">${services.length}</span> <small class="text-muted">${svcNames.join(', ')}${services.length > 3 ? '...' : ''}</small>`;
|
||||
}
|
||||
|
||||
html += `
|
||||
<tr class="scan-row">
|
||||
<td>${entry.site_name || '-'}</td>
|
||||
<td class="mono">${entry.ip_address}</td>
|
||||
<td>${pingDisplay}</td>
|
||||
<td>${tcpDisplay}</td>
|
||||
<td>${udpDisplay}</td>
|
||||
<td>${svcDisplay}</td>
|
||||
</tr>
|
||||
`;
|
||||
});
|
||||
|
||||
tbody.innerHTML = html;
|
||||
}
|
||||
|
||||
// Load scan details
|
||||
async function loadScan() {
|
||||
const loadingEl = document.getElementById('scan-loading');
|
||||
@@ -218,7 +467,6 @@
|
||||
document.getElementById('scan-timestamp').textContent = new Date(scan.timestamp).toLocaleString();
|
||||
document.getElementById('scan-duration').textContent = scan.duration ? `${scan.duration.toFixed(1)}s` : '-';
|
||||
document.getElementById('scan-triggered-by').textContent = scan.triggered_by || 'manual';
|
||||
document.getElementById('scan-config-id').textContent = scan.config_id || '-';
|
||||
|
||||
// Status badge
|
||||
let statusBadge = '';
|
||||
@@ -227,8 +475,11 @@
|
||||
} else if (scan.status === 'running') {
|
||||
statusBadge = '<span class="badge badge-info">Running</span>';
|
||||
document.getElementById('delete-btn').disabled = true;
|
||||
document.getElementById('stop-btn').style.display = 'inline-block';
|
||||
} else if (scan.status === 'failed') {
|
||||
statusBadge = '<span class="badge badge-danger">Failed</span>';
|
||||
} else if (scan.status === 'cancelled') {
|
||||
statusBadge = '<span class="badge badge-warning">Cancelled</span>';
|
||||
} else {
|
||||
statusBadge = `<span class="badge badge-info">${scan.status}</span>`;
|
||||
}
|
||||
@@ -313,6 +564,8 @@
|
||||
<th>Product</th>
|
||||
<th>Version</th>
|
||||
<th>Status</th>
|
||||
<th>Screenshot</th>
|
||||
<th>Certificate</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="site-${siteIdx}-ip-${ipIdx}-ports"></tbody>
|
||||
@@ -326,10 +579,25 @@
|
||||
const ports = ip.ports || [];
|
||||
|
||||
if (ports.length === 0) {
|
||||
portsContainer.innerHTML = '<tr class="scan-row"><td colspan="7" class="text-center text-muted">No ports found</td></tr>';
|
||||
portsContainer.innerHTML = '<tr class="scan-row"><td colspan="9" class="text-center text-muted">No ports found</td></tr>';
|
||||
} else {
|
||||
ports.forEach(port => {
|
||||
const service = port.services && port.services.length > 0 ? port.services[0] : null;
|
||||
const screenshotPath = service && service.screenshot_path ? service.screenshot_path : null;
|
||||
const certificate = service && service.certificates && service.certificates.length > 0 ? service.certificates[0] : null;
|
||||
|
||||
// Build status cell with optional "Mark Expected" button
|
||||
let statusCell;
|
||||
if (port.expected) {
|
||||
statusCell = '<span class="badge badge-good">Expected</span>';
|
||||
} else {
|
||||
// Show "Unexpected" badge with "Mark Expected" button if site_id and site_ip_id are available
|
||||
const canMarkExpected = site.site_id && ip.site_ip_id;
|
||||
statusCell = `<span class="badge badge-warning">Unexpected</span>`;
|
||||
if (canMarkExpected) {
|
||||
statusCell += ` <button class="btn btn-sm btn-outline-success ms-1" onclick="markPortExpected(${site.site_id}, ${ip.site_ip_id}, ${port.port}, '${port.protocol}')" title="Add to expected ports"><i class="bi bi-plus-circle"></i></button>`;
|
||||
}
|
||||
}
|
||||
|
||||
const row = document.createElement('tr');
|
||||
row.classList.add('scan-row'); // Fix white row bug
|
||||
@@ -340,7 +608,9 @@
|
||||
<td>${service ? service.service_name : '-'}</td>
|
||||
<td>${service ? service.product || '-' : '-'}</td>
|
||||
<td class="mono">${service ? service.version || '-' : '-'}</td>
|
||||
<td>${port.expected ? '<span class="badge badge-good">Expected</span>' : '<span class="badge badge-warning">Unexpected</span>'}</td>
|
||||
<td>${statusCell}</td>
|
||||
<td>${screenshotPath ? `<a href="/output/${screenshotPath.replace(/^\/?(?:app\/)?output\/?/, '')}" target="_blank" class="btn btn-sm btn-outline-primary" title="View Screenshot"><i class="bi bi-image"></i></a>` : '-'}</td>
|
||||
<td>${certificate ? `<button class="btn btn-sm btn-outline-info" onclick='showCertificateModal(${JSON.stringify(certificate).replace(/'/g, "'")})' title="View Certificate"><i class="bi bi-shield-lock"></i></button>` : '-'}</td>
|
||||
`;
|
||||
portsContainer.appendChild(row);
|
||||
});
|
||||
@@ -439,7 +709,7 @@
|
||||
window.location.href = '{{ url_for("main.scans") }}';
|
||||
} catch (error) {
|
||||
console.error('Error deleting scan:', error);
|
||||
alert(`Failed to delete scan: ${error.message}`);
|
||||
showAlert('danger', `Failed to delete scan: ${error.message}`);
|
||||
|
||||
// Re-enable button on error
|
||||
deleteBtn.disabled = false;
|
||||
@@ -447,6 +717,127 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Stop scan
|
||||
async function stopScan() {
|
||||
if (!confirm(`Are you sure you want to stop scan ${scanId}?`)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const stopBtn = document.getElementById('stop-btn');
|
||||
const stopText = document.getElementById('stop-text');
|
||||
const stopSpinner = document.getElementById('stop-spinner');
|
||||
|
||||
// Show loading state
|
||||
stopBtn.disabled = true;
|
||||
stopText.style.display = 'none';
|
||||
stopSpinner.style.display = 'inline-block';
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/scans/${scanId}/stop`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
let errorMessage = `HTTP ${response.status}: Failed to stop scan`;
|
||||
try {
|
||||
const data = await response.json();
|
||||
errorMessage = data.message || errorMessage;
|
||||
} catch (e) {
|
||||
// Ignore JSON parse errors
|
||||
}
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
// Show success message
|
||||
showAlert('success', `Stop signal sent to scan ${scanId}.`);
|
||||
|
||||
// Refresh scan data after a short delay
|
||||
setTimeout(() => {
|
||||
loadScan();
|
||||
}, 1000);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error stopping scan:', error);
|
||||
showAlert('danger', `Failed to stop scan: ${error.message}`);
|
||||
|
||||
// Re-enable button on error
|
||||
stopBtn.disabled = false;
|
||||
stopText.style.display = 'inline';
|
||||
stopSpinner.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
// Mark a port as expected in the site config
|
||||
async function markPortExpected(siteId, ipId, portNumber, protocol) {
|
||||
try {
|
||||
// First, get the current IP settings - fetch all IPs with high per_page to find the one we need
|
||||
const getResponse = await fetch(`/api/sites/${siteId}/ips?per_page=200`);
|
||||
if (!getResponse.ok) {
|
||||
throw new Error('Failed to get site IPs');
|
||||
}
|
||||
const ipsData = await getResponse.json();
|
||||
|
||||
// Find the IP in the site
|
||||
const ipData = ipsData.ips.find(ip => ip.id === ipId);
|
||||
if (!ipData) {
|
||||
throw new Error('IP not found in site');
|
||||
}
|
||||
|
||||
// Get current expected ports
|
||||
let expectedTcpPorts = ipData.expected_tcp_ports || [];
|
||||
let expectedUdpPorts = ipData.expected_udp_ports || [];
|
||||
|
||||
// Add the new port to the appropriate list
|
||||
if (protocol.toLowerCase() === 'tcp') {
|
||||
if (!expectedTcpPorts.includes(portNumber)) {
|
||||
expectedTcpPorts.push(portNumber);
|
||||
expectedTcpPorts.sort((a, b) => a - b);
|
||||
}
|
||||
} else if (protocol.toLowerCase() === 'udp') {
|
||||
if (!expectedUdpPorts.includes(portNumber)) {
|
||||
expectedUdpPorts.push(portNumber);
|
||||
expectedUdpPorts.sort((a, b) => a - b);
|
||||
}
|
||||
}
|
||||
|
||||
// Update the IP settings
|
||||
const updateResponse = await fetch(`/api/sites/${siteId}/ips/${ipId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
expected_tcp_ports: expectedTcpPorts,
|
||||
expected_udp_ports: expectedUdpPorts
|
||||
})
|
||||
});
|
||||
|
||||
if (!updateResponse.ok) {
|
||||
let errorMessage = 'Failed to update IP settings';
|
||||
try {
|
||||
const errorData = await updateResponse.json();
|
||||
errorMessage = errorData.message || errorMessage;
|
||||
} catch (e) {
|
||||
// Ignore JSON parse errors
|
||||
}
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
// Show success message
|
||||
showAlert('success', `Port ${portNumber}/${protocol.toUpperCase()} added to expected ports for this IP. Refresh the page to see updated status.`);
|
||||
|
||||
// Optionally refresh the scan data to show the change
|
||||
// Note: The scan data itself won't change, but the user knows it's been updated
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error marking port as expected:', error);
|
||||
showAlert('danger', `Failed to mark port as expected: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Find previous scan and show compare button
|
||||
let previousScanId = null;
|
||||
let currentConfigId = null;
|
||||
@@ -593,5 +984,97 @@
|
||||
console.error('Error loading historical chart:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Show certificate details modal
|
||||
function showCertificateModal(cert) {
|
||||
// Populate modal fields
|
||||
document.getElementById('cert-subject').textContent = cert.subject || '-';
|
||||
document.getElementById('cert-issuer').textContent = cert.issuer || '-';
|
||||
document.getElementById('cert-serial').textContent = cert.serial_number || '-';
|
||||
|
||||
// Format dates
|
||||
document.getElementById('cert-valid-from').textContent = cert.not_valid_before
|
||||
? new Date(cert.not_valid_before).toLocaleString()
|
||||
: '-';
|
||||
document.getElementById('cert-valid-until').textContent = cert.not_valid_after
|
||||
? new Date(cert.not_valid_after).toLocaleString()
|
||||
: '-';
|
||||
|
||||
// Days until expiry with color coding
|
||||
if (cert.days_until_expiry !== null && cert.days_until_expiry !== undefined) {
|
||||
let badgeClass = 'badge-success';
|
||||
if (cert.days_until_expiry < 0) {
|
||||
badgeClass = 'badge-danger';
|
||||
} else if (cert.days_until_expiry < 30) {
|
||||
badgeClass = 'badge-warning';
|
||||
}
|
||||
document.getElementById('cert-days-expiry').innerHTML =
|
||||
`<span class="badge ${badgeClass}">${cert.days_until_expiry} days</span>`;
|
||||
} else {
|
||||
document.getElementById('cert-days-expiry').textContent = '-';
|
||||
}
|
||||
|
||||
// Self-signed indicator
|
||||
document.getElementById('cert-self-signed').innerHTML = cert.is_self_signed
|
||||
? '<span class="badge badge-warning">Yes</span>'
|
||||
: '<span class="badge badge-success">No</span>';
|
||||
|
||||
// SANs
|
||||
if (cert.sans && cert.sans.length > 0) {
|
||||
document.getElementById('cert-sans').innerHTML = cert.sans
|
||||
.map(san => `<span class="badge bg-secondary me-1 mb-1">${san}</span>`)
|
||||
.join('');
|
||||
} else {
|
||||
document.getElementById('cert-sans').textContent = 'None';
|
||||
}
|
||||
|
||||
// TLS versions
|
||||
if (cert.tls_versions && cert.tls_versions.length > 0) {
|
||||
let tlsHtml = '<div class="table-responsive"><table class="table table-sm mb-0">';
|
||||
tlsHtml += '<thead><tr><th>Version</th><th>Status</th><th>Cipher Suites</th></tr></thead><tbody>';
|
||||
|
||||
cert.tls_versions.forEach(tls => {
|
||||
const statusBadge = tls.supported
|
||||
? '<span class="badge badge-success">Supported</span>'
|
||||
: '<span class="badge badge-danger">Not Supported</span>';
|
||||
|
||||
let ciphers = '-';
|
||||
if (tls.cipher_suites && tls.cipher_suites.length > 0) {
|
||||
ciphers = `<small class="text-muted">${tls.cipher_suites.length} cipher(s)</small>
|
||||
<button class="btn btn-sm btn-link p-0 ms-1" onclick="toggleCiphers(this, '${tls.tls_version}')" data-ciphers='${JSON.stringify(tls.cipher_suites).replace(/'/g, "'")}'>
|
||||
<i class="bi bi-chevron-down"></i>
|
||||
</button>
|
||||
<div class="cipher-list" style="display:none; font-size: 0.75rem; max-height: 100px; overflow-y: auto;"></div>`;
|
||||
}
|
||||
|
||||
tlsHtml += `<tr class="scan-row"><td>${tls.tls_version}</td><td>${statusBadge}</td><td>${ciphers}</td></tr>`;
|
||||
});
|
||||
|
||||
tlsHtml += '</tbody></table></div>';
|
||||
document.getElementById('cert-tls-versions').innerHTML = tlsHtml;
|
||||
} else {
|
||||
document.getElementById('cert-tls-versions').textContent = 'No TLS information available';
|
||||
}
|
||||
|
||||
// Show modal
|
||||
const modal = new bootstrap.Modal(document.getElementById('certificateModal'));
|
||||
modal.show();
|
||||
}
|
||||
|
||||
// Toggle cipher suites display
|
||||
function toggleCiphers(btn, version) {
|
||||
const cipherList = btn.nextElementSibling;
|
||||
const icon = btn.querySelector('i');
|
||||
|
||||
if (cipherList.style.display === 'none') {
|
||||
const ciphers = JSON.parse(btn.dataset.ciphers);
|
||||
cipherList.innerHTML = ciphers.map(c => `<div class="mono">${c}</div>`).join('');
|
||||
cipherList.style.display = 'block';
|
||||
icon.className = 'bi bi-chevron-up';
|
||||
} else {
|
||||
cipherList.style.display = 'none';
|
||||
icon.className = 'bi bi-chevron-down';
|
||||
}
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
<option value="running">Running</option>
|
||||
<option value="completed">Completed</option>
|
||||
<option value="failed">Failed</option>
|
||||
<option value="cancelled">Cancelled</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
@@ -151,6 +152,25 @@
|
||||
let statusFilter = '';
|
||||
let totalCount = 0;
|
||||
|
||||
// Show alert notification
|
||||
function showAlert(type, message) {
|
||||
const container = document.getElementById('notification-container');
|
||||
const notification = document.createElement('div');
|
||||
notification.className = `alert alert-${type} alert-dismissible fade show mb-2`;
|
||||
|
||||
notification.innerHTML = `
|
||||
${message}
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
|
||||
`;
|
||||
|
||||
container.appendChild(notification);
|
||||
|
||||
// Auto-dismiss after 5 seconds
|
||||
setTimeout(() => {
|
||||
notification.remove();
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
// Load initial data when page loads
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
loadScans();
|
||||
@@ -229,20 +249,27 @@
|
||||
statusBadge = '<span class="badge badge-info">Running</span>';
|
||||
} else if (scan.status === 'failed') {
|
||||
statusBadge = '<span class="badge badge-danger">Failed</span>';
|
||||
} else if (scan.status === 'cancelled') {
|
||||
statusBadge = '<span class="badge badge-warning">Cancelled</span>';
|
||||
} else {
|
||||
statusBadge = `<span class="badge badge-info">${scan.status}</span>`;
|
||||
}
|
||||
|
||||
// Action buttons
|
||||
let actionButtons = `<a href="/scans/${scan.id}" class="btn btn-sm btn-secondary">View</a>`;
|
||||
if (scan.status === 'running') {
|
||||
actionButtons += `<button class="btn btn-sm btn-warning ms-1" onclick="stopScan(${scan.id})">Stop</button>`;
|
||||
} else {
|
||||
actionButtons += `<button class="btn btn-sm btn-danger ms-1" onclick="deleteScan(${scan.id})">Delete</button>`;
|
||||
}
|
||||
|
||||
row.innerHTML = `
|
||||
<td class="mono">${scan.id}</td>
|
||||
<td>${scan.title || 'Untitled Scan'}</td>
|
||||
<td class="text-muted">${timestamp}</td>
|
||||
<td class="mono">${duration}</td>
|
||||
<td>${statusBadge}</td>
|
||||
<td>
|
||||
<a href="/scans/${scan.id}" class="btn btn-sm btn-secondary">View</a>
|
||||
${scan.status !== 'running' ? `<button class="btn btn-sm btn-danger ms-1" onclick="deleteScan(${scan.id})">Delete</button>` : ''}
|
||||
</td>
|
||||
<td>${actionButtons}</td>
|
||||
`;
|
||||
|
||||
tbody.appendChild(row);
|
||||
@@ -456,15 +483,7 @@
|
||||
bootstrap.Modal.getInstance(document.getElementById('triggerScanModal')).hide();
|
||||
|
||||
// Show success message
|
||||
const alertDiv = document.createElement('div');
|
||||
alertDiv.className = 'alert alert-success alert-dismissible fade show mt-3';
|
||||
alertDiv.innerHTML = `
|
||||
Scan triggered successfully! (ID: ${data.scan_id})
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
|
||||
`;
|
||||
// Insert at the beginning of container-fluid
|
||||
const container = document.querySelector('.container-fluid');
|
||||
container.insertBefore(alertDiv, container.firstChild);
|
||||
showAlert('success', `Scan triggered successfully! (ID: ${data.scan_id})`);
|
||||
|
||||
// Refresh scans
|
||||
loadScans();
|
||||
@@ -478,6 +497,33 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Stop scan
|
||||
async function stopScan(scanId) {
|
||||
if (!confirm(`Are you sure you want to stop scan ${scanId}?`)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/scans/${scanId}/stop`, {
|
||||
method: 'POST'
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json();
|
||||
throw new Error(data.message || 'Failed to stop scan');
|
||||
}
|
||||
|
||||
// Show success message
|
||||
showAlert('success', `Stop signal sent to scan ${scanId}.`);
|
||||
|
||||
// Refresh scans after a short delay
|
||||
setTimeout(() => loadScans(), 1000);
|
||||
} catch (error) {
|
||||
console.error('Error stopping scan:', error);
|
||||
showAlert('danger', `Failed to stop scan: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete scan
|
||||
async function deleteScan(scanId) {
|
||||
if (!confirm(`Are you sure you want to delete scan ${scanId}?`)) {
|
||||
@@ -490,23 +536,18 @@
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete scan');
|
||||
const data = await response.json();
|
||||
throw new Error(data.message || 'Failed to delete scan');
|
||||
}
|
||||
|
||||
// Show success message
|
||||
const alertDiv = document.createElement('div');
|
||||
alertDiv.className = 'alert alert-success alert-dismissible fade show mt-3';
|
||||
alertDiv.innerHTML = `
|
||||
Scan ${scanId} deleted successfully.
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
|
||||
`;
|
||||
document.querySelector('.container-fluid').insertBefore(alertDiv, document.querySelector('.row'));
|
||||
showAlert('success', `Scan ${scanId} deleted successfully.`);
|
||||
|
||||
// Refresh scans
|
||||
loadScans();
|
||||
} catch (error) {
|
||||
console.error('Error deleting scan:', error);
|
||||
alert('Failed to delete scan. Please try again.');
|
||||
showAlert('danger', `Failed to delete scan: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -298,7 +298,11 @@ async function loadSchedule() {
|
||||
function populateForm(schedule) {
|
||||
document.getElementById('schedule-id').value = schedule.id;
|
||||
document.getElementById('schedule-name').value = schedule.name;
|
||||
document.getElementById('config-id').value = schedule.config_id;
|
||||
// Display config name and ID in the readonly config-file field
|
||||
const configDisplay = schedule.config_name
|
||||
? `${schedule.config_name} (ID: ${schedule.config_id})`
|
||||
: `Config ID: ${schedule.config_id}`;
|
||||
document.getElementById('config-file').value = configDisplay;
|
||||
document.getElementById('cron-expression').value = schedule.cron_expression;
|
||||
document.getElementById('schedule-enabled').checked = schedule.enabled;
|
||||
|
||||
|
||||
@@ -26,8 +26,11 @@
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="stat-card">
|
||||
<div class="stat-value" id="total-ips">-</div>
|
||||
<div class="stat-label">Total IPs</div>
|
||||
<div class="stat-value" id="unique-ips">-</div>
|
||||
<div class="stat-label">Unique IPs</div>
|
||||
<div class="stat-sublabel" id="duplicate-ips-label" style="display: none; font-size: 0.75rem; color: #fbbf24;">
|
||||
(<span id="duplicate-ips">0</span> duplicates)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
@@ -499,7 +502,7 @@ async function loadSites() {
|
||||
const data = await response.json();
|
||||
sitesData = data.sites || [];
|
||||
|
||||
updateStats();
|
||||
updateStats(data.unique_ips, data.duplicate_ips);
|
||||
renderSites(sitesData);
|
||||
|
||||
document.getElementById('sites-loading').style.display = 'none';
|
||||
@@ -514,12 +517,20 @@ async function loadSites() {
|
||||
}
|
||||
|
||||
// Update summary stats
|
||||
function updateStats() {
|
||||
function updateStats(uniqueIps, duplicateIps) {
|
||||
const totalSites = sitesData.length;
|
||||
const totalIps = sitesData.reduce((sum, site) => sum + (site.ip_count || 0), 0);
|
||||
|
||||
document.getElementById('total-sites').textContent = totalSites;
|
||||
document.getElementById('total-ips').textContent = totalIps;
|
||||
document.getElementById('unique-ips').textContent = uniqueIps || 0;
|
||||
|
||||
// Show duplicate count if there are any
|
||||
if (duplicateIps && duplicateIps > 0) {
|
||||
document.getElementById('duplicate-ips').textContent = duplicateIps;
|
||||
document.getElementById('duplicate-ips-label').style.display = 'block';
|
||||
} else {
|
||||
document.getElementById('duplicate-ips-label').style.display = 'none';
|
||||
}
|
||||
|
||||
document.getElementById('sites-in-use').textContent = '-'; // Will be updated async
|
||||
|
||||
// Count sites in use (async)
|
||||
@@ -688,6 +699,18 @@ async function loadSiteIps(siteId) {
|
||||
const data = await response.json();
|
||||
const ips = data.ips || [];
|
||||
|
||||
// Sort IPs by numeric octets
|
||||
ips.sort((a, b) => {
|
||||
const partsA = a.ip_address.split('.').map(Number);
|
||||
const partsB = b.ip_address.split('.').map(Number);
|
||||
for (let i = 0; i < 4; i++) {
|
||||
if (partsA[i] !== partsB[i]) {
|
||||
return partsA[i] - partsB[i];
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
document.getElementById('ip-count').textContent = data.total || ips.length;
|
||||
|
||||
// Render flat IP table
|
||||
|
||||
@@ -23,7 +23,7 @@ def validate_scan_status(status: str) -> tuple[bool, Optional[str]]:
|
||||
>>> validate_scan_status('invalid')
|
||||
(False, 'Invalid status: invalid. Must be one of: running, completed, failed')
|
||||
"""
|
||||
valid_statuses = ['running', 'completed', 'failed']
|
||||
valid_statuses = ['running', 'finalizing', 'completed', 'failed', 'cancelled']
|
||||
|
||||
if status not in valid_statuses:
|
||||
return False, f'Invalid status: {status}. Must be one of: {", ".join(valid_statuses)}'
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
scanner:
|
||||
build: .
|
||||
image: sneakyscanner:latest
|
||||
container_name: sneakyscanner
|
||||
privileged: true # Required for masscan raw socket access
|
||||
network_mode: host # Required for network scanning
|
||||
volumes:
|
||||
- ./configs:/app/configs:ro
|
||||
- ./output:/app/output
|
||||
command: /app/configs/example-site.yaml
|
||||
@@ -2,12 +2,10 @@ version: '3.8'
|
||||
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
image: sneakyscanner:latest
|
||||
image: sneakyscan
|
||||
container_name: sneakyscanner-web
|
||||
# Use entrypoint script that auto-initializes database on first run
|
||||
entrypoint: ["/docker-entrypoint.sh"]
|
||||
command: ["python3", "-u", "-m", "web.app"]
|
||||
working_dir: /app
|
||||
entrypoint: ["python3", "-u", "-m", "web.app"]
|
||||
# Note: Using host network mode for scanner capabilities, so no port mapping needed
|
||||
# The Flask app will be accessible at http://localhost:5000
|
||||
volumes:
|
||||
@@ -41,6 +39,9 @@ services:
|
||||
# Scheduler configuration (APScheduler)
|
||||
- SCHEDULER_EXECUTORS=${SCHEDULER_EXECUTORS:-2}
|
||||
- SCHEDULER_JOB_DEFAULTS_MAX_INSTANCES=${SCHEDULER_JOB_DEFAULTS_MAX_INSTANCES:-3}
|
||||
# UDP scanning configuration
|
||||
- UDP_SCAN_ENABLED=${UDP_SCAN_ENABLED:-false}
|
||||
- UDP_PORTS=${UDP_PORTS:-53,67,68,69,123,161,500,514,1900}
|
||||
# Scanner functionality requires privileged mode and host network for masscan/nmap
|
||||
privileged: true
|
||||
network_mode: host
|
||||
@@ -56,8 +57,7 @@ services:
|
||||
# Optional: Initialize database on first run
|
||||
# Run with: docker-compose -f docker-compose-web.yml run --rm init-db
|
||||
init-db:
|
||||
build: .
|
||||
image: sneakyscanner:latest
|
||||
image: sneakyscan
|
||||
container_name: sneakyscanner-init-db
|
||||
entrypoint: ["python3"]
|
||||
command: ["init_db.py", "--db-url", "sqlite:////app/data/sneakyscanner.db"]
|
||||
@@ -65,3 +65,4 @@ services:
|
||||
- ./data:/app/data
|
||||
profiles:
|
||||
- tools
|
||||
networks: []
|
||||
|
||||
@@ -117,7 +117,7 @@ Retrieve a paginated list of all sites.
|
||||
| `per_page` | integer | No | 20 | Items per page (1-100) |
|
||||
| `all` | string | No | - | Set to "true" to return all sites without pagination |
|
||||
|
||||
**Success Response (200 OK):**
|
||||
**Success Response (200 OK) - Paginated:**
|
||||
```json
|
||||
{
|
||||
"sites": [
|
||||
@@ -139,13 +139,40 @@ Retrieve a paginated list of all sites.
|
||||
}
|
||||
```
|
||||
|
||||
**Success Response (200 OK) - All Sites (all=true):**
|
||||
```json
|
||||
{
|
||||
"sites": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Production DC",
|
||||
"description": "Production datacenter servers",
|
||||
"ip_count": 25,
|
||||
"created_at": "2025-11-19T10:30:00Z",
|
||||
"updated_at": "2025-11-19T10:30:00Z"
|
||||
}
|
||||
],
|
||||
"total_ips": 100,
|
||||
"unique_ips": 85,
|
||||
"duplicate_ips": 15
|
||||
}
|
||||
```
|
||||
|
||||
**Response Fields (all=true):**
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `total_ips` | integer | Total count of IP entries across all sites (including duplicates) |
|
||||
| `unique_ips` | integer | Count of distinct IP addresses |
|
||||
| `duplicate_ips` | integer | Number of duplicate IP entries (total_ips - unique_ips) |
|
||||
|
||||
**Usage Example:**
|
||||
```bash
|
||||
# List first page
|
||||
curl -X GET http://localhost:5000/api/sites \
|
||||
-b cookies.txt
|
||||
|
||||
# Get all sites (for dropdowns)
|
||||
# Get all sites with global IP stats
|
||||
curl -X GET "http://localhost:5000/api/sites?all=true" \
|
||||
-b cookies.txt
|
||||
```
|
||||
@@ -720,7 +747,7 @@ Retrieve a paginated list of scans with optional status filtering.
|
||||
"duration": 125.5,
|
||||
"status": "completed",
|
||||
"title": "Production Network Scan",
|
||||
"config_id": "/app/configs/production.yaml",
|
||||
"config_id": 1,
|
||||
"triggered_by": "manual",
|
||||
"started_at": "2025-11-14T10:30:00Z",
|
||||
"completed_at": "2025-11-14T10:32:05Z"
|
||||
@@ -731,7 +758,7 @@ Retrieve a paginated list of scans with optional status filtering.
|
||||
"duration": 98.2,
|
||||
"status": "completed",
|
||||
"title": "Development Network Scan",
|
||||
"config_id": "/app/configs/dev.yaml",
|
||||
"config_id": 2,
|
||||
"triggered_by": "scheduled",
|
||||
"started_at": "2025-11-13T15:00:00Z",
|
||||
"completed_at": "2025-11-13T15:01:38Z"
|
||||
@@ -793,7 +820,7 @@ Retrieve complete details for a specific scan, including all sites, IPs, ports,
|
||||
"duration": 125.5,
|
||||
"status": "completed",
|
||||
"title": "Production Network Scan",
|
||||
"config_id": "/app/configs/production.yaml",
|
||||
"config_id": 1,
|
||||
"json_path": "/app/output/scan_report_20251114_103000.json",
|
||||
"html_path": "/app/output/scan_report_20251114_103000.html",
|
||||
"zip_path": "/app/output/scan_report_20251114_103000.zip",
|
||||
@@ -968,7 +995,8 @@ Delete a scan and all associated files (JSON, HTML, ZIP, screenshots).
|
||||
**Success Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"message": "Scan 42 deleted successfully"
|
||||
"scan_id": 42,
|
||||
"message": "Scan deleted successfully"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -988,6 +1016,56 @@ curl -X DELETE http://localhost:5000/api/scans/42 \
|
||||
-b cookies.txt
|
||||
```
|
||||
|
||||
### Get Scans by IP
|
||||
|
||||
Get the last 10 scans containing a specific IP address.
|
||||
|
||||
**Endpoint:** `GET /api/scans/by-ip/{ip_address}`
|
||||
|
||||
**Authentication:** Required
|
||||
|
||||
**Path Parameters:**
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|------|----------|-------------|
|
||||
| `ip_address` | string | Yes | IP address to search for |
|
||||
|
||||
**Success Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"ip_address": "192.168.1.10",
|
||||
"scans": [
|
||||
{
|
||||
"id": 42,
|
||||
"timestamp": "2025-11-14T10:30:00Z",
|
||||
"duration": 125.5,
|
||||
"status": "completed",
|
||||
"title": "Production Network Scan",
|
||||
"config_id": 1,
|
||||
"triggered_by": "manual",
|
||||
"created_at": "2025-11-14T10:30:00Z"
|
||||
},
|
||||
{
|
||||
"id": 38,
|
||||
"timestamp": "2025-11-13T10:30:00Z",
|
||||
"duration": 98.2,
|
||||
"status": "completed",
|
||||
"title": "Production Network Scan",
|
||||
"config_id": 1,
|
||||
"triggered_by": "scheduled",
|
||||
"created_at": "2025-11-13T10:30:00Z"
|
||||
}
|
||||
],
|
||||
"count": 2
|
||||
}
|
||||
```
|
||||
|
||||
**Usage Example:**
|
||||
```bash
|
||||
curl -X GET http://localhost:5000/api/scans/by-ip/192.168.1.10 \
|
||||
-b cookies.txt
|
||||
```
|
||||
|
||||
### Compare Scans
|
||||
|
||||
Compare two scans to identify differences in ports, services, and certificates.
|
||||
@@ -1111,7 +1189,7 @@ Retrieve a list of all schedules with pagination and filtering.
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Daily Production Scan",
|
||||
"config_id": "/app/configs/prod-scan.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 2 * * *",
|
||||
"enabled": true,
|
||||
"created_at": "2025-11-01T10:00:00Z",
|
||||
@@ -1157,7 +1235,7 @@ Retrieve details for a specific schedule including execution history.
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Daily Production Scan",
|
||||
"config_id": "/app/configs/prod-scan.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 2 * * *",
|
||||
"enabled": true,
|
||||
"created_at": "2025-11-01T10:00:00Z",
|
||||
@@ -1201,7 +1279,7 @@ Create a new scheduled scan.
|
||||
```json
|
||||
{
|
||||
"name": "Daily Production Scan",
|
||||
"config_id": "/app/configs/prod-scan.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 2 * * *",
|
||||
"enabled": true
|
||||
}
|
||||
@@ -1215,7 +1293,7 @@ Create a new scheduled scan.
|
||||
"schedule": {
|
||||
"id": 1,
|
||||
"name": "Daily Production Scan",
|
||||
"config_id": "/app/configs/prod-scan.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 2 * * *",
|
||||
"enabled": true,
|
||||
"created_at": "2025-11-01T10:00:00Z"
|
||||
@@ -1236,7 +1314,7 @@ Create a new scheduled scan.
|
||||
```bash
|
||||
curl -X POST http://localhost:5000/api/schedules \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name":"Daily Scan","config_id":"/app/configs/prod.yaml","cron_expression":"0 2 * * *"}' \
|
||||
-d '{"name":"Daily Scan","config_id":1,"cron_expression":"0 2 * * *"}' \
|
||||
-b cookies.txt
|
||||
```
|
||||
|
||||
@@ -1270,7 +1348,7 @@ Update an existing schedule.
|
||||
"schedule": {
|
||||
"id": 1,
|
||||
"name": "Updated Schedule Name",
|
||||
"config_id": "/app/configs/prod-scan.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 3 * * *",
|
||||
"enabled": false,
|
||||
"updated_at": "2025-11-15T10:00:00Z"
|
||||
@@ -1512,7 +1590,7 @@ Get historical trend data for scans with the same configuration.
|
||||
],
|
||||
"labels": ["2025-11-10 12:00", "2025-11-15 12:00"],
|
||||
"port_counts": [25, 26],
|
||||
"config_id": "/app/configs/prod-scan.yaml"
|
||||
"config_id": 1
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1632,7 +1710,8 @@ Retrieve a specific setting by key.
|
||||
{
|
||||
"status": "success",
|
||||
"key": "smtp_server",
|
||||
"value": "smtp.gmail.com"
|
||||
"value": "smtp.gmail.com",
|
||||
"read_only": false
|
||||
}
|
||||
```
|
||||
|
||||
@@ -2342,6 +2421,9 @@ List all configured webhooks with pagination.
|
||||
"severity_filter": ["critical", "warning"],
|
||||
"timeout": 10,
|
||||
"retry_count": 3,
|
||||
"template": null,
|
||||
"template_format": "json",
|
||||
"content_type_override": null,
|
||||
"created_at": "2025-11-18T10:00:00Z",
|
||||
"updated_at": "2025-11-18T10:00:00Z"
|
||||
}
|
||||
@@ -2393,6 +2475,9 @@ Get details for a specific webhook.
|
||||
"severity_filter": ["critical"],
|
||||
"timeout": 10,
|
||||
"retry_count": 3,
|
||||
"template": null,
|
||||
"template_format": "json",
|
||||
"content_type_override": null,
|
||||
"created_at": "2025-11-18T10:00:00Z",
|
||||
"updated_at": "2025-11-18T10:00:00Z"
|
||||
}
|
||||
@@ -2475,6 +2560,9 @@ Create a new webhook configuration.
|
||||
"custom_headers": null,
|
||||
"timeout": 10,
|
||||
"retry_count": 3,
|
||||
"template": null,
|
||||
"template_format": "json",
|
||||
"content_type_override": null,
|
||||
"created_at": "2025-11-18T10:00:00Z"
|
||||
}
|
||||
}
|
||||
@@ -2577,6 +2665,9 @@ Update an existing webhook configuration.
|
||||
"custom_headers": null,
|
||||
"timeout": 15,
|
||||
"retry_count": 3,
|
||||
"template": null,
|
||||
"template_format": "json",
|
||||
"content_type_override": null,
|
||||
"updated_at": "2025-11-18T11:00:00Z"
|
||||
}
|
||||
}
|
||||
@@ -3310,9 +3401,9 @@ API versioning will be implemented in future releases. The API is considered sta
|
||||
- **Webhooks API** - Webhook management, delivery tracking, authentication support, retry logic
|
||||
|
||||
### Endpoint Count
|
||||
- Total endpoints: 80+
|
||||
- Authenticated endpoints: 75+
|
||||
- Public endpoints: 5 (login, setup, health checks)
|
||||
- Total endpoints: 65+
|
||||
- Authenticated endpoints: 60+
|
||||
- Public endpoints: 5 (login, setup, health checks for scans/schedules/settings/alerts/webhooks)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -24,10 +24,10 @@ SneakyScanner is deployed as a Docker container running a Flask web application
|
||||
|
||||
**Architecture:**
|
||||
- **Web Application**: Flask app on port 5000 with modern web UI
|
||||
- **Database**: SQLite (persisted to volume)
|
||||
- **Database**: SQLite (persisted to volume) - stores all configurations, scan results, and settings
|
||||
- **Background Jobs**: APScheduler for async scan execution
|
||||
- **Scanner**: masscan, nmap, sslyze, Playwright
|
||||
- **Config Creator**: Web-based CIDR-to-YAML configuration builder
|
||||
- **Config Management**: Database-backed configuration system managed entirely via web UI
|
||||
- **Scheduling**: Cron-based scheduled scans with dashboard management
|
||||
|
||||
---
|
||||
@@ -143,6 +143,13 @@ docker compose -f docker-compose-standalone.yml up
|
||||
|
||||
SneakyScanner is configured via environment variables. The recommended approach is to use a `.env` file.
|
||||
|
||||
|
||||
**UDP Port Scanning**
|
||||
|
||||
- UDP Port scanning is disabled by default.
|
||||
- You can turn it on via the .env variable.
|
||||
- By Default, UDP port scanning only scans the top 20 ports, for convenience I have included the NMAP top 100 UDP ports as well.
|
||||
|
||||
#### Creating Your .env File
|
||||
|
||||
```bash
|
||||
@@ -160,6 +167,7 @@ python3 -c "from cryptography.fernet import Fernet; print('SNEAKYSCANNER_ENCRYPT
|
||||
nano .env
|
||||
```
|
||||
|
||||
|
||||
#### Key Configuration Options
|
||||
|
||||
| Variable | Description | Default | Required |
|
||||
@@ -190,54 +198,30 @@ The application needs these directories (created automatically by Docker):
|
||||
|
||||
```bash
|
||||
# Verify directories exist
|
||||
ls -la configs/ data/ output/ logs/
|
||||
ls -la data/ output/ logs/
|
||||
|
||||
# If missing, create them
|
||||
mkdir -p configs data output logs
|
||||
mkdir -p data output logs
|
||||
```
|
||||
|
||||
### Step 2: Configure Scan Targets
|
||||
|
||||
You can create scan configurations in two ways:
|
||||
After starting the application, create scan configurations using the web UI:
|
||||
|
||||
**Option A: Using the Web UI (Recommended - Phase 4 Feature)**
|
||||
**Creating Configurations via Web UI**
|
||||
|
||||
1. Navigate to **Configs** in the web interface
|
||||
2. Click **"Create New Config"**
|
||||
3. Use the CIDR-based config creator for quick setup:
|
||||
3. Use the form-based config creator:
|
||||
- Enter site name
|
||||
- Enter CIDR range (e.g., `192.168.1.0/24`)
|
||||
- Select expected ports from dropdowns
|
||||
- Click **"Generate Config"**
|
||||
4. Or use the **YAML Editor** for advanced configurations
|
||||
5. Save and use immediately in scans or schedules
|
||||
- Select expected TCP/UDP ports from dropdowns
|
||||
- Optionally enable ping checks
|
||||
4. Click **"Save Configuration"**
|
||||
5. Configuration is saved to database and immediately available for scans and schedules
|
||||
|
||||
**Option B: Manual YAML Files**
|
||||
**Note**: All configurations are stored in the database, not as files. This provides better reliability, easier backup, and seamless management through the web interface.
|
||||
|
||||
Create YAML configuration files manually in the `configs/` directory:
|
||||
|
||||
```bash
|
||||
# Example configuration
|
||||
cat > configs/my-network.yaml <<EOF
|
||||
title: "My Network Infrastructure"
|
||||
sites:
|
||||
- name: "Web Servers"
|
||||
cidr: "192.168.1.0/24" # Scan entire subnet
|
||||
expected_ports:
|
||||
- port: 80
|
||||
protocol: tcp
|
||||
service: "http"
|
||||
- port: 443
|
||||
protocol: tcp
|
||||
service: "https"
|
||||
- port: 22
|
||||
protocol: tcp
|
||||
service: "ssh"
|
||||
ping_expected: true
|
||||
EOF
|
||||
```
|
||||
|
||||
**Note**: Phase 4 introduced a powerful config creator in the web UI that makes it easy to generate configs from CIDR ranges without manually editing YAML.
|
||||
|
||||
### Step 3: Build Docker Image
|
||||
|
||||
@@ -389,38 +373,37 @@ The dashboard provides a central view of your scanning activity:
|
||||
- **Trend Charts**: Port count trends over time using Chart.js
|
||||
- **Quick Actions**: Buttons to run scans, create configs, manage schedules
|
||||
|
||||
### Managing Scan Configurations (Phase 4)
|
||||
### Managing Scan Configurations
|
||||
|
||||
All scan configurations are stored in the database and managed entirely through the web interface.
|
||||
|
||||
**Creating Configs:**
|
||||
1. Navigate to **Configs** → **Create New Config**
|
||||
2. **CIDR Creator Mode**:
|
||||
2. Fill in the configuration form:
|
||||
- Enter site name (e.g., "Production Servers")
|
||||
- Enter CIDR range (e.g., `192.168.1.0/24`)
|
||||
- Select expected TCP/UDP ports from dropdowns
|
||||
- Click **"Generate Config"** to create YAML
|
||||
3. **YAML Editor Mode**:
|
||||
- Switch to editor tab for advanced configurations
|
||||
- Syntax highlighting with line numbers
|
||||
- Validate YAML before saving
|
||||
- Enable/disable ping checks
|
||||
3. Click **"Save Configuration"**
|
||||
4. Configuration is immediately stored in database and available for use
|
||||
|
||||
**Editing Configs:**
|
||||
1. Navigate to **Configs** → Select config
|
||||
1. Navigate to **Configs** → Select config from list
|
||||
2. Click **"Edit"** button
|
||||
3. Make changes in YAML editor
|
||||
4. Save changes (validates YAML automatically)
|
||||
3. Modify any fields in the configuration form
|
||||
4. Click **"Save Changes"** to update database
|
||||
|
||||
**Uploading Configs:**
|
||||
1. Navigate to **Configs** → **Upload**
|
||||
2. Select YAML file from your computer
|
||||
3. File is validated and saved to `configs/` directory
|
||||
|
||||
**Downloading Configs:**
|
||||
- Click **"Download"** button next to any config
|
||||
- Saves YAML file to your local machine
|
||||
**Viewing Configs:**
|
||||
- Navigate to **Configs** page to see all saved configurations
|
||||
- Each config shows site name, CIDR range, and expected ports
|
||||
- Click on any config to view full details
|
||||
|
||||
**Deleting Configs:**
|
||||
- Click **"Delete"** button
|
||||
- Click **"Delete"** button next to any config
|
||||
- **Warning**: Cannot delete configs used by active schedules
|
||||
- Deletion removes the configuration from the database permanently
|
||||
|
||||
**Note**: All configurations are database-backed, providing automatic backups when you backup the database file.
|
||||
|
||||
### Running Scans
|
||||
|
||||
@@ -477,12 +460,11 @@ SneakyScanner uses several mounted volumes for data persistence:
|
||||
|
||||
| Volume | Container Path | Purpose | Important? |
|
||||
|--------|----------------|---------|------------|
|
||||
| `./configs` | `/app/configs` | Scan configuration files (managed via web UI) | Yes |
|
||||
| `./data` | `/app/data` | SQLite database (contains all scan history) | **Critical** |
|
||||
| `./data` | `/app/data` | SQLite database (contains configurations, scan history, settings) | **Critical** |
|
||||
| `./output` | `/app/output` | Scan results (JSON, HTML, ZIP, screenshots) | Yes |
|
||||
| `./logs` | `/app/logs` | Application logs (rotating file handler) | No |
|
||||
|
||||
**Note**: As of Phase 4, the `./configs` volume is read-write to support the web-based config creator and editor. The web UI can now create, edit, and delete configuration files directly.
|
||||
**Note**: All scan configurations are stored in the SQLite database (`./data/sneakyscanner.db`). There is no separate configs directory or YAML files. Backing up the database file ensures all your configurations are preserved.
|
||||
|
||||
### Backing Up Data
|
||||
|
||||
@@ -490,23 +472,22 @@ SneakyScanner uses several mounted volumes for data persistence:
|
||||
# Create backup directory
|
||||
mkdir -p backups/$(date +%Y%m%d)
|
||||
|
||||
# Backup database
|
||||
# Backup database (includes all configurations)
|
||||
cp data/sneakyscanner.db backups/$(date +%Y%m%d)/
|
||||
|
||||
# Backup scan outputs
|
||||
tar -czf backups/$(date +%Y%m%d)/output.tar.gz output/
|
||||
|
||||
# Backup configurations
|
||||
tar -czf backups/$(date +%Y%m%d)/configs.tar.gz configs/
|
||||
```
|
||||
|
||||
**Important**: The database backup includes all scan configurations, settings, schedules, and scan history. No separate configuration file backup is needed.
|
||||
|
||||
### Restoring Data
|
||||
|
||||
```bash
|
||||
# Stop application
|
||||
docker compose -f docker-compose.yml down
|
||||
|
||||
# Restore database
|
||||
# Restore database (includes all configurations)
|
||||
cp backups/YYYYMMDD/sneakyscanner.db data/
|
||||
|
||||
# Restore outputs
|
||||
@@ -516,6 +497,8 @@ tar -xzf backups/YYYYMMDD/output.tar.gz
|
||||
docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
**Note**: Restoring the database file restores all configurations, settings, schedules, and scan history.
|
||||
|
||||
### Cleaning Up Old Scan Results
|
||||
|
||||
**Option A: Using the Web UI (Recommended)**
|
||||
@@ -564,50 +547,52 @@ curl -X POST http://localhost:5000/api/auth/logout \
|
||||
-b cookies.txt
|
||||
```
|
||||
|
||||
### Config Management (Phase 4)
|
||||
### Config Management
|
||||
|
||||
```bash
|
||||
# List all configs
|
||||
curl http://localhost:5000/api/configs \
|
||||
-b cookies.txt
|
||||
|
||||
# Get specific config
|
||||
curl http://localhost:5000/api/configs/prod-network.yaml \
|
||||
# Get specific config by ID
|
||||
curl http://localhost:5000/api/configs/1 \
|
||||
-b cookies.txt
|
||||
|
||||
# Create new config
|
||||
curl -X POST http://localhost:5000/api/configs \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"filename": "test-network.yaml",
|
||||
"content": "title: Test Network\nsites:\n - name: Test\n cidr: 10.0.0.0/24"
|
||||
"name": "Test Network",
|
||||
"cidr": "10.0.0.0/24",
|
||||
"expected_ports": [
|
||||
{"port": 80, "protocol": "tcp", "service": "http"},
|
||||
{"port": 443, "protocol": "tcp", "service": "https"}
|
||||
],
|
||||
"ping_expected": true
|
||||
}' \
|
||||
-b cookies.txt
|
||||
|
||||
# Update config
|
||||
curl -X PUT http://localhost:5000/api/configs/test-network.yaml \
|
||||
curl -X PUT http://localhost:5000/api/configs/1 \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"content": "title: Updated Test Network\nsites:\n - name: Test Site\n cidr: 10.0.0.0/24"
|
||||
"name": "Updated Test Network",
|
||||
"cidr": "10.0.1.0/24"
|
||||
}' \
|
||||
-b cookies.txt
|
||||
|
||||
# Download config
|
||||
curl http://localhost:5000/api/configs/test-network.yaml/download \
|
||||
-b cookies.txt -o test-network.yaml
|
||||
|
||||
# Delete config
|
||||
curl -X DELETE http://localhost:5000/api/configs/test-network.yaml \
|
||||
curl -X DELETE http://localhost:5000/api/configs/1 \
|
||||
-b cookies.txt
|
||||
```
|
||||
|
||||
### Scan Management
|
||||
|
||||
```bash
|
||||
# Trigger a scan
|
||||
# Trigger a scan (using config ID from database)
|
||||
curl -X POST http://localhost:5000/api/scans \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"config_id": "/app/configs/prod-network.yaml"}' \
|
||||
-d '{"config_id": 1}' \
|
||||
-b cookies.txt
|
||||
|
||||
# List all scans
|
||||
@@ -634,12 +619,12 @@ curl -X DELETE http://localhost:5000/api/scans/123 \
|
||||
curl http://localhost:5000/api/schedules \
|
||||
-b cookies.txt
|
||||
|
||||
# Create schedule
|
||||
# Create schedule (using config ID from database)
|
||||
curl -X POST http://localhost:5000/api/schedules \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "Daily Production Scan",
|
||||
"config_id": "/app/configs/prod-network.yaml",
|
||||
"config_id": 1,
|
||||
"cron_expression": "0 2 * * *",
|
||||
"enabled": true
|
||||
}' \
|
||||
@@ -875,24 +860,25 @@ docker compose -f docker-compose.yml logs web | grep -E "(ERROR|Exception|Traceb
|
||||
docker compose -f docker-compose.yml exec web which masscan nmap
|
||||
```
|
||||
|
||||
### Config Files Not Appearing in Web UI
|
||||
### Configs Not Appearing in Web UI
|
||||
|
||||
**Problem**: Manually created configs don't show up in web interface
|
||||
**Problem**: Created configs don't show up in web interface
|
||||
|
||||
```bash
|
||||
# Check file permissions (must be readable by web container)
|
||||
ls -la configs/
|
||||
# Check database connectivity
|
||||
docker compose -f docker-compose.yml logs web | grep -i "database"
|
||||
|
||||
# Fix permissions if needed
|
||||
sudo chown -R 1000:1000 configs/
|
||||
chmod 644 configs/*.yaml
|
||||
# Verify database file exists and is readable
|
||||
ls -lh data/sneakyscanner.db
|
||||
|
||||
# Verify YAML syntax is valid
|
||||
docker compose -f docker-compose.yml exec web python3 -c \
|
||||
"import yaml; yaml.safe_load(open('/app/configs/your-config.yaml'))"
|
||||
|
||||
# Check web logs for parsing errors
|
||||
# Check for errors when creating configs
|
||||
docker compose -f docker-compose.yml logs web | grep -i "config"
|
||||
|
||||
# Try accessing configs via API
|
||||
curl http://localhost:5000/api/configs -b cookies.txt
|
||||
|
||||
# If database is corrupted, check integrity
|
||||
docker compose -f docker-compose.yml exec web sqlite3 /app/data/sneakyscanner.db "PRAGMA integrity_check;"
|
||||
```
|
||||
|
||||
### Health Check Failing
|
||||
@@ -979,11 +965,11 @@ server {
|
||||
# Ensure proper ownership of data directories
|
||||
sudo chown -R $USER:$USER data/ output/ logs/
|
||||
|
||||
# Restrict database file permissions
|
||||
# Restrict database file permissions (contains configurations and sensitive data)
|
||||
chmod 600 data/sneakyscanner.db
|
||||
|
||||
# Configs should be read-only
|
||||
chmod 444 configs/*.yaml
|
||||
# Ensure database directory is writable
|
||||
chmod 700 data/
|
||||
```
|
||||
|
||||
---
|
||||
@@ -1051,19 +1037,17 @@ mkdir -p "$BACKUP_DIR"
|
||||
# Stop application for consistent backup
|
||||
docker compose -f docker-compose.yml stop web
|
||||
|
||||
# Backup database
|
||||
# Backup database (includes all configurations)
|
||||
cp data/sneakyscanner.db "$BACKUP_DIR/"
|
||||
|
||||
# Backup outputs (last 30 days only)
|
||||
find output/ -type f -mtime -30 -exec cp --parents {} "$BACKUP_DIR/" \;
|
||||
|
||||
# Backup configs
|
||||
cp -r configs/ "$BACKUP_DIR/"
|
||||
|
||||
# Restart application
|
||||
docker compose -f docker-compose.yml start web
|
||||
|
||||
echo "Backup complete: $BACKUP_DIR"
|
||||
echo "Database backup includes all configurations, settings, and scan history"
|
||||
```
|
||||
|
||||
Make executable and schedule with cron:
|
||||
@@ -1083,15 +1067,18 @@ crontab -e
|
||||
# Stop application
|
||||
docker compose -f docker-compose.yml down
|
||||
|
||||
# Restore files
|
||||
# Restore database (includes all configurations)
|
||||
cp backups/YYYYMMDD_HHMMSS/sneakyscanner.db data/
|
||||
cp -r backups/YYYYMMDD_HHMMSS/configs/* configs/
|
||||
|
||||
# Restore output files
|
||||
cp -r backups/YYYYMMDD_HHMMSS/output/* output/
|
||||
|
||||
# Start application
|
||||
docker compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
**Note**: Restoring the database file will restore all configurations, settings, schedules, and scan history from the backup.
|
||||
|
||||
---
|
||||
|
||||
## Support and Further Reading
|
||||
@@ -1105,13 +1092,13 @@ docker compose -f docker-compose.yml up -d
|
||||
|
||||
## What's New
|
||||
|
||||
### Phase 4 (2025-11-17) - Config Creator ✅
|
||||
- **CIDR-based Config Creator**: Web UI for generating scan configs from CIDR ranges
|
||||
- **YAML Editor**: Built-in editor with syntax highlighting (CodeMirror)
|
||||
- **Config Management UI**: List, view, edit, download, and delete configs via web interface
|
||||
- **Config Upload**: Direct YAML file upload for advanced users
|
||||
- **REST API**: 7 new config management endpoints
|
||||
### Phase 4+ (2025-11-17) - Database-Backed Configuration System ✅
|
||||
- **Database-Backed Configs**: All configurations stored in SQLite database (no YAML files)
|
||||
- **Web-Based Config Creator**: Form-based UI for creating scan configs from CIDR ranges
|
||||
- **Config Management UI**: List, view, edit, and delete configs via web interface
|
||||
- **REST API**: Full config management via RESTful API with database storage
|
||||
- **Schedule Protection**: Prevents deleting configs used by active schedules
|
||||
- **Automatic Backups**: Configurations included in database backups
|
||||
|
||||
### Phase 3 (2025-11-14) - Dashboard & Scheduling ✅
|
||||
- **Dashboard**: Summary stats, recent scans, trend charts
|
||||
@@ -1133,5 +1120,5 @@ docker compose -f docker-compose.yml up -d
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2025-11-17
|
||||
**Version**: Phase 4 - Config Creator Complete
|
||||
**Last Updated**: 2025-11-24
|
||||
**Version**: Phase 4+ - Database-Backed Configuration System
|
||||
|
||||
0
docs/KNOWN_ISSUES.md
Normal file
0
docs/KNOWN_ISSUES.md
Normal file
700
docs/ROADMAP.md
700
docs/ROADMAP.md
@@ -4,677 +4,115 @@
|
||||
|
||||
SneakyScanner is a comprehensive **Flask web application** for infrastructure monitoring and security auditing. The primary interface is the web GUI, with a CLI API client planned for scripting and automation needs.
|
||||
|
||||
**Current Phase:** Phase 5 Complete ✅ | Phase 6 Next Up
|
||||
## Version 1.0.0 - Complete ✅
|
||||
|
||||
## Progress Overview
|
||||
|
||||
**Note:** For detailed architecture and technology stack information, see [README.md](../README.md)
|
||||
|
||||
- ✅ **Phase 1: Foundation** - Complete (2025-11-13)
|
||||
- Database schema, SQLAlchemy models, settings system, Flask app structure
|
||||
- ✅ **Phase 2: Flask Web App Core** - Complete (2025-11-14)
|
||||
- REST API, background jobs, authentication, web UI, testing (100 tests)
|
||||
- ✅ **Phase 3: Dashboard & Scheduling** - Complete (2025-11-14)
|
||||
- Dashboard, scan history, scheduled scans, trend charts
|
||||
- ✅ **Phase 4: Config Creator** - Complete (2025-11-17)
|
||||
- CIDR-based config creation, YAML editor, config management UI
|
||||
- ✅ **Phase 5: Webhooks & Alerting** - Complete (2025-11-19)
|
||||
- Webhook notifications, alert rules, notification templates
|
||||
- 📋 **Phase 6: CLI as API Client** - Planned
|
||||
- CLI for scripting and automation via API
|
||||
- 📋 **Phase 7: Advanced Features** - Future
|
||||
- Email notifications, scan comparison, CVE integration, timeline view, PDF export
|
||||
|
||||
|
||||
## Target Users
|
||||
|
||||
- **Infrastructure teams** monitoring on-premises networks
|
||||
- **Security professionals** performing periodic security audits
|
||||
- **DevOps engineers** tracking infrastructure drift
|
||||
- **Single users or small teams** (not enterprise multi-tenant)
|
||||
|
||||
## Database Schema Design
|
||||
|
||||
### Core Tables
|
||||
|
||||
#### `scans`
|
||||
Stores metadata about each scan execution.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique scan ID |
|
||||
| `timestamp` | DATETIME | Scan start time (UTC) |
|
||||
| `duration` | FLOAT | Total scan duration (seconds) |
|
||||
| `status` | VARCHAR(20) | `running`, `completed`, `failed` |
|
||||
| `config_id` | INTEGER | FK to scan_configs table |
|
||||
| `title` | TEXT | Scan title from config |
|
||||
| `json_path` | TEXT | Path to JSON report |
|
||||
| `html_path` | TEXT | Path to HTML report |
|
||||
| `zip_path` | TEXT | Path to ZIP archive |
|
||||
| `screenshot_dir` | TEXT | Path to screenshot directory |
|
||||
| `created_at` | DATETIME | Record creation time |
|
||||
| `triggered_by` | VARCHAR(50) | `manual`, `scheduled`, `api` |
|
||||
| `schedule_id` | INTEGER | FK to schedules (if triggered by schedule) |
|
||||
|
||||
#### `scan_sites`
|
||||
Logical grouping of IPs by site.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique site record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `site_name` | VARCHAR(255) | Site name from config |
|
||||
|
||||
#### `scan_ips`
|
||||
IP addresses scanned in each scan.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique IP record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `site_id` | INTEGER | FK to scan_sites |
|
||||
| `ip_address` | VARCHAR(45) | IPv4 or IPv6 address |
|
||||
| `ping_expected` | BOOLEAN | Expected ping response |
|
||||
| `ping_actual` | BOOLEAN | Actual ping response |
|
||||
|
||||
#### `scan_ports`
|
||||
Discovered TCP/UDP ports.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique port record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `ip_id` | INTEGER | FK to scan_ips |
|
||||
| `port` | INTEGER | Port number (1-65535) |
|
||||
| `protocol` | VARCHAR(10) | `tcp` or `udp` |
|
||||
| `expected` | BOOLEAN | Was this port expected? |
|
||||
| `state` | VARCHAR(20) | `open`, `closed`, `filtered` |
|
||||
|
||||
#### `scan_services`
|
||||
Detected services on open ports.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique service record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `port_id` | INTEGER | FK to scan_ports |
|
||||
| `service_name` | VARCHAR(100) | Service name (e.g., `ssh`, `http`) |
|
||||
| `product` | VARCHAR(255) | Product name (e.g., `OpenSSH`) |
|
||||
| `version` | VARCHAR(100) | Version string |
|
||||
| `extrainfo` | TEXT | Additional nmap info |
|
||||
| `ostype` | VARCHAR(100) | OS type if detected |
|
||||
| `http_protocol` | VARCHAR(10) | `http` or `https` (if web service) |
|
||||
| `screenshot_path` | TEXT | Relative path to screenshot |
|
||||
|
||||
#### `scan_certificates`
|
||||
SSL/TLS certificates discovered on HTTPS services.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique certificate record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `service_id` | INTEGER | FK to scan_services |
|
||||
| `subject` | TEXT | Certificate subject (CN) |
|
||||
| `issuer` | TEXT | Certificate issuer |
|
||||
| `serial_number` | TEXT | Serial number |
|
||||
| `not_valid_before` | DATETIME | Validity start date |
|
||||
| `not_valid_after` | DATETIME | Validity end date |
|
||||
| `days_until_expiry` | INTEGER | Days until expiration |
|
||||
| `sans` | TEXT | JSON array of SANs |
|
||||
| `is_self_signed` | BOOLEAN | Self-signed certificate flag |
|
||||
|
||||
#### `scan_tls_versions`
|
||||
TLS version support and cipher suites.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique TLS version record ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `certificate_id` | INTEGER | FK to scan_certificates |
|
||||
| `tls_version` | VARCHAR(20) | `TLS 1.0`, `TLS 1.1`, `TLS 1.2`, `TLS 1.3` |
|
||||
| `supported` | BOOLEAN | Is this version supported? |
|
||||
| `cipher_suites` | TEXT | JSON array of cipher suites |
|
||||
|
||||
### Scheduling & Notifications Tables
|
||||
|
||||
#### `schedules`
|
||||
Scheduled scan configurations.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique schedule ID |
|
||||
| `name` | VARCHAR(255) | Schedule name (e.g., "Daily prod scan") |
|
||||
| `config_id` | INTEGER | FK to scan_configs table |
|
||||
| `cron_expression` | VARCHAR(100) | Cron-like schedule (e.g., `0 2 * * *`) |
|
||||
| `enabled` | BOOLEAN | Is schedule active? |
|
||||
| `last_run` | DATETIME | Last execution time |
|
||||
| `next_run` | DATETIME | Next scheduled execution |
|
||||
| `created_at` | DATETIME | Schedule creation time |
|
||||
| `updated_at` | DATETIME | Last modification time |
|
||||
|
||||
#### `alerts`
|
||||
Alert history and notifications sent.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique alert ID |
|
||||
| `scan_id` | INTEGER | FK to scans |
|
||||
| `alert_type` | VARCHAR(50) | `new_port`, `cert_expiry`, `service_change`, `ping_failed` |
|
||||
| `severity` | VARCHAR(20) | `info`, `warning`, `critical` |
|
||||
| `message` | TEXT | Human-readable alert message |
|
||||
| `ip_address` | VARCHAR(45) | Related IP (optional) |
|
||||
| `port` | INTEGER | Related port (optional) |
|
||||
| `email_sent` | BOOLEAN | Was email notification sent? |
|
||||
| `email_sent_at` | DATETIME | Email send timestamp |
|
||||
| `created_at` | DATETIME | Alert creation time |
|
||||
|
||||
#### `alert_rules`
|
||||
User-defined alert rules.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique rule ID |
|
||||
| `rule_type` | VARCHAR(50) | `unexpected_port`, `cert_expiry`, `service_down`, etc. |
|
||||
| `enabled` | BOOLEAN | Is rule active? |
|
||||
| `threshold` | INTEGER | Threshold value (e.g., days for cert expiry) |
|
||||
| `email_enabled` | BOOLEAN | Send email for this rule? |
|
||||
| `created_at` | DATETIME | Rule creation time |
|
||||
|
||||
### Settings Table
|
||||
|
||||
#### `settings`
|
||||
Application configuration key-value store.
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | INTEGER PRIMARY KEY | Unique setting ID |
|
||||
| `key` | VARCHAR(255) UNIQUE | Setting key (e.g., `smtp_server`) |
|
||||
| `value` | TEXT | Setting value (JSON for complex values) |
|
||||
| `updated_at` | DATETIME | Last modification time |
|
||||
|
||||
**Example Settings:**
|
||||
- `smtp_server` - SMTP server hostname
|
||||
- `smtp_port` - SMTP port (587, 465, 25)
|
||||
- `smtp_username` - SMTP username
|
||||
- `smtp_password` - SMTP password (encrypted)
|
||||
- `smtp_from_email` - From email address
|
||||
- `smtp_to_emails` - JSON array of recipient emails
|
||||
- `app_password` - Single-user password hash (bcrypt)
|
||||
- `retention_days` - How long to keep old scans (0 = forever)
|
||||
|
||||
## API Design
|
||||
|
||||
### REST API Endpoints
|
||||
|
||||
All API endpoints return JSON and follow RESTful conventions.
|
||||
|
||||
#### Scans
|
||||
|
||||
| Method | Endpoint | Description | Request Body | Response |
|
||||
|--------|----------|-------------|--------------|----------|
|
||||
| `GET` | `/api/scans` | List all scans (paginated) | - | `{ "scans": [...], "total": N, "page": 1 }` |
|
||||
| `GET` | `/api/scans/{id}` | Get scan details | - | `{ "scan": {...} }` |
|
||||
| `POST` | `/api/scans` | Trigger new scan | `{ "config_id": "path" }` | `{ "scan_id": N, "status": "running" }` |
|
||||
| `DELETE` | `/api/scans/{id}` | Delete scan and files | - | `{ "status": "deleted" }` |
|
||||
| `GET` | `/api/scans/{id}/status` | Get scan status | - | `{ "status": "running", "progress": "45%" }` |
|
||||
| `GET` | `/api/scans/{id1}/compare/{id2}` | Compare two scans | - | `{ "diff": {...} }` |
|
||||
|
||||
#### Schedules
|
||||
|
||||
| Method | Endpoint | Description | Request Body | Response |
|
||||
|--------|----------|-------------|--------------|----------|
|
||||
| `GET` | `/api/schedules` | List all schedules | - | `{ "schedules": [...] }` |
|
||||
| `GET` | `/api/schedules/{id}` | Get schedule details | - | `{ "schedule": {...} }` |
|
||||
| `POST` | `/api/schedules` | Create new schedule | `{ "name": "...", "config_id": "...", "cron_expression": "..." }` | `{ "schedule_id": N }` |
|
||||
| `PUT` | `/api/schedules/{id}` | Update schedule | `{ "enabled": true, "cron_expression": "..." }` | `{ "status": "updated" }` |
|
||||
| `DELETE` | `/api/schedules/{id}` | Delete schedule | - | `{ "status": "deleted" }` |
|
||||
| `POST` | `/api/schedules/{id}/trigger` | Manually trigger scheduled scan | - | `{ "scan_id": N }` |
|
||||
|
||||
#### Alerts
|
||||
|
||||
| Method | Endpoint | Description | Request Body | Response |
|
||||
|--------|----------|-------------|--------------|----------|
|
||||
| `GET` | `/api/alerts` | List recent alerts | - | `{ "alerts": [...] }` |
|
||||
| `GET` | `/api/alerts/rules` | List alert rules | - | `{ "rules": [...] }` |
|
||||
| `POST` | `/api/alerts/rules` | Create alert rule | `{ "rule_type": "...", "threshold": N }` | `{ "rule_id": N }` |
|
||||
| `PUT` | `/api/alerts/rules/{id}` | Update alert rule | `{ "enabled": false }` | `{ "status": "updated" }` |
|
||||
| `DELETE` | `/api/alerts/rules/{id}` | Delete alert rule | - | `{ "status": "deleted" }` |
|
||||
|
||||
#### Settings
|
||||
|
||||
| Method | Endpoint | Description | Request Body | Response |
|
||||
|--------|----------|-------------|--------------|----------|
|
||||
| `GET` | `/api/settings` | Get all settings (sanitized) | - | `{ "settings": {...} }` |
|
||||
| `PUT` | `/api/settings` | Update settings | `{ "smtp_server": "...", ... }` | `{ "status": "updated" }` |
|
||||
| `POST` | `/api/settings/test-email` | Test email configuration | - | `{ "status": "sent" }` |
|
||||
|
||||
#### Statistics & Trends
|
||||
|
||||
| Method | Endpoint | Description | Request Body | Response |
|
||||
|--------|----------|-------------|--------------|----------|
|
||||
| `GET` | `/api/stats/summary` | Dashboard summary stats | - | `{ "total_scans": N, "last_scan": "...", ... }` |
|
||||
| `GET` | `/api/stats/trends` | Trend data for charts | `?days=30&metric=port_count` | `{ "data": [...] }` |
|
||||
| `GET` | `/api/stats/certificates` | Certificate expiry overview | - | `{ "expiring_soon": [...], "expired": [...] }` |
|
||||
|
||||
### Authentication
|
||||
|
||||
**Phase 2-3:** Simple session-based authentication (single-user)
|
||||
- Login endpoint: `POST /api/auth/login` (username/password)
|
||||
- Logout endpoint: `POST /api/auth/logout`
|
||||
- Session cookies with Flask-Login
|
||||
- Password stored as bcrypt hash in settings table
|
||||
|
||||
**Phase 6:** API token authentication for CLI client
|
||||
- Generate API token: `POST /api/auth/token`
|
||||
- Revoke token: `DELETE /api/auth/token`
|
||||
- CLI sends token in `Authorization: Bearer <token>` header
|
||||
|
||||
## Phased Roadmap
|
||||
|
||||
### Phase 1: Foundation ✅ COMPLETE
|
||||
### Phase 1: Foundation ✅
|
||||
**Completed:** 2025-11-13
|
||||
|
||||
**Deliverables:**
|
||||
- SQLite database with 11 tables (scans, sites, IPs, ports, services, certificates, TLS versions, schedules, alerts, alert_rules, settings)
|
||||
- SQLAlchemy ORM models with relationships
|
||||
- Alembic migration system
|
||||
- Settings system with encryption (bcrypt for passwords, Fernet for sensitive data)
|
||||
- Database schema with 11 tables (SQLAlchemy ORM, Alembic migrations)
|
||||
- Settings system with encryption (bcrypt, Fernet)
|
||||
- Flask app structure with API blueprints
|
||||
- Docker Compose deployment configuration
|
||||
- Validation script for verification
|
||||
- Docker Compose deployment
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Flask Web App Core ✅ COMPLETE
|
||||
### Phase 2: Flask Web App Core ✅
|
||||
**Completed:** 2025-11-14
|
||||
|
||||
**Deliverables:**
|
||||
- REST API with 8 endpoints (scans: trigger, list, get, status, delete; settings: get, update, test-email)
|
||||
- Background job queue using APScheduler (up to 3 concurrent scans)
|
||||
- Session-based authentication with Flask-Login
|
||||
- Database integration for scan results (full normalized schema population)
|
||||
- Web UI templates (dashboard, scan list/detail, login, error pages)
|
||||
- Error handling with content negotiation (JSON/HTML) and request IDs
|
||||
- Logging system with rotating file handlers
|
||||
- Production Docker Compose deployment
|
||||
- Comprehensive test suite (100 tests, all passing)
|
||||
- Documentation (API_REFERENCE.md, DEPLOYMENT.md)
|
||||
- REST API (8 endpoints for scans, settings)
|
||||
- Background job queue (APScheduler, 3 concurrent scans)
|
||||
- Session-based authentication (Flask-Login)
|
||||
- Web UI templates (dashboard, scan list/detail, login)
|
||||
- Comprehensive test suite (100 tests)
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Dashboard & Scheduling ✅ COMPLETE
|
||||
### Phase 3: Dashboard & Scheduling ✅
|
||||
**Completed:** 2025-11-14
|
||||
|
||||
**Deliverables:**
|
||||
- Dashboard with summary stats (total scans, IPs, ports, services)
|
||||
- Recent scans table with clickable details
|
||||
- Scan detail page with full results display
|
||||
- Historical trend charts using Chart.js (port counts over time)
|
||||
- Scheduled scan management UI (create, edit, delete, enable/disable)
|
||||
- Schedule execution with APScheduler and cron expressions
|
||||
- Manual scan trigger from web UI
|
||||
- Navigation menu (Dashboard, Scans, Schedules, Configs, Settings)
|
||||
- Download buttons for scan reports (JSON, HTML, ZIP)
|
||||
- Dashboard with summary stats and trend charts (Chart.js)
|
||||
- Scan detail pages with full results display
|
||||
- Scheduled scan management (cron expressions)
|
||||
- Download buttons for reports (JSON, HTML, ZIP)
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: Config Creator ✅ COMPLETE
|
||||
### Phase 4: Config Creator ✅
|
||||
**Completed:** 2025-11-17
|
||||
|
||||
**Deliverables:**
|
||||
- CIDR-based config creation UI (simplified workflow for quick config generation)
|
||||
- YAML editor with CodeMirror (syntax highlighting, line numbers)
|
||||
- Config management UI (list, view, edit, download, delete)
|
||||
- Direct YAML upload for advanced users
|
||||
- REST API for config operations (7 endpoints: list, get, create, update, delete, upload, download)
|
||||
- Schedule dependency protection (prevents deleting configs used by schedules)
|
||||
- Comprehensive testing (25+ unit and integration tests)
|
||||
- CIDR-based config creation UI
|
||||
- YAML editor with CodeMirror
|
||||
- Config management (list, view, edit, download, delete)
|
||||
- REST API for config operations (7 endpoints)
|
||||
|
||||
---
|
||||
|
||||
### Phase 5: Webhooks & Alerting ✅ COMPLETE
|
||||
### Phase 5: Webhooks & Alerting ✅
|
||||
**Completed:** 2025-11-19
|
||||
|
||||
**Goals:**
|
||||
- ✅ Implement webhook notification system for real-time alerting
|
||||
- ✅ Add alert rule configuration for unexpected exposure detection
|
||||
- ✅ Create notification template system for flexible alerting
|
||||
|
||||
**Core Use Case:**
|
||||
Monitor infrastructure for misconfigurations that expose unexpected ports/services to the world. When a scan detects an open port that wasn't defined in the YAML config's `expected_ports` list, trigger immediate notifications via webhooks.
|
||||
|
||||
**Implemented Features:**
|
||||
|
||||
#### 1. Alert Rule Engine ✅
|
||||
**Purpose:** Automatically detect and classify infrastructure anomalies after each scan.
|
||||
|
||||
**Alert Types:**
|
||||
- `unexpected_port` - Port open but not in config's `expected_ports` list
|
||||
- `unexpected_service` - Service detected that doesn't match expected service name
|
||||
- `cert_expiry` - SSL/TLS certificate expiring soon (configurable threshold)
|
||||
- `ping_failed` - Expected host not responding to ping
|
||||
- `service_down` - Previously detected service no longer responding
|
||||
- `service_change` - Service version/product changed between scans
|
||||
- `weak_tls` - TLS 1.0/1.1 detected or weak cipher suites
|
||||
- `new_host` - New IP address responding in CIDR range
|
||||
- `host_disappeared` - Previously seen IP no longer responding
|
||||
|
||||
**Alert Severity Levels:**
|
||||
- `critical` - Unexpected internet-facing service (ports 80/443/22/3389/etc.)
|
||||
- `warning` - Minor configuration drift or upcoming cert expiry
|
||||
- `info` - Informational alerts (new host discovered, service version change)
|
||||
|
||||
**Alert Rule Configuration:**
|
||||
```yaml
|
||||
# Example alert rule configuration (stored in DB)
|
||||
alert_rules:
|
||||
- id: 1
|
||||
rule_type: unexpected_port
|
||||
enabled: true
|
||||
severity: critical
|
||||
webhook_enabled: true
|
||||
filter_conditions:
|
||||
ports: [22, 80, 443, 3389, 3306, 5432, 27017] # High-risk ports
|
||||
|
||||
- id: 2
|
||||
rule_type: cert_expiry
|
||||
enabled: true
|
||||
severity: warning
|
||||
threshold: 30 # Days before expiry
|
||||
webhook_enabled: true
|
||||
```
|
||||
|
||||
**Implementation:**
|
||||
- ✅ Evaluate alert rules after each scan completes
|
||||
- ✅ Compare current scan results to expected configuration
|
||||
- ✅ Generate alerts and store in `alerts` table
|
||||
- ✅ Trigger notifications based on rule configuration
|
||||
- ✅ Alert deduplication (don't spam for same issue)
|
||||
|
||||
#### 2. Webhook Notifications ✅
|
||||
**Purpose:** Real-time HTTP POST notifications for integration with external systems (Slack, PagerDuty, custom dashboards, SIEM tools).
|
||||
|
||||
**Webhook Configuration (via Settings API):**
|
||||
```json
|
||||
{
|
||||
"webhook_enabled": true,
|
||||
"webhook_urls": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Slack Security Channel",
|
||||
"url": "https://hooks.slack.com/services/XXX/YYY/ZZZ",
|
||||
"enabled": true,
|
||||
"auth_type": "none",
|
||||
"custom_headers": {},
|
||||
"alert_types": ["unexpected_port", "unexpected_service", "weak_tls"],
|
||||
"severity_filter": ["critical", "warning"]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "PagerDuty",
|
||||
"url": "https://events.pagerduty.com/v2/enqueue",
|
||||
"enabled": true,
|
||||
"auth_type": "bearer",
|
||||
"auth_token": "encrypted_token",
|
||||
"custom_headers": {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
"alert_types": ["unexpected_port"],
|
||||
"severity_filter": ["critical"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Webhook Payload Format (JSON):**
|
||||
```json
|
||||
{
|
||||
"event_type": "scan_alert",
|
||||
"alert_id": 42,
|
||||
"alert_type": "unexpected_port",
|
||||
"severity": "critical",
|
||||
"timestamp": "2025-11-17T14:23:45Z",
|
||||
"scan": {
|
||||
"scan_id": 123,
|
||||
"title": "Production Network Scan",
|
||||
"timestamp": "2025-11-17T14:15:00Z",
|
||||
"config_id": "prod_config.yaml",
|
||||
"triggered_by": "scheduled"
|
||||
},
|
||||
"alert_details": {
|
||||
"message": "Unexpected port 3306 (MySQL) exposed on 192.168.1.100",
|
||||
"ip_address": "192.168.1.100",
|
||||
"port": 3306,
|
||||
"protocol": "tcp",
|
||||
"state": "open",
|
||||
"service": {
|
||||
"name": "mysql",
|
||||
"product": "MySQL",
|
||||
"version": "8.0.32"
|
||||
},
|
||||
"expected": false,
|
||||
"site_name": "Production Servers"
|
||||
},
|
||||
"recommended_actions": [
|
||||
"Verify if MySQL should be exposed externally",
|
||||
"Check firewall rules for 192.168.1.100",
|
||||
"Review MySQL bind-address configuration"
|
||||
],
|
||||
"web_url": "https://sneakyscanner.local/scans/123"
|
||||
}
|
||||
```
|
||||
|
||||
**Webhook Features:**
|
||||
- ✅ Multiple webhook URLs with independent configuration
|
||||
- ✅ Per-webhook filtering by alert type and severity
|
||||
- ✅ Custom headers support (for API keys, auth tokens)
|
||||
- ✅ Authentication methods:
|
||||
- `none` - No authentication
|
||||
- `bearer` - Bearer token in Authorization header
|
||||
- `basic` - Basic authentication
|
||||
- `custom` - Custom header-based auth
|
||||
- ✅ Retry logic with exponential backoff (3 attempts)
|
||||
- ✅ Webhook delivery tracking (webhook_sent, webhook_sent_at, webhook_response_code)
|
||||
- ✅ Test webhook functionality in Settings UI
|
||||
- ✅ Timeout configuration (default 10 seconds)
|
||||
- ✅ Webhook delivery history and logs
|
||||
|
||||
**Webhook API Endpoints:**
|
||||
- ✅ `POST /api/webhooks` - Create webhook configuration
|
||||
- ✅ `GET /api/webhooks` - List all webhooks
|
||||
- ✅ `PUT /api/webhooks/{id}` - Update webhook configuration
|
||||
- ✅ `DELETE /api/webhooks/{id}` - Delete webhook
|
||||
- ✅ `POST /api/webhooks/{id}/test` - Send test webhook
|
||||
- ✅ `GET /api/webhooks/{id}/history` - Get delivery history
|
||||
|
||||
**Notification Templates:**
|
||||
Flexible template system supporting multiple platforms (Slack, Discord, PagerDuty, etc.):
|
||||
```json
|
||||
{
|
||||
"text": "SneakyScanner Alert: Unexpected Port Detected",
|
||||
"attachments": [
|
||||
{
|
||||
"color": "danger",
|
||||
"fields": [
|
||||
{"title": "IP Address", "value": "192.168.1.100", "short": true},
|
||||
{"title": "Port", "value": "3306/tcp", "short": true},
|
||||
{"title": "Service", "value": "MySQL 8.0.32", "short": true},
|
||||
{"title": "Severity", "value": "CRITICAL", "short": true}
|
||||
],
|
||||
"footer": "SneakyScanner",
|
||||
"ts": 1700234625
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
- Alert Rule Engine (9 alert types: unexpected_port, cert_expiry, ping_failed, etc.)
|
||||
- Webhook notifications with retry logic
|
||||
- Multiple webhook URLs with independent filtering
|
||||
- Notification templates (Slack, Discord, PagerDuty support)
|
||||
- Alert deduplication
|
||||
|
||||
---
|
||||
|
||||
**Deliverables:**
|
||||
- ✅ Alert Rule Engine with 9 alert types (unexpected_port, unexpected_service, cert_expiry, ping_failed, service_down, service_change, weak_tls, new_host, host_disappeared)
|
||||
- ✅ Alert severity classification (critical, warning, info)
|
||||
- ✅ Alert rule configuration API (CRUD operations)
|
||||
- ✅ Webhook notification system with retry logic
|
||||
- ✅ Multiple webhook URL support with independent configuration
|
||||
- ✅ Notification template system for flexible platform integration (Slack, Discord, PagerDuty, custom)
|
||||
- ✅ Webhook API endpoints (create, list, update, delete, test, history)
|
||||
- ✅ Custom headers and authentication support (none, bearer, basic, custom)
|
||||
- ✅ Webhook delivery tracking and logging
|
||||
- ✅ Alert deduplication to prevent notification spam
|
||||
- ✅ Integration with scan completion workflow
|
||||
## Planned Features
|
||||
|
||||
**Success Criteria Met:**
|
||||
- ✅ Alerts triggered within 30 seconds of scan completion
|
||||
- ✅ Webhook POST delivered with retry on failure
|
||||
- ✅ Zero false positives for expected ports/services
|
||||
- ✅ Alert deduplication prevents notification spam
|
||||
### Version 1.1.0 - Communication & Automation
|
||||
|
||||
#### CLI as API Client
|
||||
- CLI tool for scripting and automation via REST API
|
||||
- API token authentication (Bearer tokens)
|
||||
- Commands for scan management, schedules, alerts
|
||||
|
||||
#### Email Notifications
|
||||
- SMTP integration with Flask-Mail
|
||||
- Jinja2 email templates (HTML + plain text)
|
||||
- Configurable recipients and rate limiting
|
||||
|
||||
#### Site CSV Export/Import
|
||||
- Bulk site management via CSV files
|
||||
|
||||
---
|
||||
|
||||
### Phase 6: CLI as API Client
|
||||
**Status:** Planned
|
||||
**Priority:** MEDIUM
|
||||
### Version 1.2.0 - Reporting & Analysis
|
||||
|
||||
**Goals:**
|
||||
- Create CLI API client for scripting and automation
|
||||
- Maintain standalone mode for testing
|
||||
- API token authentication
|
||||
#### Scan Comparison
|
||||
- Compare two scans API endpoint
|
||||
- Side-by-side comparison view with color-coded differences
|
||||
- Export comparison report to PDF/HTML
|
||||
|
||||
**Planned Features:**
|
||||
1. **API Client Mode:**
|
||||
- `--api-mode` flag to enable API client mode
|
||||
- `--api-url` and `--api-token` arguments
|
||||
- Trigger scans via API, poll for status, download results
|
||||
- Scans stored centrally in database
|
||||
- Standalone mode still available for testing
|
||||
|
||||
2. **API Token System:**
|
||||
- Token generation UI in settings page
|
||||
- Secure token storage (hashed in database)
|
||||
- Token authentication middleware
|
||||
- Token expiration and revocation
|
||||
|
||||
3. **Benefits:**
|
||||
- Centralized scan history accessible via web dashboard
|
||||
- No need to mount volumes for output
|
||||
- Scheduled scans managed through web UI
|
||||
- Scriptable automation while leveraging web features
|
||||
#### Enhanced Reports
|
||||
- Sortable/filterable tables (DataTables.js)
|
||||
- PDF export (WeasyPrint)
|
||||
|
||||
---
|
||||
|
||||
### Phase 7: Advanced Features
|
||||
**Status:** Future/Deferred
|
||||
**Priority:** LOW
|
||||
### Version 1.3.0 - Visualization
|
||||
|
||||
**Planned Features:**
|
||||
#### Timeline View
|
||||
- Visual scan history timeline
|
||||
- Filter by site/IP
|
||||
- Event annotations
|
||||
|
||||
1. **Email Notifications:**
|
||||
- SMTP integration with Flask-Mail
|
||||
- Jinja2 email templates (HTML + plain text)
|
||||
- Settings API for email configuration
|
||||
- Test email functionality
|
||||
- Email delivery tracking
|
||||
- Rate limiting to prevent email flood
|
||||
- Configurable recipients (multiple emails)
|
||||
|
||||
2. **Scan Comparison:**
|
||||
- Compare two scans API endpoint
|
||||
- Side-by-side comparison view
|
||||
- Color-coded differences (green=new, red=removed, yellow=changed)
|
||||
- Filter by change type
|
||||
- Export comparison report to PDF/HTML
|
||||
- "Compare with previous scan" button on scan detail page
|
||||
|
||||
3. **Enhanced Reports:**
|
||||
- Sortable/filterable tables (DataTables.js)
|
||||
- Inline screenshot thumbnails with lightbox
|
||||
- PDF export (WeasyPrint)
|
||||
|
||||
4. **Vulnerability Detection:**
|
||||
- CVE database integration (NVD API)
|
||||
- Service version matching to known CVEs
|
||||
- CVSS severity scores
|
||||
- Alert rules for critical CVEs
|
||||
|
||||
5. **Timeline View:**
|
||||
- Visual scan history timeline
|
||||
- Filter by site/IP
|
||||
- Event annotations
|
||||
|
||||
6. **Advanced Charts:**
|
||||
- Port activity heatmap
|
||||
- Service version tracking
|
||||
- Certificate expiration forecast
|
||||
|
||||
7. **Additional Integrations:**
|
||||
- Prometheus metrics export
|
||||
- CSV export/import
|
||||
- Advanced reporting dashboards
|
||||
#### Advanced Charts
|
||||
- Port activity heatmap
|
||||
- Certificate expiration forecast
|
||||
|
||||
---
|
||||
|
||||
## Development Workflow
|
||||
### Version 2.0.0 - Security Intelligence
|
||||
|
||||
### Iteration Cycle
|
||||
1. **Plan** - Define features for phase
|
||||
2. **Implement** - Code backend + frontend
|
||||
3. **Test** - Unit tests + manual testing
|
||||
4. **Deploy** - Update Docker Compose
|
||||
5. **Document** - Update README.md, ROADMAP.md
|
||||
6. **Review** - Get user feedback
|
||||
7. **Iterate** - Adjust priorities based on feedback
|
||||
#### Vulnerability Detection
|
||||
- CVE database integration (NVD API)
|
||||
- Service version matching to known CVEs
|
||||
- CVSS severity scores
|
||||
|
||||
### Git Workflow
|
||||
- **main branch** - Stable releases
|
||||
- **develop branch** - Active development
|
||||
- **feature branches** - Individual features (`feature/dashboard`, `feature/scheduler`)
|
||||
- **Pull requests** - Review before merge
|
||||
|
||||
### Testing Strategy
|
||||
- **Unit tests** - pytest for models, API endpoints
|
||||
- **Integration tests** - Full scan → DB → API workflow
|
||||
- **Manual testing** - UI/UX testing in browser
|
||||
- **Performance tests** - Large scans, database queries
|
||||
|
||||
### Documentation
|
||||
- **README.md** - User-facing documentation (updated each phase)
|
||||
- **ROADMAP.md** - This file (updated as priorities shift)
|
||||
- **CLAUDE.md** - Developer documentation (architecture, code references)
|
||||
- **API.md** - API documentation (OpenAPI/Swagger in Phase 4)
|
||||
|
||||
## Resources & References
|
||||
|
||||
### Documentation
|
||||
- [Flask Documentation](https://flask.palletsprojects.com/)
|
||||
- [SQLAlchemy ORM](https://docs.sqlalchemy.org/)
|
||||
- [APScheduler](https://apscheduler.readthedocs.io/)
|
||||
- [Chart.js](https://www.chartjs.org/docs/)
|
||||
- [Bootstrap 5](https://getbootstrap.com/docs/5.3/)
|
||||
|
||||
### Tutorials
|
||||
- [Flask Mega-Tutorial](https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-i-hello-world)
|
||||
- [SQLAlchemy Tutorial](https://docs.sqlalchemy.org/en/20/tutorial/)
|
||||
- [APScheduler with Flask](https://github.com/viniciuschiele/flask-apscheduler)
|
||||
|
||||
### Similar Projects (Inspiration)
|
||||
- [OpenVAS](https://www.openvas.org/) - Vulnerability scanner with web UI
|
||||
- [Nessus](https://www.tenable.com/products/nessus) - Commercial scanner (inspiration for UI/UX)
|
||||
- [OWASP ZAP](https://www.zaproxy.org/) - Web app scanner (comparison reports, alerts)
|
||||
---
|
||||
|
||||
## Changelog
|
||||
|
||||
| Date | Version | Changes |
|
||||
|------|---------|---------|
|
||||
| 2025-11-14 | 1.0 | Initial roadmap created based on user requirements |
|
||||
| 2025-11-13 | 1.1 | **Phase 1 COMPLETE** - Database schema, SQLAlchemy models, Flask app structure, settings system with encryption, Alembic migrations, API blueprints, Docker support, validation script |
|
||||
| 2025-11-14 | 1.2 | **Phase 2 COMPLETE** - REST API (5 scan endpoints, 3 settings endpoints), background jobs (APScheduler), authentication (Flask-Login), web UI (dashboard, scans, login, errors), error handling (content negotiation, request IDs, logging), 100 tests passing, comprehensive documentation (API_REFERENCE.md, DEPLOYMENT.md, PHASE2_COMPLETE.md) |
|
||||
| 2025-11-17 | 1.3 | **Bug Fix** - Fixed Chart.js infinite canvas growth issue in scan detail page (duplicate initialization, missing chart.destroy(), missing fixed-height container) |
|
||||
| 2025-11-17 | 1.4 | **Phase 4 COMPLETE** - Config Creator with CIDR-based creation, YAML editor (CodeMirror), config management UI (list/edit/delete), REST API (7 endpoints), Docker volume permissions fix, comprehensive testing and documentation |
|
||||
| 2025-11-17 | 1.5 | **Roadmap Compression** - Condensed completed phases (1-4) into concise summaries, updated project scope to emphasize web GUI frontend with CLI as API client coming soon (Phase 6), reorganized phases for clarity |
|
||||
| 2025-11-19 | 1.6 | **Phase 5 Progress** - Completed webhooks, notification templates, and alerting rules. Alert Rule Engine and Webhook System implemented. |
|
||||
| 2025-11-19 | 1.7 | **Phase 5 COMPLETE** - Webhooks & Alerting phase completed. Moved Email Notifications and Scan Comparison to Phase 7. Alert rules, webhook notifications, and notification templates fully implemented and tested. |
|
||||
| 2025-11-13 | 1.0.0-alpha | Phase 1 complete - Foundation |
|
||||
| 2025-11-14 | 1.0.0-beta | Phases 2-3 complete - Web App Core, Dashboard & Scheduling |
|
||||
| 2025-11-17 | 1.0.0-rc1 | Phase 4 complete - Config Creator |
|
||||
| 2025-11-19 | 1.0.0 | Phase 5 complete - Webhooks & Alerting |
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** 2025-11-19
|
||||
**Next Review:** Before Phase 6 kickoff (CLI as API Client)
|
||||
**Last Updated:** 2025-11-20
|
||||
|
||||
BIN
docs/alerts.png
BIN
docs/alerts.png
Binary file not shown.
|
Before Width: | Height: | Size: 103 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 60 KiB |
BIN
docs/configs.png
BIN
docs/configs.png
Binary file not shown.
|
Before Width: | Height: | Size: 56 KiB |
BIN
docs/scans.png
BIN
docs/scans.png
Binary file not shown.
|
Before Width: | Height: | Size: 61 KiB |
99
scripts/release.sh
Executable file
99
scripts/release.sh
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/bin/bash
|
||||
|
||||
# SneakyScan Release Script
|
||||
# Handles version bumping, branch merging, tagging, and pushing
|
||||
|
||||
set -e
|
||||
|
||||
CONFIG_FILE="app/web/config.py"
|
||||
DEVELOP_BRANCH="nightly"
|
||||
STAGING_BRANCH="beta"
|
||||
MAIN_BRANCH="master"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${GREEN}=== SneakyScan Release Script ===${NC}\n"
|
||||
|
||||
# Ensure we're in the repo root
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo -e "${RED}Error: Must run from repository root${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for uncommitted changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo -e "${RED}Error: You have uncommitted changes. Please commit or stash them first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Prompt for version
|
||||
read -p "Enter version (e.g., 1.0.0): " VERSION
|
||||
|
||||
# Validate version format (semver)
|
||||
if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9]+)?$ ]]; then
|
||||
echo -e "${RED}Error: Invalid version format. Use semver (e.g., 1.0.0 or 1.0.0-beta)${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_NAME="v$VERSION"
|
||||
|
||||
# Check if tag already exists
|
||||
if git rev-parse "$TAG_NAME" >/dev/null 2>&1; then
|
||||
echo -e "${RED}Error: Tag $TAG_NAME already exists${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "\n${YELLOW}Release version: $VERSION${NC}"
|
||||
echo -e "${YELLOW}Tag name: $TAG_NAME${NC}\n"
|
||||
|
||||
read -p "Proceed with release? (y/n): " CONFIRM
|
||||
if [ "$CONFIRM" != "y" ]; then
|
||||
echo "Release cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Fetch latest from remote
|
||||
echo -e "\n${GREEN}Fetching latest from remote...${NC}"
|
||||
git fetch origin
|
||||
|
||||
# Update version in config.py
|
||||
echo -e "\n${GREEN}Updating version in $CONFIG_FILE...${NC}"
|
||||
sed -i "s/APP_VERSION = .*/APP_VERSION = '$VERSION'/" "$CONFIG_FILE"
|
||||
|
||||
# Checkout develop and commit version change
|
||||
echo -e "\n${GREEN}Committing version change on $DEVELOP_BRANCH...${NC}"
|
||||
git checkout "$DEVELOP_BRANCH"
|
||||
git add "$CONFIG_FILE"
|
||||
git commit -m "Bump version to $VERSION"
|
||||
|
||||
# Merge develop into staging
|
||||
echo -e "\n${GREEN}Merging $DEVELOP_BRANCH into $STAGING_BRANCH...${NC}"
|
||||
git checkout "$STAGING_BRANCH"
|
||||
git merge "$DEVELOP_BRANCH" -m "Merge $DEVELOP_BRANCH into $STAGING_BRANCH for release $VERSION"
|
||||
|
||||
# Merge staging into main
|
||||
echo -e "\n${GREEN}Merging $STAGING_BRANCH into $MAIN_BRANCH...${NC}"
|
||||
git checkout "$MAIN_BRANCH"
|
||||
git merge "$STAGING_BRANCH" -m "Merge $STAGING_BRANCH into $MAIN_BRANCH for release $VERSION"
|
||||
|
||||
# Create tag
|
||||
echo -e "\n${GREEN}Creating tag $TAG_NAME...${NC}"
|
||||
git tag -a "$TAG_NAME" -m "Release $VERSION"
|
||||
|
||||
# Push everything
|
||||
echo -e "\n${GREEN}Pushing branches and tag to remote...${NC}"
|
||||
git push origin "$DEVELOP_BRANCH"
|
||||
git push origin "$STAGING_BRANCH"
|
||||
git push origin "$MAIN_BRANCH"
|
||||
git push origin "$TAG_NAME"
|
||||
|
||||
# Return to develop branch
|
||||
git checkout "$DEVELOP_BRANCH"
|
||||
|
||||
echo -e "\n${GREEN}=== Release $VERSION complete! ===${NC}"
|
||||
echo -e "Tag: $TAG_NAME"
|
||||
echo -e "All branches and tags have been pushed to origin."
|
||||
43
setup.sh
43
setup.sh
@@ -91,27 +91,40 @@ echo "Creating required directories..."
|
||||
mkdir -p data logs output configs
|
||||
echo "✓ Directories created"
|
||||
|
||||
# Check if Docker is running
|
||||
# Check if Podman is running
|
||||
echo ""
|
||||
echo "Checking Docker..."
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
echo "✗ Docker is not running or not installed"
|
||||
echo "Please install Docker and start the Docker daemon"
|
||||
echo "Checking Podman..."
|
||||
if ! podman info > /dev/null 2>&1; then
|
||||
echo "✗ Podman is not running or not installed"
|
||||
echo "Please install Podman"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Docker is running"
|
||||
echo "✓ Podman is available"
|
||||
|
||||
# Build and start
|
||||
echo ""
|
||||
echo "Building and starting SneakyScanner..."
|
||||
echo "Starting SneakyScanner..."
|
||||
echo "This may take a few minutes on first run..."
|
||||
echo ""
|
||||
|
||||
docker compose build
|
||||
podman build --network=host -t sneakyscan .
|
||||
|
||||
# Initialize database if it doesn't exist or is empty
|
||||
echo ""
|
||||
echo "Starting SneakyScanner..."
|
||||
docker compose up -d
|
||||
echo "Initializing database..."
|
||||
|
||||
# Build init command with optional password
|
||||
INIT_CMD="init_db.py --db-url sqlite:////app/data/sneakyscanner.db --force"
|
||||
if [ -n "$INITIAL_PASSWORD" ]; then
|
||||
INIT_CMD="$INIT_CMD --password $INITIAL_PASSWORD"
|
||||
fi
|
||||
|
||||
podman run --rm --entrypoint python3 -w /app \
|
||||
-v "$(pwd)/data:/app/data" \
|
||||
sneakyscan $INIT_CMD
|
||||
echo "✓ Database initialized"
|
||||
|
||||
podman-compose up -d
|
||||
|
||||
# Wait for service to be healthy
|
||||
echo ""
|
||||
@@ -119,7 +132,7 @@ echo "Waiting for application to start..."
|
||||
sleep 5
|
||||
|
||||
# Check if container is running
|
||||
if docker ps | grep -q sneakyscanner-web; then
|
||||
if podman ps | grep -q sneakyscanner-web; then
|
||||
echo ""
|
||||
echo "================================================"
|
||||
echo " ✓ SneakyScanner is Running!"
|
||||
@@ -140,15 +153,15 @@ if docker ps | grep -q sneakyscanner-web; then
|
||||
fi
|
||||
echo ""
|
||||
echo "Useful commands:"
|
||||
echo " docker compose logs -f # View logs"
|
||||
echo " docker compose stop # Stop the service"
|
||||
echo " docker compose restart # Restart the service"
|
||||
echo " podman-compose logs -f # View logs"
|
||||
echo " podman-compose stop # Stop the service"
|
||||
echo " podman-compose restart # Restart the service"
|
||||
echo ""
|
||||
echo "⚠ IMPORTANT: Change your password after first login!"
|
||||
echo "================================================"
|
||||
else
|
||||
echo ""
|
||||
echo "✗ Container failed to start. Check logs with:"
|
||||
echo " docker compose logs"
|
||||
echo " podman-compose logs"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user