Compare commits
11 Commits
9e2fc348b7
...
nightly
| Author | SHA1 | Date | |
|---|---|---|---|
| 52378eaaf4 | |||
| 7667d80d2f | |||
| 9a0b7c7920 | |||
| d02a065bde | |||
| 4c22948ea2 | |||
| 51fa4caaf5 | |||
| 8c34f8b2eb | |||
| 136276497d | |||
| 6bc733fefd | |||
| 4b197e0b3d | |||
| 30f0987a99 |
@@ -39,13 +39,12 @@ COPY app/web/ ./web/
|
|||||||
COPY app/migrations/ ./migrations/
|
COPY app/migrations/ ./migrations/
|
||||||
COPY app/alembic.ini .
|
COPY app/alembic.ini .
|
||||||
COPY app/init_db.py .
|
COPY app/init_db.py .
|
||||||
COPY app/docker-entrypoint.sh /docker-entrypoint.sh
|
|
||||||
|
|
||||||
# Create required directories
|
# Create required directories
|
||||||
RUN mkdir -p /app/output /app/logs
|
RUN mkdir -p /app/output /app/logs
|
||||||
|
|
||||||
# Make scripts executable
|
# Make scripts executable
|
||||||
RUN chmod +x /app/src/scanner.py /app/init_db.py /docker-entrypoint.sh
|
RUN chmod +x /app/src/scanner.py /app/init_db.py
|
||||||
|
|
||||||
# Force Python unbuffered output
|
# Force Python unbuffered output
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|||||||
@@ -69,8 +69,12 @@ def run_migrations_online() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
with connectable.connect() as connection:
|
||||||
|
# Enable batch mode for SQLite to support ALTER TABLE operations
|
||||||
|
# like DROP COLUMN which SQLite doesn't natively support
|
||||||
context.configure(
|
context.configure(
|
||||||
connection=connection, target_metadata=target_metadata
|
connection=connection,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
render_as_batch=True
|
||||||
)
|
)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
|
|||||||
@@ -1,125 +1,214 @@
|
|||||||
"""Initial database schema for SneakyScanner
|
"""Initial schema for SneakyScanner
|
||||||
|
|
||||||
Revision ID: 001
|
Revision ID: 001
|
||||||
Revises:
|
Revises: None
|
||||||
Create Date: 2025-11-13 18:00:00.000000
|
Create Date: 2025-12-24
|
||||||
|
|
||||||
|
This is the complete initial schema for SneakyScanner. All tables are created
|
||||||
|
in the correct order to satisfy foreign key constraints.
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic
|
||||||
revision = '001'
|
revision = '001'
|
||||||
down_revision = None
|
down_revision = None
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade():
|
||||||
"""Create all initial tables for SneakyScanner."""
|
"""Create all tables for SneakyScanner."""
|
||||||
|
|
||||||
# Create schedules table first (referenced by scans)
|
# =========================================================================
|
||||||
op.create_table('schedules',
|
# Settings Table (no dependencies)
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'settings',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Schedule name (e.g., \'Daily prod scan\')'),
|
sa.Column('key', sa.String(length=255), nullable=False, comment='Setting key'),
|
||||||
sa.Column('config_file', sa.Text(), nullable=False, comment='Path to YAML config'),
|
sa.Column('value', sa.Text(), nullable=True, comment='Setting value (JSON for complex values)'),
|
||||||
sa.Column('cron_expression', sa.String(length=100), nullable=False, comment='Cron-like schedule (e.g., \'0 2 * * *\')'),
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||||
sa.Column('enabled', sa.Boolean(), nullable=False, comment='Is schedule active?'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.Column('last_run', sa.DateTime(), nullable=True, comment='Last execution time'),
|
sa.UniqueConstraint('key')
|
||||||
sa.Column('next_run', sa.DateTime(), nullable=True, comment='Next scheduled execution'),
|
)
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Schedule creation time'),
|
op.create_index('ix_settings_key', 'settings', ['key'], unique=True)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Reusable Site Definition Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'sites',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False, comment='Unique site name'),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True, comment='Site description'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Site creation time'),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('name')
|
||||||
|
)
|
||||||
|
op.create_index('ix_sites_name', 'sites', ['name'], unique=True)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'site_ips',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||||
|
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||||
|
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Expected ping response'),
|
||||||
|
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports'),
|
||||||
|
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='IP creation time'),
|
||||||
|
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('site_id', 'ip_address', name='uix_site_ip_address')
|
||||||
|
)
|
||||||
|
op.create_index('ix_site_ips_site_id', 'site_ips', ['site_id'])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Scan Configuration Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'scan_configs',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('title', sa.String(length=255), nullable=False, comment='Configuration title'),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True, comment='Configuration description'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Config creation time'),
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create scans table
|
op.create_table(
|
||||||
op.create_table('scans',
|
'scan_config_sites',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('config_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||||
|
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||||
|
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('config_id', 'site_id', name='uix_config_site')
|
||||||
|
)
|
||||||
|
op.create_index('ix_scan_config_sites_config_id', 'scan_config_sites', ['config_id'])
|
||||||
|
op.create_index('ix_scan_config_sites_site_id', 'scan_config_sites', ['site_id'])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Scheduling Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'schedules',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False, comment='Schedule name'),
|
||||||
|
sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'),
|
||||||
|
sa.Column('cron_expression', sa.String(length=100), nullable=False, comment='Cron-like schedule'),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is schedule active?'),
|
||||||
|
sa.Column('last_run', sa.DateTime(), nullable=True, comment='Last execution time'),
|
||||||
|
sa.Column('next_run', sa.DateTime(), nullable=True, comment='Next scheduled execution'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Schedule creation time'),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||||
|
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('ix_schedules_config_id', 'schedules', ['config_id'])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Core Scan Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'scans',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('timestamp', sa.DateTime(), nullable=False, comment='Scan start time (UTC)'),
|
sa.Column('timestamp', sa.DateTime(), nullable=False, comment='Scan start time (UTC)'),
|
||||||
sa.Column('duration', sa.Float(), nullable=True, comment='Total scan duration in seconds'),
|
sa.Column('duration', sa.Float(), nullable=True, comment='Total scan duration in seconds'),
|
||||||
sa.Column('status', sa.String(length=20), nullable=False, comment='running, completed, failed'),
|
sa.Column('status', sa.String(length=20), nullable=False, default='running', comment='running, finalizing, completed, failed, cancelled'),
|
||||||
sa.Column('config_file', sa.Text(), nullable=True, comment='Path to YAML config used'),
|
sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'),
|
||||||
sa.Column('title', sa.Text(), nullable=True, comment='Scan title from config'),
|
sa.Column('title', sa.Text(), nullable=True, comment='Scan title from config'),
|
||||||
sa.Column('json_path', sa.Text(), nullable=True, comment='Path to JSON report'),
|
sa.Column('json_path', sa.Text(), nullable=True, comment='Path to JSON report'),
|
||||||
sa.Column('html_path', sa.Text(), nullable=True, comment='Path to HTML report'),
|
sa.Column('html_path', sa.Text(), nullable=True, comment='Path to HTML report'),
|
||||||
sa.Column('zip_path', sa.Text(), nullable=True, comment='Path to ZIP archive'),
|
sa.Column('zip_path', sa.Text(), nullable=True, comment='Path to ZIP archive'),
|
||||||
sa.Column('screenshot_dir', sa.Text(), nullable=True, comment='Path to screenshot directory'),
|
sa.Column('screenshot_dir', sa.Text(), nullable=True, comment='Path to screenshot directory'),
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Record creation time'),
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Record creation time'),
|
||||||
sa.Column('triggered_by', sa.String(length=50), nullable=False, comment='manual, scheduled, api'),
|
sa.Column('triggered_by', sa.String(length=50), nullable=False, default='manual', comment='manual, scheduled, api'),
|
||||||
sa.Column('schedule_id', sa.Integer(), nullable=True, comment='FK to schedules if triggered by schedule'),
|
sa.Column('schedule_id', sa.Integer(), nullable=True, comment='FK to schedules if triggered by schedule'),
|
||||||
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id'], ),
|
sa.Column('started_at', sa.DateTime(), nullable=True, comment='Scan execution start time'),
|
||||||
|
sa.Column('completed_at', sa.DateTime(), nullable=True, comment='Scan execution completion time'),
|
||||||
|
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if scan failed'),
|
||||||
|
sa.Column('current_phase', sa.String(length=50), nullable=True, comment='Current scan phase'),
|
||||||
|
sa.Column('total_ips', sa.Integer(), nullable=True, comment='Total number of IPs to scan'),
|
||||||
|
sa.Column('completed_ips', sa.Integer(), nullable=True, default=0, comment='Number of IPs completed'),
|
||||||
|
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||||
|
sa.ForeignKeyConstraint(['schedule_id'], ['schedules.id']),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scans_timestamp'), 'scans', ['timestamp'], unique=False)
|
op.create_index('ix_scans_timestamp', 'scans', ['timestamp'])
|
||||||
|
op.create_index('ix_scans_config_id', 'scans', ['config_id'])
|
||||||
|
|
||||||
# Create scan_sites table
|
op.create_table(
|
||||||
op.create_table('scan_sites',
|
'scan_sites',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('site_name', sa.String(length=255), nullable=False, comment='Site name from config'),
|
sa.Column('site_name', sa.String(length=255), nullable=False, comment='Site name from config'),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_sites_scan_id'), 'scan_sites', ['scan_id'], unique=False)
|
op.create_index('ix_scan_sites_scan_id', 'scan_sites', ['scan_id'])
|
||||||
|
|
||||||
# Create scan_ips table
|
op.create_table(
|
||||||
op.create_table('scan_ips',
|
'scan_ips',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to scan_sites'),
|
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||||
sa.Column('ping_expected', sa.Boolean(), nullable=True, comment='Expected ping response'),
|
sa.Column('ping_expected', sa.Boolean(), nullable=True, comment='Expected ping response'),
|
||||||
sa.Column('ping_actual', sa.Boolean(), nullable=True, comment='Actual ping response'),
|
sa.Column('ping_actual', sa.Boolean(), nullable=True, comment='Actual ping response'),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.ForeignKeyConstraint(['site_id'], ['scan_sites.id'], ),
|
sa.ForeignKeyConstraint(['site_id'], ['scan_sites.id']),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_ip')
|
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_ip')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_ips_scan_id'), 'scan_ips', ['scan_id'], unique=False)
|
op.create_index('ix_scan_ips_scan_id', 'scan_ips', ['scan_id'])
|
||||||
op.create_index(op.f('ix_scan_ips_site_id'), 'scan_ips', ['site_id'], unique=False)
|
op.create_index('ix_scan_ips_site_id', 'scan_ips', ['site_id'])
|
||||||
|
|
||||||
# Create scan_ports table
|
op.create_table(
|
||||||
op.create_table('scan_ports',
|
'scan_ports',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('ip_id', sa.Integer(), nullable=False, comment='FK to scan_ips'),
|
sa.Column('ip_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('port', sa.Integer(), nullable=False, comment='Port number (1-65535)'),
|
sa.Column('port', sa.Integer(), nullable=False, comment='Port number (1-65535)'),
|
||||||
sa.Column('protocol', sa.String(length=10), nullable=False, comment='tcp or udp'),
|
sa.Column('protocol', sa.String(length=10), nullable=False, comment='tcp or udp'),
|
||||||
sa.Column('expected', sa.Boolean(), nullable=True, comment='Was this port expected?'),
|
sa.Column('expected', sa.Boolean(), nullable=True, comment='Was this port expected?'),
|
||||||
sa.Column('state', sa.String(length=20), nullable=False, comment='open, closed, filtered'),
|
sa.Column('state', sa.String(length=20), nullable=False, default='open', comment='open, closed, filtered'),
|
||||||
sa.ForeignKeyConstraint(['ip_id'], ['scan_ips.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['ip_id'], ['scan_ips.id']),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('scan_id', 'ip_id', 'port', 'protocol', name='uix_scan_ip_port')
|
sa.UniqueConstraint('scan_id', 'ip_id', 'port', 'protocol', name='uix_scan_ip_port')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_ports_ip_id'), 'scan_ports', ['ip_id'], unique=False)
|
op.create_index('ix_scan_ports_scan_id', 'scan_ports', ['scan_id'])
|
||||||
op.create_index(op.f('ix_scan_ports_scan_id'), 'scan_ports', ['scan_id'], unique=False)
|
op.create_index('ix_scan_ports_ip_id', 'scan_ports', ['ip_id'])
|
||||||
|
|
||||||
# Create scan_services table
|
op.create_table(
|
||||||
op.create_table('scan_services',
|
'scan_services',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('port_id', sa.Integer(), nullable=False, comment='FK to scan_ports'),
|
sa.Column('port_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('service_name', sa.String(length=100), nullable=True, comment='Service name (e.g., ssh, http)'),
|
sa.Column('service_name', sa.String(length=100), nullable=True, comment='Service name'),
|
||||||
sa.Column('product', sa.String(length=255), nullable=True, comment='Product name (e.g., OpenSSH)'),
|
sa.Column('product', sa.String(length=255), nullable=True, comment='Product name'),
|
||||||
sa.Column('version', sa.String(length=100), nullable=True, comment='Version string'),
|
sa.Column('version', sa.String(length=100), nullable=True, comment='Version string'),
|
||||||
sa.Column('extrainfo', sa.Text(), nullable=True, comment='Additional nmap info'),
|
sa.Column('extrainfo', sa.Text(), nullable=True, comment='Additional nmap info'),
|
||||||
sa.Column('ostype', sa.String(length=100), nullable=True, comment='OS type if detected'),
|
sa.Column('ostype', sa.String(length=100), nullable=True, comment='OS type if detected'),
|
||||||
sa.Column('http_protocol', sa.String(length=10), nullable=True, comment='http or https (if web service)'),
|
sa.Column('http_protocol', sa.String(length=10), nullable=True, comment='http or https'),
|
||||||
sa.Column('screenshot_path', sa.Text(), nullable=True, comment='Relative path to screenshot'),
|
sa.Column('screenshot_path', sa.Text(), nullable=True, comment='Relative path to screenshot'),
|
||||||
sa.ForeignKeyConstraint(['port_id'], ['scan_ports.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['port_id'], ['scan_ports.id']),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_services_port_id'), 'scan_services', ['port_id'], unique=False)
|
op.create_index('ix_scan_services_scan_id', 'scan_services', ['scan_id'])
|
||||||
op.create_index(op.f('ix_scan_services_scan_id'), 'scan_services', ['scan_id'], unique=False)
|
op.create_index('ix_scan_services_port_id', 'scan_services', ['port_id'])
|
||||||
|
|
||||||
# Create scan_certificates table
|
op.create_table(
|
||||||
op.create_table('scan_certificates',
|
'scan_certificates',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('service_id', sa.Integer(), nullable=False, comment='FK to scan_services'),
|
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('subject', sa.Text(), nullable=True, comment='Certificate subject (CN)'),
|
sa.Column('subject', sa.Text(), nullable=True, comment='Certificate subject (CN)'),
|
||||||
sa.Column('issuer', sa.Text(), nullable=True, comment='Certificate issuer'),
|
sa.Column('issuer', sa.Text(), nullable=True, comment='Certificate issuer'),
|
||||||
sa.Column('serial_number', sa.Text(), nullable=True, comment='Serial number'),
|
sa.Column('serial_number', sa.Text(), nullable=True, comment='Serial number'),
|
||||||
@@ -127,95 +216,177 @@ def upgrade() -> None:
|
|||||||
sa.Column('not_valid_after', sa.DateTime(), nullable=True, comment='Validity end date'),
|
sa.Column('not_valid_after', sa.DateTime(), nullable=True, comment='Validity end date'),
|
||||||
sa.Column('days_until_expiry', sa.Integer(), nullable=True, comment='Days until expiration'),
|
sa.Column('days_until_expiry', sa.Integer(), nullable=True, comment='Days until expiration'),
|
||||||
sa.Column('sans', sa.Text(), nullable=True, comment='JSON array of SANs'),
|
sa.Column('sans', sa.Text(), nullable=True, comment='JSON array of SANs'),
|
||||||
sa.Column('is_self_signed', sa.Boolean(), nullable=True, comment='Self-signed certificate flag'),
|
sa.Column('is_self_signed', sa.Boolean(), nullable=True, default=False, comment='Self-signed flag'),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.ForeignKeyConstraint(['service_id'], ['scan_services.id'], ),
|
sa.ForeignKeyConstraint(['service_id'], ['scan_services.id']),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id')
|
||||||
comment='Index on expiration date for alert queries'
|
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_certificates_scan_id'), 'scan_certificates', ['scan_id'], unique=False)
|
op.create_index('ix_scan_certificates_scan_id', 'scan_certificates', ['scan_id'])
|
||||||
op.create_index(op.f('ix_scan_certificates_service_id'), 'scan_certificates', ['service_id'], unique=False)
|
op.create_index('ix_scan_certificates_service_id', 'scan_certificates', ['service_id'])
|
||||||
|
|
||||||
# Create scan_tls_versions table
|
op.create_table(
|
||||||
op.create_table('scan_tls_versions',
|
'scan_tls_versions',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('certificate_id', sa.Integer(), nullable=False, comment='FK to scan_certificates'),
|
sa.Column('certificate_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('tls_version', sa.String(length=20), nullable=False, comment='TLS 1.0, TLS 1.1, TLS 1.2, TLS 1.3'),
|
sa.Column('tls_version', sa.String(length=20), nullable=False, comment='TLS 1.0, 1.1, 1.2, 1.3'),
|
||||||
sa.Column('supported', sa.Boolean(), nullable=False, comment='Is this version supported?'),
|
sa.Column('supported', sa.Boolean(), nullable=False, comment='Is this version supported?'),
|
||||||
sa.Column('cipher_suites', sa.Text(), nullable=True, comment='JSON array of cipher suites'),
|
sa.Column('cipher_suites', sa.Text(), nullable=True, comment='JSON array of cipher suites'),
|
||||||
sa.ForeignKeyConstraint(['certificate_id'], ['scan_certificates.id'], ),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.ForeignKeyConstraint(['certificate_id'], ['scan_certificates.id']),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_scan_tls_versions_certificate_id'), 'scan_tls_versions', ['certificate_id'], unique=False)
|
op.create_index('ix_scan_tls_versions_scan_id', 'scan_tls_versions', ['scan_id'])
|
||||||
op.create_index(op.f('ix_scan_tls_versions_scan_id'), 'scan_tls_versions', ['scan_id'], unique=False)
|
op.create_index('ix_scan_tls_versions_certificate_id', 'scan_tls_versions', ['certificate_id'])
|
||||||
|
|
||||||
# Create alerts table
|
op.create_table(
|
||||||
op.create_table('alerts',
|
'scan_progress',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('alert_type', sa.String(length=50), nullable=False, comment='new_port, cert_expiry, service_change, ping_failed'),
|
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IP address being scanned'),
|
||||||
sa.Column('severity', sa.String(length=20), nullable=False, comment='info, warning, critical'),
|
sa.Column('site_name', sa.String(length=255), nullable=True, comment='Site name'),
|
||||||
sa.Column('message', sa.Text(), nullable=False, comment='Human-readable alert message'),
|
sa.Column('phase', sa.String(length=50), nullable=False, comment='Phase: ping, tcp_scan, etc.'),
|
||||||
sa.Column('ip_address', sa.String(length=45), nullable=True, comment='Related IP (optional)'),
|
sa.Column('status', sa.String(length=20), nullable=False, default='pending', comment='pending, in_progress, completed, failed'),
|
||||||
sa.Column('port', sa.Integer(), nullable=True, comment='Related port (optional)'),
|
sa.Column('ping_result', sa.Boolean(), nullable=True, comment='Ping response result'),
|
||||||
sa.Column('email_sent', sa.Boolean(), nullable=False, comment='Was email notification sent?'),
|
sa.Column('tcp_ports', sa.Text(), nullable=True, comment='JSON array of TCP ports'),
|
||||||
sa.Column('email_sent_at', sa.DateTime(), nullable=True, comment='Email send timestamp'),
|
sa.Column('udp_ports', sa.Text(), nullable=True, comment='JSON array of UDP ports'),
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Alert creation time'),
|
sa.Column('services', sa.Text(), nullable=True, comment='JSON array of services'),
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Entry creation time'),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last update time'),
|
||||||
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
comment='Indexes for alert filtering'
|
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_progress_ip')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_alerts_scan_id'), 'alerts', ['scan_id'], unique=False)
|
op.create_index('ix_scan_progress_scan_id', 'scan_progress', ['scan_id'])
|
||||||
|
|
||||||
# Create alert_rules table
|
op.create_table(
|
||||||
op.create_table('alert_rules',
|
'scan_site_associations',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('rule_type', sa.String(length=50), nullable=False, comment='unexpected_port, cert_expiry, service_down, etc.'),
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('enabled', sa.Boolean(), nullable=False, comment='Is rule active?'),
|
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('threshold', sa.Integer(), nullable=True, comment='Threshold value (e.g., days for cert expiry)'),
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||||
sa.Column('email_enabled', sa.Boolean(), nullable=False, comment='Send email for this rule?'),
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
|
sa.ForeignKeyConstraint(['site_id'], ['sites.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('scan_id', 'site_id', name='uix_scan_site')
|
||||||
|
)
|
||||||
|
op.create_index('ix_scan_site_associations_scan_id', 'scan_site_associations', ['scan_id'])
|
||||||
|
op.create_index('ix_scan_site_associations_site_id', 'scan_site_associations', ['site_id'])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Alert Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'alert_rules',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=True, comment='User-friendly rule name'),
|
||||||
|
sa.Column('rule_type', sa.String(length=50), nullable=False, comment='unexpected_port, cert_expiry, etc.'),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is rule active?'),
|
||||||
|
sa.Column('threshold', sa.Integer(), nullable=True, comment='Threshold value'),
|
||||||
|
sa.Column('email_enabled', sa.Boolean(), nullable=False, default=False, comment='Send email?'),
|
||||||
|
sa.Column('webhook_enabled', sa.Boolean(), nullable=False, default=False, comment='Send webhook?'),
|
||||||
|
sa.Column('severity', sa.String(length=20), nullable=True, comment='critical, warning, info'),
|
||||||
|
sa.Column('filter_conditions', sa.Text(), nullable=True, comment='JSON filter conditions'),
|
||||||
|
sa.Column('config_id', sa.Integer(), nullable=True, comment='Optional: specific config this rule applies to'),
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Rule creation time'),
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Rule creation time'),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True, comment='Last update time'),
|
||||||
|
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('ix_alert_rules_config_id', 'alert_rules', ['config_id'])
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'alerts',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('scan_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('rule_id', sa.Integer(), nullable=True, comment='Associated alert rule'),
|
||||||
|
sa.Column('alert_type', sa.String(length=50), nullable=False, comment='Alert type'),
|
||||||
|
sa.Column('severity', sa.String(length=20), nullable=False, comment='info, warning, critical'),
|
||||||
|
sa.Column('message', sa.Text(), nullable=False, comment='Human-readable message'),
|
||||||
|
sa.Column('ip_address', sa.String(length=45), nullable=True, comment='Related IP'),
|
||||||
|
sa.Column('port', sa.Integer(), nullable=True, comment='Related port'),
|
||||||
|
sa.Column('email_sent', sa.Boolean(), nullable=False, default=False, comment='Was email sent?'),
|
||||||
|
sa.Column('email_sent_at', sa.DateTime(), nullable=True, comment='Email send timestamp'),
|
||||||
|
sa.Column('webhook_sent', sa.Boolean(), nullable=False, default=False, comment='Was webhook sent?'),
|
||||||
|
sa.Column('webhook_sent_at', sa.DateTime(), nullable=True, comment='Webhook send timestamp'),
|
||||||
|
sa.Column('acknowledged', sa.Boolean(), nullable=False, default=False, comment='Was alert acknowledged?'),
|
||||||
|
sa.Column('acknowledged_at', sa.DateTime(), nullable=True, comment='Acknowledgment timestamp'),
|
||||||
|
sa.Column('acknowledged_by', sa.String(length=255), nullable=True, comment='User who acknowledged'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Alert creation time'),
|
||||||
|
sa.ForeignKeyConstraint(['scan_id'], ['scans.id']),
|
||||||
|
sa.ForeignKeyConstraint(['rule_id'], ['alert_rules.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('ix_alerts_scan_id', 'alerts', ['scan_id'])
|
||||||
|
op.create_index('ix_alerts_rule_id', 'alerts', ['rule_id'])
|
||||||
|
op.create_index('ix_alerts_acknowledged', 'alerts', ['acknowledged'])
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Webhook Tables
|
||||||
|
# =========================================================================
|
||||||
|
op.create_table(
|
||||||
|
'webhooks',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False, comment='Webhook name'),
|
||||||
|
sa.Column('url', sa.Text(), nullable=False, comment='Webhook URL'),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=True, comment='Is webhook enabled?'),
|
||||||
|
sa.Column('auth_type', sa.String(length=20), nullable=True, comment='none, bearer, basic, custom'),
|
||||||
|
sa.Column('auth_token', sa.Text(), nullable=True, comment='Encrypted auth token'),
|
||||||
|
sa.Column('custom_headers', sa.Text(), nullable=True, comment='JSON custom headers'),
|
||||||
|
sa.Column('alert_types', sa.Text(), nullable=True, comment='JSON array of alert types'),
|
||||||
|
sa.Column('severity_filter', sa.Text(), nullable=True, comment='JSON array of severities'),
|
||||||
|
sa.Column('timeout', sa.Integer(), nullable=True, default=10, comment='Request timeout'),
|
||||||
|
sa.Column('retry_count', sa.Integer(), nullable=True, default=3, comment='Retry attempts'),
|
||||||
|
sa.Column('template', sa.Text(), nullable=True, comment='Jinja2 template for payload'),
|
||||||
|
sa.Column('template_format', sa.String(length=20), nullable=True, default='json', comment='json, text'),
|
||||||
|
sa.Column('content_type_override', sa.String(length=100), nullable=True, comment='Custom Content-Type'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Creation time'),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last update time'),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create settings table
|
op.create_table(
|
||||||
op.create_table('settings',
|
'webhook_delivery_log',
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
sa.Column('key', sa.String(length=255), nullable=False, comment='Setting key (e.g., smtp_server)'),
|
sa.Column('webhook_id', sa.Integer(), nullable=False, comment='Associated webhook'),
|
||||||
sa.Column('value', sa.Text(), nullable=True, comment='Setting value (JSON for complex values)'),
|
sa.Column('alert_id', sa.Integer(), nullable=False, comment='Associated alert'),
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
sa.Column('status', sa.String(length=20), nullable=True, comment='success, failed, retrying'),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.Column('response_code', sa.Integer(), nullable=True, comment='HTTP response code'),
|
||||||
sa.UniqueConstraint('key')
|
sa.Column('response_body', sa.Text(), nullable=True, comment='Response body'),
|
||||||
|
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if failed'),
|
||||||
|
sa.Column('attempt_number', sa.Integer(), nullable=True, comment='Which attempt'),
|
||||||
|
sa.Column('delivered_at', sa.DateTime(), nullable=False, comment='Delivery timestamp'),
|
||||||
|
sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id']),
|
||||||
|
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_settings_key'), 'settings', ['key'], unique=True)
|
op.create_index('ix_webhook_delivery_log_webhook_id', 'webhook_delivery_log', ['webhook_id'])
|
||||||
|
op.create_index('ix_webhook_delivery_log_alert_id', 'webhook_delivery_log', ['alert_id'])
|
||||||
|
op.create_index('ix_webhook_delivery_log_status', 'webhook_delivery_log', ['status'])
|
||||||
|
|
||||||
|
print("\n✓ Initial schema created successfully")
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade():
|
||||||
"""Drop all tables."""
|
"""Drop all tables in reverse order."""
|
||||||
op.drop_index(op.f('ix_settings_key'), table_name='settings')
|
op.drop_table('webhook_delivery_log')
|
||||||
op.drop_table('settings')
|
op.drop_table('webhooks')
|
||||||
op.drop_table('alert_rules')
|
|
||||||
op.drop_index(op.f('ix_alerts_scan_id'), table_name='alerts')
|
|
||||||
op.drop_table('alerts')
|
op.drop_table('alerts')
|
||||||
op.drop_index(op.f('ix_scan_tls_versions_scan_id'), table_name='scan_tls_versions')
|
op.drop_table('alert_rules')
|
||||||
op.drop_index(op.f('ix_scan_tls_versions_certificate_id'), table_name='scan_tls_versions')
|
op.drop_table('scan_site_associations')
|
||||||
|
op.drop_table('scan_progress')
|
||||||
op.drop_table('scan_tls_versions')
|
op.drop_table('scan_tls_versions')
|
||||||
op.drop_index(op.f('ix_scan_certificates_service_id'), table_name='scan_certificates')
|
|
||||||
op.drop_index(op.f('ix_scan_certificates_scan_id'), table_name='scan_certificates')
|
|
||||||
op.drop_table('scan_certificates')
|
op.drop_table('scan_certificates')
|
||||||
op.drop_index(op.f('ix_scan_services_scan_id'), table_name='scan_services')
|
|
||||||
op.drop_index(op.f('ix_scan_services_port_id'), table_name='scan_services')
|
|
||||||
op.drop_table('scan_services')
|
op.drop_table('scan_services')
|
||||||
op.drop_index(op.f('ix_scan_ports_scan_id'), table_name='scan_ports')
|
|
||||||
op.drop_index(op.f('ix_scan_ports_ip_id'), table_name='scan_ports')
|
|
||||||
op.drop_table('scan_ports')
|
op.drop_table('scan_ports')
|
||||||
op.drop_index(op.f('ix_scan_ips_site_id'), table_name='scan_ips')
|
|
||||||
op.drop_index(op.f('ix_scan_ips_scan_id'), table_name='scan_ips')
|
|
||||||
op.drop_table('scan_ips')
|
op.drop_table('scan_ips')
|
||||||
op.drop_index(op.f('ix_scan_sites_scan_id'), table_name='scan_sites')
|
|
||||||
op.drop_table('scan_sites')
|
op.drop_table('scan_sites')
|
||||||
op.drop_index(op.f('ix_scans_timestamp'), table_name='scans')
|
|
||||||
op.drop_table('scans')
|
op.drop_table('scans')
|
||||||
op.drop_table('schedules')
|
op.drop_table('schedules')
|
||||||
|
op.drop_table('scan_config_sites')
|
||||||
|
op.drop_table('scan_configs')
|
||||||
|
op.drop_table('site_ips')
|
||||||
|
op.drop_table('sites')
|
||||||
|
op.drop_table('settings')
|
||||||
|
|
||||||
|
print("\n✓ All tables dropped")
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
"""Add indexes for scan queries
|
|
||||||
|
|
||||||
Revision ID: 002
|
|
||||||
Revises: 001
|
|
||||||
Create Date: 2025-11-14 00:30:00.000000
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '002'
|
|
||||||
down_revision = '001'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add database indexes for better query performance."""
|
|
||||||
# Add index on scans.status for filtering
|
|
||||||
# Note: index on scans.timestamp already exists from migration 001
|
|
||||||
op.create_index('ix_scans_status', 'scans', ['status'], unique=False)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove indexes."""
|
|
||||||
op.drop_index('ix_scans_status', table_name='scans')
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
"""Add timing and error fields to scans table
|
|
||||||
|
|
||||||
Revision ID: 003
|
|
||||||
Revises: 002
|
|
||||||
Create Date: 2025-11-14
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '003'
|
|
||||||
down_revision = '002'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Add fields for tracking scan execution timing and errors.
|
|
||||||
|
|
||||||
New fields:
|
|
||||||
- started_at: When scan execution actually started
|
|
||||||
- completed_at: When scan execution finished (success or failure)
|
|
||||||
- error_message: Error message if scan failed
|
|
||||||
"""
|
|
||||||
with op.batch_alter_table('scans') as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('started_at', sa.DateTime(), nullable=True, comment='Scan execution start time'))
|
|
||||||
batch_op.add_column(sa.Column('completed_at', sa.DateTime(), nullable=True, comment='Scan execution completion time'))
|
|
||||||
batch_op.add_column(sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if scan failed'))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove the timing and error fields."""
|
|
||||||
with op.batch_alter_table('scans') as batch_op:
|
|
||||||
batch_op.drop_column('error_message')
|
|
||||||
batch_op.drop_column('completed_at')
|
|
||||||
batch_op.drop_column('started_at')
|
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
"""Add enhanced alert features for Phase 5
|
|
||||||
|
|
||||||
Revision ID: 004
|
|
||||||
Revises: 003
|
|
||||||
Create Date: 2025-11-18
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '004'
|
|
||||||
down_revision = '003'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Add enhancements for Phase 5 Alert Rule Engine:
|
|
||||||
- Enhanced alert_rules fields
|
|
||||||
- Enhanced alerts fields
|
|
||||||
- New webhooks table
|
|
||||||
- New webhook_delivery_log table
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Enhance alert_rules table
|
|
||||||
with op.batch_alter_table('alert_rules') as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('name', sa.String(255), nullable=True, comment='User-friendly rule name'))
|
|
||||||
batch_op.add_column(sa.Column('webhook_enabled', sa.Boolean(), nullable=False, server_default='0', comment='Whether to send webhooks for this rule'))
|
|
||||||
batch_op.add_column(sa.Column('severity', sa.String(20), nullable=True, comment='Alert severity level (critical, warning, info)'))
|
|
||||||
batch_op.add_column(sa.Column('filter_conditions', sa.Text(), nullable=True, comment='JSON filter conditions for the rule'))
|
|
||||||
batch_op.add_column(sa.Column('config_file', sa.String(255), nullable=True, comment='Optional: specific config file this rule applies to'))
|
|
||||||
batch_op.add_column(sa.Column('updated_at', sa.DateTime(), nullable=True, comment='Last update timestamp'))
|
|
||||||
|
|
||||||
# Enhance alerts table
|
|
||||||
with op.batch_alter_table('alerts') as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('rule_id', sa.Integer(), nullable=True, comment='Associated alert rule'))
|
|
||||||
batch_op.add_column(sa.Column('webhook_sent', sa.Boolean(), nullable=False, server_default='0', comment='Whether webhook was sent'))
|
|
||||||
batch_op.add_column(sa.Column('webhook_sent_at', sa.DateTime(), nullable=True, comment='When webhook was sent'))
|
|
||||||
batch_op.add_column(sa.Column('acknowledged', sa.Boolean(), nullable=False, server_default='0', comment='Whether alert was acknowledged'))
|
|
||||||
batch_op.add_column(sa.Column('acknowledged_at', sa.DateTime(), nullable=True, comment='When alert was acknowledged'))
|
|
||||||
batch_op.add_column(sa.Column('acknowledged_by', sa.String(255), nullable=True, comment='User who acknowledged the alert'))
|
|
||||||
batch_op.create_foreign_key('fk_alerts_rule_id', 'alert_rules', ['rule_id'], ['id'])
|
|
||||||
batch_op.create_index('idx_alerts_rule_id', ['rule_id'])
|
|
||||||
batch_op.create_index('idx_alerts_acknowledged', ['acknowledged'])
|
|
||||||
|
|
||||||
# Create webhooks table
|
|
||||||
op.create_table('webhooks',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('name', sa.String(255), nullable=False, comment='Webhook name'),
|
|
||||||
sa.Column('url', sa.Text(), nullable=False, comment='Webhook URL'),
|
|
||||||
sa.Column('enabled', sa.Boolean(), nullable=False, server_default='1', comment='Whether webhook is enabled'),
|
|
||||||
sa.Column('auth_type', sa.String(20), nullable=True, comment='Authentication type: none, bearer, basic, custom'),
|
|
||||||
sa.Column('auth_token', sa.Text(), nullable=True, comment='Encrypted authentication token'),
|
|
||||||
sa.Column('custom_headers', sa.Text(), nullable=True, comment='JSON custom headers'),
|
|
||||||
sa.Column('alert_types', sa.Text(), nullable=True, comment='JSON array of alert types to trigger on'),
|
|
||||||
sa.Column('severity_filter', sa.Text(), nullable=True, comment='JSON array of severities to trigger on'),
|
|
||||||
sa.Column('timeout', sa.Integer(), nullable=True, server_default='10', comment='Request timeout in seconds'),
|
|
||||||
sa.Column('retry_count', sa.Integer(), nullable=True, server_default='3', comment='Number of retry attempts'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create webhook_delivery_log table
|
|
||||||
op.create_table('webhook_delivery_log',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('webhook_id', sa.Integer(), nullable=False, comment='Associated webhook'),
|
|
||||||
sa.Column('alert_id', sa.Integer(), nullable=False, comment='Associated alert'),
|
|
||||||
sa.Column('status', sa.String(20), nullable=True, comment='Delivery status: success, failed, retrying'),
|
|
||||||
sa.Column('response_code', sa.Integer(), nullable=True, comment='HTTP response code'),
|
|
||||||
sa.Column('response_body', sa.Text(), nullable=True, comment='Response body from webhook'),
|
|
||||||
sa.Column('error_message', sa.Text(), nullable=True, comment='Error message if failed'),
|
|
||||||
sa.Column('attempt_number', sa.Integer(), nullable=True, comment='Which attempt this was'),
|
|
||||||
sa.Column('delivered_at', sa.DateTime(), nullable=False, comment='Delivery timestamp'),
|
|
||||||
sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create indexes for webhook_delivery_log
|
|
||||||
op.create_index('idx_webhook_delivery_alert_id', 'webhook_delivery_log', ['alert_id'])
|
|
||||||
op.create_index('idx_webhook_delivery_webhook_id', 'webhook_delivery_log', ['webhook_id'])
|
|
||||||
op.create_index('idx_webhook_delivery_status', 'webhook_delivery_log', ['status'])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove Phase 5 alert enhancements."""
|
|
||||||
|
|
||||||
# Drop webhook_delivery_log table and its indexes
|
|
||||||
op.drop_index('idx_webhook_delivery_status', table_name='webhook_delivery_log')
|
|
||||||
op.drop_index('idx_webhook_delivery_webhook_id', table_name='webhook_delivery_log')
|
|
||||||
op.drop_index('idx_webhook_delivery_alert_id', table_name='webhook_delivery_log')
|
|
||||||
op.drop_table('webhook_delivery_log')
|
|
||||||
|
|
||||||
# Drop webhooks table
|
|
||||||
op.drop_table('webhooks')
|
|
||||||
|
|
||||||
# Remove enhancements from alerts table
|
|
||||||
with op.batch_alter_table('alerts') as batch_op:
|
|
||||||
batch_op.drop_index('idx_alerts_acknowledged')
|
|
||||||
batch_op.drop_index('idx_alerts_rule_id')
|
|
||||||
batch_op.drop_constraint('fk_alerts_rule_id', type_='foreignkey')
|
|
||||||
batch_op.drop_column('acknowledged_by')
|
|
||||||
batch_op.drop_column('acknowledged_at')
|
|
||||||
batch_op.drop_column('acknowledged')
|
|
||||||
batch_op.drop_column('webhook_sent_at')
|
|
||||||
batch_op.drop_column('webhook_sent')
|
|
||||||
batch_op.drop_column('rule_id')
|
|
||||||
|
|
||||||
# Remove enhancements from alert_rules table
|
|
||||||
with op.batch_alter_table('alert_rules') as batch_op:
|
|
||||||
batch_op.drop_column('updated_at')
|
|
||||||
batch_op.drop_column('config_file')
|
|
||||||
batch_op.drop_column('filter_conditions')
|
|
||||||
batch_op.drop_column('severity')
|
|
||||||
batch_op.drop_column('webhook_enabled')
|
|
||||||
batch_op.drop_column('name')
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
"""Add webhook template support
|
|
||||||
|
|
||||||
Revision ID: 005
|
|
||||||
Revises: 004
|
|
||||||
Create Date: 2025-11-18
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '005'
|
|
||||||
down_revision = '004'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
# Default template that matches the current JSON payload structure
|
|
||||||
DEFAULT_TEMPLATE = """{
|
|
||||||
"event": "alert.created",
|
|
||||||
"alert": {
|
|
||||||
"id": {{ alert.id }},
|
|
||||||
"type": "{{ alert.type }}",
|
|
||||||
"severity": "{{ alert.severity }}",
|
|
||||||
"message": "{{ alert.message }}",
|
|
||||||
{% if alert.ip_address %}"ip_address": "{{ alert.ip_address }}",{% endif %}
|
|
||||||
{% if alert.port %}"port": {{ alert.port }},{% endif %}
|
|
||||||
"acknowledged": {{ alert.acknowledged|lower }},
|
|
||||||
"created_at": "{{ alert.created_at.isoformat() }}"
|
|
||||||
},
|
|
||||||
"scan": {
|
|
||||||
"id": {{ scan.id }},
|
|
||||||
"title": "{{ scan.title }}",
|
|
||||||
"timestamp": "{{ scan.timestamp.isoformat() }}",
|
|
||||||
"status": "{{ scan.status }}"
|
|
||||||
},
|
|
||||||
"rule": {
|
|
||||||
"id": {{ rule.id }},
|
|
||||||
"name": "{{ rule.name }}",
|
|
||||||
"type": "{{ rule.type }}",
|
|
||||||
"threshold": {{ rule.threshold if rule.threshold else 'null' }}
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Add webhook template fields:
|
|
||||||
- template: Jinja2 template for payload
|
|
||||||
- template_format: Output format (json, text)
|
|
||||||
- content_type_override: Optional custom Content-Type
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Add new columns to webhooks table
|
|
||||||
with op.batch_alter_table('webhooks') as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('template', sa.Text(), nullable=True, comment='Jinja2 template for webhook payload'))
|
|
||||||
batch_op.add_column(sa.Column('template_format', sa.String(20), nullable=True, server_default='json', comment='Template output format: json, text'))
|
|
||||||
batch_op.add_column(sa.Column('content_type_override', sa.String(100), nullable=True, comment='Optional custom Content-Type header'))
|
|
||||||
|
|
||||||
# Populate existing webhooks with default template
|
|
||||||
# This ensures backward compatibility by converting existing webhooks to use the
|
|
||||||
# same JSON structure they're currently sending
|
|
||||||
connection = op.get_bind()
|
|
||||||
connection.execute(
|
|
||||||
sa.text("""
|
|
||||||
UPDATE webhooks
|
|
||||||
SET template = :template,
|
|
||||||
template_format = 'json'
|
|
||||||
WHERE template IS NULL
|
|
||||||
"""),
|
|
||||||
{"template": DEFAULT_TEMPLATE}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove webhook template fields."""
|
|
||||||
|
|
||||||
with op.batch_alter_table('webhooks') as batch_op:
|
|
||||||
batch_op.drop_column('content_type_override')
|
|
||||||
batch_op.drop_column('template_format')
|
|
||||||
batch_op.drop_column('template')
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
"""Add reusable site definitions
|
|
||||||
|
|
||||||
Revision ID: 006
|
|
||||||
Revises: 005
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration introduces reusable site definitions that can be shared across
|
|
||||||
multiple scans. Sites are defined once with CIDR ranges and can be referenced
|
|
||||||
in multiple scan configurations.
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '006'
|
|
||||||
down_revision = '005'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Create new site tables and migrate existing scan_sites data to the new structure.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Create sites table (master site definitions)
|
|
||||||
op.create_table('sites',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Unique site name'),
|
|
||||||
sa.Column('description', sa.Text(), nullable=True, comment='Site description'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Site creation time'),
|
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('name', name='uix_site_name')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_sites_name'), 'sites', ['name'], unique=True)
|
|
||||||
|
|
||||||
# Create site_cidrs table (CIDR ranges for each site)
|
|
||||||
op.create_table('site_cidrs',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
|
||||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
|
||||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Expected ping response for this CIDR'),
|
|
||||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports'),
|
|
||||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='CIDR creation time'),
|
|
||||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_site_cidrs_site_id'), 'site_cidrs', ['site_id'], unique=False)
|
|
||||||
|
|
||||||
# Create site_ips table (IP-level overrides within CIDRs)
|
|
||||||
op.create_table('site_ips',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('site_cidr_id', sa.Integer(), nullable=False, comment='FK to site_cidrs'),
|
|
||||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
|
||||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Override ping expectation for this IP'),
|
|
||||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports (overrides CIDR)'),
|
|
||||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports (overrides CIDR)'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='IP override creation time'),
|
|
||||||
sa.ForeignKeyConstraint(['site_cidr_id'], ['site_cidrs.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('site_cidr_id', 'ip_address', name='uix_site_cidr_ip')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_site_ips_site_cidr_id'), 'site_ips', ['site_cidr_id'], unique=False)
|
|
||||||
|
|
||||||
# Create scan_site_associations table (many-to-many between scans and sites)
|
|
||||||
op.create_table('scan_site_associations',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
|
||||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
|
||||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('scan_id', 'site_id', name='uix_scan_site')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_scan_site_associations_scan_id'), 'scan_site_associations', ['scan_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_scan_site_associations_site_id'), 'scan_site_associations', ['site_id'], unique=False)
|
|
||||||
|
|
||||||
# Migrate existing data
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
# 1. Extract unique site names from existing scan_sites and create master Site records
|
|
||||||
# This groups all historical scan sites by name and creates one master site per unique name
|
|
||||||
connection.execute(text("""
|
|
||||||
INSERT INTO sites (name, description, created_at, updated_at)
|
|
||||||
SELECT DISTINCT
|
|
||||||
site_name,
|
|
||||||
'Migrated from scan_sites' as description,
|
|
||||||
datetime('now') as created_at,
|
|
||||||
datetime('now') as updated_at
|
|
||||||
FROM scan_sites
|
|
||||||
WHERE site_name NOT IN (SELECT name FROM sites)
|
|
||||||
"""))
|
|
||||||
|
|
||||||
# 2. Create scan_site_associations linking scans to their sites
|
|
||||||
# This maintains the historical relationship between scans and the sites they used
|
|
||||||
connection.execute(text("""
|
|
||||||
INSERT INTO scan_site_associations (scan_id, site_id, created_at)
|
|
||||||
SELECT DISTINCT
|
|
||||||
ss.scan_id,
|
|
||||||
s.id as site_id,
|
|
||||||
datetime('now') as created_at
|
|
||||||
FROM scan_sites ss
|
|
||||||
INNER JOIN sites s ON s.name = ss.site_name
|
|
||||||
WHERE NOT EXISTS (
|
|
||||||
SELECT 1 FROM scan_site_associations ssa
|
|
||||||
WHERE ssa.scan_id = ss.scan_id AND ssa.site_id = s.id
|
|
||||||
)
|
|
||||||
"""))
|
|
||||||
|
|
||||||
# 3. For each migrated site, create a CIDR entry from the IPs in scan_ips
|
|
||||||
# Since historical data has individual IPs, we'll create /32 CIDRs for each unique IP
|
|
||||||
# This preserves the exact IP addresses while fitting them into the new CIDR-based model
|
|
||||||
connection.execute(text("""
|
|
||||||
INSERT INTO site_cidrs (site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports, created_at)
|
|
||||||
SELECT DISTINCT
|
|
||||||
s.id as site_id,
|
|
||||||
si.ip_address || '/32' as cidr,
|
|
||||||
si.ping_expected,
|
|
||||||
'[]' as expected_tcp_ports,
|
|
||||||
'[]' as expected_udp_ports,
|
|
||||||
datetime('now') as created_at
|
|
||||||
FROM scan_ips si
|
|
||||||
INNER JOIN scan_sites ss ON ss.id = si.site_id
|
|
||||||
INNER JOIN sites s ON s.name = ss.site_name
|
|
||||||
WHERE NOT EXISTS (
|
|
||||||
SELECT 1 FROM site_cidrs sc
|
|
||||||
WHERE sc.site_id = s.id AND sc.cidr = si.ip_address || '/32'
|
|
||||||
)
|
|
||||||
GROUP BY s.id, si.ip_address, si.ping_expected
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print("✓ Migration complete: Reusable sites created from historical scan data")
|
|
||||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()} master site(s)")
|
|
||||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()} CIDR range(s)")
|
|
||||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM scan_site_associations')).scalar()} scan-site association(s)")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove reusable site tables."""
|
|
||||||
|
|
||||||
# Drop tables in reverse order of creation (respecting foreign keys)
|
|
||||||
op.drop_index(op.f('ix_scan_site_associations_site_id'), table_name='scan_site_associations')
|
|
||||||
op.drop_index(op.f('ix_scan_site_associations_scan_id'), table_name='scan_site_associations')
|
|
||||||
op.drop_table('scan_site_associations')
|
|
||||||
|
|
||||||
op.drop_index(op.f('ix_site_ips_site_cidr_id'), table_name='site_ips')
|
|
||||||
op.drop_table('site_ips')
|
|
||||||
|
|
||||||
op.drop_index(op.f('ix_site_cidrs_site_id'), table_name='site_cidrs')
|
|
||||||
op.drop_table('site_cidrs')
|
|
||||||
|
|
||||||
op.drop_index(op.f('ix_sites_name'), table_name='sites')
|
|
||||||
op.drop_table('sites')
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: Reusable site tables removed")
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
"""Add database-stored scan configurations
|
|
||||||
|
|
||||||
Revision ID: 007
|
|
||||||
Revises: 006
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration introduces database-stored scan configurations to replace YAML
|
|
||||||
config files. Configs reference sites from the sites table, enabling visual
|
|
||||||
config builder and better data management.
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '007'
|
|
||||||
down_revision = '006'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Create scan_configs and scan_config_sites tables.
|
|
||||||
Add config_id foreign keys to scans and schedules tables.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Create scan_configs table
|
|
||||||
op.create_table('scan_configs',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('title', sa.String(length=255), nullable=False, comment='Configuration title'),
|
|
||||||
sa.Column('description', sa.Text(), nullable=True, comment='Configuration description'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Config creation time'),
|
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create scan_config_sites table (many-to-many between configs and sites)
|
|
||||||
op.create_table('scan_config_sites',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('config_id', sa.Integer(), nullable=False, comment='FK to scan_configs'),
|
|
||||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
|
||||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('config_id', 'site_id', name='uix_config_site')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_scan_config_sites_config_id'), 'scan_config_sites', ['config_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_scan_config_sites_site_id'), 'scan_config_sites', ['site_id'], unique=False)
|
|
||||||
|
|
||||||
# Add config_id to scans table
|
|
||||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
|
||||||
batch_op.create_index('ix_scans_config_id', ['config_id'], unique=False)
|
|
||||||
batch_op.create_foreign_key('fk_scans_config_id', 'scan_configs', ['config_id'], ['id'])
|
|
||||||
# Mark config_file as deprecated in comment (already has nullable=True)
|
|
||||||
|
|
||||||
# Add config_id to schedules table and make config_file nullable
|
|
||||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
|
||||||
batch_op.create_index('ix_schedules_config_id', ['config_id'], unique=False)
|
|
||||||
batch_op.create_foreign_key('fk_schedules_config_id', 'scan_configs', ['config_id'], ['id'])
|
|
||||||
# Make config_file nullable (it was required before)
|
|
||||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=True)
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
print("✓ Migration complete: Scan configs tables created")
|
|
||||||
print(" - Created scan_configs table for database-stored configurations")
|
|
||||||
print(" - Created scan_config_sites association table")
|
|
||||||
print(" - Added config_id to scans table")
|
|
||||||
print(" - Added config_id to schedules table")
|
|
||||||
print(" - Existing YAML configs remain in config_file column for backward compatibility")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove scan config tables and columns."""
|
|
||||||
|
|
||||||
# Remove foreign keys and columns from schedules
|
|
||||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
|
||||||
batch_op.drop_constraint('fk_schedules_config_id', type_='foreignkey')
|
|
||||||
batch_op.drop_index('ix_schedules_config_id')
|
|
||||||
batch_op.drop_column('config_id')
|
|
||||||
# Restore config_file as required
|
|
||||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=False)
|
|
||||||
|
|
||||||
# Remove foreign keys and columns from scans
|
|
||||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
|
||||||
batch_op.drop_constraint('fk_scans_config_id', type_='foreignkey')
|
|
||||||
batch_op.drop_index('ix_scans_config_id')
|
|
||||||
batch_op.drop_column('config_id')
|
|
||||||
|
|
||||||
# Drop tables in reverse order
|
|
||||||
op.drop_index(op.f('ix_scan_config_sites_site_id'), table_name='scan_config_sites')
|
|
||||||
op.drop_index(op.f('ix_scan_config_sites_config_id'), table_name='scan_config_sites')
|
|
||||||
op.drop_table('scan_config_sites')
|
|
||||||
|
|
||||||
op.drop_table('scan_configs')
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: Scan config tables and columns removed")
|
|
||||||
@@ -1,270 +0,0 @@
|
|||||||
"""Expand CIDRs to individual IPs with per-IP settings
|
|
||||||
|
|
||||||
Revision ID: 008
|
|
||||||
Revises: 007
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration changes the site architecture to automatically expand CIDRs into
|
|
||||||
individual IPs in the database. Each IP has its own port and ping settings.
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
- Add site_id to site_ips (direct link to sites, support standalone IPs)
|
|
||||||
- Make site_cidr_id nullable (IPs can exist without a CIDR parent)
|
|
||||||
- Remove settings from site_cidrs (settings now only at IP level)
|
|
||||||
- Add unique constraint: no duplicate IPs within a site
|
|
||||||
- Expand existing CIDRs to individual IPs
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import text
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '008'
|
|
||||||
down_revision = '007'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Modify schema to support per-IP settings and auto-expand CIDRs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
# Check if site_id column already exists
|
|
||||||
inspector = sa.inspect(connection)
|
|
||||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
|
||||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
|
||||||
|
|
||||||
# Step 1: Add site_id column to site_ips (will be populated from site_cidr_id)
|
|
||||||
if 'site_id' not in site_ips_columns:
|
|
||||||
print("Adding site_id column to site_ips...")
|
|
||||||
op.add_column('site_ips', sa.Column('site_id', sa.Integer(), nullable=True, comment='FK to sites (direct link)'))
|
|
||||||
else:
|
|
||||||
print("site_id column already exists in site_ips, skipping...")
|
|
||||||
|
|
||||||
# Step 2: Populate site_id from site_cidr_id (before we make it nullable)
|
|
||||||
print("Populating site_id from existing site_cidr relationships...")
|
|
||||||
connection.execute(text("""
|
|
||||||
UPDATE site_ips
|
|
||||||
SET site_id = (
|
|
||||||
SELECT site_id
|
|
||||||
FROM site_cidrs
|
|
||||||
WHERE site_cidrs.id = site_ips.site_cidr_id
|
|
||||||
)
|
|
||||||
WHERE site_cidr_id IS NOT NULL
|
|
||||||
"""))
|
|
||||||
|
|
||||||
# Step 3: Make site_id NOT NULL and add foreign key
|
|
||||||
# Check if foreign key exists before creating
|
|
||||||
try:
|
|
||||||
op.alter_column('site_ips', 'site_id', nullable=False)
|
|
||||||
print("Made site_id NOT NULL")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"site_id already NOT NULL or error: {e}")
|
|
||||||
|
|
||||||
# Check if foreign key exists
|
|
||||||
try:
|
|
||||||
op.create_foreign_key('fk_site_ips_site_id', 'site_ips', 'sites', ['site_id'], ['id'])
|
|
||||||
print("Created foreign key fk_site_ips_site_id")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Foreign key already exists or error: {e}")
|
|
||||||
|
|
||||||
# Check if index exists
|
|
||||||
try:
|
|
||||||
op.create_index(op.f('ix_site_ips_site_id'), 'site_ips', ['site_id'], unique=False)
|
|
||||||
print("Created index ix_site_ips_site_id")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Index already exists or error: {e}")
|
|
||||||
|
|
||||||
# Step 4: Make site_cidr_id nullable (for standalone IPs)
|
|
||||||
try:
|
|
||||||
op.alter_column('site_ips', 'site_cidr_id', nullable=True)
|
|
||||||
print("Made site_cidr_id nullable")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"site_cidr_id already nullable or error: {e}")
|
|
||||||
|
|
||||||
# Step 5: Drop old unique constraint and create new one (site_id, ip_address)
|
|
||||||
# This prevents duplicate IPs within a site (across all CIDRs and standalone)
|
|
||||||
try:
|
|
||||||
op.drop_constraint('uix_site_cidr_ip', 'site_ips', type_='unique')
|
|
||||||
print("Dropped old constraint uix_site_cidr_ip")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Constraint already dropped or doesn't exist: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.create_unique_constraint('uix_site_ip_address', 'site_ips', ['site_id', 'ip_address'])
|
|
||||||
print("Created new constraint uix_site_ip_address")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Constraint already exists or error: {e}")
|
|
||||||
|
|
||||||
# Step 6: Expand existing CIDRs to individual IPs
|
|
||||||
print("Expanding existing CIDRs to individual IPs...")
|
|
||||||
|
|
||||||
# Get all existing CIDRs
|
|
||||||
cidrs = connection.execute(text("""
|
|
||||||
SELECT id, site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports
|
|
||||||
FROM site_cidrs
|
|
||||||
""")).fetchall()
|
|
||||||
|
|
||||||
expanded_count = 0
|
|
||||||
skipped_count = 0
|
|
||||||
|
|
||||||
for cidr_row in cidrs:
|
|
||||||
cidr_id, site_id, cidr_str, expected_ping, expected_tcp_ports, expected_udp_ports = cidr_row
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Parse CIDR
|
|
||||||
network = ipaddress.ip_network(cidr_str, strict=False)
|
|
||||||
|
|
||||||
# Check size - skip if too large (> /24 for IPv4, > /64 for IPv6)
|
|
||||||
if isinstance(network, ipaddress.IPv4Network) and network.prefixlen < 24:
|
|
||||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
elif isinstance(network, ipaddress.IPv6Network) and network.prefixlen < 64:
|
|
||||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Expand to individual IPs
|
|
||||||
for ip in network.hosts() if network.num_addresses > 2 else [network.network_address]:
|
|
||||||
ip_str = str(ip)
|
|
||||||
|
|
||||||
# Check if this IP already exists (from previous IP overrides)
|
|
||||||
existing = connection.execute(text("""
|
|
||||||
SELECT id FROM site_ips
|
|
||||||
WHERE site_cidr_id = :cidr_id AND ip_address = :ip_address
|
|
||||||
"""), {'cidr_id': cidr_id, 'ip_address': ip_str}).fetchone()
|
|
||||||
|
|
||||||
if not existing:
|
|
||||||
# Insert new IP with settings from CIDR
|
|
||||||
connection.execute(text("""
|
|
||||||
INSERT INTO site_ips (
|
|
||||||
site_id, site_cidr_id, ip_address,
|
|
||||||
expected_ping, expected_tcp_ports, expected_udp_ports,
|
|
||||||
created_at
|
|
||||||
)
|
|
||||||
VALUES (
|
|
||||||
:site_id, :cidr_id, :ip_address,
|
|
||||||
:expected_ping, :expected_tcp_ports, :expected_udp_ports,
|
|
||||||
datetime('now')
|
|
||||||
)
|
|
||||||
"""), {
|
|
||||||
'site_id': site_id,
|
|
||||||
'cidr_id': cidr_id,
|
|
||||||
'ip_address': ip_str,
|
|
||||||
'expected_ping': expected_ping,
|
|
||||||
'expected_tcp_ports': expected_tcp_ports,
|
|
||||||
'expected_udp_ports': expected_udp_ports
|
|
||||||
})
|
|
||||||
expanded_count += 1
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ✗ Error expanding CIDR {cidr_str}: {e}")
|
|
||||||
skipped_count += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
print(f" ✓ Expanded {expanded_count} IPs from CIDRs")
|
|
||||||
if skipped_count > 0:
|
|
||||||
print(f" ⚠ Skipped {skipped_count} CIDRs (too large or errors)")
|
|
||||||
|
|
||||||
# Step 7: Remove settings columns from site_cidrs (now only at IP level)
|
|
||||||
print("Removing settings columns from site_cidrs...")
|
|
||||||
# Re-inspect to get current columns
|
|
||||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
|
||||||
|
|
||||||
if 'expected_ping' in site_cidrs_columns:
|
|
||||||
try:
|
|
||||||
op.drop_column('site_cidrs', 'expected_ping')
|
|
||||||
print("Dropped expected_ping from site_cidrs")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error dropping expected_ping: {e}")
|
|
||||||
else:
|
|
||||||
print("expected_ping already dropped from site_cidrs")
|
|
||||||
|
|
||||||
if 'expected_tcp_ports' in site_cidrs_columns:
|
|
||||||
try:
|
|
||||||
op.drop_column('site_cidrs', 'expected_tcp_ports')
|
|
||||||
print("Dropped expected_tcp_ports from site_cidrs")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error dropping expected_tcp_ports: {e}")
|
|
||||||
else:
|
|
||||||
print("expected_tcp_ports already dropped from site_cidrs")
|
|
||||||
|
|
||||||
if 'expected_udp_ports' in site_cidrs_columns:
|
|
||||||
try:
|
|
||||||
op.drop_column('site_cidrs', 'expected_udp_ports')
|
|
||||||
print("Dropped expected_udp_ports from site_cidrs")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error dropping expected_udp_ports: {e}")
|
|
||||||
else:
|
|
||||||
print("expected_udp_ports already dropped from site_cidrs")
|
|
||||||
|
|
||||||
# Print summary
|
|
||||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
|
||||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
|
||||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
|
||||||
|
|
||||||
print("\n✓ Migration 008 complete: CIDRs expanded to individual IPs")
|
|
||||||
print(f" - Total sites: {total_sites}")
|
|
||||||
print(f" - Total CIDRs: {total_cidrs}")
|
|
||||||
print(f" - Total IPs: {total_ips}")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""
|
|
||||||
Revert schema changes (restore CIDR-level settings).
|
|
||||||
Note: This will lose per-IP granularity!
|
|
||||||
"""
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
print("Rolling back to CIDR-level settings...")
|
|
||||||
|
|
||||||
# Step 1: Add settings columns back to site_cidrs
|
|
||||||
op.add_column('site_cidrs', sa.Column('expected_ping', sa.Boolean(), nullable=True))
|
|
||||||
op.add_column('site_cidrs', sa.Column('expected_tcp_ports', sa.Text(), nullable=True))
|
|
||||||
op.add_column('site_cidrs', sa.Column('expected_udp_ports', sa.Text(), nullable=True))
|
|
||||||
|
|
||||||
# Step 2: Populate CIDR settings from first IP in each CIDR (approximation)
|
|
||||||
connection.execute(text("""
|
|
||||||
UPDATE site_cidrs
|
|
||||||
SET
|
|
||||||
expected_ping = (
|
|
||||||
SELECT expected_ping FROM site_ips
|
|
||||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
expected_tcp_ports = (
|
|
||||||
SELECT expected_tcp_ports FROM site_ips
|
|
||||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
expected_udp_ports = (
|
|
||||||
SELECT expected_udp_ports FROM site_ips
|
|
||||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
"""))
|
|
||||||
|
|
||||||
# Step 3: Delete auto-expanded IPs (keep only original overrides)
|
|
||||||
# In practice, this is difficult to determine, so we'll keep all IPs
|
|
||||||
# and just remove the schema changes
|
|
||||||
|
|
||||||
# Step 4: Drop new unique constraint and restore old one
|
|
||||||
op.drop_constraint('uix_site_ip_address', 'site_ips', type_='unique')
|
|
||||||
op.create_unique_constraint('uix_site_cidr_ip', 'site_ips', ['site_cidr_id', 'ip_address'])
|
|
||||||
|
|
||||||
# Step 5: Make site_cidr_id NOT NULL again
|
|
||||||
op.alter_column('site_ips', 'site_cidr_id', nullable=False)
|
|
||||||
|
|
||||||
# Step 6: Drop site_id column and related constraints
|
|
||||||
op.drop_index(op.f('ix_site_ips_site_id'), table_name='site_ips')
|
|
||||||
op.drop_constraint('fk_site_ips_site_id', 'site_ips', type_='foreignkey')
|
|
||||||
op.drop_column('site_ips', 'site_id')
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: Reverted to CIDR-level settings")
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
"""Remove CIDR table - make sites IP-only
|
|
||||||
|
|
||||||
Revision ID: 009
|
|
||||||
Revises: 008
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration removes the SiteCIDR table entirely, making sites purely
|
|
||||||
IP-based. CIDRs are now only used as a convenience for bulk IP addition,
|
|
||||||
not stored as permanent entities.
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
- Set all site_ips.site_cidr_id to NULL (preserve all IPs)
|
|
||||||
- Drop foreign key from site_ips to site_cidrs
|
|
||||||
- Drop site_cidrs table
|
|
||||||
- Remove site_cidr_id column from site_ips
|
|
||||||
|
|
||||||
All existing IPs are preserved. They become "standalone" IPs without
|
|
||||||
a CIDR parent.
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '009'
|
|
||||||
down_revision = '008'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Remove CIDR table and make all IPs standalone.
|
|
||||||
"""
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
inspector = sa.inspect(connection)
|
|
||||||
|
|
||||||
print("\n=== Migration 009: Remove CIDR Table ===\n")
|
|
||||||
|
|
||||||
# Get counts before migration
|
|
||||||
try:
|
|
||||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
|
||||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
|
||||||
ips_with_cidr = connection.execute(text(
|
|
||||||
'SELECT COUNT(*) FROM site_ips WHERE site_cidr_id IS NOT NULL'
|
|
||||||
)).scalar()
|
|
||||||
|
|
||||||
print(f"Before migration:")
|
|
||||||
print(f" - Total CIDRs: {total_cidrs}")
|
|
||||||
print(f" - Total IPs: {total_ips}")
|
|
||||||
print(f" - IPs linked to CIDRs: {ips_with_cidr}")
|
|
||||||
print(f" - Standalone IPs: {total_ips - ips_with_cidr}\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Could not get pre-migration stats: {e}\n")
|
|
||||||
|
|
||||||
# Step 1: Set all site_cidr_id to NULL (preserve all IPs as standalone)
|
|
||||||
print("Step 1: Converting all IPs to standalone (nulling CIDR associations)...")
|
|
||||||
try:
|
|
||||||
result = connection.execute(text("""
|
|
||||||
UPDATE site_ips
|
|
||||||
SET site_cidr_id = NULL
|
|
||||||
WHERE site_cidr_id IS NOT NULL
|
|
||||||
"""))
|
|
||||||
print(f" ✓ Converted {result.rowcount} IPs to standalone\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Error or already done: {e}\n")
|
|
||||||
|
|
||||||
# Step 2: Drop foreign key constraint from site_ips to site_cidrs
|
|
||||||
print("Step 2: Dropping foreign key constraint from site_ips to site_cidrs...")
|
|
||||||
foreign_keys = inspector.get_foreign_keys('site_ips')
|
|
||||||
fk_to_drop = None
|
|
||||||
|
|
||||||
for fk in foreign_keys:
|
|
||||||
if fk['referred_table'] == 'site_cidrs':
|
|
||||||
fk_to_drop = fk['name']
|
|
||||||
break
|
|
||||||
|
|
||||||
if fk_to_drop:
|
|
||||||
try:
|
|
||||||
op.drop_constraint(fk_to_drop, 'site_ips', type_='foreignkey')
|
|
||||||
print(f" ✓ Dropped foreign key constraint: {fk_to_drop}\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not drop foreign key: {e}\n")
|
|
||||||
else:
|
|
||||||
print(" ⚠ Foreign key constraint not found or already dropped\n")
|
|
||||||
|
|
||||||
# Step 3: Drop index on site_cidr_id (if exists)
|
|
||||||
print("Step 3: Dropping index on site_cidr_id...")
|
|
||||||
indexes = inspector.get_indexes('site_ips')
|
|
||||||
index_to_drop = None
|
|
||||||
|
|
||||||
for idx in indexes:
|
|
||||||
if 'site_cidr_id' in idx['column_names']:
|
|
||||||
index_to_drop = idx['name']
|
|
||||||
break
|
|
||||||
|
|
||||||
if index_to_drop:
|
|
||||||
try:
|
|
||||||
op.drop_index(index_to_drop, table_name='site_ips')
|
|
||||||
print(f" ✓ Dropped index: {index_to_drop}\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not drop index: {e}\n")
|
|
||||||
else:
|
|
||||||
print(" ⚠ Index not found or already dropped\n")
|
|
||||||
|
|
||||||
# Step 4: Drop site_cidrs table
|
|
||||||
print("Step 4: Dropping site_cidrs table...")
|
|
||||||
tables = inspector.get_table_names()
|
|
||||||
|
|
||||||
if 'site_cidrs' in tables:
|
|
||||||
try:
|
|
||||||
op.drop_table('site_cidrs')
|
|
||||||
print(" ✓ Dropped site_cidrs table\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not drop table: {e}\n")
|
|
||||||
else:
|
|
||||||
print(" ⚠ Table site_cidrs not found or already dropped\n")
|
|
||||||
|
|
||||||
# Step 5: Drop site_cidr_id column from site_ips
|
|
||||||
print("Step 5: Dropping site_cidr_id column from site_ips...")
|
|
||||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
|
||||||
|
|
||||||
if 'site_cidr_id' in site_ips_columns:
|
|
||||||
try:
|
|
||||||
op.drop_column('site_ips', 'site_cidr_id')
|
|
||||||
print(" ✓ Dropped site_cidr_id column from site_ips\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not drop column: {e}\n")
|
|
||||||
else:
|
|
||||||
print(" ⚠ Column site_cidr_id not found or already dropped\n")
|
|
||||||
|
|
||||||
# Get counts after migration
|
|
||||||
try:
|
|
||||||
final_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
|
||||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
|
||||||
|
|
||||||
print("After migration:")
|
|
||||||
print(f" - Total sites: {total_sites}")
|
|
||||||
print(f" - Total IPs (all standalone): {final_ips}")
|
|
||||||
print(f" - CIDRs: N/A (table removed)")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Could not get post-migration stats: {e}")
|
|
||||||
|
|
||||||
print("\n✓ Migration 009 complete: Sites are now IP-only")
|
|
||||||
print(" All IPs preserved as standalone. CIDRs can still be used")
|
|
||||||
print(" via the API/UI for bulk IP creation, but are not stored.\n")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""
|
|
||||||
Recreate site_cidrs table (CANNOT restore original CIDR associations).
|
|
||||||
|
|
||||||
WARNING: This downgrade creates an empty site_cidrs table structure but
|
|
||||||
cannot restore the original CIDR-to-IP associations since that data was
|
|
||||||
deleted. All IPs will remain standalone.
|
|
||||||
"""
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
print("\n=== Downgrade 009: Recreate CIDR Table Structure ===\n")
|
|
||||||
print("⚠ WARNING: Cannot restore original CIDR associations!")
|
|
||||||
print(" The site_cidrs table structure will be recreated but will be empty.")
|
|
||||||
print(" All IPs will remain standalone. This is a PARTIAL downgrade.\n")
|
|
||||||
|
|
||||||
# Step 1: Recreate site_cidrs table (empty)
|
|
||||||
print("Step 1: Recreating site_cidrs table structure...")
|
|
||||||
try:
|
|
||||||
op.create_table(
|
|
||||||
'site_cidrs',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
|
||||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
|
||||||
)
|
|
||||||
print(" ✓ Recreated site_cidrs table (empty)\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not create table: {e}\n")
|
|
||||||
|
|
||||||
# Step 2: Add site_cidr_id column back to site_ips (nullable)
|
|
||||||
print("Step 2: Adding site_cidr_id column back to site_ips...")
|
|
||||||
try:
|
|
||||||
op.add_column('site_ips', sa.Column('site_cidr_id', sa.Integer(), nullable=True, comment='FK to site_cidrs (optional, for grouping)'))
|
|
||||||
print(" ✓ Added site_cidr_id column (nullable)\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not add column: {e}\n")
|
|
||||||
|
|
||||||
# Step 3: Add foreign key constraint
|
|
||||||
print("Step 3: Adding foreign key constraint...")
|
|
||||||
try:
|
|
||||||
op.create_foreign_key('fk_site_ips_site_cidr_id', 'site_ips', 'site_cidrs', ['site_cidr_id'], ['id'])
|
|
||||||
print(" ✓ Created foreign key constraint\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not create foreign key: {e}\n")
|
|
||||||
|
|
||||||
# Step 4: Add index on site_cidr_id
|
|
||||||
print("Step 4: Adding index on site_cidr_id...")
|
|
||||||
try:
|
|
||||||
op.create_index('ix_site_ips_site_cidr_id', 'site_ips', ['site_cidr_id'], unique=False)
|
|
||||||
print(" ✓ Created index on site_cidr_id\n")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠ Could not create index: {e}\n")
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: CIDR table structure restored (but empty)")
|
|
||||||
print(" All IPs remain standalone. You would need to manually recreate")
|
|
||||||
print(" CIDR records and associate IPs with them.\n")
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
"""Add config_id to alert_rules table
|
|
||||||
|
|
||||||
Revision ID: 010
|
|
||||||
Revises: 009
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration adds config_id foreign key to alert_rules table to replace
|
|
||||||
the config_file column, completing the migration from file-based to
|
|
||||||
database-based configurations.
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '010'
|
|
||||||
down_revision = '009'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Add config_id to alert_rules table and remove config_file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
|
||||||
# Add config_id column with foreign key
|
|
||||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
|
||||||
batch_op.create_index('ix_alert_rules_config_id', ['config_id'], unique=False)
|
|
||||||
batch_op.create_foreign_key('fk_alert_rules_config_id', 'scan_configs', ['config_id'], ['id'])
|
|
||||||
|
|
||||||
# Remove the old config_file column
|
|
||||||
batch_op.drop_column('config_file')
|
|
||||||
|
|
||||||
print("✓ Migration complete: AlertRule now uses config_id")
|
|
||||||
print(" - Added config_id foreign key to alert_rules table")
|
|
||||||
print(" - Removed deprecated config_file column")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Remove config_id and restore config_file on alert_rules."""
|
|
||||||
|
|
||||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
|
||||||
# Remove foreign key and config_id column
|
|
||||||
batch_op.drop_constraint('fk_alert_rules_config_id', type_='foreignkey')
|
|
||||||
batch_op.drop_index('ix_alert_rules_config_id')
|
|
||||||
batch_op.drop_column('config_id')
|
|
||||||
|
|
||||||
# Restore config_file column
|
|
||||||
batch_op.add_column(sa.Column('config_file', sa.String(255), nullable=True, comment='Optional: specific config file this rule applies to'))
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: AlertRule config_id removed, config_file restored")
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
"""Drop deprecated config_file columns
|
|
||||||
|
|
||||||
Revision ID: 011
|
|
||||||
Revises: 010
|
|
||||||
Create Date: 2025-11-19
|
|
||||||
|
|
||||||
This migration removes the deprecated config_file columns from scans and schedules
|
|
||||||
tables. All functionality now uses config_id to reference database-stored configs.
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic
|
|
||||||
revision = '011'
|
|
||||||
down_revision = '010'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
"""
|
|
||||||
Drop config_file columns from scans and schedules tables.
|
|
||||||
|
|
||||||
Prerequisites:
|
|
||||||
- All scans must have config_id set
|
|
||||||
- All schedules must have config_id set
|
|
||||||
- Code must be updated to no longer reference config_file
|
|
||||||
"""
|
|
||||||
|
|
||||||
connection = op.get_bind()
|
|
||||||
|
|
||||||
# Check for any records missing config_id
|
|
||||||
result = connection.execute(sa.text(
|
|
||||||
"SELECT COUNT(*) FROM scans WHERE config_id IS NULL"
|
|
||||||
))
|
|
||||||
scans_without_config = result.scalar()
|
|
||||||
|
|
||||||
result = connection.execute(sa.text(
|
|
||||||
"SELECT COUNT(*) FROM schedules WHERE config_id IS NULL"
|
|
||||||
))
|
|
||||||
schedules_without_config = result.scalar()
|
|
||||||
|
|
||||||
if scans_without_config > 0:
|
|
||||||
print(f"WARNING: {scans_without_config} scans have NULL config_id")
|
|
||||||
print(" These scans will lose their config reference after migration")
|
|
||||||
|
|
||||||
if schedules_without_config > 0:
|
|
||||||
raise Exception(
|
|
||||||
f"Cannot proceed: {schedules_without_config} schedules have NULL config_id. "
|
|
||||||
"Please set config_id for all schedules before running this migration."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Drop config_file from scans table
|
|
||||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
|
||||||
batch_op.drop_column('config_file')
|
|
||||||
|
|
||||||
# Drop config_file from schedules table
|
|
||||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
|
||||||
batch_op.drop_column('config_file')
|
|
||||||
|
|
||||||
print("✓ Migration complete: Dropped config_file columns")
|
|
||||||
print(" - Removed config_file from scans table")
|
|
||||||
print(" - Removed config_file from schedules table")
|
|
||||||
print(" - All references should now use config_id")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
"""Re-add config_file columns (data will be lost)."""
|
|
||||||
|
|
||||||
# Add config_file back to scans
|
|
||||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(
|
|
||||||
sa.Column('config_file', sa.Text(), nullable=True,
|
|
||||||
comment='Path to YAML config used (deprecated)')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add config_file back to schedules
|
|
||||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(
|
|
||||||
sa.Column('config_file', sa.Text(), nullable=True,
|
|
||||||
comment='Path to YAML config (deprecated)')
|
|
||||||
)
|
|
||||||
|
|
||||||
print("✓ Downgrade complete: Re-added config_file columns")
|
|
||||||
print(" WARNING: config_file values are lost and will be NULL")
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
"""Add scan progress tracking
|
|
||||||
|
|
||||||
Revision ID: 012
|
|
||||||
Revises: 011
|
|
||||||
Create Date: 2024-01-01 00:00:00.000000
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '012'
|
|
||||||
down_revision = '011'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# Add progress tracking columns to scans table
|
|
||||||
op.add_column('scans', sa.Column('current_phase', sa.String(50), nullable=True,
|
|
||||||
comment='Current scan phase: ping, tcp_scan, udp_scan, service_detection, http_analysis'))
|
|
||||||
op.add_column('scans', sa.Column('total_ips', sa.Integer(), nullable=True,
|
|
||||||
comment='Total number of IPs to scan'))
|
|
||||||
op.add_column('scans', sa.Column('completed_ips', sa.Integer(), nullable=True, default=0,
|
|
||||||
comment='Number of IPs completed in current phase'))
|
|
||||||
|
|
||||||
# Create scan_progress table for per-IP progress tracking
|
|
||||||
op.create_table(
|
|
||||||
'scan_progress',
|
|
||||||
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
|
|
||||||
sa.Column('scan_id', sa.Integer(), sa.ForeignKey('scans.id'), nullable=False, index=True),
|
|
||||||
sa.Column('ip_address', sa.String(45), nullable=False, comment='IP address being scanned'),
|
|
||||||
sa.Column('site_name', sa.String(255), nullable=True, comment='Site name this IP belongs to'),
|
|
||||||
sa.Column('phase', sa.String(50), nullable=False,
|
|
||||||
comment='Phase: ping, tcp_scan, udp_scan, service_detection, http_analysis'),
|
|
||||||
sa.Column('status', sa.String(20), nullable=False, default='pending',
|
|
||||||
comment='pending, in_progress, completed, failed'),
|
|
||||||
sa.Column('ping_result', sa.Boolean(), nullable=True, comment='Ping response result'),
|
|
||||||
sa.Column('tcp_ports', sa.Text(), nullable=True, comment='JSON array of discovered TCP ports'),
|
|
||||||
sa.Column('udp_ports', sa.Text(), nullable=True, comment='JSON array of discovered UDP ports'),
|
|
||||||
sa.Column('services', sa.Text(), nullable=True, comment='JSON array of detected services'),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now(),
|
|
||||||
comment='Entry creation time'),
|
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now(),
|
|
||||||
onupdate=sa.func.now(), comment='Last update time'),
|
|
||||||
sa.UniqueConstraint('scan_id', 'ip_address', name='uix_scan_progress_ip')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# Drop scan_progress table
|
|
||||||
op.drop_table('scan_progress')
|
|
||||||
|
|
||||||
# Remove progress tracking columns from scans table
|
|
||||||
op.drop_column('scans', 'completed_ips')
|
|
||||||
op.drop_column('scans', 'total_ips')
|
|
||||||
op.drop_column('scans', 'current_phase')
|
|
||||||
@@ -2,12 +2,10 @@ version: '3.8'
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
web:
|
web:
|
||||||
build: .
|
image: sneakyscan
|
||||||
image: sneakyscanner:latest
|
|
||||||
container_name: sneakyscanner-web
|
container_name: sneakyscanner-web
|
||||||
# Use entrypoint script that auto-initializes database on first run
|
working_dir: /app
|
||||||
entrypoint: ["/docker-entrypoint.sh"]
|
entrypoint: ["python3", "-u", "-m", "web.app"]
|
||||||
command: ["python3", "-u", "-m", "web.app"]
|
|
||||||
# Note: Using host network mode for scanner capabilities, so no port mapping needed
|
# Note: Using host network mode for scanner capabilities, so no port mapping needed
|
||||||
# The Flask app will be accessible at http://localhost:5000
|
# The Flask app will be accessible at http://localhost:5000
|
||||||
volumes:
|
volumes:
|
||||||
@@ -59,8 +57,7 @@ services:
|
|||||||
# Optional: Initialize database on first run
|
# Optional: Initialize database on first run
|
||||||
# Run with: docker-compose -f docker-compose-web.yml run --rm init-db
|
# Run with: docker-compose -f docker-compose-web.yml run --rm init-db
|
||||||
init-db:
|
init-db:
|
||||||
build: .
|
image: sneakyscan
|
||||||
image: sneakyscanner:latest
|
|
||||||
container_name: sneakyscanner-init-db
|
container_name: sneakyscanner-init-db
|
||||||
entrypoint: ["python3"]
|
entrypoint: ["python3"]
|
||||||
command: ["init_db.py", "--db-url", "sqlite:////app/data/sneakyscanner.db"]
|
command: ["init_db.py", "--db-url", "sqlite:////app/data/sneakyscanner.db"]
|
||||||
@@ -68,3 +65,4 @@ services:
|
|||||||
- ./data:/app/data
|
- ./data:/app/data
|
||||||
profiles:
|
profiles:
|
||||||
- tools
|
- tools
|
||||||
|
networks: []
|
||||||
|
|||||||
43
setup.sh
43
setup.sh
@@ -91,27 +91,40 @@ echo "Creating required directories..."
|
|||||||
mkdir -p data logs output configs
|
mkdir -p data logs output configs
|
||||||
echo "✓ Directories created"
|
echo "✓ Directories created"
|
||||||
|
|
||||||
# Check if Docker is running
|
# Check if Podman is running
|
||||||
echo ""
|
echo ""
|
||||||
echo "Checking Docker..."
|
echo "Checking Podman..."
|
||||||
if ! docker info > /dev/null 2>&1; then
|
if ! podman info > /dev/null 2>&1; then
|
||||||
echo "✗ Docker is not running or not installed"
|
echo "✗ Podman is not running or not installed"
|
||||||
echo "Please install Docker and start the Docker daemon"
|
echo "Please install Podman"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "✓ Docker is running"
|
echo "✓ Podman is available"
|
||||||
|
|
||||||
# Build and start
|
# Build and start
|
||||||
echo ""
|
echo ""
|
||||||
echo "Building and starting SneakyScanner..."
|
echo "Starting SneakyScanner..."
|
||||||
echo "This may take a few minutes on first run..."
|
echo "This may take a few minutes on first run..."
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
docker compose build
|
podman build --network=host -t sneakyscan .
|
||||||
|
|
||||||
|
# Initialize database if it doesn't exist or is empty
|
||||||
echo ""
|
echo ""
|
||||||
echo "Starting SneakyScanner..."
|
echo "Initializing database..."
|
||||||
docker compose up -d
|
|
||||||
|
# Build init command with optional password
|
||||||
|
INIT_CMD="init_db.py --db-url sqlite:////app/data/sneakyscanner.db --force"
|
||||||
|
if [ -n "$INITIAL_PASSWORD" ]; then
|
||||||
|
INIT_CMD="$INIT_CMD --password $INITIAL_PASSWORD"
|
||||||
|
fi
|
||||||
|
|
||||||
|
podman run --rm --entrypoint python3 -w /app \
|
||||||
|
-v "$(pwd)/data:/app/data" \
|
||||||
|
sneakyscan $INIT_CMD
|
||||||
|
echo "✓ Database initialized"
|
||||||
|
|
||||||
|
podman-compose up -d
|
||||||
|
|
||||||
# Wait for service to be healthy
|
# Wait for service to be healthy
|
||||||
echo ""
|
echo ""
|
||||||
@@ -119,7 +132,7 @@ echo "Waiting for application to start..."
|
|||||||
sleep 5
|
sleep 5
|
||||||
|
|
||||||
# Check if container is running
|
# Check if container is running
|
||||||
if docker ps | grep -q sneakyscanner-web; then
|
if podman ps | grep -q sneakyscanner-web; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "================================================"
|
echo "================================================"
|
||||||
echo " ✓ SneakyScanner is Running!"
|
echo " ✓ SneakyScanner is Running!"
|
||||||
@@ -140,15 +153,15 @@ if docker ps | grep -q sneakyscanner-web; then
|
|||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
echo "Useful commands:"
|
echo "Useful commands:"
|
||||||
echo " docker compose logs -f # View logs"
|
echo " podman-compose logs -f # View logs"
|
||||||
echo " docker compose stop # Stop the service"
|
echo " podman-compose stop # Stop the service"
|
||||||
echo " docker compose restart # Restart the service"
|
echo " podman-compose restart # Restart the service"
|
||||||
echo ""
|
echo ""
|
||||||
echo "⚠ IMPORTANT: Change your password after first login!"
|
echo "⚠ IMPORTANT: Change your password after first login!"
|
||||||
echo "================================================"
|
echo "================================================"
|
||||||
else
|
else
|
||||||
echo ""
|
echo ""
|
||||||
echo "✗ Container failed to start. Check logs with:"
|
echo "✗ Container failed to start. Check logs with:"
|
||||||
echo " docker compose logs"
|
echo " podman-compose logs"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
Reference in New Issue
Block a user