Compare commits
2 Commits
4a4c33a10b
...
0ec338e252
| Author | SHA1 | Date | |
|---|---|---|---|
| 0ec338e252 | |||
| 034f146fa1 |
161
app/migrations/versions/006_add_reusable_sites.py
Normal file
161
app/migrations/versions/006_add_reusable_sites.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""Add reusable site definitions
|
||||
|
||||
Revision ID: 006
|
||||
Revises: 005
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration introduces reusable site definitions that can be shared across
|
||||
multiple scans. Sites are defined once with CIDR ranges and can be referenced
|
||||
in multiple scan configurations.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '006'
|
||||
down_revision = '005'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Create new site tables and migrate existing scan_sites data to the new structure.
|
||||
"""
|
||||
|
||||
# Create sites table (master site definitions)
|
||||
op.create_table('sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False, comment='Unique site name'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Site description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Site creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name', name='uix_site_name')
|
||||
)
|
||||
op.create_index(op.f('ix_sites_name'), 'sites', ['name'], unique=True)
|
||||
|
||||
# Create site_cidrs table (CIDR ranges for each site)
|
||||
op.create_table('site_cidrs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Expected ping response for this CIDR'),
|
||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports'),
|
||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='CIDR creation time'),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
||||
)
|
||||
op.create_index(op.f('ix_site_cidrs_site_id'), 'site_cidrs', ['site_id'], unique=False)
|
||||
|
||||
# Create site_ips table (IP-level overrides within CIDRs)
|
||||
op.create_table('site_ips',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_cidr_id', sa.Integer(), nullable=False, comment='FK to site_cidrs'),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=False, comment='IPv4 or IPv6 address'),
|
||||
sa.Column('expected_ping', sa.Boolean(), nullable=True, comment='Override ping expectation for this IP'),
|
||||
sa.Column('expected_tcp_ports', sa.Text(), nullable=True, comment='JSON array of expected TCP ports (overrides CIDR)'),
|
||||
sa.Column('expected_udp_ports', sa.Text(), nullable=True, comment='JSON array of expected UDP ports (overrides CIDR)'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='IP override creation time'),
|
||||
sa.ForeignKeyConstraint(['site_cidr_id'], ['site_cidrs.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('site_cidr_id', 'ip_address', name='uix_site_cidr_ip')
|
||||
)
|
||||
op.create_index(op.f('ix_site_ips_site_cidr_id'), 'site_ips', ['site_cidr_id'], unique=False)
|
||||
|
||||
# Create scan_site_associations table (many-to-many between scans and sites)
|
||||
op.create_table('scan_site_associations',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('scan_id', sa.Integer(), nullable=False, comment='FK to scans'),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('scan_id', 'site_id', name='uix_scan_site')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_site_associations_scan_id'), 'scan_site_associations', ['scan_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_site_associations_site_id'), 'scan_site_associations', ['site_id'], unique=False)
|
||||
|
||||
# Migrate existing data
|
||||
connection = op.get_bind()
|
||||
|
||||
# 1. Extract unique site names from existing scan_sites and create master Site records
|
||||
# This groups all historical scan sites by name and creates one master site per unique name
|
||||
connection.execute(text("""
|
||||
INSERT INTO sites (name, description, created_at, updated_at)
|
||||
SELECT DISTINCT
|
||||
site_name,
|
||||
'Migrated from scan_sites' as description,
|
||||
datetime('now') as created_at,
|
||||
datetime('now') as updated_at
|
||||
FROM scan_sites
|
||||
WHERE site_name NOT IN (SELECT name FROM sites)
|
||||
"""))
|
||||
|
||||
# 2. Create scan_site_associations linking scans to their sites
|
||||
# This maintains the historical relationship between scans and the sites they used
|
||||
connection.execute(text("""
|
||||
INSERT INTO scan_site_associations (scan_id, site_id, created_at)
|
||||
SELECT DISTINCT
|
||||
ss.scan_id,
|
||||
s.id as site_id,
|
||||
datetime('now') as created_at
|
||||
FROM scan_sites ss
|
||||
INNER JOIN sites s ON s.name = ss.site_name
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM scan_site_associations ssa
|
||||
WHERE ssa.scan_id = ss.scan_id AND ssa.site_id = s.id
|
||||
)
|
||||
"""))
|
||||
|
||||
# 3. For each migrated site, create a CIDR entry from the IPs in scan_ips
|
||||
# Since historical data has individual IPs, we'll create /32 CIDRs for each unique IP
|
||||
# This preserves the exact IP addresses while fitting them into the new CIDR-based model
|
||||
connection.execute(text("""
|
||||
INSERT INTO site_cidrs (site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports, created_at)
|
||||
SELECT DISTINCT
|
||||
s.id as site_id,
|
||||
si.ip_address || '/32' as cidr,
|
||||
si.ping_expected,
|
||||
'[]' as expected_tcp_ports,
|
||||
'[]' as expected_udp_ports,
|
||||
datetime('now') as created_at
|
||||
FROM scan_ips si
|
||||
INNER JOIN scan_sites ss ON ss.id = si.site_id
|
||||
INNER JOIN sites s ON s.name = ss.site_name
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM site_cidrs sc
|
||||
WHERE sc.site_id = s.id AND sc.cidr = si.ip_address || '/32'
|
||||
)
|
||||
GROUP BY s.id, si.ip_address, si.ping_expected
|
||||
"""))
|
||||
|
||||
print("✓ Migration complete: Reusable sites created from historical scan data")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()} master site(s)")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()} CIDR range(s)")
|
||||
print(f" - Created {connection.execute(text('SELECT COUNT(*) FROM scan_site_associations')).scalar()} scan-site association(s)")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove reusable site tables."""
|
||||
|
||||
# Drop tables in reverse order of creation (respecting foreign keys)
|
||||
op.drop_index(op.f('ix_scan_site_associations_site_id'), table_name='scan_site_associations')
|
||||
op.drop_index(op.f('ix_scan_site_associations_scan_id'), table_name='scan_site_associations')
|
||||
op.drop_table('scan_site_associations')
|
||||
|
||||
op.drop_index(op.f('ix_site_ips_site_cidr_id'), table_name='site_ips')
|
||||
op.drop_table('site_ips')
|
||||
|
||||
op.drop_index(op.f('ix_site_cidrs_site_id'), table_name='site_cidrs')
|
||||
op.drop_table('site_cidrs')
|
||||
|
||||
op.drop_index(op.f('ix_sites_name'), table_name='sites')
|
||||
op.drop_table('sites')
|
||||
|
||||
print("✓ Downgrade complete: Reusable site tables removed")
|
||||
102
app/migrations/versions/007_configs_to_database.py
Normal file
102
app/migrations/versions/007_configs_to_database.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Add database-stored scan configurations
|
||||
|
||||
Revision ID: 007
|
||||
Revises: 006
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration introduces database-stored scan configurations to replace YAML
|
||||
config files. Configs reference sites from the sites table, enabling visual
|
||||
config builder and better data management.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '007'
|
||||
down_revision = '006'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Create scan_configs and scan_config_sites tables.
|
||||
Add config_id foreign keys to scans and schedules tables.
|
||||
"""
|
||||
|
||||
# Create scan_configs table
|
||||
op.create_table('scan_configs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=False, comment='Configuration title'),
|
||||
sa.Column('description', sa.Text(), nullable=True, comment='Configuration description'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Config creation time'),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, comment='Last modification time'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create scan_config_sites table (many-to-many between configs and sites)
|
||||
op.create_table('scan_config_sites',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('config_id', sa.Integer(), nullable=False, comment='FK to scan_configs'),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False, comment='FK to sites'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, comment='Association creation time'),
|
||||
sa.ForeignKeyConstraint(['config_id'], ['scan_configs.id'], ),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('config_id', 'site_id', name='uix_config_site')
|
||||
)
|
||||
op.create_index(op.f('ix_scan_config_sites_config_id'), 'scan_config_sites', ['config_id'], unique=False)
|
||||
op.create_index(op.f('ix_scan_config_sites_site_id'), 'scan_config_sites', ['site_id'], unique=False)
|
||||
|
||||
# Add config_id to scans table
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_scans_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_scans_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
# Mark config_file as deprecated in comment (already has nullable=True)
|
||||
|
||||
# Add config_id to schedules table and make config_file nullable
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_schedules_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_schedules_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
# Make config_file nullable (it was required before)
|
||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=True)
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("✓ Migration complete: Scan configs tables created")
|
||||
print(" - Created scan_configs table for database-stored configurations")
|
||||
print(" - Created scan_config_sites association table")
|
||||
print(" - Added config_id to scans table")
|
||||
print(" - Added config_id to schedules table")
|
||||
print(" - Existing YAML configs remain in config_file column for backward compatibility")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove scan config tables and columns."""
|
||||
|
||||
# Remove foreign keys and columns from schedules
|
||||
with op.batch_alter_table('schedules', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('fk_schedules_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_schedules_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
# Restore config_file as required
|
||||
batch_op.alter_column('config_file', existing_type=sa.Text(), nullable=False)
|
||||
|
||||
# Remove foreign keys and columns from scans
|
||||
with op.batch_alter_table('scans', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('fk_scans_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_scans_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
|
||||
# Drop tables in reverse order
|
||||
op.drop_index(op.f('ix_scan_config_sites_site_id'), table_name='scan_config_sites')
|
||||
op.drop_index(op.f('ix_scan_config_sites_config_id'), table_name='scan_config_sites')
|
||||
op.drop_table('scan_config_sites')
|
||||
|
||||
op.drop_table('scan_configs')
|
||||
|
||||
print("✓ Downgrade complete: Scan config tables and columns removed")
|
||||
270
app/migrations/versions/008_expand_cidrs_to_ips.py
Normal file
270
app/migrations/versions/008_expand_cidrs_to_ips.py
Normal file
@@ -0,0 +1,270 @@
|
||||
"""Expand CIDRs to individual IPs with per-IP settings
|
||||
|
||||
Revision ID: 008
|
||||
Revises: 007
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration changes the site architecture to automatically expand CIDRs into
|
||||
individual IPs in the database. Each IP has its own port and ping settings.
|
||||
|
||||
Changes:
|
||||
- Add site_id to site_ips (direct link to sites, support standalone IPs)
|
||||
- Make site_cidr_id nullable (IPs can exist without a CIDR parent)
|
||||
- Remove settings from site_cidrs (settings now only at IP level)
|
||||
- Add unique constraint: no duplicate IPs within a site
|
||||
- Expand existing CIDRs to individual IPs
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
import ipaddress
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '008'
|
||||
down_revision = '007'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Modify schema to support per-IP settings and auto-expand CIDRs.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
# Check if site_id column already exists
|
||||
inspector = sa.inspect(connection)
|
||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
||||
|
||||
# Step 1: Add site_id column to site_ips (will be populated from site_cidr_id)
|
||||
if 'site_id' not in site_ips_columns:
|
||||
print("Adding site_id column to site_ips...")
|
||||
op.add_column('site_ips', sa.Column('site_id', sa.Integer(), nullable=True, comment='FK to sites (direct link)'))
|
||||
else:
|
||||
print("site_id column already exists in site_ips, skipping...")
|
||||
|
||||
# Step 2: Populate site_id from site_cidr_id (before we make it nullable)
|
||||
print("Populating site_id from existing site_cidr relationships...")
|
||||
connection.execute(text("""
|
||||
UPDATE site_ips
|
||||
SET site_id = (
|
||||
SELECT site_id
|
||||
FROM site_cidrs
|
||||
WHERE site_cidrs.id = site_ips.site_cidr_id
|
||||
)
|
||||
WHERE site_cidr_id IS NOT NULL
|
||||
"""))
|
||||
|
||||
# Step 3: Make site_id NOT NULL and add foreign key
|
||||
# Check if foreign key exists before creating
|
||||
try:
|
||||
op.alter_column('site_ips', 'site_id', nullable=False)
|
||||
print("Made site_id NOT NULL")
|
||||
except Exception as e:
|
||||
print(f"site_id already NOT NULL or error: {e}")
|
||||
|
||||
# Check if foreign key exists
|
||||
try:
|
||||
op.create_foreign_key('fk_site_ips_site_id', 'site_ips', 'sites', ['site_id'], ['id'])
|
||||
print("Created foreign key fk_site_ips_site_id")
|
||||
except Exception as e:
|
||||
print(f"Foreign key already exists or error: {e}")
|
||||
|
||||
# Check if index exists
|
||||
try:
|
||||
op.create_index(op.f('ix_site_ips_site_id'), 'site_ips', ['site_id'], unique=False)
|
||||
print("Created index ix_site_ips_site_id")
|
||||
except Exception as e:
|
||||
print(f"Index already exists or error: {e}")
|
||||
|
||||
# Step 4: Make site_cidr_id nullable (for standalone IPs)
|
||||
try:
|
||||
op.alter_column('site_ips', 'site_cidr_id', nullable=True)
|
||||
print("Made site_cidr_id nullable")
|
||||
except Exception as e:
|
||||
print(f"site_cidr_id already nullable or error: {e}")
|
||||
|
||||
# Step 5: Drop old unique constraint and create new one (site_id, ip_address)
|
||||
# This prevents duplicate IPs within a site (across all CIDRs and standalone)
|
||||
try:
|
||||
op.drop_constraint('uix_site_cidr_ip', 'site_ips', type_='unique')
|
||||
print("Dropped old constraint uix_site_cidr_ip")
|
||||
except Exception as e:
|
||||
print(f"Constraint already dropped or doesn't exist: {e}")
|
||||
|
||||
try:
|
||||
op.create_unique_constraint('uix_site_ip_address', 'site_ips', ['site_id', 'ip_address'])
|
||||
print("Created new constraint uix_site_ip_address")
|
||||
except Exception as e:
|
||||
print(f"Constraint already exists or error: {e}")
|
||||
|
||||
# Step 6: Expand existing CIDRs to individual IPs
|
||||
print("Expanding existing CIDRs to individual IPs...")
|
||||
|
||||
# Get all existing CIDRs
|
||||
cidrs = connection.execute(text("""
|
||||
SELECT id, site_id, cidr, expected_ping, expected_tcp_ports, expected_udp_ports
|
||||
FROM site_cidrs
|
||||
""")).fetchall()
|
||||
|
||||
expanded_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for cidr_row in cidrs:
|
||||
cidr_id, site_id, cidr_str, expected_ping, expected_tcp_ports, expected_udp_ports = cidr_row
|
||||
|
||||
try:
|
||||
# Parse CIDR
|
||||
network = ipaddress.ip_network(cidr_str, strict=False)
|
||||
|
||||
# Check size - skip if too large (> /24 for IPv4, > /64 for IPv6)
|
||||
if isinstance(network, ipaddress.IPv4Network) and network.prefixlen < 24:
|
||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
||||
skipped_count += 1
|
||||
continue
|
||||
elif isinstance(network, ipaddress.IPv6Network) and network.prefixlen < 64:
|
||||
print(f" ⚠ Skipping large CIDR {cidr_str} (>{network.num_addresses} IPs)")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Expand to individual IPs
|
||||
for ip in network.hosts() if network.num_addresses > 2 else [network.network_address]:
|
||||
ip_str = str(ip)
|
||||
|
||||
# Check if this IP already exists (from previous IP overrides)
|
||||
existing = connection.execute(text("""
|
||||
SELECT id FROM site_ips
|
||||
WHERE site_cidr_id = :cidr_id AND ip_address = :ip_address
|
||||
"""), {'cidr_id': cidr_id, 'ip_address': ip_str}).fetchone()
|
||||
|
||||
if not existing:
|
||||
# Insert new IP with settings from CIDR
|
||||
connection.execute(text("""
|
||||
INSERT INTO site_ips (
|
||||
site_id, site_cidr_id, ip_address,
|
||||
expected_ping, expected_tcp_ports, expected_udp_ports,
|
||||
created_at
|
||||
)
|
||||
VALUES (
|
||||
:site_id, :cidr_id, :ip_address,
|
||||
:expected_ping, :expected_tcp_ports, :expected_udp_ports,
|
||||
datetime('now')
|
||||
)
|
||||
"""), {
|
||||
'site_id': site_id,
|
||||
'cidr_id': cidr_id,
|
||||
'ip_address': ip_str,
|
||||
'expected_ping': expected_ping,
|
||||
'expected_tcp_ports': expected_tcp_ports,
|
||||
'expected_udp_ports': expected_udp_ports
|
||||
})
|
||||
expanded_count += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ Error expanding CIDR {cidr_str}: {e}")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
print(f" ✓ Expanded {expanded_count} IPs from CIDRs")
|
||||
if skipped_count > 0:
|
||||
print(f" ⚠ Skipped {skipped_count} CIDRs (too large or errors)")
|
||||
|
||||
# Step 7: Remove settings columns from site_cidrs (now only at IP level)
|
||||
print("Removing settings columns from site_cidrs...")
|
||||
# Re-inspect to get current columns
|
||||
site_cidrs_columns = [col['name'] for col in inspector.get_columns('site_cidrs')]
|
||||
|
||||
if 'expected_ping' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_ping')
|
||||
print("Dropped expected_ping from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_ping: {e}")
|
||||
else:
|
||||
print("expected_ping already dropped from site_cidrs")
|
||||
|
||||
if 'expected_tcp_ports' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_tcp_ports')
|
||||
print("Dropped expected_tcp_ports from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_tcp_ports: {e}")
|
||||
else:
|
||||
print("expected_tcp_ports already dropped from site_cidrs")
|
||||
|
||||
if 'expected_udp_ports' in site_cidrs_columns:
|
||||
try:
|
||||
op.drop_column('site_cidrs', 'expected_udp_ports')
|
||||
print("Dropped expected_udp_ports from site_cidrs")
|
||||
except Exception as e:
|
||||
print(f"Error dropping expected_udp_ports: {e}")
|
||||
else:
|
||||
print("expected_udp_ports already dropped from site_cidrs")
|
||||
|
||||
# Print summary
|
||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
|
||||
print("\n✓ Migration 008 complete: CIDRs expanded to individual IPs")
|
||||
print(f" - Total sites: {total_sites}")
|
||||
print(f" - Total CIDRs: {total_cidrs}")
|
||||
print(f" - Total IPs: {total_ips}")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Revert schema changes (restore CIDR-level settings).
|
||||
Note: This will lose per-IP granularity!
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("Rolling back to CIDR-level settings...")
|
||||
|
||||
# Step 1: Add settings columns back to site_cidrs
|
||||
op.add_column('site_cidrs', sa.Column('expected_ping', sa.Boolean(), nullable=True))
|
||||
op.add_column('site_cidrs', sa.Column('expected_tcp_ports', sa.Text(), nullable=True))
|
||||
op.add_column('site_cidrs', sa.Column('expected_udp_ports', sa.Text(), nullable=True))
|
||||
|
||||
# Step 2: Populate CIDR settings from first IP in each CIDR (approximation)
|
||||
connection.execute(text("""
|
||||
UPDATE site_cidrs
|
||||
SET
|
||||
expected_ping = (
|
||||
SELECT expected_ping FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
),
|
||||
expected_tcp_ports = (
|
||||
SELECT expected_tcp_ports FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
),
|
||||
expected_udp_ports = (
|
||||
SELECT expected_udp_ports FROM site_ips
|
||||
WHERE site_ips.site_cidr_id = site_cidrs.id
|
||||
LIMIT 1
|
||||
)
|
||||
"""))
|
||||
|
||||
# Step 3: Delete auto-expanded IPs (keep only original overrides)
|
||||
# In practice, this is difficult to determine, so we'll keep all IPs
|
||||
# and just remove the schema changes
|
||||
|
||||
# Step 4: Drop new unique constraint and restore old one
|
||||
op.drop_constraint('uix_site_ip_address', 'site_ips', type_='unique')
|
||||
op.create_unique_constraint('uix_site_cidr_ip', 'site_ips', ['site_cidr_id', 'ip_address'])
|
||||
|
||||
# Step 5: Make site_cidr_id NOT NULL again
|
||||
op.alter_column('site_ips', 'site_cidr_id', nullable=False)
|
||||
|
||||
# Step 6: Drop site_id column and related constraints
|
||||
op.drop_index(op.f('ix_site_ips_site_id'), table_name='site_ips')
|
||||
op.drop_constraint('fk_site_ips_site_id', 'site_ips', type_='foreignkey')
|
||||
op.drop_column('site_ips', 'site_id')
|
||||
|
||||
print("✓ Downgrade complete: Reverted to CIDR-level settings")
|
||||
210
app/migrations/versions/009_remove_cidrs.py
Normal file
210
app/migrations/versions/009_remove_cidrs.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""Remove CIDR table - make sites IP-only
|
||||
|
||||
Revision ID: 009
|
||||
Revises: 008
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration removes the SiteCIDR table entirely, making sites purely
|
||||
IP-based. CIDRs are now only used as a convenience for bulk IP addition,
|
||||
not stored as permanent entities.
|
||||
|
||||
Changes:
|
||||
- Set all site_ips.site_cidr_id to NULL (preserve all IPs)
|
||||
- Drop foreign key from site_ips to site_cidrs
|
||||
- Drop site_cidrs table
|
||||
- Remove site_cidr_id column from site_ips
|
||||
|
||||
All existing IPs are preserved. They become "standalone" IPs without
|
||||
a CIDR parent.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '009'
|
||||
down_revision = '008'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Remove CIDR table and make all IPs standalone.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
inspector = sa.inspect(connection)
|
||||
|
||||
print("\n=== Migration 009: Remove CIDR Table ===\n")
|
||||
|
||||
# Get counts before migration
|
||||
try:
|
||||
total_cidrs = connection.execute(text('SELECT COUNT(*) FROM site_cidrs')).scalar()
|
||||
total_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
ips_with_cidr = connection.execute(text(
|
||||
'SELECT COUNT(*) FROM site_ips WHERE site_cidr_id IS NOT NULL'
|
||||
)).scalar()
|
||||
|
||||
print(f"Before migration:")
|
||||
print(f" - Total CIDRs: {total_cidrs}")
|
||||
print(f" - Total IPs: {total_ips}")
|
||||
print(f" - IPs linked to CIDRs: {ips_with_cidr}")
|
||||
print(f" - Standalone IPs: {total_ips - ips_with_cidr}\n")
|
||||
except Exception as e:
|
||||
print(f"Could not get pre-migration stats: {e}\n")
|
||||
|
||||
# Step 1: Set all site_cidr_id to NULL (preserve all IPs as standalone)
|
||||
print("Step 1: Converting all IPs to standalone (nulling CIDR associations)...")
|
||||
try:
|
||||
result = connection.execute(text("""
|
||||
UPDATE site_ips
|
||||
SET site_cidr_id = NULL
|
||||
WHERE site_cidr_id IS NOT NULL
|
||||
"""))
|
||||
print(f" ✓ Converted {result.rowcount} IPs to standalone\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error or already done: {e}\n")
|
||||
|
||||
# Step 2: Drop foreign key constraint from site_ips to site_cidrs
|
||||
print("Step 2: Dropping foreign key constraint from site_ips to site_cidrs...")
|
||||
foreign_keys = inspector.get_foreign_keys('site_ips')
|
||||
fk_to_drop = None
|
||||
|
||||
for fk in foreign_keys:
|
||||
if fk['referred_table'] == 'site_cidrs':
|
||||
fk_to_drop = fk['name']
|
||||
break
|
||||
|
||||
if fk_to_drop:
|
||||
try:
|
||||
op.drop_constraint(fk_to_drop, 'site_ips', type_='foreignkey')
|
||||
print(f" ✓ Dropped foreign key constraint: {fk_to_drop}\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop foreign key: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Foreign key constraint not found or already dropped\n")
|
||||
|
||||
# Step 3: Drop index on site_cidr_id (if exists)
|
||||
print("Step 3: Dropping index on site_cidr_id...")
|
||||
indexes = inspector.get_indexes('site_ips')
|
||||
index_to_drop = None
|
||||
|
||||
for idx in indexes:
|
||||
if 'site_cidr_id' in idx['column_names']:
|
||||
index_to_drop = idx['name']
|
||||
break
|
||||
|
||||
if index_to_drop:
|
||||
try:
|
||||
op.drop_index(index_to_drop, table_name='site_ips')
|
||||
print(f" ✓ Dropped index: {index_to_drop}\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop index: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Index not found or already dropped\n")
|
||||
|
||||
# Step 4: Drop site_cidrs table
|
||||
print("Step 4: Dropping site_cidrs table...")
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
if 'site_cidrs' in tables:
|
||||
try:
|
||||
op.drop_table('site_cidrs')
|
||||
print(" ✓ Dropped site_cidrs table\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop table: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Table site_cidrs not found or already dropped\n")
|
||||
|
||||
# Step 5: Drop site_cidr_id column from site_ips
|
||||
print("Step 5: Dropping site_cidr_id column from site_ips...")
|
||||
site_ips_columns = [col['name'] for col in inspector.get_columns('site_ips')]
|
||||
|
||||
if 'site_cidr_id' in site_ips_columns:
|
||||
try:
|
||||
op.drop_column('site_ips', 'site_cidr_id')
|
||||
print(" ✓ Dropped site_cidr_id column from site_ips\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not drop column: {e}\n")
|
||||
else:
|
||||
print(" ⚠ Column site_cidr_id not found or already dropped\n")
|
||||
|
||||
# Get counts after migration
|
||||
try:
|
||||
final_ips = connection.execute(text('SELECT COUNT(*) FROM site_ips')).scalar()
|
||||
total_sites = connection.execute(text('SELECT COUNT(*) FROM sites')).scalar()
|
||||
|
||||
print("After migration:")
|
||||
print(f" - Total sites: {total_sites}")
|
||||
print(f" - Total IPs (all standalone): {final_ips}")
|
||||
print(f" - CIDRs: N/A (table removed)")
|
||||
except Exception as e:
|
||||
print(f"Could not get post-migration stats: {e}")
|
||||
|
||||
print("\n✓ Migration 009 complete: Sites are now IP-only")
|
||||
print(" All IPs preserved as standalone. CIDRs can still be used")
|
||||
print(" via the API/UI for bulk IP creation, but are not stored.\n")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Recreate site_cidrs table (CANNOT restore original CIDR associations).
|
||||
|
||||
WARNING: This downgrade creates an empty site_cidrs table structure but
|
||||
cannot restore the original CIDR-to-IP associations since that data was
|
||||
deleted. All IPs will remain standalone.
|
||||
"""
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
print("\n=== Downgrade 009: Recreate CIDR Table Structure ===\n")
|
||||
print("⚠ WARNING: Cannot restore original CIDR associations!")
|
||||
print(" The site_cidrs table structure will be recreated but will be empty.")
|
||||
print(" All IPs will remain standalone. This is a PARTIAL downgrade.\n")
|
||||
|
||||
# Step 1: Recreate site_cidrs table (empty)
|
||||
print("Step 1: Recreating site_cidrs table structure...")
|
||||
try:
|
||||
op.create_table(
|
||||
'site_cidrs',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('site_id', sa.Integer(), nullable=False),
|
||||
sa.Column('cidr', sa.String(length=45), nullable=False, comment='CIDR notation (e.g., 10.0.0.0/24)'),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ),
|
||||
sa.UniqueConstraint('site_id', 'cidr', name='uix_site_cidr')
|
||||
)
|
||||
print(" ✓ Recreated site_cidrs table (empty)\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create table: {e}\n")
|
||||
|
||||
# Step 2: Add site_cidr_id column back to site_ips (nullable)
|
||||
print("Step 2: Adding site_cidr_id column back to site_ips...")
|
||||
try:
|
||||
op.add_column('site_ips', sa.Column('site_cidr_id', sa.Integer(), nullable=True, comment='FK to site_cidrs (optional, for grouping)'))
|
||||
print(" ✓ Added site_cidr_id column (nullable)\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not add column: {e}\n")
|
||||
|
||||
# Step 3: Add foreign key constraint
|
||||
print("Step 3: Adding foreign key constraint...")
|
||||
try:
|
||||
op.create_foreign_key('fk_site_ips_site_cidr_id', 'site_ips', 'site_cidrs', ['site_cidr_id'], ['id'])
|
||||
print(" ✓ Created foreign key constraint\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create foreign key: {e}\n")
|
||||
|
||||
# Step 4: Add index on site_cidr_id
|
||||
print("Step 4: Adding index on site_cidr_id...")
|
||||
try:
|
||||
op.create_index('ix_site_ips_site_cidr_id', 'site_ips', ['site_cidr_id'], unique=False)
|
||||
print(" ✓ Created index on site_cidr_id\n")
|
||||
except Exception as e:
|
||||
print(f" ⚠ Could not create index: {e}\n")
|
||||
|
||||
print("✓ Downgrade complete: CIDR table structure restored (but empty)")
|
||||
print(" All IPs remain standalone. You would need to manually recreate")
|
||||
print(" CIDR records and associate IPs with them.\n")
|
||||
53
app/migrations/versions/010_alert_rules_config_id.py
Normal file
53
app/migrations/versions/010_alert_rules_config_id.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Add config_id to alert_rules table
|
||||
|
||||
Revision ID: 010
|
||||
Revises: 009
|
||||
Create Date: 2025-11-19
|
||||
|
||||
This migration adds config_id foreign key to alert_rules table to replace
|
||||
the config_file column, completing the migration from file-based to
|
||||
database-based configurations.
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic
|
||||
revision = '010'
|
||||
down_revision = '009'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Add config_id to alert_rules table and remove config_file.
|
||||
"""
|
||||
|
||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
||||
# Add config_id column with foreign key
|
||||
batch_op.add_column(sa.Column('config_id', sa.Integer(), nullable=True, comment='FK to scan_configs table'))
|
||||
batch_op.create_index('ix_alert_rules_config_id', ['config_id'], unique=False)
|
||||
batch_op.create_foreign_key('fk_alert_rules_config_id', 'scan_configs', ['config_id'], ['id'])
|
||||
|
||||
# Remove the old config_file column
|
||||
batch_op.drop_column('config_file')
|
||||
|
||||
print("✓ Migration complete: AlertRule now uses config_id")
|
||||
print(" - Added config_id foreign key to alert_rules table")
|
||||
print(" - Removed deprecated config_file column")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove config_id and restore config_file on alert_rules."""
|
||||
|
||||
with op.batch_alter_table('alert_rules', schema=None) as batch_op:
|
||||
# Remove foreign key and config_id column
|
||||
batch_op.drop_constraint('fk_alert_rules_config_id', type_='foreignkey')
|
||||
batch_op.drop_index('ix_alert_rules_config_id')
|
||||
batch_op.drop_column('config_id')
|
||||
|
||||
# Restore config_file column
|
||||
batch_op.add_column(sa.Column('config_file', sa.String(255), nullable=True, comment='Optional: specific config file this rule applies to'))
|
||||
|
||||
print("✓ Downgrade complete: AlertRule config_id removed, config_file restored")
|
||||
@@ -29,17 +29,52 @@ sys.stderr.reconfigure(line_buffering=True)
|
||||
|
||||
|
||||
class SneakyScanner:
|
||||
"""Wrapper for masscan to perform network scans based on YAML config"""
|
||||
"""Wrapper for masscan to perform network scans based on YAML config or database config"""
|
||||
|
||||
def __init__(self, config_path: str, output_dir: str = "/app/output"):
|
||||
self.config_path = Path(config_path)
|
||||
def __init__(self, config_path: str = None, config_id: int = None, config_dict: Dict = None, output_dir: str = "/app/output"):
|
||||
"""
|
||||
Initialize scanner with configuration.
|
||||
|
||||
Args:
|
||||
config_path: Path to YAML config file (legacy)
|
||||
config_id: Database config ID (preferred)
|
||||
config_dict: Config dictionary (for direct use)
|
||||
output_dir: Output directory for scan results
|
||||
|
||||
Note: Provide exactly one of config_path, config_id, or config_dict
|
||||
"""
|
||||
if sum([config_path is not None, config_id is not None, config_dict is not None]) != 1:
|
||||
raise ValueError("Must provide exactly one of: config_path, config_id, or config_dict")
|
||||
|
||||
self.config_path = Path(config_path) if config_path else None
|
||||
self.config_id = config_id
|
||||
self.output_dir = Path(output_dir)
|
||||
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.config = self._load_config()
|
||||
|
||||
if config_dict:
|
||||
self.config = config_dict
|
||||
# Process sites: resolve references and expand CIDRs
|
||||
if 'sites' in self.config:
|
||||
self.config['sites'] = self._resolve_sites(self.config['sites'])
|
||||
else:
|
||||
self.config = self._load_config()
|
||||
|
||||
self.screenshot_capture = None
|
||||
|
||||
def _load_config(self) -> Dict[str, Any]:
|
||||
"""Load and validate YAML configuration"""
|
||||
"""
|
||||
Load and validate configuration from file or database.
|
||||
|
||||
Supports three formats:
|
||||
1. Legacy: Sites with explicit IP lists
|
||||
2. Site references: Sites referencing database-stored sites
|
||||
3. Inline CIDRs: Sites with CIDR ranges
|
||||
"""
|
||||
# Load from database if config_id provided
|
||||
if self.config_id:
|
||||
return self._load_config_from_database(self.config_id)
|
||||
|
||||
# Load from YAML file
|
||||
if not self.config_path.exists():
|
||||
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
||||
|
||||
@@ -51,8 +86,256 @@ class SneakyScanner:
|
||||
if not config.get('sites'):
|
||||
raise ValueError("Config must include 'sites' field")
|
||||
|
||||
# Process sites: resolve references and expand CIDRs
|
||||
config['sites'] = self._resolve_sites(config['sites'])
|
||||
|
||||
return config
|
||||
|
||||
def _load_config_from_database(self, config_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Load configuration from database by ID.
|
||||
|
||||
Args:
|
||||
config_id: Database config ID
|
||||
|
||||
Returns:
|
||||
Config dictionary with expanded sites
|
||||
|
||||
Raises:
|
||||
ValueError: If config not found or invalid
|
||||
"""
|
||||
try:
|
||||
# Import here to avoid circular dependencies and allow scanner to work standalone
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from web.models import ScanConfig
|
||||
|
||||
# Create database session
|
||||
db_url = os.environ.get('DATABASE_URL', 'sqlite:////app/data/sneakyscanner.db')
|
||||
engine = create_engine(db_url)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
|
||||
try:
|
||||
# Load config from database
|
||||
db_config = session.query(ScanConfig).filter_by(id=config_id).first()
|
||||
|
||||
if not db_config:
|
||||
raise ValueError(f"Config with ID {config_id} not found in database")
|
||||
|
||||
# Build config dict with site references
|
||||
config = {
|
||||
'title': db_config.title,
|
||||
'sites': []
|
||||
}
|
||||
|
||||
# Add each site as a site_ref
|
||||
for assoc in db_config.site_associations:
|
||||
site = assoc.site
|
||||
config['sites'].append({
|
||||
'site_ref': site.name
|
||||
})
|
||||
|
||||
# Process sites: resolve references and expand CIDRs
|
||||
config['sites'] = self._resolve_sites(config['sites'])
|
||||
|
||||
return config
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
except ImportError as e:
|
||||
raise ValueError(f"Failed to load config from database (import error): {str(e)}")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to load config from database: {str(e)}")
|
||||
|
||||
def _resolve_sites(self, sites: List[Dict]) -> List[Dict]:
|
||||
"""
|
||||
Resolve site references and expand CIDRs to IP lists.
|
||||
|
||||
Converts all site formats into the legacy format (with explicit IPs)
|
||||
for compatibility with the existing scan logic.
|
||||
|
||||
Args:
|
||||
sites: List of site definitions from config
|
||||
|
||||
Returns:
|
||||
List of sites with expanded IP lists
|
||||
"""
|
||||
import ipaddress
|
||||
|
||||
resolved_sites = []
|
||||
|
||||
for site_def in sites:
|
||||
# Handle site references
|
||||
if 'site_ref' in site_def:
|
||||
site_ref = site_def['site_ref']
|
||||
# Load site from database
|
||||
site_data = self._load_site_from_database(site_ref)
|
||||
if site_data:
|
||||
resolved_sites.append(site_data)
|
||||
else:
|
||||
print(f"WARNING: Site reference '{site_ref}' not found in database", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Handle inline CIDR definitions
|
||||
if 'cidrs' in site_def:
|
||||
site_name = site_def.get('name', 'Unknown Site')
|
||||
expanded_ips = []
|
||||
|
||||
for cidr_def in site_def['cidrs']:
|
||||
cidr = cidr_def['cidr']
|
||||
expected_ping = cidr_def.get('expected_ping', False)
|
||||
expected_tcp_ports = cidr_def.get('expected_tcp_ports', [])
|
||||
expected_udp_ports = cidr_def.get('expected_udp_ports', [])
|
||||
|
||||
# Check if there are IP-level overrides (from database sites)
|
||||
ip_overrides = cidr_def.get('ip_overrides', [])
|
||||
override_map = {
|
||||
override['ip_address']: override
|
||||
for override in ip_overrides
|
||||
}
|
||||
|
||||
# Expand CIDR to IP list
|
||||
try:
|
||||
network = ipaddress.ip_network(cidr, strict=False)
|
||||
ip_list = [str(ip) for ip in network.hosts()]
|
||||
|
||||
# If network has only 1 address (like /32), hosts() returns empty
|
||||
if not ip_list:
|
||||
ip_list = [str(network.network_address)]
|
||||
|
||||
# Create IP config for each IP in the CIDR
|
||||
for ip_address in ip_list:
|
||||
# Check if this IP has an override
|
||||
if ip_address in override_map:
|
||||
override = override_map[ip_address]
|
||||
ip_config = {
|
||||
'address': ip_address,
|
||||
'expected': {
|
||||
'ping': override.get('expected_ping', expected_ping),
|
||||
'tcp_ports': override.get('expected_tcp_ports', expected_tcp_ports),
|
||||
'udp_ports': override.get('expected_udp_ports', expected_udp_ports)
|
||||
}
|
||||
}
|
||||
else:
|
||||
# Use CIDR-level defaults
|
||||
ip_config = {
|
||||
'address': ip_address,
|
||||
'expected': {
|
||||
'ping': expected_ping,
|
||||
'tcp_ports': expected_tcp_ports,
|
||||
'udp_ports': expected_udp_ports
|
||||
}
|
||||
}
|
||||
|
||||
expanded_ips.append(ip_config)
|
||||
|
||||
except ValueError as e:
|
||||
print(f"WARNING: Invalid CIDR '{cidr}': {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Add expanded site
|
||||
resolved_sites.append({
|
||||
'name': site_name,
|
||||
'ips': expanded_ips
|
||||
})
|
||||
continue
|
||||
|
||||
# Legacy format: already has 'ips' list
|
||||
if 'ips' in site_def:
|
||||
resolved_sites.append(site_def)
|
||||
continue
|
||||
|
||||
print(f"WARNING: Site definition missing required fields: {site_def}", file=sys.stderr)
|
||||
|
||||
return resolved_sites
|
||||
|
||||
def _load_site_from_database(self, site_name: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Load a site definition from the database.
|
||||
|
||||
IPs are pre-expanded in the database, so we just load them directly.
|
||||
|
||||
Args:
|
||||
site_name: Name of the site to load
|
||||
|
||||
Returns:
|
||||
Site definition dict with IPs, or None if not found
|
||||
"""
|
||||
try:
|
||||
# Import database modules
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add parent directory to path if needed
|
||||
parent_dir = str(Path(__file__).parent.parent)
|
||||
if parent_dir not in sys.path:
|
||||
sys.path.insert(0, parent_dir)
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, joinedload
|
||||
from web.models import Site
|
||||
|
||||
# Get database URL from environment
|
||||
database_url = os.environ.get('DATABASE_URL', 'sqlite:///./sneakyscanner.db')
|
||||
|
||||
# Create engine and session
|
||||
engine = create_engine(database_url)
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
|
||||
# Query site with all IPs (CIDRs are already expanded)
|
||||
site = (
|
||||
session.query(Site)
|
||||
.options(joinedload(Site.ips))
|
||||
.filter(Site.name == site_name)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not site:
|
||||
session.close()
|
||||
return None
|
||||
|
||||
# Load all IPs directly from database (already expanded)
|
||||
expanded_ips = []
|
||||
|
||||
for ip_obj in site.ips:
|
||||
# Get settings from IP (no need to merge with CIDR defaults)
|
||||
expected_ping = ip_obj.expected_ping if ip_obj.expected_ping is not None else False
|
||||
expected_tcp_ports = json.loads(ip_obj.expected_tcp_ports) if ip_obj.expected_tcp_ports else []
|
||||
expected_udp_ports = json.loads(ip_obj.expected_udp_ports) if ip_obj.expected_udp_ports else []
|
||||
|
||||
ip_config = {
|
||||
'address': ip_obj.ip_address,
|
||||
'expected': {
|
||||
'ping': expected_ping,
|
||||
'tcp_ports': expected_tcp_ports,
|
||||
'udp_ports': expected_udp_ports
|
||||
}
|
||||
}
|
||||
|
||||
expanded_ips.append(ip_config)
|
||||
|
||||
session.close()
|
||||
|
||||
return {
|
||||
'name': site.name,
|
||||
'ips': expanded_ips
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"ERROR: Failed to load site '{site_name}' from database: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
def _run_masscan(self, targets: List[str], ports: str, protocol: str) -> List[Dict]:
|
||||
"""
|
||||
Run masscan and return parsed results
|
||||
@@ -557,7 +840,10 @@ class SneakyScanner:
|
||||
Dictionary containing scan results
|
||||
"""
|
||||
print(f"Starting scan: {self.config['title']}", flush=True)
|
||||
print(f"Config: {self.config_path}", flush=True)
|
||||
if self.config_id:
|
||||
print(f"Config ID: {self.config_id}", flush=True)
|
||||
elif self.config_path:
|
||||
print(f"Config: {self.config_path}", flush=True)
|
||||
|
||||
# Record start time
|
||||
start_time = time.time()
|
||||
@@ -662,7 +948,8 @@ class SneakyScanner:
|
||||
'title': self.config['title'],
|
||||
'scan_time': datetime.utcnow().isoformat() + 'Z',
|
||||
'scan_duration': scan_duration,
|
||||
'config_file': str(self.config_path),
|
||||
'config_file': str(self.config_path) if self.config_path else None,
|
||||
'config_id': self.config_id,
|
||||
'sites': []
|
||||
}
|
||||
|
||||
|
||||
@@ -169,7 +169,8 @@ def list_alert_rules():
|
||||
'webhook_enabled': rule.webhook_enabled,
|
||||
'severity': rule.severity,
|
||||
'filter_conditions': json.loads(rule.filter_conditions) if rule.filter_conditions else None,
|
||||
'config_file': rule.config_file,
|
||||
'config_id': rule.config_id,
|
||||
'config_title': rule.config.title if rule.config else None,
|
||||
'created_at': rule.created_at.isoformat(),
|
||||
'updated_at': rule.updated_at.isoformat() if rule.updated_at else None
|
||||
})
|
||||
@@ -195,7 +196,7 @@ def create_alert_rule():
|
||||
webhook_enabled: Send webhook for this rule (default: false)
|
||||
severity: Alert severity (critical, warning, info)
|
||||
filter_conditions: JSON object with filter conditions
|
||||
config_file: Optional config file to apply rule to
|
||||
config_id: Optional config ID to apply rule to
|
||||
|
||||
Returns:
|
||||
JSON response with created rule
|
||||
@@ -226,6 +227,17 @@ def create_alert_rule():
|
||||
}), 400
|
||||
|
||||
try:
|
||||
# Validate config_id if provided
|
||||
config_id = data.get('config_id')
|
||||
if config_id:
|
||||
from web.models import ScanConfig
|
||||
config = current_app.db_session.query(ScanConfig).filter_by(id=config_id).first()
|
||||
if not config:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Config with ID {config_id} not found'
|
||||
}), 400
|
||||
|
||||
# Create new rule
|
||||
rule = AlertRule(
|
||||
name=data.get('name', f"{data['rule_type']} rule"),
|
||||
@@ -236,7 +248,7 @@ def create_alert_rule():
|
||||
webhook_enabled=data.get('webhook_enabled', False),
|
||||
severity=data.get('severity', 'warning'),
|
||||
filter_conditions=json.dumps(data['filter_conditions']) if data.get('filter_conditions') else None,
|
||||
config_file=data.get('config_file'),
|
||||
config_id=config_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
@@ -257,7 +269,8 @@ def create_alert_rule():
|
||||
'webhook_enabled': rule.webhook_enabled,
|
||||
'severity': rule.severity,
|
||||
'filter_conditions': json.loads(rule.filter_conditions) if rule.filter_conditions else None,
|
||||
'config_file': rule.config_file,
|
||||
'config_id': rule.config_id,
|
||||
'config_title': rule.config.title if rule.config else None,
|
||||
'created_at': rule.created_at.isoformat(),
|
||||
'updated_at': rule.updated_at.isoformat()
|
||||
}
|
||||
@@ -288,7 +301,7 @@ def update_alert_rule(rule_id):
|
||||
webhook_enabled: Send webhook for this rule (optional)
|
||||
severity: Alert severity (optional)
|
||||
filter_conditions: JSON object with filter conditions (optional)
|
||||
config_file: Config file to apply rule to (optional)
|
||||
config_id: Config ID to apply rule to (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with update status
|
||||
@@ -312,6 +325,18 @@ def update_alert_rule(rule_id):
|
||||
}), 400
|
||||
|
||||
try:
|
||||
# Validate config_id if provided
|
||||
if 'config_id' in data:
|
||||
config_id = data['config_id']
|
||||
if config_id:
|
||||
from web.models import ScanConfig
|
||||
config = current_app.db_session.query(ScanConfig).filter_by(id=config_id).first()
|
||||
if not config:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Config with ID {config_id} not found'
|
||||
}), 400
|
||||
|
||||
# Update fields if provided
|
||||
if 'name' in data:
|
||||
rule.name = data['name']
|
||||
@@ -327,8 +352,8 @@ def update_alert_rule(rule_id):
|
||||
rule.severity = data['severity']
|
||||
if 'filter_conditions' in data:
|
||||
rule.filter_conditions = json.dumps(data['filter_conditions']) if data['filter_conditions'] else None
|
||||
if 'config_file' in data:
|
||||
rule.config_file = data['config_file']
|
||||
if 'config_id' in data:
|
||||
rule.config_id = data['config_id']
|
||||
|
||||
rule.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
@@ -347,7 +372,8 @@ def update_alert_rule(rule_id):
|
||||
'webhook_enabled': rule.webhook_enabled,
|
||||
'severity': rule.severity,
|
||||
'filter_conditions': json.loads(rule.filter_conditions) if rule.filter_conditions else None,
|
||||
'config_file': rule.config_file,
|
||||
'config_id': rule.config_id,
|
||||
'config_title': rule.config.title if rule.config else None,
|
||||
'created_at': rule.created_at.isoformat(),
|
||||
'updated_at': rule.updated_at.isoformat()
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
"""
|
||||
Configs API blueprint.
|
||||
|
||||
Handles endpoints for managing scan configuration files, including CSV/YAML upload,
|
||||
template download, and config management.
|
||||
Handles endpoints for managing scan configurations stored in the database.
|
||||
Provides REST API for creating, updating, and deleting configs that reference sites.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import io
|
||||
from flask import Blueprint, jsonify, request, send_file
|
||||
from werkzeug.utils import secure_filename
|
||||
from flask import Blueprint, jsonify, request, current_app
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.services.config_service import ConfigService
|
||||
@@ -17,32 +15,40 @@ bp = Blueprint('configs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Database-based Config Endpoints (Primary)
|
||||
# ============================================================================
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_configs():
|
||||
"""
|
||||
List all config files with metadata.
|
||||
List all scan configurations from database.
|
||||
|
||||
Returns:
|
||||
JSON response with list of configs:
|
||||
{
|
||||
"configs": [
|
||||
{
|
||||
"filename": "prod-scan.yaml",
|
||||
"title": "Prod Scan",
|
||||
"path": "/app/configs/prod-scan.yaml",
|
||||
"created_at": "2025-11-15T10:30:00Z",
|
||||
"size_bytes": 1234,
|
||||
"used_by_schedules": ["Daily Scan"]
|
||||
"id": 1,
|
||||
"title": "Production Scan",
|
||||
"description": "Weekly production scan",
|
||||
"site_count": 3,
|
||||
"sites": [
|
||||
{"id": 1, "name": "Production DC"},
|
||||
{"id": 2, "name": "DMZ"}
|
||||
],
|
||||
"created_at": "2025-11-19T10:30:00Z",
|
||||
"updated_at": "2025-11-19T10:30:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
config_service = ConfigService()
|
||||
configs = config_service.list_configs()
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
configs = config_service.list_configs_db()
|
||||
|
||||
logger.info(f"Listed {len(configs)} config files")
|
||||
logger.info(f"Listed {len(configs)} configs from database")
|
||||
|
||||
return jsonify({
|
||||
'configs': configs
|
||||
@@ -56,78 +62,38 @@ def list_configs():
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['GET'])
|
||||
@bp.route('', methods=['POST'])
|
||||
@api_auth_required
|
||||
def get_config(filename: str):
|
||||
def create_config():
|
||||
"""
|
||||
Get config file content and parsed data.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Returns:
|
||||
JSON response with config content:
|
||||
{
|
||||
"filename": "prod-scan.yaml",
|
||||
"content": "title: Prod Scan\n...",
|
||||
"parsed": {"title": "Prod Scan", "sites": [...]}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_data = config_service.get_config(filename)
|
||||
|
||||
logger.info(f"Retrieved config file: {filename}")
|
||||
|
||||
return jsonify(config_data)
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid config file: {filename} - {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid config',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error getting config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/create-from-cidr', methods=['POST'])
|
||||
@api_auth_required
|
||||
def create_from_cidr():
|
||||
"""
|
||||
Create config from CIDR range.
|
||||
Create a new scan configuration in the database.
|
||||
|
||||
Request:
|
||||
JSON with:
|
||||
{
|
||||
"title": "My Scan",
|
||||
"cidr": "10.0.0.0/24",
|
||||
"site_name": "Production" (optional),
|
||||
"ping_default": false (optional)
|
||||
"title": "Production Scan",
|
||||
"description": "Weekly production scan (optional)",
|
||||
"site_ids": [1, 2, 3]
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with created config info:
|
||||
JSON response with created config:
|
||||
{
|
||||
"success": true,
|
||||
"filename": "my-scan.yaml",
|
||||
"preview": "title: My Scan\n..."
|
||||
"config": {
|
||||
"id": 1,
|
||||
"title": "Production Scan",
|
||||
"description": "...",
|
||||
"site_count": 3,
|
||||
"sites": [...],
|
||||
"created_at": "2025-11-19T10:30:00Z",
|
||||
"updated_at": "2025-11-19T10:30:00Z"
|
||||
}
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Validation error or missing fields
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
@@ -145,272 +111,192 @@ def create_from_cidr():
|
||||
'message': 'Missing required field: title'
|
||||
}), 400
|
||||
|
||||
if 'cidr' not in data:
|
||||
if 'site_ids' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: cidr'
|
||||
'message': 'Missing required field: site_ids'
|
||||
}), 400
|
||||
|
||||
title = data['title']
|
||||
cidr = data['cidr']
|
||||
site_name = data.get('site_name', None)
|
||||
ping_default = data.get('ping_default', False)
|
||||
description = data.get('description', None)
|
||||
site_ids = data['site_ids']
|
||||
|
||||
# Validate title
|
||||
if not title or not title.strip():
|
||||
if not isinstance(site_ids, list):
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': 'Title cannot be empty'
|
||||
'error': 'Bad request',
|
||||
'message': 'Field site_ids must be an array'
|
||||
}), 400
|
||||
|
||||
# Create config from CIDR
|
||||
config_service = ConfigService()
|
||||
filename, yaml_preview = config_service.create_from_cidr(
|
||||
title=title,
|
||||
cidr=cidr,
|
||||
site_name=site_name,
|
||||
ping_default=ping_default
|
||||
)
|
||||
# Create config
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config = config_service.create_config(title, description, site_ids)
|
||||
|
||||
logger.info(f"Created config from CIDR {cidr}: {filename}")
|
||||
logger.info(f"Created config: {config['title']} (ID: {config['id']})")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'filename': filename,
|
||||
'preview': yaml_preview
|
||||
})
|
||||
'config': config
|
||||
}), 201
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"CIDR validation failed: {str(e)}")
|
||||
logger.warning(f"Config validation failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error creating config from CIDR: {str(e)}", exc_info=True)
|
||||
logger.error(f"Unexpected error creating config: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/upload-yaml', methods=['POST'])
|
||||
@bp.route('/<int:config_id>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def upload_yaml():
|
||||
def get_config(config_id: int):
|
||||
"""
|
||||
Upload YAML config file directly.
|
||||
|
||||
Request:
|
||||
multipart/form-data with 'file' field containing YAML file
|
||||
Optional 'filename' field for custom filename
|
||||
|
||||
Returns:
|
||||
JSON response with created config info:
|
||||
{
|
||||
"success": true,
|
||||
"filename": "prod-scan.yaml"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Check if file is present
|
||||
if 'file' not in request.files:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'No file provided'
|
||||
}), 400
|
||||
|
||||
file = request.files['file']
|
||||
|
||||
# Check if file is selected
|
||||
if file.filename == '':
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'No file selected'
|
||||
}), 400
|
||||
|
||||
# Check file extension
|
||||
if not (file.filename.endswith('.yaml') or file.filename.endswith('.yml')):
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'File must be a YAML file (.yaml or .yml extension)'
|
||||
}), 400
|
||||
|
||||
# Read YAML content
|
||||
yaml_content = file.read().decode('utf-8')
|
||||
|
||||
# Get filename (use uploaded filename or custom)
|
||||
filename = request.form.get('filename', file.filename)
|
||||
filename = secure_filename(filename)
|
||||
|
||||
# Create config from YAML
|
||||
config_service = ConfigService()
|
||||
final_filename = config_service.create_from_yaml(filename, yaml_content)
|
||||
|
||||
logger.info(f"Created config from YAML upload: {final_filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'filename': final_filename
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"YAML validation failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except UnicodeDecodeError:
|
||||
logger.warning("YAML file encoding error")
|
||||
return jsonify({
|
||||
'error': 'Encoding error',
|
||||
'message': 'YAML file must be UTF-8 encoded'
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error uploading YAML: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
|
||||
|
||||
@bp.route('/<filename>/download', methods=['GET'])
|
||||
@api_auth_required
|
||||
def download_config(filename: str):
|
||||
"""
|
||||
Download existing config file.
|
||||
Get a scan configuration by ID.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
config_id: Configuration ID
|
||||
|
||||
Returns:
|
||||
YAML file download
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_data = config_service.get_config(filename)
|
||||
|
||||
# Create file-like object
|
||||
yaml_file = io.BytesIO(config_data['content'].encode('utf-8'))
|
||||
yaml_file.seek(0)
|
||||
|
||||
logger.info(f"Config file downloaded: {filename}")
|
||||
|
||||
# Send file
|
||||
return send_file(
|
||||
yaml_file,
|
||||
mimetype='application/x-yaml',
|
||||
as_attachment=True,
|
||||
download_name=filename
|
||||
)
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error downloading config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_config(filename: str):
|
||||
"""
|
||||
Update existing config file with new YAML content.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Request:
|
||||
JSON with:
|
||||
JSON response with config details:
|
||||
{
|
||||
"content": "title: My Scan\nsites: ..."
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with success status:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Config updated successfully"
|
||||
"id": 1,
|
||||
"title": "Production Scan",
|
||||
"description": "...",
|
||||
"site_count": 3,
|
||||
"sites": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Production DC",
|
||||
"description": "...",
|
||||
"ip_count": 5
|
||||
}
|
||||
],
|
||||
"created_at": "2025-11-19T10:30:00Z",
|
||||
"updated_at": "2025-11-19T10:30:00Z"
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Invalid YAML or config structure
|
||||
- 404: Config file not found
|
||||
- 404: Config not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config = config_service.get_config_by_id(config_id)
|
||||
|
||||
data = request.get_json()
|
||||
logger.info(f"Retrieved config: {config['title']} (ID: {config_id})")
|
||||
|
||||
if not data or 'content' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: content'
|
||||
}), 400
|
||||
return jsonify(config)
|
||||
|
||||
yaml_content = data['content']
|
||||
|
||||
# Update config
|
||||
config_service = ConfigService()
|
||||
config_service.update_config(filename, yaml_content)
|
||||
|
||||
logger.info(f"Updated config file: {filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Config updated successfully'
|
||||
})
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Config not found: {config_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid config content for {filename}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error updating config {filename}: {str(e)}", exc_info=True)
|
||||
logger.error(f"Unexpected error getting config {config_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['DELETE'])
|
||||
@bp.route('/<int:config_id>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def delete_config(filename: str):
|
||||
def update_config(config_id: int):
|
||||
"""
|
||||
Delete config file and cascade delete associated schedules.
|
||||
|
||||
When a config is deleted, all schedules using that config (both enabled
|
||||
and disabled) are automatically deleted as well.
|
||||
Update an existing scan configuration.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
config_id: Configuration ID
|
||||
|
||||
Request:
|
||||
JSON with (all fields optional):
|
||||
{
|
||||
"title": "New Title",
|
||||
"description": "New Description",
|
||||
"site_ids": [1, 2, 3]
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with updated config:
|
||||
{
|
||||
"success": true,
|
||||
"config": {...}
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Validation error
|
||||
- 404: Config not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Request body must be JSON'
|
||||
}), 400
|
||||
|
||||
title = data.get('title', None)
|
||||
description = data.get('description', None)
|
||||
site_ids = data.get('site_ids', None)
|
||||
|
||||
if site_ids is not None and not isinstance(site_ids, list):
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Field site_ids must be an array'
|
||||
}), 400
|
||||
|
||||
# Update config
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config = config_service.update_config(config_id, title, description, site_ids)
|
||||
|
||||
logger.info(f"Updated config: {config['title']} (ID: {config_id})")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'config': config
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
if 'not found' in str(e).lower():
|
||||
logger.warning(f"Config not found: {config_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
else:
|
||||
logger.warning(f"Config validation failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error updating config {config_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:config_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_config(config_id: int):
|
||||
"""
|
||||
Delete a scan configuration.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
|
||||
Returns:
|
||||
JSON response with success status:
|
||||
@@ -420,32 +306,155 @@ def delete_config(filename: str):
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 404: Config file not found
|
||||
- 404: Config not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config_service.delete_config(config_id)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_service.delete_config(filename)
|
||||
|
||||
logger.info(f"Deleted config file: {filename}")
|
||||
logger.info(f"Deleted config (ID: {config_id})")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Config deleted successfully'
|
||||
})
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Config not found: {config_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error deleting config {filename}: {str(e)}", exc_info=True)
|
||||
logger.error(f"Unexpected error deleting config {config_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:config_id>/sites', methods=['POST'])
|
||||
@api_auth_required
|
||||
def add_site_to_config(config_id: int):
|
||||
"""
|
||||
Add a site to an existing config.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
|
||||
Request:
|
||||
JSON with:
|
||||
{
|
||||
"site_id": 5
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with updated config:
|
||||
{
|
||||
"success": true,
|
||||
"config": {...}
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Validation error or site already in config
|
||||
- 404: Config or site not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'site_id' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: site_id'
|
||||
}), 400
|
||||
|
||||
site_id = data['site_id']
|
||||
|
||||
# Add site to config
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config = config_service.add_site_to_config(config_id, site_id)
|
||||
|
||||
logger.info(f"Added site {site_id} to config {config_id}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'config': config
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
if 'not found' in str(e).lower():
|
||||
logger.warning(f"Config or site not found: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
else:
|
||||
logger.warning(f"Validation error: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error adding site to config: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:config_id>/sites/<int:site_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def remove_site_from_config(config_id: int, site_id: int):
|
||||
"""
|
||||
Remove a site from a config.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
site_id: Site ID to remove
|
||||
|
||||
Returns:
|
||||
JSON response with updated config:
|
||||
{
|
||||
"success": true,
|
||||
"config": {...}
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Validation error (e.g., last site cannot be removed)
|
||||
- 404: Config not found or site not in config
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
config_service = ConfigService(db_session=current_app.db_session)
|
||||
config = config_service.remove_site_from_config(config_id, site_id)
|
||||
|
||||
logger.info(f"Removed site {site_id} from config {config_id}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'config': config
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
if 'not found' in str(e).lower() or 'not in this config' in str(e).lower():
|
||||
logger.warning(f"Config or site not found: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
else:
|
||||
logger.warning(f"Validation error: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error removing site from config: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
|
||||
@@ -129,7 +129,7 @@ def trigger_scan():
|
||||
Trigger a new scan.
|
||||
|
||||
Request body:
|
||||
config_file: Path to YAML config file
|
||||
config_id: Database config ID (required)
|
||||
|
||||
Returns:
|
||||
JSON response with scan_id and status
|
||||
@@ -137,25 +137,35 @@ def trigger_scan():
|
||||
try:
|
||||
# Get request data
|
||||
data = request.get_json() or {}
|
||||
config_file = data.get('config_file')
|
||||
config_id = data.get('config_id')
|
||||
|
||||
# Validate required fields
|
||||
if not config_file:
|
||||
logger.warning("Scan trigger request missing config_file")
|
||||
if not config_id:
|
||||
logger.warning("Scan trigger request missing config_id")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'config_file is required'
|
||||
'message': 'config_id is required'
|
||||
}), 400
|
||||
|
||||
# Validate config_id is an integer
|
||||
try:
|
||||
config_id = int(config_id)
|
||||
except (TypeError, ValueError):
|
||||
logger.warning(f"Invalid config_id type: {config_id}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'config_id must be an integer'
|
||||
}), 400
|
||||
|
||||
# Trigger scan via service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
scan_id = scan_service.trigger_scan(
|
||||
config_file=config_file,
|
||||
config_id=config_id,
|
||||
triggered_by='api',
|
||||
scheduler=current_app.scheduler
|
||||
)
|
||||
|
||||
logger.info(f"Scan {scan_id} triggered via API: config={config_file}")
|
||||
logger.info(f"Scan {scan_id} triggered via API: config_id={config_id}")
|
||||
|
||||
return jsonify({
|
||||
'scan_id': scan_id,
|
||||
@@ -164,10 +174,10 @@ def trigger_scan():
|
||||
}), 201
|
||||
|
||||
except ValueError as e:
|
||||
# Config file validation error
|
||||
# Config validation error
|
||||
error_message = str(e)
|
||||
logger.warning(f"Invalid config file: {error_message}")
|
||||
logger.warning(f"Request data: config_file='{config_file}'")
|
||||
logger.warning(f"Invalid config: {error_message}")
|
||||
logger.warning(f"Request data: config_id='{config_id}'")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': error_message
|
||||
|
||||
655
app/web/api/sites.py
Normal file
655
app/web/api/sites.py
Normal file
@@ -0,0 +1,655 @@
|
||||
"""
|
||||
Sites API blueprint.
|
||||
|
||||
Handles endpoints for managing reusable site definitions, including CIDR ranges
|
||||
and IP-level overrides.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.services.site_service import SiteService
|
||||
from web.utils.pagination import validate_page_params
|
||||
|
||||
bp = Blueprint('sites', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_sites():
|
||||
"""
|
||||
List all sites with pagination.
|
||||
|
||||
Query params:
|
||||
page: Page number (default: 1)
|
||||
per_page: Items per page (default: 20, max: 100)
|
||||
all: If 'true', returns all sites without pagination (for dropdowns)
|
||||
|
||||
Returns:
|
||||
JSON response with sites list and pagination info
|
||||
"""
|
||||
try:
|
||||
# Check if requesting all sites (no pagination)
|
||||
if request.args.get('all', '').lower() == 'true':
|
||||
site_service = SiteService(current_app.db_session)
|
||||
sites = site_service.list_all_sites()
|
||||
|
||||
logger.info(f"Listed all sites (count={len(sites)})")
|
||||
return jsonify({'sites': sites})
|
||||
|
||||
# Get and validate query parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 20, type=int)
|
||||
|
||||
# Validate pagination params
|
||||
page, per_page = validate_page_params(page, per_page)
|
||||
|
||||
# Get sites from service
|
||||
site_service = SiteService(current_app.db_session)
|
||||
paginated_result = site_service.list_sites(page=page, per_page=per_page)
|
||||
|
||||
logger.info(f"Listed sites: page={page}, per_page={per_page}, total={paginated_result.total}")
|
||||
|
||||
return jsonify({
|
||||
'sites': paginated_result.items,
|
||||
'total': paginated_result.total,
|
||||
'page': paginated_result.page,
|
||||
'per_page': paginated_result.per_page,
|
||||
'total_pages': paginated_result.pages,
|
||||
'has_prev': paginated_result.has_prev,
|
||||
'has_next': paginated_result.has_next
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid request parameters: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error listing sites: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve sites'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing sites: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_site(site_id):
|
||||
"""
|
||||
Get details for a specific site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Returns:
|
||||
JSON response with site details including CIDRs and IP overrides
|
||||
"""
|
||||
try:
|
||||
site_service = SiteService(current_app.db_session)
|
||||
site = site_service.get_site(site_id)
|
||||
|
||||
if not site:
|
||||
logger.warning(f"Site not found: {site_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Site with ID {site_id} not found'
|
||||
}), 404
|
||||
|
||||
logger.info(f"Retrieved site details: {site_id}")
|
||||
return jsonify(site)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve site'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('', methods=['POST'])
|
||||
@api_auth_required
|
||||
def create_site():
|
||||
"""
|
||||
Create a new site.
|
||||
|
||||
Request body:
|
||||
name: Site name (required, must be unique)
|
||||
description: Site description (optional)
|
||||
cidrs: List of CIDR definitions (optional, but recommended)
|
||||
[
|
||||
{
|
||||
"cidr": "10.0.0.0/24",
|
||||
"expected_ping": true,
|
||||
"expected_tcp_ports": [22, 80, 443],
|
||||
"expected_udp_ports": [53]
|
||||
}
|
||||
]
|
||||
|
||||
Returns:
|
||||
JSON response with created site data
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Validate required fields
|
||||
name = data.get('name')
|
||||
if not name:
|
||||
logger.warning("Site creation request missing name")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'name is required'
|
||||
}), 400
|
||||
|
||||
description = data.get('description')
|
||||
|
||||
# Create site (empty initially)
|
||||
site_service = SiteService(current_app.db_session)
|
||||
site = site_service.create_site(
|
||||
name=name,
|
||||
description=description
|
||||
)
|
||||
|
||||
logger.info(f"Created site '{name}' (id={site['id']})")
|
||||
return jsonify(site), 201
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid site creation request: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error creating site: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to create site'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error creating site: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_site(site_id):
|
||||
"""
|
||||
Update site metadata (name and/or description).
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Request body:
|
||||
name: New site name (optional, must be unique)
|
||||
description: New description (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with updated site data
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
name = data.get('name')
|
||||
description = data.get('description')
|
||||
|
||||
# Update site
|
||||
site_service = SiteService(current_app.db_session)
|
||||
site = site_service.update_site(
|
||||
site_id=site_id,
|
||||
name=name,
|
||||
description=description
|
||||
)
|
||||
|
||||
logger.info(f"Updated site {site_id}")
|
||||
return jsonify(site)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid site update request: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error updating site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to update site'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error updating site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_site(site_id):
|
||||
"""
|
||||
Delete a site.
|
||||
|
||||
Prevents deletion if site is used in any scan.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Returns:
|
||||
JSON response with success message
|
||||
"""
|
||||
try:
|
||||
site_service = SiteService(current_app.db_session)
|
||||
site_service.delete_site(site_id)
|
||||
|
||||
logger.info(f"Deleted site {site_id}")
|
||||
return jsonify({
|
||||
'message': f'Site {site_id} deleted successfully'
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Cannot delete site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error deleting site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to delete site'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error deleting site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/ips/bulk', methods=['POST'])
|
||||
@api_auth_required
|
||||
def bulk_add_ips(site_id):
|
||||
"""
|
||||
Bulk add IPs to a site from CIDR or list.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Request body:
|
||||
source_type: "cidr" or "list" (required)
|
||||
cidr: CIDR notation if source_type="cidr" (e.g., "10.0.0.0/24")
|
||||
ips: List of IP addresses if source_type="list" (e.g., ["10.0.0.1", "10.0.0.2"])
|
||||
expected_ping: Expected ping response for all IPs (optional)
|
||||
expected_tcp_ports: List of expected TCP ports for all IPs (optional)
|
||||
expected_udp_ports: List of expected UDP ports for all IPs (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with count of IPs added and any errors
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
source_type = data.get('source_type')
|
||||
if source_type not in ['cidr', 'list']:
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'source_type must be "cidr" or "list"'
|
||||
}), 400
|
||||
|
||||
expected_ping = data.get('expected_ping')
|
||||
expected_tcp_ports = data.get('expected_tcp_ports', [])
|
||||
expected_udp_ports = data.get('expected_udp_ports', [])
|
||||
|
||||
site_service = SiteService(current_app.db_session)
|
||||
|
||||
if source_type == 'cidr':
|
||||
cidr = data.get('cidr')
|
||||
if not cidr:
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'cidr is required when source_type="cidr"'
|
||||
}), 400
|
||||
|
||||
result = site_service.bulk_add_ips_from_cidr(
|
||||
site_id=site_id,
|
||||
cidr=cidr,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=expected_tcp_ports,
|
||||
expected_udp_ports=expected_udp_ports
|
||||
)
|
||||
|
||||
logger.info(f"Bulk added {result['ip_count']} IPs from CIDR '{cidr}' to site {site_id}")
|
||||
return jsonify(result), 201
|
||||
|
||||
else: # source_type == 'list'
|
||||
ip_list = data.get('ips', [])
|
||||
if not isinstance(ip_list, list):
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'ips must be a list when source_type="list"'
|
||||
}), 400
|
||||
|
||||
result = site_service.bulk_add_ips_from_list(
|
||||
site_id=site_id,
|
||||
ip_list=ip_list,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=expected_tcp_ports,
|
||||
expected_udp_ports=expected_udp_ports
|
||||
)
|
||||
|
||||
logger.info(f"Bulk added {result['ip_count']} IPs from list to site {site_id}")
|
||||
return jsonify(result), 201
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid bulk IP request: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error bulk adding IPs to site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to add IPs'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error bulk adding IPs to site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/ips', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_ips(site_id):
|
||||
"""
|
||||
List IPs in a site with pagination.
|
||||
|
||||
Query params:
|
||||
page: Page number (default: 1)
|
||||
per_page: Items per page (default: 50, max: 200)
|
||||
|
||||
Returns:
|
||||
JSON response with IPs list and pagination info
|
||||
"""
|
||||
try:
|
||||
# Get and validate query parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 50, type=int)
|
||||
|
||||
# Validate pagination params
|
||||
page, per_page = validate_page_params(page, per_page, max_per_page=200)
|
||||
|
||||
# Get IPs from service
|
||||
site_service = SiteService(current_app.db_session)
|
||||
paginated_result = site_service.list_ips(
|
||||
site_id=site_id,
|
||||
page=page,
|
||||
per_page=per_page
|
||||
)
|
||||
|
||||
logger.info(f"Listed IPs for site {site_id}: page={page}, per_page={per_page}, total={paginated_result.total}")
|
||||
|
||||
return jsonify({
|
||||
'ips': paginated_result.items,
|
||||
'total': paginated_result.total,
|
||||
'page': paginated_result.page,
|
||||
'per_page': paginated_result.per_page,
|
||||
'total_pages': paginated_result.pages,
|
||||
'has_prev': paginated_result.has_prev,
|
||||
'has_next': paginated_result.has_next
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid request parameters: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error listing IPs for site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve IPs'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing IPs for site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/ips', methods=['POST'])
|
||||
@api_auth_required
|
||||
def add_standalone_ip(site_id):
|
||||
"""
|
||||
Add a standalone IP (without CIDR parent) to a site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Request body:
|
||||
ip_address: IP address (required)
|
||||
expected_ping: Expected ping response (optional)
|
||||
expected_tcp_ports: List of expected TCP ports (optional)
|
||||
expected_udp_ports: List of expected UDP ports (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with created IP data
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Validate required fields
|
||||
ip_address = data.get('ip_address')
|
||||
if not ip_address:
|
||||
logger.warning("Standalone IP creation request missing ip_address")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'ip_address is required'
|
||||
}), 400
|
||||
|
||||
expected_ping = data.get('expected_ping')
|
||||
expected_tcp_ports = data.get('expected_tcp_ports', [])
|
||||
expected_udp_ports = data.get('expected_udp_ports', [])
|
||||
|
||||
# Add standalone IP
|
||||
site_service = SiteService(current_app.db_session)
|
||||
ip_data = site_service.add_standalone_ip(
|
||||
site_id=site_id,
|
||||
ip_address=ip_address,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=expected_tcp_ports,
|
||||
expected_udp_ports=expected_udp_ports
|
||||
)
|
||||
|
||||
logger.info(f"Added standalone IP '{ip_address}' to site {site_id}")
|
||||
return jsonify(ip_data), 201
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid standalone IP creation request: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error adding standalone IP to site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to add IP'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error adding standalone IP to site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/ips/<int:ip_id>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_ip_settings(site_id, ip_id):
|
||||
"""
|
||||
Update settings for an individual IP.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_id: IP ID
|
||||
|
||||
Request body:
|
||||
expected_ping: New ping expectation (optional)
|
||||
expected_tcp_ports: New TCP ports expectation (optional)
|
||||
expected_udp_ports: New UDP ports expectation (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with updated IP data
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
expected_ping = data.get('expected_ping')
|
||||
expected_tcp_ports = data.get('expected_tcp_ports')
|
||||
expected_udp_ports = data.get('expected_udp_ports')
|
||||
|
||||
# Update IP settings
|
||||
site_service = SiteService(current_app.db_session)
|
||||
ip_data = site_service.update_ip_settings(
|
||||
site_id=site_id,
|
||||
ip_id=ip_id,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=expected_tcp_ports,
|
||||
expected_udp_ports=expected_udp_ports
|
||||
)
|
||||
|
||||
logger.info(f"Updated IP {ip_id} in site {site_id}")
|
||||
return jsonify(ip_data)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid IP update request: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error updating IP {ip_id} in site {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to update IP'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error updating IP {ip_id} in site {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/ips/<int:ip_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def remove_ip(site_id, ip_id):
|
||||
"""
|
||||
Remove an IP from a site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_id: IP ID
|
||||
|
||||
Returns:
|
||||
JSON response with success message
|
||||
"""
|
||||
try:
|
||||
site_service = SiteService(current_app.db_session)
|
||||
site_service.remove_ip(site_id, ip_id)
|
||||
|
||||
logger.info(f"Removed IP {ip_id} from site {site_id}")
|
||||
return jsonify({
|
||||
'message': f'IP {ip_id} removed successfully'
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Cannot remove IP {ip_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error removing IP {ip_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to remove IP'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error removing IP {ip_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:site_id>/usage', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_site_usage(site_id):
|
||||
"""
|
||||
Get list of scans that use this site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Returns:
|
||||
JSON response with list of scans
|
||||
"""
|
||||
try:
|
||||
site_service = SiteService(current_app.db_session)
|
||||
|
||||
# First check if site exists
|
||||
site = site_service.get_site(site_id)
|
||||
if not site:
|
||||
logger.warning(f"Site not found: {site_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Site with ID {site_id} not found'
|
||||
}), 404
|
||||
|
||||
scans = site_service.get_scan_usage(site_id)
|
||||
|
||||
logger.info(f"Retrieved usage for site {site_id} (count={len(scans)})")
|
||||
return jsonify({
|
||||
'site_id': site_id,
|
||||
'site_name': site['name'],
|
||||
'scans': scans,
|
||||
'count': len(scans)
|
||||
})
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving site usage {site_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve site usage'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving site usage {site_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
@@ -335,6 +335,7 @@ def register_blueprints(app: Flask) -> None:
|
||||
from web.api.settings import bp as settings_bp
|
||||
from web.api.stats import bp as stats_bp
|
||||
from web.api.configs import bp as configs_bp
|
||||
from web.api.sites import bp as sites_bp
|
||||
from web.auth.routes import bp as auth_bp
|
||||
from web.routes.main import bp as main_bp
|
||||
from web.routes.webhooks import bp as webhooks_bp
|
||||
@@ -356,6 +357,7 @@ def register_blueprints(app: Flask) -> None:
|
||||
app.register_blueprint(settings_bp, url_prefix='/api/settings')
|
||||
app.register_blueprint(stats_bp, url_prefix='/api/stats')
|
||||
app.register_blueprint(configs_bp, url_prefix='/api/configs')
|
||||
app.register_blueprint(sites_bp, url_prefix='/api/sites')
|
||||
|
||||
app.logger.info("Blueprints registered")
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ that are managed by developers, not stored in the database.
|
||||
|
||||
# Application metadata
|
||||
APP_NAME = 'SneakyScanner'
|
||||
APP_VERSION = '1.0.0-phase5'
|
||||
APP_VERSION = '1.0.0-alpha'
|
||||
|
||||
# Repository URL
|
||||
REPO_URL = 'https://git.sneakygeek.net/sneakygeek/SneakyScan'
|
||||
|
||||
@@ -21,7 +21,7 @@ from web.services.alert_service import AlertService
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def execute_scan(scan_id: int, config_file: str, db_url: str):
|
||||
def execute_scan(scan_id: int, config_file: str = None, config_id: int = None, db_url: str = None):
|
||||
"""
|
||||
Execute a scan in the background.
|
||||
|
||||
@@ -31,9 +31,12 @@ def execute_scan(scan_id: int, config_file: str, db_url: str):
|
||||
|
||||
Args:
|
||||
scan_id: ID of the scan record in database
|
||||
config_file: Path to YAML configuration file
|
||||
config_file: Path to YAML configuration file (legacy, optional)
|
||||
config_id: Database config ID (preferred, optional)
|
||||
db_url: Database connection URL
|
||||
|
||||
Note: Provide exactly one of config_file or config_id
|
||||
|
||||
Workflow:
|
||||
1. Create new database session for this thread
|
||||
2. Update scan status to 'running'
|
||||
@@ -42,7 +45,8 @@ def execute_scan(scan_id: int, config_file: str, db_url: str):
|
||||
5. Save results to database
|
||||
6. Update status to 'completed' or 'failed'
|
||||
"""
|
||||
logger.info(f"Starting background scan execution: scan_id={scan_id}, config={config_file}")
|
||||
config_desc = f"config_id={config_id}" if config_id else f"config_file={config_file}"
|
||||
logger.info(f"Starting background scan execution: scan_id={scan_id}, {config_desc}")
|
||||
|
||||
# Create new database session for this thread
|
||||
engine = create_engine(db_url, echo=False)
|
||||
@@ -61,16 +65,21 @@ def execute_scan(scan_id: int, config_file: str, db_url: str):
|
||||
scan.started_at = datetime.utcnow()
|
||||
session.commit()
|
||||
|
||||
logger.info(f"Scan {scan_id}: Initializing scanner with config {config_file}")
|
||||
logger.info(f"Scan {scan_id}: Initializing scanner with {config_desc}")
|
||||
|
||||
# Convert config_file to full path if it's just a filename
|
||||
if not config_file.startswith('/'):
|
||||
config_path = f'/app/configs/{config_file}'
|
||||
# Initialize scanner based on config type
|
||||
if config_id:
|
||||
# Use database config
|
||||
scanner = SneakyScanner(config_id=config_id)
|
||||
else:
|
||||
config_path = config_file
|
||||
# Use YAML config file
|
||||
# Convert config_file to full path if it's just a filename
|
||||
if not config_file.startswith('/'):
|
||||
config_path = f'/app/configs/{config_file}'
|
||||
else:
|
||||
config_path = config_file
|
||||
|
||||
# Initialize scanner
|
||||
scanner = SneakyScanner(config_path)
|
||||
scanner = SneakyScanner(config_path=config_path)
|
||||
|
||||
# Execute scan
|
||||
logger.info(f"Scan {scan_id}: Running scanner...")
|
||||
|
||||
@@ -46,7 +46,8 @@ class Scan(Base):
|
||||
timestamp = Column(DateTime, nullable=False, index=True, comment="Scan start time (UTC)")
|
||||
duration = Column(Float, nullable=True, comment="Total scan duration in seconds")
|
||||
status = Column(String(20), nullable=False, default='running', comment="running, completed, failed")
|
||||
config_file = Column(Text, nullable=True, comment="Path to YAML config used")
|
||||
config_file = Column(Text, nullable=True, comment="Path to YAML config used (deprecated)")
|
||||
config_id = Column(Integer, ForeignKey('scan_configs.id'), nullable=True, index=True, comment="FK to scan_configs table")
|
||||
title = Column(Text, nullable=True, comment="Scan title from config")
|
||||
json_path = Column(Text, nullable=True, comment="Path to JSON report")
|
||||
html_path = Column(Text, nullable=True, comment="Path to HTML report")
|
||||
@@ -68,6 +69,8 @@ class Scan(Base):
|
||||
tls_versions = relationship('ScanTLSVersion', back_populates='scan', cascade='all, delete-orphan')
|
||||
alerts = relationship('Alert', back_populates='scan', cascade='all, delete-orphan')
|
||||
schedule = relationship('Schedule', back_populates='scans')
|
||||
config = relationship('ScanConfig', back_populates='scans')
|
||||
site_associations = relationship('ScanSiteAssociation', back_populates='scan', cascade='all, delete-orphan')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Scan(id={self.id}, title='{self.title}', status='{self.status}')>"
|
||||
@@ -242,6 +245,148 @@ class ScanTLSVersion(Base):
|
||||
return f"<ScanTLSVersion(id={self.id}, tls_version='{self.tls_version}', supported={self.supported})>"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Reusable Site Definition Tables
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class Site(Base):
|
||||
"""
|
||||
Master site definition (reusable across scans).
|
||||
|
||||
Sites represent logical network segments (e.g., "Production DC", "DMZ",
|
||||
"Branch Office") that can be reused across multiple scans. Each site
|
||||
contains one or more CIDR ranges.
|
||||
"""
|
||||
__tablename__ = 'sites'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(String(255), nullable=False, unique=True, index=True, comment="Unique site name")
|
||||
description = Column(Text, nullable=True, comment="Site description")
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Site creation time")
|
||||
updated_at = Column(DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow, comment="Last modification time")
|
||||
|
||||
# Relationships
|
||||
ips = relationship('SiteIP', back_populates='site', cascade='all, delete-orphan')
|
||||
scan_associations = relationship('ScanSiteAssociation', back_populates='site')
|
||||
config_associations = relationship('ScanConfigSite', back_populates='site')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Site(id={self.id}, name='{self.name}')>"
|
||||
|
||||
|
||||
class SiteIP(Base):
|
||||
"""
|
||||
Individual IP addresses with their own settings.
|
||||
|
||||
Each IP is directly associated with a site and has its own port and ping settings.
|
||||
IPs are standalone entities - CIDRs are only used as a convenience for bulk creation.
|
||||
"""
|
||||
__tablename__ = 'site_ips'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
site_id = Column(Integer, ForeignKey('sites.id'), nullable=False, index=True, comment="FK to sites")
|
||||
ip_address = Column(String(45), nullable=False, comment="IPv4 or IPv6 address")
|
||||
expected_ping = Column(Boolean, nullable=True, comment="Expected ping response for this IP")
|
||||
expected_tcp_ports = Column(Text, nullable=True, comment="JSON array of expected TCP ports")
|
||||
expected_udp_ports = Column(Text, nullable=True, comment="JSON array of expected UDP ports")
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="IP creation time")
|
||||
|
||||
# Relationships
|
||||
site = relationship('Site', back_populates='ips')
|
||||
|
||||
# Index for efficient IP lookups - prevent duplicate IPs within a site
|
||||
__table_args__ = (
|
||||
UniqueConstraint('site_id', 'ip_address', name='uix_site_ip_address'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SiteIP(id={self.id}, ip_address='{self.ip_address}')>"
|
||||
|
||||
|
||||
class ScanSiteAssociation(Base):
|
||||
"""
|
||||
Many-to-many relationship between scans and sites.
|
||||
|
||||
Tracks which sites were included in which scans. This allows sites
|
||||
to be reused across multiple scans.
|
||||
"""
|
||||
__tablename__ = 'scan_site_associations'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
scan_id = Column(Integer, ForeignKey('scans.id'), nullable=False, index=True)
|
||||
site_id = Column(Integer, ForeignKey('sites.id'), nullable=False, index=True)
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Association creation time")
|
||||
|
||||
# Relationships
|
||||
scan = relationship('Scan', back_populates='site_associations')
|
||||
site = relationship('Site', back_populates='scan_associations')
|
||||
|
||||
# Index to prevent duplicate associations
|
||||
__table_args__ = (
|
||||
UniqueConstraint('scan_id', 'site_id', name='uix_scan_site'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ScanSiteAssociation(scan_id={self.scan_id}, site_id={self.site_id})>"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Scan Configuration Tables
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class ScanConfig(Base):
|
||||
"""
|
||||
Scan configurations stored in database (replaces YAML files).
|
||||
|
||||
Stores reusable scan configurations that reference sites from the
|
||||
sites table. Configs define what sites to scan together.
|
||||
"""
|
||||
__tablename__ = 'scan_configs'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
title = Column(String(255), nullable=False, comment="Configuration title")
|
||||
description = Column(Text, nullable=True, comment="Configuration description")
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Config creation time")
|
||||
updated_at = Column(DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow, comment="Last modification time")
|
||||
|
||||
# Relationships
|
||||
site_associations = relationship('ScanConfigSite', back_populates='config', cascade='all, delete-orphan')
|
||||
scans = relationship('Scan', back_populates='config')
|
||||
schedules = relationship('Schedule', back_populates='config')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ScanConfig(id={self.id}, title='{self.title}')>"
|
||||
|
||||
|
||||
class ScanConfigSite(Base):
|
||||
"""
|
||||
Many-to-many relationship between scan configs and sites.
|
||||
|
||||
Links scan configurations to the sites they should scan. A config
|
||||
can reference multiple sites, and sites can be used in multiple configs.
|
||||
"""
|
||||
__tablename__ = 'scan_config_sites'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('scan_configs.id'), nullable=False, index=True)
|
||||
site_id = Column(Integer, ForeignKey('sites.id'), nullable=False, index=True)
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Association creation time")
|
||||
|
||||
# Relationships
|
||||
config = relationship('ScanConfig', back_populates='site_associations')
|
||||
site = relationship('Site', back_populates='config_associations')
|
||||
|
||||
# Index to prevent duplicate associations
|
||||
__table_args__ = (
|
||||
UniqueConstraint('config_id', 'site_id', name='uix_config_site'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ScanConfigSite(config_id={self.config_id}, site_id={self.site_id})>"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Scheduling & Notifications Tables
|
||||
# ============================================================================
|
||||
@@ -258,7 +403,8 @@ class Schedule(Base):
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(String(255), nullable=False, comment="Schedule name (e.g., 'Daily prod scan')")
|
||||
config_file = Column(Text, nullable=False, comment="Path to YAML config")
|
||||
config_file = Column(Text, nullable=True, comment="Path to YAML config (deprecated)")
|
||||
config_id = Column(Integer, ForeignKey('scan_configs.id'), nullable=True, index=True, comment="FK to scan_configs table")
|
||||
cron_expression = Column(String(100), nullable=False, comment="Cron-like schedule (e.g., '0 2 * * *')")
|
||||
enabled = Column(Boolean, nullable=False, default=True, comment="Is schedule active?")
|
||||
last_run = Column(DateTime, nullable=True, comment="Last execution time")
|
||||
@@ -268,6 +414,7 @@ class Schedule(Base):
|
||||
|
||||
# Relationships
|
||||
scans = relationship('Scan', back_populates='schedule')
|
||||
config = relationship('ScanConfig', back_populates='schedules')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Schedule(id={self.id}, name='{self.name}', enabled={self.enabled})>"
|
||||
@@ -330,12 +477,13 @@ class AlertRule(Base):
|
||||
webhook_enabled = Column(Boolean, nullable=False, default=False, comment="Send webhook for this rule?")
|
||||
severity = Column(String(20), nullable=True, comment="Alert severity: critical, warning, info")
|
||||
filter_conditions = Column(Text, nullable=True, comment="JSON filter conditions for the rule")
|
||||
config_file = Column(String(255), nullable=True, comment="Optional: specific config file this rule applies to")
|
||||
config_id = Column(Integer, ForeignKey('scan_configs.id'), nullable=True, index=True, comment="Optional: specific config this rule applies to")
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow, comment="Rule creation time")
|
||||
updated_at = Column(DateTime, nullable=True, comment="Last update time")
|
||||
|
||||
# Relationships
|
||||
alerts = relationship("Alert", back_populates="rule", cascade="all, delete-orphan")
|
||||
config = relationship("ScanConfig", backref="alert_rules")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AlertRule(id={self.id}, name='{self.name}', rule_type='{self.rule_type}', enabled={self.enabled})>"
|
||||
|
||||
@@ -35,20 +35,7 @@ def dashboard():
|
||||
Returns:
|
||||
Rendered dashboard template
|
||||
"""
|
||||
import os
|
||||
|
||||
# Get list of available config files
|
||||
configs_dir = '/app/configs'
|
||||
config_files = []
|
||||
|
||||
try:
|
||||
if os.path.exists(configs_dir):
|
||||
config_files = [f for f in os.listdir(configs_dir) if f.endswith(('.yaml', '.yml'))]
|
||||
config_files.sort()
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing config files: {e}")
|
||||
|
||||
return render_template('dashboard.html', config_files=config_files)
|
||||
return render_template('dashboard.html')
|
||||
|
||||
|
||||
@bp.route('/scans')
|
||||
@@ -60,20 +47,7 @@ def scans():
|
||||
Returns:
|
||||
Rendered scans list template
|
||||
"""
|
||||
import os
|
||||
|
||||
# Get list of available config files
|
||||
configs_dir = '/app/configs'
|
||||
config_files = []
|
||||
|
||||
try:
|
||||
if os.path.exists(configs_dir):
|
||||
config_files = [f for f in os.listdir(configs_dir) if f.endswith(('.yaml', '.yml'))]
|
||||
config_files.sort()
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing config files: {e}")
|
||||
|
||||
return render_template('scans.html', config_files=config_files)
|
||||
return render_template('scans.html')
|
||||
|
||||
|
||||
@bp.route('/scans/<int:scan_id>')
|
||||
@@ -164,6 +138,18 @@ def edit_schedule(schedule_id):
|
||||
return render_template('schedule_edit.html', schedule_id=schedule_id)
|
||||
|
||||
|
||||
@bp.route('/sites')
|
||||
@login_required
|
||||
def sites():
|
||||
"""
|
||||
Sites management page - manage reusable site definitions.
|
||||
|
||||
Returns:
|
||||
Rendered sites template
|
||||
"""
|
||||
return render_template('sites.html')
|
||||
|
||||
|
||||
@bp.route('/configs')
|
||||
@login_required
|
||||
def configs():
|
||||
@@ -287,7 +273,6 @@ def alert_rules():
|
||||
Returns:
|
||||
Rendered alert rules template
|
||||
"""
|
||||
import os
|
||||
from flask import current_app
|
||||
from web.models import AlertRule
|
||||
|
||||
@@ -305,19 +290,7 @@ def alert_rules():
|
||||
if rules is None:
|
||||
rules = []
|
||||
|
||||
# Get list of available config files
|
||||
configs_dir = '/app/configs'
|
||||
config_files = []
|
||||
|
||||
try:
|
||||
if os.path.exists(configs_dir):
|
||||
config_files = [f for f in os.listdir(configs_dir) if f.endswith(('.yaml', '.yml'))]
|
||||
config_files.sort()
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing config files: {e}")
|
||||
|
||||
return render_template(
|
||||
'alert_rules.html',
|
||||
rules=rules,
|
||||
config_files=config_files
|
||||
rules=rules
|
||||
)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""
|
||||
Config Service - Business logic for config file management
|
||||
Config Service - Business logic for config management
|
||||
|
||||
This service handles all operations related to scan configuration files,
|
||||
including creation, validation, listing, and deletion.
|
||||
This service handles all operations related to scan configurations,
|
||||
both database-stored (primary) and file-based (deprecated).
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -13,26 +13,343 @@ from typing import Dict, List, Tuple, Any, Optional
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from werkzeug.utils import secure_filename
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
|
||||
class ConfigService:
|
||||
"""Business logic for config management"""
|
||||
|
||||
def __init__(self, configs_dir: str = '/app/configs'):
|
||||
def __init__(self, db_session: Session = None, configs_dir: str = '/app/configs'):
|
||||
"""
|
||||
Initialize the config service.
|
||||
|
||||
Args:
|
||||
configs_dir: Directory where config files are stored
|
||||
db_session: SQLAlchemy database session (for database operations)
|
||||
configs_dir: Directory where legacy config files are stored
|
||||
"""
|
||||
self.db = db_session
|
||||
self.configs_dir = configs_dir
|
||||
|
||||
# Ensure configs directory exists
|
||||
# Ensure configs directory exists (for legacy YAML configs)
|
||||
os.makedirs(self.configs_dir, exist_ok=True)
|
||||
|
||||
def list_configs(self) -> List[Dict[str, Any]]:
|
||||
# ============================================================================
|
||||
# Database-based Config Operations (Primary)
|
||||
# ============================================================================
|
||||
|
||||
def create_config(self, title: str, description: Optional[str], site_ids: List[int]) -> Dict[str, Any]:
|
||||
"""
|
||||
List all config files with metadata.
|
||||
Create a new scan configuration in the database.
|
||||
|
||||
Args:
|
||||
title: Configuration title
|
||||
description: Optional configuration description
|
||||
site_ids: List of site IDs to include in this config
|
||||
|
||||
Returns:
|
||||
Created config as dictionary:
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Production Scan",
|
||||
"description": "...",
|
||||
"site_count": 3,
|
||||
"sites": [...],
|
||||
"created_at": "2025-11-19T10:30:00Z",
|
||||
"updated_at": "2025-11-19T10:30:00Z"
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: If validation fails or sites don't exist
|
||||
"""
|
||||
if not title or not title.strip():
|
||||
raise ValueError("Title is required")
|
||||
|
||||
if not site_ids or len(site_ids) == 0:
|
||||
raise ValueError("At least one site must be selected")
|
||||
|
||||
# Import models here to avoid circular imports
|
||||
from web.models import ScanConfig, ScanConfigSite, Site
|
||||
|
||||
# Verify all sites exist
|
||||
existing_sites = self.db.query(Site).filter(Site.id.in_(site_ids)).all()
|
||||
if len(existing_sites) != len(site_ids):
|
||||
found_ids = {s.id for s in existing_sites}
|
||||
missing_ids = set(site_ids) - found_ids
|
||||
raise ValueError(f"Sites not found: {missing_ids}")
|
||||
|
||||
# Create config
|
||||
config = ScanConfig(
|
||||
title=title.strip(),
|
||||
description=description.strip() if description else None,
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(config)
|
||||
self.db.flush() # Get the config ID
|
||||
|
||||
# Create associations
|
||||
for site_id in site_ids:
|
||||
assoc = ScanConfigSite(
|
||||
config_id=config.id,
|
||||
site_id=site_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
self.db.add(assoc)
|
||||
|
||||
self.db.commit()
|
||||
|
||||
return self.get_config_by_id(config.id)
|
||||
|
||||
def get_config_by_id(self, config_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get a scan configuration by ID.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
|
||||
Returns:
|
||||
Config as dictionary with sites
|
||||
|
||||
Raises:
|
||||
ValueError: If config not found
|
||||
"""
|
||||
from web.models import ScanConfig
|
||||
|
||||
config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
|
||||
if not config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
# Get associated sites
|
||||
sites = []
|
||||
for assoc in config.site_associations:
|
||||
site = assoc.site
|
||||
sites.append({
|
||||
'id': site.id,
|
||||
'name': site.name,
|
||||
'description': site.description,
|
||||
'ip_count': len(site.ips)
|
||||
})
|
||||
|
||||
return {
|
||||
'id': config.id,
|
||||
'title': config.title,
|
||||
'description': config.description,
|
||||
'site_count': len(sites),
|
||||
'sites': sites,
|
||||
'created_at': config.created_at.isoformat() + 'Z' if config.created_at else None,
|
||||
'updated_at': config.updated_at.isoformat() + 'Z' if config.updated_at else None
|
||||
}
|
||||
|
||||
def list_configs_db(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
List all scan configurations from database.
|
||||
|
||||
Returns:
|
||||
List of config dictionaries with metadata
|
||||
"""
|
||||
from web.models import ScanConfig
|
||||
|
||||
configs = self.db.query(ScanConfig).order_by(ScanConfig.updated_at.desc()).all()
|
||||
|
||||
result = []
|
||||
for config in configs:
|
||||
sites = []
|
||||
for assoc in config.site_associations:
|
||||
site = assoc.site
|
||||
sites.append({
|
||||
'id': site.id,
|
||||
'name': site.name
|
||||
})
|
||||
|
||||
result.append({
|
||||
'id': config.id,
|
||||
'title': config.title,
|
||||
'description': config.description,
|
||||
'site_count': len(sites),
|
||||
'sites': sites,
|
||||
'created_at': config.created_at.isoformat() + 'Z' if config.created_at else None,
|
||||
'updated_at': config.updated_at.isoformat() + 'Z' if config.updated_at else None
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def update_config(self, config_id: int, title: Optional[str], description: Optional[str], site_ids: Optional[List[int]]) -> Dict[str, Any]:
|
||||
"""
|
||||
Update a scan configuration.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID to update
|
||||
title: New title (optional)
|
||||
description: New description (optional)
|
||||
site_ids: New list of site IDs (optional, replaces existing)
|
||||
|
||||
Returns:
|
||||
Updated config dictionary
|
||||
|
||||
Raises:
|
||||
ValueError: If config not found or validation fails
|
||||
"""
|
||||
from web.models import ScanConfig, ScanConfigSite, Site
|
||||
|
||||
config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
|
||||
if not config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
# Update fields if provided
|
||||
if title is not None:
|
||||
if not title.strip():
|
||||
raise ValueError("Title cannot be empty")
|
||||
config.title = title.strip()
|
||||
|
||||
if description is not None:
|
||||
config.description = description.strip() if description.strip() else None
|
||||
|
||||
# Update sites if provided
|
||||
if site_ids is not None:
|
||||
if len(site_ids) == 0:
|
||||
raise ValueError("At least one site must be selected")
|
||||
|
||||
# Verify all sites exist
|
||||
existing_sites = self.db.query(Site).filter(Site.id.in_(site_ids)).all()
|
||||
if len(existing_sites) != len(site_ids):
|
||||
found_ids = {s.id for s in existing_sites}
|
||||
missing_ids = set(site_ids) - found_ids
|
||||
raise ValueError(f"Sites not found: {missing_ids}")
|
||||
|
||||
# Remove existing associations
|
||||
self.db.query(ScanConfigSite).filter_by(config_id=config_id).delete()
|
||||
|
||||
# Create new associations
|
||||
for site_id in site_ids:
|
||||
assoc = ScanConfigSite(
|
||||
config_id=config_id,
|
||||
site_id=site_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
self.db.add(assoc)
|
||||
|
||||
config.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
|
||||
return self.get_config_by_id(config_id)
|
||||
|
||||
def delete_config(self, config_id: int) -> None:
|
||||
"""
|
||||
Delete a scan configuration from database.
|
||||
|
||||
This will cascade delete associated ScanConfigSite records.
|
||||
Schedules and scans referencing this config will have their
|
||||
config_id set to NULL.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID to delete
|
||||
|
||||
Raises:
|
||||
ValueError: If config not found
|
||||
"""
|
||||
from web.models import ScanConfig
|
||||
|
||||
config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
|
||||
if not config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
self.db.delete(config)
|
||||
self.db.commit()
|
||||
|
||||
def add_site_to_config(self, config_id: int, site_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Add a site to an existing config.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
site_id: Site ID to add
|
||||
|
||||
Returns:
|
||||
Updated config dictionary
|
||||
|
||||
Raises:
|
||||
ValueError: If config or site not found, or association already exists
|
||||
"""
|
||||
from web.models import ScanConfig, Site, ScanConfigSite
|
||||
|
||||
config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
if not config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
site = self.db.query(Site).filter_by(id=site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with ID {site_id} not found")
|
||||
|
||||
# Check if association already exists
|
||||
existing = self.db.query(ScanConfigSite).filter_by(
|
||||
config_id=config_id, site_id=site_id
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
raise ValueError(f"Site '{site.name}' is already in this config")
|
||||
|
||||
# Create association
|
||||
assoc = ScanConfigSite(
|
||||
config_id=config_id,
|
||||
site_id=site_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
self.db.add(assoc)
|
||||
|
||||
config.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
|
||||
return self.get_config_by_id(config_id)
|
||||
|
||||
def remove_site_from_config(self, config_id: int, site_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Remove a site from a config.
|
||||
|
||||
Args:
|
||||
config_id: Configuration ID
|
||||
site_id: Site ID to remove
|
||||
|
||||
Returns:
|
||||
Updated config dictionary
|
||||
|
||||
Raises:
|
||||
ValueError: If config not found, or removing would leave config empty
|
||||
"""
|
||||
from web.models import ScanConfig, ScanConfigSite
|
||||
|
||||
config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
if not config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
# Check if this would leave the config empty
|
||||
current_site_count = len(config.site_associations)
|
||||
if current_site_count <= 1:
|
||||
raise ValueError("Cannot remove last site from config. Delete the config instead.")
|
||||
|
||||
# Remove association
|
||||
deleted = self.db.query(ScanConfigSite).filter_by(
|
||||
config_id=config_id, site_id=site_id
|
||||
).delete()
|
||||
|
||||
if deleted == 0:
|
||||
raise ValueError(f"Site with ID {site_id} is not in this config")
|
||||
|
||||
config.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
|
||||
return self.get_config_by_id(config_id)
|
||||
|
||||
# ============================================================================
|
||||
# Legacy YAML File Operations (Deprecated)
|
||||
# ============================================================================
|
||||
|
||||
def list_configs_file(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
[DEPRECATED] List all config files with metadata.
|
||||
|
||||
Returns:
|
||||
List of config metadata dictionaries:
|
||||
@@ -175,6 +492,9 @@ class ConfigService:
|
||||
if not is_valid:
|
||||
raise ValueError(f"Invalid config structure: {error_msg}")
|
||||
|
||||
# Create inline sites in database (if any)
|
||||
self.create_inline_sites(parsed)
|
||||
|
||||
# Write file
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(content)
|
||||
@@ -266,9 +586,9 @@ class ConfigService:
|
||||
|
||||
return filename, yaml_content
|
||||
|
||||
def update_config(self, filename: str, yaml_content: str) -> None:
|
||||
def update_config_file(self, filename: str, yaml_content: str) -> None:
|
||||
"""
|
||||
Update existing config file with new YAML content.
|
||||
[DEPRECATED] Update existing config file with new YAML content.
|
||||
|
||||
Args:
|
||||
filename: Config filename to update
|
||||
@@ -299,9 +619,9 @@ class ConfigService:
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(yaml_content)
|
||||
|
||||
def delete_config(self, filename: str) -> None:
|
||||
def delete_config_file(self, filename: str) -> None:
|
||||
"""
|
||||
Delete config file and cascade delete any associated schedules.
|
||||
[DEPRECATED] Delete config file and cascade delete any associated schedules.
|
||||
|
||||
When a config is deleted, all schedules using that config (both enabled
|
||||
and disabled) are automatically deleted as well, since they would be
|
||||
@@ -371,12 +691,15 @@ class ConfigService:
|
||||
# Delete file
|
||||
os.remove(filepath)
|
||||
|
||||
def validate_config_content(self, content: Dict) -> Tuple[bool, str]:
|
||||
def validate_config_content(self, content: Dict, check_site_refs: bool = True) -> Tuple[bool, str]:
|
||||
"""
|
||||
Validate parsed YAML config structure.
|
||||
|
||||
Supports both legacy format (inline IPs) and new format (site references or CIDRs).
|
||||
|
||||
Args:
|
||||
content: Parsed YAML config as dict
|
||||
check_site_refs: If True, validates that referenced sites exist in database
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
@@ -408,11 +731,65 @@ class ConfigService:
|
||||
if not isinstance(site, dict):
|
||||
return False, f"Site {i+1} must be a dictionary/object"
|
||||
|
||||
# Check if this is a site reference (new format)
|
||||
if 'site_ref' in site:
|
||||
# Site reference format
|
||||
site_ref = site.get('site_ref')
|
||||
if not isinstance(site_ref, str) or not site_ref.strip():
|
||||
return False, f"Site {i+1} field 'site_ref' must be a non-empty string"
|
||||
|
||||
# Validate site reference exists (if check enabled)
|
||||
if check_site_refs:
|
||||
try:
|
||||
from web.services.site_service import SiteService
|
||||
from flask import current_app
|
||||
|
||||
site_service = SiteService(current_app.db_session)
|
||||
referenced_site = site_service.get_site_by_name(site_ref)
|
||||
if not referenced_site:
|
||||
return False, f"Site {i+1}: Referenced site '{site_ref}' does not exist"
|
||||
except Exception as e:
|
||||
# If we can't check (e.g., outside app context), skip validation
|
||||
pass
|
||||
|
||||
continue # Site reference is valid
|
||||
|
||||
# Check if this is inline site creation with CIDRs (new format)
|
||||
if 'cidrs' in site:
|
||||
# Inline site creation with CIDR format
|
||||
if 'name' not in site:
|
||||
return False, f"Site {i+1} with inline CIDRs missing required field: 'name'"
|
||||
|
||||
cidrs = site.get('cidrs')
|
||||
if not isinstance(cidrs, list):
|
||||
return False, f"Site {i+1} field 'cidrs' must be a list"
|
||||
|
||||
if len(cidrs) == 0:
|
||||
return False, f"Site {i+1} must have at least one CIDR"
|
||||
|
||||
# Validate each CIDR
|
||||
for j, cidr_config in enumerate(cidrs):
|
||||
if not isinstance(cidr_config, dict):
|
||||
return False, f"Site {i+1} CIDR {j+1} must be a dictionary/object"
|
||||
|
||||
if 'cidr' not in cidr_config:
|
||||
return False, f"Site {i+1} CIDR {j+1} missing required field: 'cidr'"
|
||||
|
||||
# Validate CIDR format
|
||||
cidr_str = cidr_config.get('cidr')
|
||||
try:
|
||||
ipaddress.ip_network(cidr_str, strict=False)
|
||||
except ValueError:
|
||||
return False, f"Site {i+1} CIDR {j+1}: Invalid CIDR notation '{cidr_str}'"
|
||||
|
||||
continue # Inline CIDR site is valid
|
||||
|
||||
# Legacy format: inline IPs
|
||||
if 'name' not in site:
|
||||
return False, f"Site {i+1} missing required field: 'name'"
|
||||
|
||||
if 'ips' not in site:
|
||||
return False, f"Site {i+1} missing required field: 'ips'"
|
||||
return False, f"Site {i+1} missing required field: 'ips' (or use 'site_ref' or 'cidrs')"
|
||||
|
||||
if not isinstance(site['ips'], list):
|
||||
return False, f"Site {i+1} field 'ips' must be a list"
|
||||
@@ -550,3 +927,60 @@ class ConfigService:
|
||||
"""
|
||||
filepath = os.path.join(self.configs_dir, filename)
|
||||
return os.path.exists(filepath) and os.path.isfile(filepath)
|
||||
|
||||
def create_inline_sites(self, config_content: Dict) -> None:
|
||||
"""
|
||||
Create sites in the database for inline site definitions in a config.
|
||||
|
||||
This method scans the config for inline site definitions (with CIDRs)
|
||||
and creates them as reusable sites in the database if they don't already exist.
|
||||
|
||||
Args:
|
||||
config_content: Parsed YAML config dictionary
|
||||
|
||||
Raises:
|
||||
ValueError: If site creation fails
|
||||
"""
|
||||
try:
|
||||
from web.services.site_service import SiteService
|
||||
from flask import current_app
|
||||
|
||||
site_service = SiteService(current_app.db_session)
|
||||
|
||||
sites = config_content.get('sites', [])
|
||||
|
||||
for site_def in sites:
|
||||
# Skip site references (they already exist)
|
||||
if 'site_ref' in site_def:
|
||||
continue
|
||||
|
||||
# Skip legacy IP-based sites (not creating those as reusable sites)
|
||||
if 'ips' in site_def and 'cidrs' not in site_def:
|
||||
continue
|
||||
|
||||
# Process inline CIDR-based sites
|
||||
if 'cidrs' in site_def:
|
||||
site_name = site_def.get('name')
|
||||
|
||||
# Check if site already exists
|
||||
existing_site = site_service.get_site_by_name(site_name)
|
||||
if existing_site:
|
||||
# Site already exists, skip creation
|
||||
continue
|
||||
|
||||
# Create new site
|
||||
cidrs = site_def.get('cidrs', [])
|
||||
description = f"Auto-created from config '{config_content.get('title', 'Unknown')}'"
|
||||
|
||||
site_service.create_site(
|
||||
name=site_name,
|
||||
description=description,
|
||||
cidrs=cidrs
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# If site creation fails, log but don't block config creation
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(
|
||||
f"Failed to create inline sites from config: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ from sqlalchemy.orm import Session, joinedload
|
||||
|
||||
from web.models import (
|
||||
Scan, ScanSite, ScanIP, ScanPort, ScanService as ScanServiceModel,
|
||||
ScanCertificate, ScanTLSVersion
|
||||
ScanCertificate, ScanTLSVersion, Site, ScanSiteAssociation
|
||||
)
|
||||
from web.utils.pagination import paginate, PaginatedResult
|
||||
from web.utils.validators import validate_config_file, validate_scan_status
|
||||
@@ -41,8 +41,9 @@ class ScanService:
|
||||
"""
|
||||
self.db = db_session
|
||||
|
||||
def trigger_scan(self, config_file: str, triggered_by: str = 'manual',
|
||||
schedule_id: Optional[int] = None, scheduler=None) -> int:
|
||||
def trigger_scan(self, config_file: str = None, config_id: int = None,
|
||||
triggered_by: str = 'manual', schedule_id: Optional[int] = None,
|
||||
scheduler=None) -> int:
|
||||
"""
|
||||
Trigger a new scan.
|
||||
|
||||
@@ -50,7 +51,8 @@ class ScanService:
|
||||
queues the scan for background execution.
|
||||
|
||||
Args:
|
||||
config_file: Path to YAML configuration file
|
||||
config_file: Path to YAML configuration file (legacy, optional)
|
||||
config_id: Database config ID (preferred, optional)
|
||||
triggered_by: Source that triggered scan (manual, scheduled, api)
|
||||
schedule_id: Optional schedule ID if triggered by schedule
|
||||
scheduler: Optional SchedulerService instance for queuing background jobs
|
||||
@@ -59,57 +61,106 @@ class ScanService:
|
||||
Scan ID of the created scan
|
||||
|
||||
Raises:
|
||||
ValueError: If config file is invalid
|
||||
ValueError: If config is invalid or both/neither config_file and config_id provided
|
||||
"""
|
||||
# Validate config file
|
||||
is_valid, error_msg = validate_config_file(config_file)
|
||||
if not is_valid:
|
||||
raise ValueError(f"Invalid config file: {error_msg}")
|
||||
# Validate that exactly one config source is provided
|
||||
if not (bool(config_file) ^ bool(config_id)):
|
||||
raise ValueError("Must provide exactly one of config_file or config_id")
|
||||
|
||||
# Convert config_file to full path if it's just a filename
|
||||
if not config_file.startswith('/'):
|
||||
config_path = f'/app/configs/{config_file}'
|
||||
# Handle database config
|
||||
if config_id:
|
||||
from web.models import ScanConfig
|
||||
|
||||
# Validate config exists
|
||||
db_config = self.db.query(ScanConfig).filter_by(id=config_id).first()
|
||||
if not db_config:
|
||||
raise ValueError(f"Config with ID {config_id} not found")
|
||||
|
||||
# Create scan record with config_id
|
||||
scan = Scan(
|
||||
timestamp=datetime.utcnow(),
|
||||
status='running',
|
||||
config_id=config_id,
|
||||
title=db_config.title,
|
||||
triggered_by=triggered_by,
|
||||
schedule_id=schedule_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(scan)
|
||||
self.db.commit()
|
||||
self.db.refresh(scan)
|
||||
|
||||
logger.info(f"Scan {scan.id} triggered via {triggered_by} with config_id={config_id}")
|
||||
|
||||
# Queue background job if scheduler provided
|
||||
if scheduler:
|
||||
try:
|
||||
job_id = scheduler.queue_scan(scan.id, config_id=config_id)
|
||||
logger.info(f"Scan {scan.id} queued for background execution (job_id={job_id})")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue scan {scan.id}: {str(e)}")
|
||||
# Mark scan as failed if job queuing fails
|
||||
scan.status = 'failed'
|
||||
scan.error_message = f"Failed to queue background job: {str(e)}"
|
||||
self.db.commit()
|
||||
raise
|
||||
else:
|
||||
logger.warning(f"Scan {scan.id} created but not queued (no scheduler provided)")
|
||||
|
||||
return scan.id
|
||||
|
||||
# Handle legacy YAML config file
|
||||
else:
|
||||
config_path = config_file
|
||||
# Validate config file
|
||||
is_valid, error_msg = validate_config_file(config_file)
|
||||
if not is_valid:
|
||||
raise ValueError(f"Invalid config file: {error_msg}")
|
||||
|
||||
# Load config to get title
|
||||
import yaml
|
||||
with open(config_path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
# Convert config_file to full path if it's just a filename
|
||||
if not config_file.startswith('/'):
|
||||
config_path = f'/app/configs/{config_file}'
|
||||
else:
|
||||
config_path = config_file
|
||||
|
||||
# Create scan record
|
||||
scan = Scan(
|
||||
timestamp=datetime.utcnow(),
|
||||
status='running',
|
||||
config_file=config_file,
|
||||
title=config.get('title', 'Untitled Scan'),
|
||||
triggered_by=triggered_by,
|
||||
schedule_id=schedule_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
# Load config to get title
|
||||
import yaml
|
||||
with open(config_path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
self.db.add(scan)
|
||||
self.db.commit()
|
||||
self.db.refresh(scan)
|
||||
# Create scan record
|
||||
scan = Scan(
|
||||
timestamp=datetime.utcnow(),
|
||||
status='running',
|
||||
config_file=config_file,
|
||||
title=config.get('title', 'Untitled Scan'),
|
||||
triggered_by=triggered_by,
|
||||
schedule_id=schedule_id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
logger.info(f"Scan {scan.id} triggered via {triggered_by}")
|
||||
self.db.add(scan)
|
||||
self.db.commit()
|
||||
self.db.refresh(scan)
|
||||
|
||||
# Queue background job if scheduler provided
|
||||
if scheduler:
|
||||
try:
|
||||
job_id = scheduler.queue_scan(scan.id, config_file)
|
||||
logger.info(f"Scan {scan.id} queued for background execution (job_id={job_id})")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue scan {scan.id}: {str(e)}")
|
||||
# Mark scan as failed if job queuing fails
|
||||
scan.status = 'failed'
|
||||
scan.error_message = f"Failed to queue background job: {str(e)}"
|
||||
self.db.commit()
|
||||
raise
|
||||
else:
|
||||
logger.warning(f"Scan {scan.id} created but not queued (no scheduler provided)")
|
||||
logger.info(f"Scan {scan.id} triggered via {triggered_by}")
|
||||
|
||||
return scan.id
|
||||
# Queue background job if scheduler provided
|
||||
if scheduler:
|
||||
try:
|
||||
job_id = scheduler.queue_scan(scan.id, config_file=config_file)
|
||||
logger.info(f"Scan {scan.id} queued for background execution (job_id={job_id})")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue scan {scan.id}: {str(e)}")
|
||||
# Mark scan as failed if job queuing fails
|
||||
scan.status = 'failed'
|
||||
scan.error_message = f"Failed to queue background job: {str(e)}"
|
||||
self.db.commit()
|
||||
raise
|
||||
else:
|
||||
logger.warning(f"Scan {scan.id} created but not queued (no scheduler provided)")
|
||||
|
||||
return scan.id
|
||||
|
||||
def get_scan(self, scan_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
@@ -366,6 +417,34 @@ class ScanService:
|
||||
self.db.add(site)
|
||||
self.db.flush() # Get site.id for foreign key
|
||||
|
||||
# Create ScanSiteAssociation if this site exists in the database
|
||||
# This links the scan to reusable site definitions
|
||||
master_site = (
|
||||
self.db.query(Site)
|
||||
.filter(Site.name == site_data['name'])
|
||||
.first()
|
||||
)
|
||||
|
||||
if master_site:
|
||||
# Check if association already exists (avoid duplicates)
|
||||
existing_assoc = (
|
||||
self.db.query(ScanSiteAssociation)
|
||||
.filter(
|
||||
ScanSiteAssociation.scan_id == scan_obj.id,
|
||||
ScanSiteAssociation.site_id == master_site.id
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not existing_assoc:
|
||||
assoc = ScanSiteAssociation(
|
||||
scan_id=scan_obj.id,
|
||||
site_id=master_site.id,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
self.db.add(assoc)
|
||||
logger.debug(f"Created association between scan {scan_obj.id} and site '{master_site.name}' (id={master_site.id})")
|
||||
|
||||
# Process each IP in this site
|
||||
for ip_data in site_data.get('ips', []):
|
||||
# Create ScanIP record
|
||||
|
||||
@@ -149,13 +149,16 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading schedules on startup: {str(e)}", exc_info=True)
|
||||
|
||||
def queue_scan(self, scan_id: int, config_file: str) -> str:
|
||||
def queue_scan(self, scan_id: int, config_file: str = None, config_id: int = None) -> str:
|
||||
"""
|
||||
Queue a scan for immediate background execution.
|
||||
|
||||
Args:
|
||||
scan_id: Database ID of the scan
|
||||
config_file: Path to YAML configuration file
|
||||
config_file: Path to YAML configuration file (legacy, optional)
|
||||
config_id: Database config ID (preferred, optional)
|
||||
|
||||
Note: Provide exactly one of config_file or config_id
|
||||
|
||||
Returns:
|
||||
Job ID from APScheduler
|
||||
@@ -169,7 +172,7 @@ class SchedulerService:
|
||||
# Add job to run immediately
|
||||
job = self.scheduler.add_job(
|
||||
func=execute_scan,
|
||||
args=[scan_id, config_file, self.db_url],
|
||||
kwargs={'scan_id': scan_id, 'config_file': config_file, 'config_id': config_id, 'db_url': self.db_url},
|
||||
id=f'scan_{scan_id}',
|
||||
name=f'Scan {scan_id}',
|
||||
replace_existing=True,
|
||||
|
||||
656
app/web/services/site_service.py
Normal file
656
app/web/services/site_service.py
Normal file
@@ -0,0 +1,656 @@
|
||||
"""
|
||||
Site service for managing reusable site definitions.
|
||||
|
||||
This service handles the business logic for creating, updating, and managing
|
||||
sites with their associated CIDR ranges and IP-level overrides.
|
||||
"""
|
||||
|
||||
import ipaddress
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session, joinedload
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from web.models import (
|
||||
Site, SiteIP, ScanSiteAssociation
|
||||
)
|
||||
from web.utils.pagination import paginate, PaginatedResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SiteService:
|
||||
"""
|
||||
Service for managing reusable site definitions.
|
||||
|
||||
Handles site lifecycle: creation, updates, deletion (with safety checks),
|
||||
CIDR management, and IP-level overrides.
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: Session):
|
||||
"""
|
||||
Initialize site service.
|
||||
|
||||
Args:
|
||||
db_session: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db_session
|
||||
|
||||
def create_site(self, name: str, description: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new site.
|
||||
|
||||
Args:
|
||||
name: Unique site name
|
||||
description: Optional site description
|
||||
|
||||
Returns:
|
||||
Dictionary with created site data
|
||||
|
||||
Raises:
|
||||
ValueError: If site name already exists
|
||||
"""
|
||||
# Validate site name is unique
|
||||
existing = self.db.query(Site).filter(Site.name == name).first()
|
||||
if existing:
|
||||
raise ValueError(f"Site with name '{name}' already exists")
|
||||
|
||||
# Create site (can be empty, IPs added separately)
|
||||
site = Site(
|
||||
name=name,
|
||||
description=description,
|
||||
created_at=datetime.utcnow(),
|
||||
updated_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(site)
|
||||
self.db.commit()
|
||||
self.db.refresh(site)
|
||||
|
||||
logger.info(f"Created site '{name}' (id={site.id})")
|
||||
|
||||
return self._site_to_dict(site)
|
||||
|
||||
def update_site(self, site_id: int, name: Optional[str] = None,
|
||||
description: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Update site metadata (name and/or description).
|
||||
|
||||
Args:
|
||||
site_id: Site ID to update
|
||||
name: New site name (must be unique)
|
||||
description: New description
|
||||
|
||||
Returns:
|
||||
Dictionary with updated site data
|
||||
|
||||
Raises:
|
||||
ValueError: If site not found or name already exists
|
||||
"""
|
||||
site = self.db.query(Site).filter(Site.id == site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with id {site_id} not found")
|
||||
|
||||
# Update name if provided
|
||||
if name is not None and name != site.name:
|
||||
# Check uniqueness
|
||||
existing = self.db.query(Site).filter(
|
||||
Site.name == name,
|
||||
Site.id != site_id
|
||||
).first()
|
||||
if existing:
|
||||
raise ValueError(f"Site with name '{name}' already exists")
|
||||
site.name = name
|
||||
|
||||
# Update description if provided
|
||||
if description is not None:
|
||||
site.description = description
|
||||
|
||||
site.updated_at = datetime.utcnow()
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(site)
|
||||
|
||||
logger.info(f"Updated site {site_id} ('{site.name}')")
|
||||
|
||||
return self._site_to_dict(site)
|
||||
|
||||
def delete_site(self, site_id: int) -> None:
|
||||
"""
|
||||
Delete a site.
|
||||
|
||||
Prevents deletion if the site is used in any scan (per user requirement).
|
||||
|
||||
Args:
|
||||
site_id: Site ID to delete
|
||||
|
||||
Raises:
|
||||
ValueError: If site not found or is used in scans
|
||||
"""
|
||||
site = self.db.query(Site).filter(Site.id == site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with id {site_id} not found")
|
||||
|
||||
# Check if site is used in any scans
|
||||
usage_count = (
|
||||
self.db.query(func.count(ScanSiteAssociation.id))
|
||||
.filter(ScanSiteAssociation.site_id == site_id)
|
||||
.scalar()
|
||||
)
|
||||
|
||||
if usage_count > 0:
|
||||
raise ValueError(
|
||||
f"Cannot delete site '{site.name}': it is used in {usage_count} scan(s). "
|
||||
f"Sites that have been used in scans cannot be deleted."
|
||||
)
|
||||
|
||||
# Safe to delete
|
||||
self.db.delete(site)
|
||||
self.db.commit()
|
||||
|
||||
logger.info(f"Deleted site {site_id} ('{site.name}')")
|
||||
|
||||
def get_site(self, site_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get site details.
|
||||
|
||||
Args:
|
||||
site_id: Site ID to retrieve
|
||||
|
||||
Returns:
|
||||
Dictionary with site data, or None if not found
|
||||
"""
|
||||
site = (
|
||||
self.db.query(Site)
|
||||
.filter(Site.id == site_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not site:
|
||||
return None
|
||||
|
||||
return self._site_to_dict(site)
|
||||
|
||||
def get_site_by_name(self, name: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get site details by name.
|
||||
|
||||
Args:
|
||||
name: Site name to retrieve
|
||||
|
||||
Returns:
|
||||
Dictionary with site data, or None if not found
|
||||
"""
|
||||
site = (
|
||||
self.db.query(Site)
|
||||
.filter(Site.name == name)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not site:
|
||||
return None
|
||||
|
||||
return self._site_to_dict(site)
|
||||
|
||||
def list_sites(self, page: int = 1, per_page: int = 20) -> PaginatedResult:
|
||||
"""
|
||||
List all sites with pagination.
|
||||
|
||||
Args:
|
||||
page: Page number (1-indexed)
|
||||
per_page: Number of items per page
|
||||
|
||||
Returns:
|
||||
PaginatedResult with site data
|
||||
"""
|
||||
query = (
|
||||
self.db.query(Site)
|
||||
.order_by(Site.name)
|
||||
)
|
||||
|
||||
return paginate(query, page, per_page, self._site_to_dict)
|
||||
|
||||
def list_all_sites(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
List all sites without pagination (for dropdowns, etc.).
|
||||
|
||||
Returns:
|
||||
List of site dictionaries
|
||||
"""
|
||||
sites = (
|
||||
self.db.query(Site)
|
||||
.order_by(Site.name)
|
||||
.all()
|
||||
)
|
||||
|
||||
return [self._site_to_dict(site) for site in sites]
|
||||
|
||||
def bulk_add_ips_from_cidr(self, site_id: int, cidr: str,
|
||||
expected_ping: Optional[bool] = None,
|
||||
expected_tcp_ports: Optional[List[int]] = None,
|
||||
expected_udp_ports: Optional[List[int]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Expand a CIDR range and add all IPs to a site.
|
||||
|
||||
CIDRs are NOT stored - they are just used to generate IP records.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
cidr: CIDR notation (e.g., "10.0.0.0/24")
|
||||
expected_ping: Expected ping response for all IPs
|
||||
expected_tcp_ports: List of expected TCP ports for all IPs
|
||||
expected_udp_ports: List of expected UDP ports for all IPs
|
||||
|
||||
Returns:
|
||||
Dictionary with:
|
||||
- cidr: The CIDR that was expanded
|
||||
- ip_count: Number of IPs created
|
||||
- ips_added: List of IP addresses created
|
||||
- ips_skipped: List of IPs that already existed
|
||||
|
||||
Raises:
|
||||
ValueError: If site not found or CIDR is invalid/too large
|
||||
"""
|
||||
site = self.db.query(Site).filter(Site.id == site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with id {site_id} not found")
|
||||
|
||||
# Validate CIDR format and size
|
||||
try:
|
||||
network = ipaddress.ip_network(cidr, strict=False)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid CIDR notation '{cidr}': {str(e)}")
|
||||
|
||||
# Enforce CIDR size limits (max /24 for IPv4, /64 for IPv6)
|
||||
if isinstance(network, ipaddress.IPv4Network) and network.prefixlen < 24:
|
||||
raise ValueError(
|
||||
f"CIDR '{cidr}' is too large ({network.num_addresses} IPs). "
|
||||
f"Maximum allowed is /24 (256 IPs) for IPv4."
|
||||
)
|
||||
elif isinstance(network, ipaddress.IPv6Network) and network.prefixlen < 64:
|
||||
raise ValueError(
|
||||
f"CIDR '{cidr}' is too large. "
|
||||
f"Maximum allowed is /64 for IPv6."
|
||||
)
|
||||
|
||||
# Expand CIDR to individual IPs (no cidr_id since we're not storing CIDR)
|
||||
ip_count, ips_added, ips_skipped = self._expand_cidr_to_ips(
|
||||
site_id=site_id,
|
||||
network=network,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=expected_tcp_ports or [],
|
||||
expected_udp_ports=expected_udp_ports or []
|
||||
)
|
||||
|
||||
site.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Expanded CIDR '{cidr}' for site {site_id} ('{site.name}'): "
|
||||
f"added {ip_count} IPs, skipped {len(ips_skipped)} duplicates"
|
||||
)
|
||||
|
||||
return {
|
||||
'cidr': cidr,
|
||||
'ip_count': ip_count,
|
||||
'ips_added': ips_added,
|
||||
'ips_skipped': ips_skipped
|
||||
}
|
||||
|
||||
def bulk_add_ips_from_list(self, site_id: int, ip_list: List[str],
|
||||
expected_ping: Optional[bool] = None,
|
||||
expected_tcp_ports: Optional[List[int]] = None,
|
||||
expected_udp_ports: Optional[List[int]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Add multiple IPs from a list (e.g., from CSV/text import).
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_list: List of IP addresses as strings
|
||||
expected_ping: Expected ping response for all IPs
|
||||
expected_tcp_ports: List of expected TCP ports for all IPs
|
||||
expected_udp_ports: List of expected UDP ports for all IPs
|
||||
|
||||
Returns:
|
||||
Dictionary with:
|
||||
- ip_count: Number of IPs successfully created
|
||||
- ips_added: List of IP addresses created
|
||||
- ips_skipped: List of IPs that already existed
|
||||
- errors: List of validation errors {ip: error_message}
|
||||
|
||||
Raises:
|
||||
ValueError: If site not found
|
||||
"""
|
||||
site = self.db.query(Site).filter(Site.id == site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with id {site_id} not found")
|
||||
|
||||
ips_added = []
|
||||
ips_skipped = []
|
||||
errors = []
|
||||
|
||||
for ip_str in ip_list:
|
||||
ip_str = ip_str.strip()
|
||||
if not ip_str:
|
||||
continue # Skip empty lines
|
||||
|
||||
# Validate IP format
|
||||
try:
|
||||
ipaddress.ip_address(ip_str)
|
||||
except ValueError as e:
|
||||
errors.append({'ip': ip_str, 'error': f"Invalid IP address: {str(e)}"})
|
||||
continue
|
||||
|
||||
# Check for duplicate (across all IPs in the site)
|
||||
existing = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.site_id == site_id, SiteIP.ip_address == ip_str)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
ips_skipped.append(ip_str)
|
||||
continue
|
||||
|
||||
# Create IP record
|
||||
try:
|
||||
ip_obj = SiteIP(
|
||||
site_id=site_id,
|
||||
ip_address=ip_str,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=json.dumps(expected_tcp_ports or []),
|
||||
expected_udp_ports=json.dumps(expected_udp_ports or []),
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(ip_obj)
|
||||
ips_added.append(ip_str)
|
||||
except Exception as e:
|
||||
errors.append({'ip': ip_str, 'error': f"Database error: {str(e)}"})
|
||||
|
||||
site.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
|
||||
logger.info(
|
||||
f"Bulk added {len(ips_added)} IPs to site {site_id} ('{site.name}'), "
|
||||
f"skipped {len(ips_skipped)} duplicates, {len(errors)} errors"
|
||||
)
|
||||
|
||||
return {
|
||||
'ip_count': len(ips_added),
|
||||
'ips_added': ips_added,
|
||||
'ips_skipped': ips_skipped,
|
||||
'errors': errors
|
||||
}
|
||||
|
||||
def add_standalone_ip(self, site_id: int, ip_address: str,
|
||||
expected_ping: Optional[bool] = None,
|
||||
expected_tcp_ports: Optional[List[int]] = None,
|
||||
expected_udp_ports: Optional[List[int]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Add a standalone IP (without a CIDR parent) to a site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_address: IP address to add
|
||||
expected_ping: Expected ping response
|
||||
expected_tcp_ports: List of expected TCP ports
|
||||
expected_udp_ports: List of expected UDP ports
|
||||
|
||||
Returns:
|
||||
Dictionary with IP data
|
||||
|
||||
Raises:
|
||||
ValueError: If site not found, IP is invalid, or already exists
|
||||
"""
|
||||
site = self.db.query(Site).filter(Site.id == site_id).first()
|
||||
if not site:
|
||||
raise ValueError(f"Site with id {site_id} not found")
|
||||
|
||||
# Validate IP format
|
||||
try:
|
||||
ipaddress.ip_address(ip_address)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid IP address '{ip_address}': {str(e)}")
|
||||
|
||||
# Check for duplicate (across all IPs in the site)
|
||||
existing = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.site_id == site_id, SiteIP.ip_address == ip_address)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
raise ValueError(f"IP '{ip_address}' already exists in this site")
|
||||
|
||||
# Create IP
|
||||
ip_obj = SiteIP(
|
||||
site_id=site_id,
|
||||
ip_address=ip_address,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=json.dumps(expected_tcp_ports or []),
|
||||
expected_udp_ports=json.dumps(expected_udp_ports or []),
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(ip_obj)
|
||||
site.updated_at = datetime.utcnow()
|
||||
self.db.commit()
|
||||
self.db.refresh(ip_obj)
|
||||
|
||||
logger.info(f"Added IP '{ip_address}' to site {site_id} ('{site.name}')")
|
||||
|
||||
return self._ip_to_dict(ip_obj)
|
||||
|
||||
def update_ip_settings(self, site_id: int, ip_id: int,
|
||||
expected_ping: Optional[bool] = None,
|
||||
expected_tcp_ports: Optional[List[int]] = None,
|
||||
expected_udp_ports: Optional[List[int]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Update settings for an individual IP.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_id: IP ID to update
|
||||
expected_ping: New ping expectation (if provided)
|
||||
expected_tcp_ports: New TCP ports expectation (if provided)
|
||||
expected_udp_ports: New UDP ports expectation (if provided)
|
||||
|
||||
Returns:
|
||||
Dictionary with updated IP data
|
||||
|
||||
Raises:
|
||||
ValueError: If IP not found
|
||||
"""
|
||||
ip_obj = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.id == ip_id, SiteIP.site_id == site_id)
|
||||
.first()
|
||||
)
|
||||
if not ip_obj:
|
||||
raise ValueError(f"IP with id {ip_id} not found for site {site_id}")
|
||||
|
||||
# Update settings if provided
|
||||
if expected_ping is not None:
|
||||
ip_obj.expected_ping = expected_ping
|
||||
if expected_tcp_ports is not None:
|
||||
ip_obj.expected_tcp_ports = json.dumps(expected_tcp_ports)
|
||||
if expected_udp_ports is not None:
|
||||
ip_obj.expected_udp_ports = json.dumps(expected_udp_ports)
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(ip_obj)
|
||||
|
||||
logger.info(f"Updated settings for IP '{ip_obj.ip_address}' in site {site_id}")
|
||||
|
||||
return self._ip_to_dict(ip_obj)
|
||||
|
||||
def remove_ip(self, site_id: int, ip_id: int) -> None:
|
||||
"""
|
||||
Remove an IP from a site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
ip_id: IP ID to remove
|
||||
|
||||
Raises:
|
||||
ValueError: If IP not found
|
||||
"""
|
||||
ip_obj = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.id == ip_id, SiteIP.site_id == site_id)
|
||||
.first()
|
||||
)
|
||||
if not ip_obj:
|
||||
raise ValueError(f"IP with id {ip_id} not found for site {site_id}")
|
||||
|
||||
ip_address = ip_obj.ip_address
|
||||
self.db.delete(ip_obj)
|
||||
self.db.commit()
|
||||
|
||||
logger.info(f"Removed IP '{ip_address}' from site {site_id}")
|
||||
|
||||
def list_ips(self, site_id: int, page: int = 1, per_page: int = 50) -> PaginatedResult:
|
||||
"""
|
||||
List IPs in a site with pagination.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
page: Page number (1-indexed)
|
||||
per_page: Number of items per page
|
||||
|
||||
Returns:
|
||||
PaginatedResult with IP data
|
||||
"""
|
||||
query = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.site_id == site_id)
|
||||
.order_by(SiteIP.ip_address)
|
||||
)
|
||||
|
||||
return paginate(query, page, per_page, self._ip_to_dict)
|
||||
|
||||
def get_scan_usage(self, site_id: int) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get list of scans that use this site.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
|
||||
Returns:
|
||||
List of scan dictionaries
|
||||
"""
|
||||
from web.models import Scan # Import here to avoid circular dependency
|
||||
|
||||
associations = (
|
||||
self.db.query(ScanSiteAssociation)
|
||||
.options(joinedload(ScanSiteAssociation.scan))
|
||||
.filter(ScanSiteAssociation.site_id == site_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'id': assoc.scan.id,
|
||||
'title': assoc.scan.title,
|
||||
'timestamp': assoc.scan.timestamp.isoformat() if assoc.scan.timestamp else None,
|
||||
'status': assoc.scan.status
|
||||
}
|
||||
for assoc in associations
|
||||
]
|
||||
|
||||
# Private helper methods
|
||||
|
||||
def _expand_cidr_to_ips(self, site_id: int,
|
||||
network: ipaddress.IPv4Network | ipaddress.IPv6Network,
|
||||
expected_ping: Optional[bool],
|
||||
expected_tcp_ports: List[int],
|
||||
expected_udp_ports: List[int]) -> tuple[int, List[str], List[str]]:
|
||||
"""
|
||||
Expand a CIDR to individual IP addresses.
|
||||
|
||||
Args:
|
||||
site_id: Site ID
|
||||
network: ipaddress network object
|
||||
expected_ping: Default ping setting for all IPs
|
||||
expected_tcp_ports: Default TCP ports for all IPs
|
||||
expected_udp_ports: Default UDP ports for all IPs
|
||||
|
||||
Returns:
|
||||
Tuple of (count of IPs created, list of IPs added, list of IPs skipped)
|
||||
"""
|
||||
ip_count = 0
|
||||
ips_added = []
|
||||
ips_skipped = []
|
||||
|
||||
# For /32 or /128 (single host), use the network address
|
||||
# For larger ranges, use hosts() to exclude network/broadcast addresses
|
||||
if network.num_addresses == 1:
|
||||
ip_list = [network.network_address]
|
||||
elif network.num_addresses == 2:
|
||||
# For /31 networks (point-to-point), both addresses are usable
|
||||
ip_list = [network.network_address, network.broadcast_address]
|
||||
else:
|
||||
# Use hosts() to get usable IPs (excludes network and broadcast)
|
||||
ip_list = list(network.hosts())
|
||||
|
||||
for ip in ip_list:
|
||||
ip_str = str(ip)
|
||||
|
||||
# Check for duplicate
|
||||
existing = (
|
||||
self.db.query(SiteIP)
|
||||
.filter(SiteIP.site_id == site_id, SiteIP.ip_address == ip_str)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
ips_skipped.append(ip_str)
|
||||
continue
|
||||
|
||||
# Create SiteIP entry
|
||||
ip_obj = SiteIP(
|
||||
site_id=site_id,
|
||||
ip_address=ip_str,
|
||||
expected_ping=expected_ping,
|
||||
expected_tcp_ports=json.dumps(expected_tcp_ports),
|
||||
expected_udp_ports=json.dumps(expected_udp_ports),
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
self.db.add(ip_obj)
|
||||
ips_added.append(ip_str)
|
||||
ip_count += 1
|
||||
|
||||
return ip_count, ips_added, ips_skipped
|
||||
|
||||
def _site_to_dict(self, site: Site) -> Dict[str, Any]:
|
||||
"""Convert Site model to dictionary."""
|
||||
# Count IPs for this site
|
||||
ip_count = (
|
||||
self.db.query(func.count(SiteIP.id))
|
||||
.filter(SiteIP.site_id == site.id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
return {
|
||||
'id': site.id,
|
||||
'name': site.name,
|
||||
'description': site.description,
|
||||
'created_at': site.created_at.isoformat() if site.created_at else None,
|
||||
'updated_at': site.updated_at.isoformat() if site.updated_at else None,
|
||||
'ip_count': ip_count
|
||||
}
|
||||
|
||||
def _ip_to_dict(self, ip: SiteIP) -> Dict[str, Any]:
|
||||
"""Convert SiteIP model to dictionary."""
|
||||
return {
|
||||
'id': ip.id,
|
||||
'site_id': ip.site_id,
|
||||
'ip_address': ip.ip_address,
|
||||
'expected_ping': ip.expected_ping,
|
||||
'expected_tcp_ports': json.loads(ip.expected_tcp_ports) if ip.expected_tcp_ports else [],
|
||||
'expected_udp_ports': json.loads(ip.expected_udp_ports) if ip.expected_udp_ports else [],
|
||||
'created_at': ip.created_at.isoformat() if ip.created_at else None
|
||||
}
|
||||
@@ -96,8 +96,8 @@
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if rule.config_file %}
|
||||
<small class="text-muted">{{ rule.config_file }}</small>
|
||||
{% if rule.config %}
|
||||
<small class="text-muted">{{ rule.config.title }}</small>
|
||||
{% else %}
|
||||
<span class="badge bg-primary">All Configs</span>
|
||||
{% endif %}
|
||||
@@ -209,20 +209,9 @@
|
||||
<label for="rule-config" class="form-label">Apply to Config (optional)</label>
|
||||
<select class="form-select" id="rule-config">
|
||||
<option value="">All Configs (Apply to all scans)</option>
|
||||
{% if config_files %}
|
||||
{% for config_file in config_files %}
|
||||
<option value="{{ config_file }}">{{ config_file }}</option>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<option value="" disabled>No config files found</option>
|
||||
{% endif %}
|
||||
</select>
|
||||
<small class="form-text text-muted">
|
||||
{% if config_files %}
|
||||
Select a specific config file to limit this rule, or leave as "All Configs" to apply to all scans
|
||||
{% else %}
|
||||
No config files found. Upload a config in the Configs section to see available options.
|
||||
{% endif %}
|
||||
<small class="form-text text-muted" id="config-help-text">
|
||||
Select a specific config to limit this rule, or leave as "All Configs" to apply to all scans
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
@@ -272,12 +261,51 @@
|
||||
<script>
|
||||
let editingRuleId = null;
|
||||
|
||||
// Load available configs for the dropdown
|
||||
async function loadConfigsForRule() {
|
||||
const selectEl = document.getElementById('rule-config');
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/configs');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load configurations');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const configs = data.configs || [];
|
||||
|
||||
// Preserve the "All Configs" option and current selection
|
||||
const currentValue = selectEl.value;
|
||||
selectEl.innerHTML = '<option value="">All Configs (Apply to all scans)</option>';
|
||||
|
||||
configs.forEach(config => {
|
||||
const option = document.createElement('option');
|
||||
// Store the config ID as the value
|
||||
option.value = config.id;
|
||||
const siteText = config.site_count === 1 ? 'site' : 'sites';
|
||||
option.textContent = `${config.title} (${config.site_count} ${siteText})`;
|
||||
selectEl.appendChild(option);
|
||||
});
|
||||
|
||||
// Restore selection if it was set
|
||||
if (currentValue) {
|
||||
selectEl.value = currentValue;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading configs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function showCreateRuleModal() {
|
||||
editingRuleId = null;
|
||||
document.getElementById('ruleModalTitle').textContent = 'Create Alert Rule';
|
||||
document.getElementById('save-rule-text').textContent = 'Create Rule';
|
||||
document.getElementById('ruleForm').reset();
|
||||
document.getElementById('rule-enabled').checked = true;
|
||||
|
||||
// Load configs when modal is shown
|
||||
loadConfigsForRule();
|
||||
|
||||
new bootstrap.Modal(document.getElementById('ruleModal')).show();
|
||||
}
|
||||
|
||||
@@ -286,33 +314,36 @@ function editRule(ruleId) {
|
||||
document.getElementById('ruleModalTitle').textContent = 'Edit Alert Rule';
|
||||
document.getElementById('save-rule-text').textContent = 'Update Rule';
|
||||
|
||||
// Fetch rule details
|
||||
fetch(`/api/alerts/rules`, {
|
||||
headers: {
|
||||
'X-API-Key': localStorage.getItem('api_key') || ''
|
||||
}
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const rule = data.rules.find(r => r.id === ruleId);
|
||||
if (rule) {
|
||||
document.getElementById('rule-id').value = rule.id;
|
||||
document.getElementById('rule-name').value = rule.name || '';
|
||||
document.getElementById('rule-type').value = rule.rule_type;
|
||||
document.getElementById('rule-severity').value = rule.severity || 'warning';
|
||||
document.getElementById('rule-threshold').value = rule.threshold || '';
|
||||
document.getElementById('rule-config').value = rule.config_file || '';
|
||||
document.getElementById('rule-email').checked = rule.email_enabled;
|
||||
document.getElementById('rule-webhook').checked = rule.webhook_enabled;
|
||||
document.getElementById('rule-enabled').checked = rule.enabled;
|
||||
// Load configs first, then fetch rule details
|
||||
loadConfigsForRule().then(() => {
|
||||
// Fetch rule details
|
||||
fetch(`/api/alerts/rules`, {
|
||||
headers: {
|
||||
'X-API-Key': localStorage.getItem('api_key') || ''
|
||||
}
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const rule = data.rules.find(r => r.id === ruleId);
|
||||
if (rule) {
|
||||
document.getElementById('rule-id').value = rule.id;
|
||||
document.getElementById('rule-name').value = rule.name || '';
|
||||
document.getElementById('rule-type').value = rule.rule_type;
|
||||
document.getElementById('rule-severity').value = rule.severity || 'warning';
|
||||
document.getElementById('rule-threshold').value = rule.threshold || '';
|
||||
document.getElementById('rule-config').value = rule.config_id || '';
|
||||
document.getElementById('rule-email').checked = rule.email_enabled;
|
||||
document.getElementById('rule-webhook').checked = rule.webhook_enabled;
|
||||
document.getElementById('rule-enabled').checked = rule.enabled;
|
||||
|
||||
updateThresholdLabel();
|
||||
new bootstrap.Modal(document.getElementById('ruleModal')).show();
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching rule:', error);
|
||||
alert('Failed to load rule details');
|
||||
updateThresholdLabel();
|
||||
new bootstrap.Modal(document.getElementById('ruleModal')).show();
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching rule:', error);
|
||||
alert('Failed to load rule details');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -353,7 +384,7 @@ function saveRule() {
|
||||
const ruleType = document.getElementById('rule-type').value;
|
||||
const severity = document.getElementById('rule-severity').value;
|
||||
const threshold = document.getElementById('rule-threshold').value;
|
||||
const configFile = document.getElementById('rule-config').value;
|
||||
const configId = document.getElementById('rule-config').value;
|
||||
const emailEnabled = document.getElementById('rule-email').checked;
|
||||
const webhookEnabled = document.getElementById('rule-webhook').checked;
|
||||
const enabled = document.getElementById('rule-enabled').checked;
|
||||
@@ -368,7 +399,7 @@ function saveRule() {
|
||||
rule_type: ruleType,
|
||||
severity: severity,
|
||||
threshold: threshold ? parseInt(threshold) : null,
|
||||
config_file: configFile || null,
|
||||
config_id: configId ? parseInt(configId) : null,
|
||||
email_enabled: emailEnabled,
|
||||
webhook_enabled: webhookEnabled,
|
||||
enabled: enabled
|
||||
|
||||
@@ -53,6 +53,10 @@
|
||||
<a class="nav-link {% if request.endpoint and 'schedule' in request.endpoint %}active{% endif %}"
|
||||
href="{{ url_for('main.schedules') }}">Schedules</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link {% if request.endpoint == 'main.sites' %}active{% endif %}"
|
||||
href="{{ url_for('main.sites') }}">Sites</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link {% if request.endpoint and 'config' in request.endpoint %}active{% endif %}"
|
||||
href="{{ url_for('main.configs') }}">Configs</a>
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Configuration Files - SneakyScanner{% endblock %}
|
||||
|
||||
{% block extra_styles %}
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/config-manager.css') }}">
|
||||
{% endblock %}
|
||||
{% block title %}Scan Configurations - SneakyScanner{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="row mt-4">
|
||||
<div class="col-12">
|
||||
<div class="d-flex justify-content-between align-items-center mb-4">
|
||||
<h1 style="color: #60a5fa;">Configuration Files</h1>
|
||||
<h1 style="color: #60a5fa;">Scan Configurations</h1>
|
||||
<div>
|
||||
<a href="{{ url_for('main.upload_config') }}" class="btn btn-primary">
|
||||
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#createConfigModal">
|
||||
<i class="bi bi-plus-circle"></i> Create New Config
|
||||
</a>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -30,14 +26,14 @@
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="stat-card">
|
||||
<div class="stat-value" id="configs-in-use">-</div>
|
||||
<div class="stat-label">In Use by Schedules</div>
|
||||
<div class="stat-value" id="total-sites-used">-</div>
|
||||
<div class="stat-label">Total Sites Referenced</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="stat-card">
|
||||
<div class="stat-value" id="total-size">-</div>
|
||||
<div class="stat-label">Total Size</div>
|
||||
<div class="stat-value" id="recent-updates">-</div>
|
||||
<div class="stat-label">Updated This Week</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -68,11 +64,10 @@
|
||||
<table class="table table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Filename</th>
|
||||
<th>Title</th>
|
||||
<th>Created</th>
|
||||
<th>Size</th>
|
||||
<th>Used By</th>
|
||||
<th>Description</th>
|
||||
<th>Sites</th>
|
||||
<th>Updated</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -82,12 +77,12 @@
|
||||
</table>
|
||||
</div>
|
||||
<div id="empty-state" style="display: none;" class="text-center py-5">
|
||||
<i class="bi bi-file-earmark-text" style="font-size: 3rem; color: #64748b;"></i>
|
||||
<h5 class="mt-3 text-muted">No configuration files</h5>
|
||||
<p class="text-muted">Create your first config to define scan targets</p>
|
||||
<a href="{{ url_for('main.upload_config') }}" class="btn btn-primary mt-2">
|
||||
<i class="bi bi-gear" style="font-size: 3rem; color: #64748b;"></i>
|
||||
<h5 class="mt-3 text-muted">No configurations defined</h5>
|
||||
<p class="text-muted">Create your first scan configuration</p>
|
||||
<button class="btn btn-primary mt-2" data-bs-toggle="modal" data-bs-target="#createConfigModal">
|
||||
<i class="bi bi-plus-circle"></i> Create Config
|
||||
</a>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -95,23 +90,141 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Delete Confirmation Modal -->
|
||||
<div class="modal fade" id="deleteModal" tabindex="-1">
|
||||
<div class="modal-dialog">
|
||||
<!-- Create Config Modal -->
|
||||
<div class="modal fade" id="createConfigModal" tabindex="-1">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #f87171;">
|
||||
<i class="bi bi-exclamation-triangle"></i> Confirm Deletion
|
||||
<h5 class="modal-title" style="color: #60a5fa;">
|
||||
<i class="bi bi-plus-circle"></i> Create New Configuration
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p style="color: #e2e8f0;">Are you sure you want to delete the config file:</p>
|
||||
<p style="color: #60a5fa; font-weight: bold;" id="delete-config-name"></p>
|
||||
<p style="color: #fbbf24;" id="delete-warning-schedules" style="display: none;">
|
||||
<i class="bi bi-exclamation-circle"></i>
|
||||
This config is used by schedules and cannot be deleted.
|
||||
</p>
|
||||
<form id="create-config-form">
|
||||
<div class="mb-3">
|
||||
<label for="config-title" class="form-label">Title <span class="text-danger">*</span></label>
|
||||
<input type="text" class="form-control" id="config-title" required
|
||||
placeholder="e.g., Production Weekly Scan">
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="config-description" class="form-label">Description</label>
|
||||
<textarea class="form-control" id="config-description" rows="3"
|
||||
placeholder="Optional description of this configuration"></textarea>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Sites <span class="text-danger">*</span></label>
|
||||
<div id="sites-loading-modal" class="text-center py-3">
|
||||
<div class="spinner-border spinner-border-sm text-primary" role="status">
|
||||
<span class="visually-hidden">Loading...</span>
|
||||
</div>
|
||||
<span class="ms-2 text-muted">Loading available sites...</span>
|
||||
</div>
|
||||
<div id="sites-list" style="display: none;">
|
||||
<!-- Populated by JavaScript -->
|
||||
</div>
|
||||
<small class="form-text text-muted">Select at least one site to include in this configuration</small>
|
||||
</div>
|
||||
|
||||
<div class="alert alert-danger" id="create-config-error" style="display: none;">
|
||||
<span id="create-config-error-message"></span>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-primary" id="create-config-btn">
|
||||
<i class="bi bi-check-circle"></i> Create Configuration
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Edit Config Modal -->
|
||||
<div class="modal fade" id="editConfigModal" tabindex="-1">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #60a5fa;">
|
||||
<i class="bi bi-pencil"></i> Edit Configuration
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<form id="edit-config-form">
|
||||
<input type="hidden" id="edit-config-id">
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="edit-config-title" class="form-label">Title <span class="text-danger">*</span></label>
|
||||
<input type="text" class="form-control" id="edit-config-title" required>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="edit-config-description" class="form-label">Description</label>
|
||||
<textarea class="form-control" id="edit-config-description" rows="3"></textarea>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Sites <span class="text-danger">*</span></label>
|
||||
<div id="edit-sites-list">
|
||||
<!-- Populated by JavaScript -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="alert alert-danger" id="edit-config-error" style="display: none;">
|
||||
<span id="edit-config-error-message"></span>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-primary" id="edit-config-btn">
|
||||
<i class="bi bi-check-circle"></i> Save Changes
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- View Config Modal -->
|
||||
<div class="modal fade" id="viewConfigModal" tabindex="-1">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #60a5fa;">
|
||||
<i class="bi bi-eye"></i> Configuration Details
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div id="view-config-content">
|
||||
<!-- Populated by JavaScript -->
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Delete Confirmation Modal -->
|
||||
<div class="modal fade" id="deleteConfigModal" tabindex="-1">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #ef4444;">
|
||||
<i class="bi bi-trash"></i> Delete Configuration
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p>Are you sure you want to delete configuration <strong id="delete-config-name"></strong>?</p>
|
||||
<p class="text-warning"><i class="bi bi-exclamation-triangle"></i> This action cannot be undone.</p>
|
||||
<input type="hidden" id="delete-config-id">
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
@@ -123,76 +236,94 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- View Config Modal -->
|
||||
<div class="modal fade" id="viewModal" tabindex="-1">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content" style="background-color: #1e293b; border: 1px solid #334155;">
|
||||
<div class="modal-header" style="border-bottom: 1px solid #334155;">
|
||||
<h5 class="modal-title" style="color: #60a5fa;">
|
||||
<i class="bi bi-file-earmark-code"></i> Config File Details
|
||||
</h5>
|
||||
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<h6 style="color: #94a3b8;">Filename: <span id="view-filename" style="color: #e2e8f0;"></span></h6>
|
||||
<h6 class="mt-3" style="color: #94a3b8;">Content:</h6>
|
||||
<pre style="background-color: #0f172a; border: 1px solid #334155; padding: 15px; border-radius: 5px; max-height: 400px; overflow-y: auto;"><code id="view-content" style="color: #e2e8f0;"></code></pre>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
<a id="download-link" href="#" class="btn btn-primary">
|
||||
<i class="bi bi-download"></i> Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
{% block scripts %}
|
||||
<script>
|
||||
// Global variables
|
||||
let configsData = [];
|
||||
let selectedConfigForDeletion = null;
|
||||
// Global state
|
||||
let allConfigs = [];
|
||||
let allSites = [];
|
||||
|
||||
// Format file size
|
||||
function formatFileSize(bytes) {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
||||
}
|
||||
// Load data on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
loadSites();
|
||||
loadConfigs();
|
||||
});
|
||||
|
||||
// Format date
|
||||
function formatDate(timestamp) {
|
||||
if (!timestamp) return 'Unknown';
|
||||
const date = new Date(timestamp);
|
||||
return date.toLocaleString();
|
||||
}
|
||||
|
||||
// Load configs from API
|
||||
async function loadConfigs() {
|
||||
// Load all sites
|
||||
async function loadSites() {
|
||||
try {
|
||||
document.getElementById('configs-loading').style.display = 'block';
|
||||
document.getElementById('configs-error').style.display = 'none';
|
||||
document.getElementById('configs-content').style.display = 'none';
|
||||
|
||||
const response = await fetch('/api/configs');
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
const response = await fetch('/api/sites?all=true');
|
||||
if (!response.ok) throw new Error('Failed to load sites');
|
||||
|
||||
const data = await response.json();
|
||||
configsData = data.configs || [];
|
||||
allSites = data.sites || [];
|
||||
|
||||
renderSitesCheckboxes();
|
||||
} catch (error) {
|
||||
console.error('Error loading sites:', error);
|
||||
document.getElementById('sites-loading-modal').innerHTML =
|
||||
'<div class="alert alert-danger">Failed to load sites</div>';
|
||||
}
|
||||
}
|
||||
|
||||
// Render sites checkboxes
|
||||
function renderSitesCheckboxes(selectedIds = [], isEditMode = false) {
|
||||
const container = isEditMode ? document.getElementById('edit-sites-list') : document.getElementById('sites-list');
|
||||
|
||||
if (!container) return;
|
||||
|
||||
if (allSites.length === 0) {
|
||||
const message = '<div class="alert alert-info">No sites available. <a href="/sites">Create a site first</a>.</div>';
|
||||
container.innerHTML = message;
|
||||
if (!isEditMode) {
|
||||
document.getElementById('sites-loading-modal').style.display = 'none';
|
||||
container.style.display = 'block';
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const prefix = isEditMode ? 'edit-site' : 'site';
|
||||
const checkboxClass = isEditMode ? 'edit-site-checkbox' : 'site-checkbox';
|
||||
|
||||
let html = '<div style="max-height: 300px; overflow-y: auto;">';
|
||||
allSites.forEach(site => {
|
||||
const isChecked = selectedIds.includes(site.id);
|
||||
html += `
|
||||
<div class="form-check">
|
||||
<input class="form-check-input ${checkboxClass}" type="checkbox" value="${site.id}"
|
||||
id="${prefix}-${site.id}" ${isChecked ? 'checked' : ''}>
|
||||
<label class="form-check-label" for="${prefix}-${site.id}">
|
||||
${escapeHtml(site.name)}
|
||||
<small class="text-muted">(${site.ip_count || 0} IP${site.ip_count !== 1 ? 's' : ''})</small>
|
||||
</label>
|
||||
</div>
|
||||
`;
|
||||
});
|
||||
html += '</div>';
|
||||
|
||||
container.innerHTML = html;
|
||||
|
||||
if (!isEditMode) {
|
||||
document.getElementById('sites-loading-modal').style.display = 'none';
|
||||
container.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
// Load all configs
|
||||
async function loadConfigs() {
|
||||
try {
|
||||
const response = await fetch('/api/configs');
|
||||
if (!response.ok) throw new Error('Failed to load configs');
|
||||
|
||||
const data = await response.json();
|
||||
allConfigs = data.configs || [];
|
||||
|
||||
renderConfigs();
|
||||
updateStats();
|
||||
renderConfigs(configsData);
|
||||
|
||||
document.getElementById('configs-loading').style.display = 'none';
|
||||
document.getElementById('configs-content').style.display = 'block';
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error loading configs:', error);
|
||||
document.getElementById('configs-loading').style.display = 'none';
|
||||
@@ -201,177 +332,249 @@ async function loadConfigs() {
|
||||
}
|
||||
}
|
||||
|
||||
// Update summary stats
|
||||
function updateStats() {
|
||||
const totalConfigs = configsData.length;
|
||||
const configsInUse = configsData.filter(c => c.used_by_schedules && c.used_by_schedules.length > 0).length;
|
||||
const totalSize = configsData.reduce((sum, c) => sum + (c.size_bytes || 0), 0);
|
||||
|
||||
document.getElementById('total-configs').textContent = totalConfigs;
|
||||
document.getElementById('configs-in-use').textContent = configsInUse;
|
||||
document.getElementById('total-size').textContent = formatFileSize(totalSize);
|
||||
}
|
||||
|
||||
// Render configs table
|
||||
function renderConfigs(configs) {
|
||||
function renderConfigs(filter = '') {
|
||||
const tbody = document.getElementById('configs-tbody');
|
||||
const emptyState = document.getElementById('empty-state');
|
||||
|
||||
if (configs.length === 0) {
|
||||
const filteredConfigs = filter
|
||||
? allConfigs.filter(c =>
|
||||
c.title.toLowerCase().includes(filter.toLowerCase()) ||
|
||||
(c.description && c.description.toLowerCase().includes(filter.toLowerCase()))
|
||||
)
|
||||
: allConfigs;
|
||||
|
||||
if (filteredConfigs.length === 0) {
|
||||
tbody.innerHTML = '';
|
||||
emptyState.style.display = 'block';
|
||||
return;
|
||||
}
|
||||
|
||||
emptyState.style.display = 'none';
|
||||
|
||||
tbody.innerHTML = configs.map(config => {
|
||||
const usedByBadge = config.used_by_schedules && config.used_by_schedules.length > 0
|
||||
? `<span class="badge bg-info" title="${config.used_by_schedules.join(', ')}">${config.used_by_schedules.length} schedule(s)</span>`
|
||||
: '<span class="badge bg-secondary">Not used</span>';
|
||||
|
||||
return `
|
||||
<tr>
|
||||
<td><code>${config.filename}</code></td>
|
||||
<td>${config.title || config.filename}</td>
|
||||
<td>${formatDate(config.created_at)}</td>
|
||||
<td>${formatFileSize(config.size_bytes || 0)}</td>
|
||||
<td>${usedByBadge}</td>
|
||||
<td>
|
||||
<div class="btn-group btn-group-sm" role="group">
|
||||
<button class="btn btn-outline-primary" onclick="viewConfig('${config.filename}')" title="View">
|
||||
<i class="bi bi-eye"></i>
|
||||
</button>
|
||||
<a href="/configs/edit/${config.filename}" class="btn btn-outline-info" title="Edit">
|
||||
<i class="bi bi-pencil"></i>
|
||||
</a>
|
||||
<a href="/api/configs/${config.filename}/download" class="btn btn-outline-success" title="Download">
|
||||
<i class="bi bi-download"></i>
|
||||
</a>
|
||||
<button class="btn btn-outline-danger" onclick="confirmDelete('${config.filename}', ${config.used_by_schedules.length > 0})" title="Delete">
|
||||
<i class="bi bi-trash"></i>
|
||||
</button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
`;
|
||||
}).join('');
|
||||
tbody.innerHTML = filteredConfigs.map(config => `
|
||||
<tr>
|
||||
<td><strong>${escapeHtml(config.title)}</strong></td>
|
||||
<td>${config.description ? escapeHtml(config.description) : '<span class="text-muted">-</span>'}</td>
|
||||
<td>
|
||||
<span class="badge bg-primary">${config.site_count} site${config.site_count !== 1 ? 's' : ''}</span>
|
||||
</td>
|
||||
<td>${formatDate(config.updated_at)}</td>
|
||||
<td>
|
||||
<button class="btn btn-sm btn-info" onclick="viewConfig(${config.id})" title="View">
|
||||
<i class="bi bi-eye"></i>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-warning" onclick="editConfig(${config.id})" title="Edit">
|
||||
<i class="bi bi-pencil"></i>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-danger" onclick="deleteConfig(${config.id}, '${escapeHtml(config.title).replace(/'/g, "\\'")}');" title="Delete">
|
||||
<i class="bi bi-trash"></i>
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
`).join('');
|
||||
}
|
||||
|
||||
// View config details
|
||||
async function viewConfig(filename) {
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${filename}`);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load config: ${response.statusText}`);
|
||||
}
|
||||
// Update stats
|
||||
function updateStats() {
|
||||
document.getElementById('total-configs').textContent = allConfigs.length;
|
||||
|
||||
const data = await response.json();
|
||||
const uniqueSites = new Set();
|
||||
allConfigs.forEach(c => c.sites.forEach(s => uniqueSites.add(s.id)));
|
||||
document.getElementById('total-sites-used').textContent = uniqueSites.size;
|
||||
|
||||
document.getElementById('view-filename').textContent = data.filename;
|
||||
document.getElementById('view-content').textContent = data.content;
|
||||
document.getElementById('download-link').href = `/api/configs/${filename}/download`;
|
||||
|
||||
new bootstrap.Modal(document.getElementById('viewModal')).show();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error viewing config:', error);
|
||||
alert(`Error: ${error.message}`);
|
||||
}
|
||||
const oneWeekAgo = new Date();
|
||||
oneWeekAgo.setDate(oneWeekAgo.getDate() - 7);
|
||||
const recentUpdates = allConfigs.filter(c => new Date(c.updated_at) > oneWeekAgo).length;
|
||||
document.getElementById('recent-updates').textContent = recentUpdates;
|
||||
}
|
||||
|
||||
// Confirm delete
|
||||
function confirmDelete(filename, isInUse) {
|
||||
selectedConfigForDeletion = filename;
|
||||
document.getElementById('delete-config-name').textContent = filename;
|
||||
|
||||
const warningDiv = document.getElementById('delete-warning-schedules');
|
||||
const deleteBtn = document.getElementById('confirm-delete-btn');
|
||||
|
||||
if (isInUse) {
|
||||
warningDiv.style.display = 'block';
|
||||
deleteBtn.disabled = true;
|
||||
deleteBtn.classList.add('disabled');
|
||||
} else {
|
||||
warningDiv.style.display = 'none';
|
||||
deleteBtn.disabled = false;
|
||||
deleteBtn.classList.remove('disabled');
|
||||
}
|
||||
|
||||
new bootstrap.Modal(document.getElementById('deleteModal')).show();
|
||||
}
|
||||
|
||||
// Delete config
|
||||
async function deleteConfig() {
|
||||
if (!selectedConfigForDeletion) return;
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${selectedConfigForDeletion}`, {
|
||||
method: 'DELETE'
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(error.message || `HTTP ${response.status}`);
|
||||
}
|
||||
|
||||
// Hide modal
|
||||
bootstrap.Modal.getInstance(document.getElementById('deleteModal')).hide();
|
||||
|
||||
// Reload configs
|
||||
await loadConfigs();
|
||||
|
||||
// Show success message
|
||||
showAlert('success', `Config "${selectedConfigForDeletion}" deleted successfully`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error deleting config:', error);
|
||||
showAlert('danger', `Error deleting config: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Show alert
|
||||
function showAlert(type, message) {
|
||||
const alertHtml = `
|
||||
<div class="alert alert-${type} alert-dismissible fade show mt-3" role="alert">
|
||||
${message}
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const container = document.querySelector('.container-fluid');
|
||||
container.insertAdjacentHTML('afterbegin', alertHtml);
|
||||
|
||||
// Auto-dismiss after 5 seconds
|
||||
setTimeout(() => {
|
||||
const alert = container.querySelector('.alert');
|
||||
if (alert) {
|
||||
bootstrap.Alert.getInstance(alert)?.close();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
// Search filter
|
||||
// Search functionality
|
||||
document.getElementById('search-input').addEventListener('input', function(e) {
|
||||
const searchTerm = e.target.value.toLowerCase();
|
||||
renderConfigs(e.target.value);
|
||||
});
|
||||
|
||||
if (!searchTerm) {
|
||||
renderConfigs(configsData);
|
||||
// Create config
|
||||
document.getElementById('create-config-btn').addEventListener('click', async function() {
|
||||
const title = document.getElementById('config-title').value.trim();
|
||||
const description = document.getElementById('config-description').value.trim();
|
||||
const siteCheckboxes = document.querySelectorAll('.site-checkbox:checked');
|
||||
const siteIds = Array.from(siteCheckboxes).map(cb => parseInt(cb.value));
|
||||
|
||||
if (!title) {
|
||||
showError('create-config-error', 'Title is required');
|
||||
return;
|
||||
}
|
||||
|
||||
const filtered = configsData.filter(config =>
|
||||
config.filename.toLowerCase().includes(searchTerm) ||
|
||||
(config.title && config.title.toLowerCase().includes(searchTerm))
|
||||
);
|
||||
if (siteIds.length === 0) {
|
||||
showError('create-config-error', 'At least one site must be selected');
|
||||
return;
|
||||
}
|
||||
|
||||
renderConfigs(filtered);
|
||||
try {
|
||||
const response = await fetch('/api/configs', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ title, description: description || null, site_ids: siteIds })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json();
|
||||
throw new Error(data.message || 'Failed to create config');
|
||||
}
|
||||
|
||||
// Close modal and reload
|
||||
bootstrap.Modal.getInstance(document.getElementById('createConfigModal')).hide();
|
||||
document.getElementById('create-config-form').reset();
|
||||
renderSitesCheckboxes(); // Reset checkboxes
|
||||
await loadConfigs();
|
||||
} catch (error) {
|
||||
showError('create-config-error', error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Setup delete button
|
||||
document.getElementById('confirm-delete-btn').addEventListener('click', deleteConfig);
|
||||
// View config
|
||||
async function viewConfig(id) {
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${id}`);
|
||||
if (!response.ok) throw new Error('Failed to load config');
|
||||
|
||||
// Load configs on page load
|
||||
document.addEventListener('DOMContentLoaded', loadConfigs);
|
||||
const config = await response.json();
|
||||
|
||||
let html = `
|
||||
<div class="mb-3">
|
||||
<strong>Title:</strong> ${escapeHtml(config.title)}
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<strong>Description:</strong> ${config.description ? escapeHtml(config.description) : '<span class="text-muted">None</span>'}
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<strong>Sites (${config.site_count}):</strong>
|
||||
<ul class="mt-2">
|
||||
${config.sites.map(site => `
|
||||
<li>${escapeHtml(site.name)} <small class="text-muted">(${site.ip_count} IP${site.ip_count !== 1 ? 's' : ''})</small></li>
|
||||
`).join('')}
|
||||
</ul>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<strong>Created:</strong> ${formatDate(config.created_at)}
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<strong>Last Updated:</strong> ${formatDate(config.updated_at)}
|
||||
</div>
|
||||
`;
|
||||
|
||||
document.getElementById('view-config-content').innerHTML = html;
|
||||
new bootstrap.Modal(document.getElementById('viewConfigModal')).show();
|
||||
} catch (error) {
|
||||
alert('Error loading config: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Edit config
|
||||
async function editConfig(id) {
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${id}`);
|
||||
if (!response.ok) throw new Error('Failed to load config');
|
||||
|
||||
const config = await response.json();
|
||||
|
||||
document.getElementById('edit-config-id').value = config.id;
|
||||
document.getElementById('edit-config-title').value = config.title;
|
||||
document.getElementById('edit-config-description').value = config.description || '';
|
||||
|
||||
const selectedIds = config.sites.map(s => s.id);
|
||||
renderSitesCheckboxes(selectedIds, true); // true = isEditMode
|
||||
|
||||
new bootstrap.Modal(document.getElementById('editConfigModal')).show();
|
||||
} catch (error) {
|
||||
alert('Error loading config: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Save edited config
|
||||
document.getElementById('edit-config-btn').addEventListener('click', async function() {
|
||||
const id = document.getElementById('edit-config-id').value;
|
||||
const title = document.getElementById('edit-config-title').value.trim();
|
||||
const description = document.getElementById('edit-config-description').value.trim();
|
||||
const siteCheckboxes = document.querySelectorAll('.edit-site-checkbox:checked');
|
||||
const siteIds = Array.from(siteCheckboxes).map(cb => parseInt(cb.value));
|
||||
|
||||
if (!title) {
|
||||
showError('edit-config-error', 'Title is required');
|
||||
return;
|
||||
}
|
||||
|
||||
if (siteIds.length === 0) {
|
||||
showError('edit-config-error', 'At least one site must be selected');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ title, description: description || null, site_ids: siteIds })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json();
|
||||
throw new Error(data.message || 'Failed to update config');
|
||||
}
|
||||
|
||||
// Close modal and reload
|
||||
bootstrap.Modal.getInstance(document.getElementById('editConfigModal')).hide();
|
||||
await loadConfigs();
|
||||
} catch (error) {
|
||||
showError('edit-config-error', error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Delete config
|
||||
function deleteConfig(id, name) {
|
||||
document.getElementById('delete-config-id').value = id;
|
||||
document.getElementById('delete-config-name').textContent = name;
|
||||
new bootstrap.Modal(document.getElementById('deleteConfigModal')).show();
|
||||
}
|
||||
|
||||
// Confirm delete
|
||||
document.getElementById('confirm-delete-btn').addEventListener('click', async function() {
|
||||
const id = document.getElementById('delete-config-id').value;
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/configs/${id}`, { method: 'DELETE' });
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json();
|
||||
throw new Error(data.message || 'Failed to delete config');
|
||||
}
|
||||
|
||||
// Close modal and reload
|
||||
bootstrap.Modal.getInstance(document.getElementById('deleteConfigModal')).hide();
|
||||
await loadConfigs();
|
||||
} catch (error) {
|
||||
alert('Error deleting config: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Utility functions
|
||||
function showError(elementId, message) {
|
||||
const errorEl = document.getElementById(elementId);
|
||||
const messageEl = document.getElementById(elementId + '-message');
|
||||
messageEl.textContent = message;
|
||||
errorEl.style.display = 'block';
|
||||
}
|
||||
|
||||
function escapeHtml(text) {
|
||||
if (!text) return '';
|
||||
const div = document.createElement('div');
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
|
||||
function formatDate(dateStr) {
|
||||
if (!dateStr) return '-';
|
||||
const date = new Date(dateStr);
|
||||
return date.toLocaleDateString() + ' ' + date.toLocaleTimeString();
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
@@ -153,34 +153,28 @@
|
||||
<div class="modal-body">
|
||||
<form id="trigger-scan-form">
|
||||
<div class="mb-3">
|
||||
<label for="config-file" class="form-label">Config File</label>
|
||||
<select class="form-select" id="config-file" name="config_file" required {% if not config_files %}disabled{% endif %}>
|
||||
<option value="">Select a config file...</option>
|
||||
{% for config in config_files %}
|
||||
<option value="{{ config }}">{{ config }}</option>
|
||||
{% endfor %}
|
||||
<label for="config-select" class="form-label">Scan Configuration</label>
|
||||
<select class="form-select" id="config-select" name="config_id" required>
|
||||
<option value="">Loading configurations...</option>
|
||||
</select>
|
||||
{% if config_files %}
|
||||
<div class="form-text text-muted">
|
||||
Select a scan configuration file
|
||||
<div class="form-text text-muted" id="config-help-text">
|
||||
Select a scan configuration
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="alert alert-warning mt-2 mb-0" role="alert">
|
||||
<div id="no-configs-warning" class="alert alert-warning mt-2 mb-0" role="alert" style="display: none;">
|
||||
<i class="bi bi-exclamation-triangle"></i>
|
||||
<strong>No configurations available</strong>
|
||||
<p class="mb-2 mt-2">You need to create a configuration file before you can trigger a scan.</p>
|
||||
<a href="{{ url_for('main.upload_config') }}" class="btn btn-sm btn-primary">
|
||||
<p class="mb-2 mt-2">You need to create a configuration before you can trigger a scan.</p>
|
||||
<a href="{{ url_for('main.configs') }}" class="btn btn-sm btn-primary">
|
||||
<i class="bi bi-plus-circle"></i> Create Configuration
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="trigger-error" class="alert alert-danger" style="display: none;"></div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-primary" onclick="triggerScan()" {% if not config_files %}disabled{% endif %}>
|
||||
<button type="button" class="btn btn-primary" id="trigger-scan-btn" onclick="triggerScan()">
|
||||
<span id="modal-trigger-text">Trigger Scan</span>
|
||||
<span id="modal-trigger-spinner" class="spinner-border spinner-border-sm ms-2" style="display: none;"></span>
|
||||
</button>
|
||||
@@ -323,23 +317,75 @@
|
||||
});
|
||||
}
|
||||
|
||||
// Load available configs
|
||||
async function loadConfigs() {
|
||||
const selectEl = document.getElementById('config-select');
|
||||
const helpTextEl = document.getElementById('config-help-text');
|
||||
const noConfigsWarning = document.getElementById('no-configs-warning');
|
||||
const triggerBtn = document.getElementById('trigger-scan-btn');
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/configs');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load configurations');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const configs = data.configs || [];
|
||||
|
||||
// Clear existing options
|
||||
selectEl.innerHTML = '';
|
||||
|
||||
if (configs.length === 0) {
|
||||
selectEl.innerHTML = '<option value="">No configurations available</option>';
|
||||
selectEl.disabled = true;
|
||||
triggerBtn.disabled = true;
|
||||
helpTextEl.style.display = 'none';
|
||||
noConfigsWarning.style.display = 'block';
|
||||
} else {
|
||||
selectEl.innerHTML = '<option value="">Select a configuration...</option>';
|
||||
configs.forEach(config => {
|
||||
const option = document.createElement('option');
|
||||
option.value = config.id;
|
||||
const siteText = config.site_count === 1 ? 'site' : 'sites';
|
||||
option.textContent = `${config.title} (${config.site_count} ${siteText})`;
|
||||
selectEl.appendChild(option);
|
||||
});
|
||||
selectEl.disabled = false;
|
||||
triggerBtn.disabled = false;
|
||||
helpTextEl.style.display = 'block';
|
||||
noConfigsWarning.style.display = 'none';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading configs:', error);
|
||||
selectEl.innerHTML = '<option value="">Error loading configurations</option>';
|
||||
selectEl.disabled = true;
|
||||
triggerBtn.disabled = true;
|
||||
helpTextEl.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
// Show trigger scan modal
|
||||
function showTriggerScanModal() {
|
||||
const modal = new bootstrap.Modal(document.getElementById('triggerScanModal'));
|
||||
document.getElementById('trigger-error').style.display = 'none';
|
||||
document.getElementById('trigger-scan-form').reset();
|
||||
|
||||
// Load configs when modal is shown
|
||||
loadConfigs();
|
||||
|
||||
modal.show();
|
||||
}
|
||||
|
||||
// Trigger scan
|
||||
async function triggerScan() {
|
||||
const configFile = document.getElementById('config-file').value;
|
||||
const configId = document.getElementById('config-select').value;
|
||||
const errorEl = document.getElementById('trigger-error');
|
||||
const btnText = document.getElementById('modal-trigger-text');
|
||||
const btnSpinner = document.getElementById('modal-trigger-spinner');
|
||||
|
||||
if (!configFile) {
|
||||
errorEl.textContent = 'Please enter a config file path.';
|
||||
if (!configId) {
|
||||
errorEl.textContent = 'Please select a configuration.';
|
||||
errorEl.style.display = 'block';
|
||||
return;
|
||||
}
|
||||
@@ -356,7 +402,7 @@
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
config_file: configFile
|
||||
config_id: parseInt(configId)
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@@ -79,14 +79,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div class="mb-0">
|
||||
<label class="form-label text-muted">Config File</label>
|
||||
<div id="scan-config-file" class="mono">-</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -113,34 +113,28 @@
|
||||
<div class="modal-body">
|
||||
<form id="trigger-scan-form">
|
||||
<div class="mb-3">
|
||||
<label for="config-file" class="form-label">Config File</label>
|
||||
<select class="form-select" id="config-file" name="config_file" required {% if not config_files %}disabled{% endif %}>
|
||||
<option value="">Select a config file...</option>
|
||||
{% for config in config_files %}
|
||||
<option value="{{ config }}">{{ config }}</option>
|
||||
{% endfor %}
|
||||
<label for="config-select" class="form-label">Scan Configuration</label>
|
||||
<select class="form-select" id="config-select" name="config_id" required>
|
||||
<option value="">Loading configurations...</option>
|
||||
</select>
|
||||
{% if config_files %}
|
||||
<div class="form-text text-muted">
|
||||
Select a scan configuration file
|
||||
<div class="form-text text-muted" id="config-help-text">
|
||||
Select a scan configuration
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="alert alert-warning mt-2 mb-0" role="alert">
|
||||
<div id="no-configs-warning" class="alert alert-warning mt-2 mb-0" role="alert" style="display: none;">
|
||||
<i class="bi bi-exclamation-triangle"></i>
|
||||
<strong>No configurations available</strong>
|
||||
<p class="mb-2 mt-2">You need to create a configuration file before you can trigger a scan.</p>
|
||||
<a href="{{ url_for('main.upload_config') }}" class="btn btn-sm btn-primary">
|
||||
<p class="mb-2 mt-2">You need to create a configuration before you can trigger a scan.</p>
|
||||
<a href="{{ url_for('main.configs') }}" class="btn btn-sm btn-primary">
|
||||
<i class="bi bi-plus-circle"></i> Create Configuration
|
||||
</a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="trigger-error" class="alert alert-danger" style="display: none;"></div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer" style="border-top: 1px solid #334155;">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-primary" onclick="triggerScan()" {% if not config_files %}disabled{% endif %}>
|
||||
<button type="button" class="btn btn-primary" id="trigger-scan-btn" onclick="triggerScan()">
|
||||
<span id="modal-trigger-text">Trigger Scan</span>
|
||||
<span id="modal-trigger-spinner" class="spinner-border spinner-border-sm ms-2" style="display: none;"></span>
|
||||
</button>
|
||||
@@ -359,23 +353,75 @@
|
||||
});
|
||||
}
|
||||
|
||||
// Load available configs
|
||||
async function loadConfigs() {
|
||||
const selectEl = document.getElementById('config-select');
|
||||
const helpTextEl = document.getElementById('config-help-text');
|
||||
const noConfigsWarning = document.getElementById('no-configs-warning');
|
||||
const triggerBtn = document.getElementById('trigger-scan-btn');
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/configs');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load configurations');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const configs = data.configs || [];
|
||||
|
||||
// Clear existing options
|
||||
selectEl.innerHTML = '';
|
||||
|
||||
if (configs.length === 0) {
|
||||
selectEl.innerHTML = '<option value="">No configurations available</option>';
|
||||
selectEl.disabled = true;
|
||||
triggerBtn.disabled = true;
|
||||
helpTextEl.style.display = 'none';
|
||||
noConfigsWarning.style.display = 'block';
|
||||
} else {
|
||||
selectEl.innerHTML = '<option value="">Select a configuration...</option>';
|
||||
configs.forEach(config => {
|
||||
const option = document.createElement('option');
|
||||
option.value = config.id;
|
||||
const siteText = config.site_count === 1 ? 'site' : 'sites';
|
||||
option.textContent = `${config.title} (${config.site_count} ${siteText})`;
|
||||
selectEl.appendChild(option);
|
||||
});
|
||||
selectEl.disabled = false;
|
||||
triggerBtn.disabled = false;
|
||||
helpTextEl.style.display = 'block';
|
||||
noConfigsWarning.style.display = 'none';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading configs:', error);
|
||||
selectEl.innerHTML = '<option value="">Error loading configurations</option>';
|
||||
selectEl.disabled = true;
|
||||
triggerBtn.disabled = true;
|
||||
helpTextEl.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
// Show trigger scan modal
|
||||
function showTriggerScanModal() {
|
||||
const modal = new bootstrap.Modal(document.getElementById('triggerScanModal'));
|
||||
document.getElementById('trigger-error').style.display = 'none';
|
||||
document.getElementById('trigger-scan-form').reset();
|
||||
|
||||
// Load configs when modal is shown
|
||||
loadConfigs();
|
||||
|
||||
modal.show();
|
||||
}
|
||||
|
||||
// Trigger scan
|
||||
async function triggerScan() {
|
||||
const configFile = document.getElementById('config-file').value;
|
||||
const configId = document.getElementById('config-select').value;
|
||||
const errorEl = document.getElementById('trigger-error');
|
||||
const btnText = document.getElementById('modal-trigger-text');
|
||||
const btnSpinner = document.getElementById('modal-trigger-spinner');
|
||||
|
||||
if (!configFile) {
|
||||
errorEl.textContent = 'Please enter a config file path.';
|
||||
if (!configId) {
|
||||
errorEl.textContent = 'Please select a configuration.';
|
||||
errorEl.style.display = 'block';
|
||||
return;
|
||||
}
|
||||
@@ -392,13 +438,13 @@
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
config_file: configFile
|
||||
config_id: parseInt(configId)
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json();
|
||||
throw new Error(data.error || 'Failed to trigger scan');
|
||||
throw new Error(data.message || data.error || 'Failed to trigger scan');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
1214
app/web/templates/sites.html
Normal file
1214
app/web/templates/sites.html
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ Pagination utilities for SneakyScanner web application.
|
||||
Provides helper functions for paginating SQLAlchemy queries.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from sqlalchemy.orm import Query
|
||||
|
||||
|
||||
@@ -114,6 +114,7 @@ class PaginatedResult:
|
||||
|
||||
|
||||
def paginate(query: Query, page: int = 1, per_page: int = 20,
|
||||
transform: Optional[Callable[[Any], Dict[str, Any]]] = None,
|
||||
max_per_page: int = 100) -> PaginatedResult:
|
||||
"""
|
||||
Paginate a SQLAlchemy query.
|
||||
@@ -122,6 +123,7 @@ def paginate(query: Query, page: int = 1, per_page: int = 20,
|
||||
query: SQLAlchemy query to paginate
|
||||
page: Page number (1-indexed, default: 1)
|
||||
per_page: Items per page (default: 20)
|
||||
transform: Optional function to transform each item (default: None)
|
||||
max_per_page: Maximum items per page (default: 100)
|
||||
|
||||
Returns:
|
||||
@@ -133,6 +135,11 @@ def paginate(query: Query, page: int = 1, per_page: int = 20,
|
||||
>>> result = paginate(query, page=1, per_page=20)
|
||||
>>> scans = result.items
|
||||
>>> total_pages = result.pages
|
||||
|
||||
>>> # With transform function
|
||||
>>> def scan_to_dict(scan):
|
||||
... return {'id': scan.id, 'name': scan.name}
|
||||
>>> result = paginate(query, page=1, per_page=20, transform=scan_to_dict)
|
||||
"""
|
||||
# Validate and sanitize parameters
|
||||
page = max(1, page) # Page must be at least 1
|
||||
@@ -147,6 +154,10 @@ def paginate(query: Query, page: int = 1, per_page: int = 20,
|
||||
# Execute query with limit and offset
|
||||
items = query.limit(per_page).offset(offset).all()
|
||||
|
||||
# Apply transform if provided
|
||||
if transform is not None:
|
||||
items = [transform(item) for item in items]
|
||||
|
||||
return PaginatedResult(
|
||||
items=items,
|
||||
total=total,
|
||||
|
||||
73
destroy_everything.sh
Executable file
73
destroy_everything.sh
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
|
||||
# SneakyScan Fresh Start Script
|
||||
# This script removes all data, configs, and scan output for a clean slate
|
||||
|
||||
set -e
|
||||
|
||||
# Check for root/sudo access
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "============================================"
|
||||
echo " ERROR: Root access required"
|
||||
echo "============================================"
|
||||
echo ""
|
||||
echo "This script needs to run with sudo because"
|
||||
echo "Docker creates files with root ownership."
|
||||
echo ""
|
||||
echo "Please run:"
|
||||
echo " sudo ./destroy_everything.sh"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "============================================"
|
||||
echo " SneakyScan Fresh Start - DESTROY EVERYTHING"
|
||||
echo "============================================"
|
||||
echo ""
|
||||
echo "This will remove:"
|
||||
echo " - All database files in ./data/"
|
||||
echo " - All config files in ./configs/"
|
||||
echo " - All scan outputs in ./output/"
|
||||
echo ""
|
||||
read -p "Are you sure you want to continue? (yes/no): " -r
|
||||
echo ""
|
||||
|
||||
if [[ ! $REPLY =~ ^[Yy][Ee][Ss]$ ]]; then
|
||||
echo "Aborted."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Starting cleanup..."
|
||||
echo ""
|
||||
|
||||
# Clean data directory (database files)
|
||||
if [ -d "data" ]; then
|
||||
echo "Cleaning data directory..."
|
||||
rm -rfv data/*
|
||||
echo " Data directory cleaned"
|
||||
else
|
||||
echo " <20> Data directory not found"
|
||||
fi
|
||||
|
||||
# Clean configs directory
|
||||
if [ -d "configs" ]; then
|
||||
echo "Cleaning configs directory..."
|
||||
rm -rfv configs/*
|
||||
echo " Configs directory cleaned"
|
||||
else
|
||||
echo " <20> Configs directory not found"
|
||||
fi
|
||||
|
||||
# Clean output directory (scan results)
|
||||
if [ -d "output" ]; then
|
||||
echo "Cleaning output directory..."
|
||||
rm -rfv output/*
|
||||
echo " Output directory cleaned"
|
||||
else
|
||||
echo " <20> Output directory not found"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "============================================"
|
||||
echo " Fresh start complete! All data removed."
|
||||
echo "============================================"
|
||||
Reference in New Issue
Block a user