restructure of dirs, huge docs update
This commit is contained in:
0
app/web/api/__init__.py
Normal file
0
app/web/api/__init__.py
Normal file
144
app/web/api/alerts.py
Normal file
144
app/web/api/alerts.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""
|
||||
Alerts API blueprint.
|
||||
|
||||
Handles endpoints for viewing alert history and managing alert rules.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
|
||||
bp = Blueprint('alerts', __name__)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_alerts():
|
||||
"""
|
||||
List recent alerts.
|
||||
|
||||
Query params:
|
||||
page: Page number (default: 1)
|
||||
per_page: Items per page (default: 20)
|
||||
alert_type: Filter by alert type
|
||||
severity: Filter by severity (info, warning, critical)
|
||||
start_date: Filter alerts after this date
|
||||
end_date: Filter alerts before this date
|
||||
|
||||
Returns:
|
||||
JSON response with alerts list
|
||||
"""
|
||||
# TODO: Implement in Phase 4
|
||||
return jsonify({
|
||||
'alerts': [],
|
||||
'total': 0,
|
||||
'page': 1,
|
||||
'per_page': 20,
|
||||
'message': 'Alerts list endpoint - to be implemented in Phase 4'
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/rules', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_alert_rules():
|
||||
"""
|
||||
List all alert rules.
|
||||
|
||||
Returns:
|
||||
JSON response with alert rules
|
||||
"""
|
||||
# TODO: Implement in Phase 4
|
||||
return jsonify({
|
||||
'rules': [],
|
||||
'message': 'Alert rules list endpoint - to be implemented in Phase 4'
|
||||
})
|
||||
|
||||
|
||||
@bp.route('/rules', methods=['POST'])
|
||||
@api_auth_required
|
||||
def create_alert_rule():
|
||||
"""
|
||||
Create a new alert rule.
|
||||
|
||||
Request body:
|
||||
rule_type: Type of alert rule
|
||||
threshold: Threshold value (e.g., days for cert expiry)
|
||||
enabled: Whether rule is active (default: true)
|
||||
email_enabled: Send email for this rule (default: false)
|
||||
|
||||
Returns:
|
||||
JSON response with created rule ID
|
||||
"""
|
||||
# TODO: Implement in Phase 4
|
||||
data = request.get_json() or {}
|
||||
|
||||
return jsonify({
|
||||
'rule_id': None,
|
||||
'status': 'not_implemented',
|
||||
'message': 'Alert rule creation endpoint - to be implemented in Phase 4',
|
||||
'data': data
|
||||
}), 501
|
||||
|
||||
|
||||
@bp.route('/rules/<int:rule_id>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_alert_rule(rule_id):
|
||||
"""
|
||||
Update an existing alert rule.
|
||||
|
||||
Args:
|
||||
rule_id: Alert rule ID to update
|
||||
|
||||
Request body:
|
||||
threshold: Threshold value (optional)
|
||||
enabled: Whether rule is active (optional)
|
||||
email_enabled: Send email for this rule (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with update status
|
||||
"""
|
||||
# TODO: Implement in Phase 4
|
||||
data = request.get_json() or {}
|
||||
|
||||
return jsonify({
|
||||
'rule_id': rule_id,
|
||||
'status': 'not_implemented',
|
||||
'message': 'Alert rule update endpoint - to be implemented in Phase 4',
|
||||
'data': data
|
||||
}), 501
|
||||
|
||||
|
||||
@bp.route('/rules/<int:rule_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_alert_rule(rule_id):
|
||||
"""
|
||||
Delete an alert rule.
|
||||
|
||||
Args:
|
||||
rule_id: Alert rule ID to delete
|
||||
|
||||
Returns:
|
||||
JSON response with deletion status
|
||||
"""
|
||||
# TODO: Implement in Phase 4
|
||||
return jsonify({
|
||||
'rule_id': rule_id,
|
||||
'status': 'not_implemented',
|
||||
'message': 'Alert rule deletion endpoint - to be implemented in Phase 4'
|
||||
}), 501
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@bp.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""
|
||||
Health check endpoint for monitoring.
|
||||
|
||||
Returns:
|
||||
JSON response with API health status
|
||||
"""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'api': 'alerts',
|
||||
'version': '1.0.0-phase1'
|
||||
})
|
||||
452
app/web/api/configs.py
Normal file
452
app/web/api/configs.py
Normal file
@@ -0,0 +1,452 @@
|
||||
"""
|
||||
Configs API blueprint.
|
||||
|
||||
Handles endpoints for managing scan configuration files, including CSV/YAML upload,
|
||||
template download, and config management.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import io
|
||||
from flask import Blueprint, jsonify, request, send_file
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.services.config_service import ConfigService
|
||||
|
||||
bp = Blueprint('configs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_configs():
|
||||
"""
|
||||
List all config files with metadata.
|
||||
|
||||
Returns:
|
||||
JSON response with list of configs:
|
||||
{
|
||||
"configs": [
|
||||
{
|
||||
"filename": "prod-scan.yaml",
|
||||
"title": "Prod Scan",
|
||||
"path": "/app/configs/prod-scan.yaml",
|
||||
"created_at": "2025-11-15T10:30:00Z",
|
||||
"size_bytes": 1234,
|
||||
"used_by_schedules": ["Daily Scan"]
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
config_service = ConfigService()
|
||||
configs = config_service.list_configs()
|
||||
|
||||
logger.info(f"Listed {len(configs)} config files")
|
||||
|
||||
return jsonify({
|
||||
'configs': configs
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing configs: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_config(filename: str):
|
||||
"""
|
||||
Get config file content and parsed data.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Returns:
|
||||
JSON response with config content:
|
||||
{
|
||||
"filename": "prod-scan.yaml",
|
||||
"content": "title: Prod Scan\n...",
|
||||
"parsed": {"title": "Prod Scan", "sites": [...]}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_data = config_service.get_config(filename)
|
||||
|
||||
logger.info(f"Retrieved config file: {filename}")
|
||||
|
||||
return jsonify(config_data)
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid config file: {filename} - {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid config',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error getting config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/create-from-cidr', methods=['POST'])
|
||||
@api_auth_required
|
||||
def create_from_cidr():
|
||||
"""
|
||||
Create config from CIDR range.
|
||||
|
||||
Request:
|
||||
JSON with:
|
||||
{
|
||||
"title": "My Scan",
|
||||
"cidr": "10.0.0.0/24",
|
||||
"site_name": "Production" (optional),
|
||||
"ping_default": false (optional)
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with created config info:
|
||||
{
|
||||
"success": true,
|
||||
"filename": "my-scan.yaml",
|
||||
"preview": "title: My Scan\n..."
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Request body must be JSON'
|
||||
}), 400
|
||||
|
||||
# Validate required fields
|
||||
if 'title' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: title'
|
||||
}), 400
|
||||
|
||||
if 'cidr' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: cidr'
|
||||
}), 400
|
||||
|
||||
title = data['title']
|
||||
cidr = data['cidr']
|
||||
site_name = data.get('site_name', None)
|
||||
ping_default = data.get('ping_default', False)
|
||||
|
||||
# Validate title
|
||||
if not title or not title.strip():
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': 'Title cannot be empty'
|
||||
}), 400
|
||||
|
||||
# Create config from CIDR
|
||||
config_service = ConfigService()
|
||||
filename, yaml_preview = config_service.create_from_cidr(
|
||||
title=title,
|
||||
cidr=cidr,
|
||||
site_name=site_name,
|
||||
ping_default=ping_default
|
||||
)
|
||||
|
||||
logger.info(f"Created config from CIDR {cidr}: {filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'filename': filename,
|
||||
'preview': yaml_preview
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"CIDR validation failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error creating config from CIDR: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/upload-yaml', methods=['POST'])
|
||||
@api_auth_required
|
||||
def upload_yaml():
|
||||
"""
|
||||
Upload YAML config file directly.
|
||||
|
||||
Request:
|
||||
multipart/form-data with 'file' field containing YAML file
|
||||
Optional 'filename' field for custom filename
|
||||
|
||||
Returns:
|
||||
JSON response with created config info:
|
||||
{
|
||||
"success": true,
|
||||
"filename": "prod-scan.yaml"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Check if file is present
|
||||
if 'file' not in request.files:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'No file provided'
|
||||
}), 400
|
||||
|
||||
file = request.files['file']
|
||||
|
||||
# Check if file is selected
|
||||
if file.filename == '':
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'No file selected'
|
||||
}), 400
|
||||
|
||||
# Check file extension
|
||||
if not (file.filename.endswith('.yaml') or file.filename.endswith('.yml')):
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'File must be a YAML file (.yaml or .yml extension)'
|
||||
}), 400
|
||||
|
||||
# Read YAML content
|
||||
yaml_content = file.read().decode('utf-8')
|
||||
|
||||
# Get filename (use uploaded filename or custom)
|
||||
filename = request.form.get('filename', file.filename)
|
||||
filename = secure_filename(filename)
|
||||
|
||||
# Create config from YAML
|
||||
config_service = ConfigService()
|
||||
final_filename = config_service.create_from_yaml(filename, yaml_content)
|
||||
|
||||
logger.info(f"Created config from YAML upload: {final_filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'filename': final_filename
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"YAML validation failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except UnicodeDecodeError:
|
||||
logger.warning("YAML file encoding error")
|
||||
return jsonify({
|
||||
'error': 'Encoding error',
|
||||
'message': 'YAML file must be UTF-8 encoded'
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error uploading YAML: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
|
||||
|
||||
@bp.route('/<filename>/download', methods=['GET'])
|
||||
@api_auth_required
|
||||
def download_config(filename: str):
|
||||
"""
|
||||
Download existing config file.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Returns:
|
||||
YAML file download
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_data = config_service.get_config(filename)
|
||||
|
||||
# Create file-like object
|
||||
yaml_file = io.BytesIO(config_data['content'].encode('utf-8'))
|
||||
yaml_file.seek(0)
|
||||
|
||||
logger.info(f"Config file downloaded: {filename}")
|
||||
|
||||
# Send file
|
||||
return send_file(
|
||||
yaml_file,
|
||||
mimetype='application/x-yaml',
|
||||
as_attachment=True,
|
||||
download_name=filename
|
||||
)
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error downloading config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_config(filename: str):
|
||||
"""
|
||||
Update existing config file with new YAML content.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Request:
|
||||
JSON with:
|
||||
{
|
||||
"content": "title: My Scan\nsites: ..."
|
||||
}
|
||||
|
||||
Returns:
|
||||
JSON response with success status:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Config updated successfully"
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 400: Invalid YAML or config structure
|
||||
- 404: Config file not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'content' not in data:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'Missing required field: content'
|
||||
}), 400
|
||||
|
||||
yaml_content = data['content']
|
||||
|
||||
# Update config
|
||||
config_service = ConfigService()
|
||||
config_service.update_config(filename, yaml_content)
|
||||
|
||||
logger.info(f"Updated config file: {filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Config updated successfully'
|
||||
})
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid config content for {filename}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Validation error',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error updating config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<filename>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_config(filename: str):
|
||||
"""
|
||||
Delete config file and cascade delete associated schedules.
|
||||
|
||||
When a config is deleted, all schedules using that config (both enabled
|
||||
and disabled) are automatically deleted as well.
|
||||
|
||||
Args:
|
||||
filename: Config filename
|
||||
|
||||
Returns:
|
||||
JSON response with success status:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Config deleted successfully"
|
||||
}
|
||||
|
||||
Error responses:
|
||||
- 404: Config file not found
|
||||
- 500: Internal server error
|
||||
"""
|
||||
try:
|
||||
# Sanitize filename
|
||||
filename = secure_filename(filename)
|
||||
|
||||
config_service = ConfigService()
|
||||
config_service.delete_config(filename)
|
||||
|
||||
logger.info(f"Deleted config file: {filename}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Config deleted successfully'
|
||||
})
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"Config file not found: {filename}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error deleting config {filename}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
338
app/web/api/scans.py
Normal file
338
app/web/api/scans.py
Normal file
@@ -0,0 +1,338 @@
|
||||
"""
|
||||
Scans API blueprint.
|
||||
|
||||
Handles endpoints for triggering scans, listing scan history, and retrieving
|
||||
scan results.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.services.scan_service import ScanService
|
||||
from web.utils.validators import validate_config_file
|
||||
from web.utils.pagination import validate_page_params
|
||||
|
||||
bp = Blueprint('scans', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_scans():
|
||||
"""
|
||||
List all scans with pagination.
|
||||
|
||||
Query params:
|
||||
page: Page number (default: 1)
|
||||
per_page: Items per page (default: 20, max: 100)
|
||||
status: Filter by status (running, completed, failed)
|
||||
|
||||
Returns:
|
||||
JSON response with scans list and pagination info
|
||||
"""
|
||||
try:
|
||||
# Get and validate query parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 20, type=int)
|
||||
status_filter = request.args.get('status', None, type=str)
|
||||
|
||||
# Validate pagination params
|
||||
page, per_page = validate_page_params(page, per_page)
|
||||
|
||||
# Get scans from service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
paginated_result = scan_service.list_scans(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
status_filter=status_filter
|
||||
)
|
||||
|
||||
logger.info(f"Listed scans: page={page}, per_page={per_page}, status={status_filter}, total={paginated_result.total}")
|
||||
|
||||
return jsonify({
|
||||
'scans': paginated_result.items,
|
||||
'total': paginated_result.total,
|
||||
'page': paginated_result.page,
|
||||
'per_page': paginated_result.per_page,
|
||||
'total_pages': paginated_result.pages,
|
||||
'has_prev': paginated_result.has_prev,
|
||||
'has_next': paginated_result.has_next
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Invalid request parameters: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': str(e)
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error listing scans: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve scans'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error listing scans: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_scan(scan_id):
|
||||
"""
|
||||
Get details for a specific scan.
|
||||
|
||||
Args:
|
||||
scan_id: Scan ID
|
||||
|
||||
Returns:
|
||||
JSON response with scan details
|
||||
"""
|
||||
try:
|
||||
# Get scan from service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
scan = scan_service.get_scan(scan_id)
|
||||
|
||||
if not scan:
|
||||
logger.warning(f"Scan not found: {scan_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Scan with ID {scan_id} not found'
|
||||
}), 404
|
||||
|
||||
logger.info(f"Retrieved scan details: {scan_id}")
|
||||
return jsonify(scan)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving scan {scan_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve scan'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving scan {scan_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('', methods=['POST'])
|
||||
@api_auth_required
|
||||
def trigger_scan():
|
||||
"""
|
||||
Trigger a new scan.
|
||||
|
||||
Request body:
|
||||
config_file: Path to YAML config file
|
||||
|
||||
Returns:
|
||||
JSON response with scan_id and status
|
||||
"""
|
||||
try:
|
||||
# Get request data
|
||||
data = request.get_json() or {}
|
||||
config_file = data.get('config_file')
|
||||
|
||||
# Validate required fields
|
||||
if not config_file:
|
||||
logger.warning("Scan trigger request missing config_file")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': 'config_file is required'
|
||||
}), 400
|
||||
|
||||
# Trigger scan via service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
scan_id = scan_service.trigger_scan(
|
||||
config_file=config_file,
|
||||
triggered_by='api',
|
||||
scheduler=current_app.scheduler
|
||||
)
|
||||
|
||||
logger.info(f"Scan {scan_id} triggered via API: config={config_file}")
|
||||
|
||||
return jsonify({
|
||||
'scan_id': scan_id,
|
||||
'status': 'running',
|
||||
'message': 'Scan queued successfully'
|
||||
}), 201
|
||||
|
||||
except ValueError as e:
|
||||
# Config file validation error
|
||||
error_message = str(e)
|
||||
logger.warning(f"Invalid config file: {error_message}")
|
||||
logger.warning(f"Request data: config_file='{config_file}'")
|
||||
return jsonify({
|
||||
'error': 'Invalid request',
|
||||
'message': error_message
|
||||
}), 400
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error triggering scan: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to create scan'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error triggering scan: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_scan(scan_id):
|
||||
"""
|
||||
Delete a scan and its associated files.
|
||||
|
||||
Args:
|
||||
scan_id: Scan ID to delete
|
||||
|
||||
Returns:
|
||||
JSON response with deletion status
|
||||
"""
|
||||
try:
|
||||
# Delete scan via service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
scan_service.delete_scan(scan_id)
|
||||
|
||||
logger.info(f"Scan {scan_id} deleted successfully")
|
||||
|
||||
return jsonify({
|
||||
'scan_id': scan_id,
|
||||
'message': 'Scan deleted successfully'
|
||||
}), 200
|
||||
|
||||
except ValueError as e:
|
||||
# Scan not found
|
||||
logger.warning(f"Scan deletion failed: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': str(e)
|
||||
}), 404
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error deleting scan {scan_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to delete scan'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error deleting scan {scan_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id>/status', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_scan_status(scan_id):
|
||||
"""
|
||||
Get current status of a running scan.
|
||||
|
||||
Args:
|
||||
scan_id: Scan ID
|
||||
|
||||
Returns:
|
||||
JSON response with scan status and progress
|
||||
"""
|
||||
try:
|
||||
# Get scan status from service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
status = scan_service.get_scan_status(scan_id)
|
||||
|
||||
if not status:
|
||||
logger.warning(f"Scan not found for status check: {scan_id}")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': f'Scan with ID {scan_id} not found'
|
||||
}), 404
|
||||
|
||||
logger.debug(f"Retrieved status for scan {scan_id}: {status['status']}")
|
||||
return jsonify(status)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error retrieving scan status {scan_id}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to retrieve scan status'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error retrieving scan status {scan_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:scan_id1>/compare/<int:scan_id2>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def compare_scans(scan_id1, scan_id2):
|
||||
"""
|
||||
Compare two scans and show differences.
|
||||
|
||||
Compares ports, services, and certificates between two scans,
|
||||
highlighting added, removed, and changed items.
|
||||
|
||||
Args:
|
||||
scan_id1: First (older) scan ID
|
||||
scan_id2: Second (newer) scan ID
|
||||
|
||||
Returns:
|
||||
JSON response with comparison results including:
|
||||
- scan1, scan2: Metadata for both scans
|
||||
- ports: Added, removed, and unchanged ports
|
||||
- services: Added, removed, and changed services
|
||||
- certificates: Added, removed, and changed certificates
|
||||
- drift_score: Overall drift metric (0.0-1.0)
|
||||
"""
|
||||
try:
|
||||
# Compare scans using service
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
comparison = scan_service.compare_scans(scan_id1, scan_id2)
|
||||
|
||||
if not comparison:
|
||||
logger.warning(f"Scan comparison failed: one or both scans not found ({scan_id1}, {scan_id2})")
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': 'One or both scans not found'
|
||||
}), 404
|
||||
|
||||
logger.info(f"Compared scans {scan_id1} and {scan_id2}: drift_score={comparison['drift_score']}")
|
||||
return jsonify(comparison), 200
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error comparing scans {scan_id1} and {scan_id2}: {str(e)}")
|
||||
return jsonify({
|
||||
'error': 'Database error',
|
||||
'message': 'Failed to compare scans'
|
||||
}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error comparing scans {scan_id1} and {scan_id2}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@bp.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""
|
||||
Health check endpoint for monitoring.
|
||||
|
||||
Returns:
|
||||
JSON response with API health status
|
||||
"""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'api': 'scans',
|
||||
'version': '1.0.0-phase1'
|
||||
})
|
||||
331
app/web/api/schedules.py
Normal file
331
app/web/api/schedules.py
Normal file
@@ -0,0 +1,331 @@
|
||||
"""
|
||||
Schedules API blueprint.
|
||||
|
||||
Handles endpoints for managing scheduled scans including CRUD operations
|
||||
and manual triggering.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from flask import Blueprint, jsonify, request, current_app
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.services.schedule_service import ScheduleService
|
||||
from web.services.scan_service import ScanService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
bp = Blueprint('schedules', __name__)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def list_schedules():
|
||||
"""
|
||||
List all schedules with pagination and filtering.
|
||||
|
||||
Query parameters:
|
||||
page: Page number (default: 1)
|
||||
per_page: Items per page (default: 20)
|
||||
enabled: Filter by enabled status (true/false)
|
||||
|
||||
Returns:
|
||||
JSON response with paginated schedules list
|
||||
"""
|
||||
try:
|
||||
# Parse query parameters
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 20, type=int)
|
||||
enabled_str = request.args.get('enabled', type=str)
|
||||
|
||||
# Parse enabled filter
|
||||
enabled_filter = None
|
||||
if enabled_str is not None:
|
||||
enabled_filter = enabled_str.lower() == 'true'
|
||||
|
||||
# Get schedules
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
result = schedule_service.list_schedules(page, per_page, enabled_filter)
|
||||
|
||||
return jsonify(result), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing schedules: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:schedule_id>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_schedule(schedule_id):
|
||||
"""
|
||||
Get details for a specific schedule.
|
||||
|
||||
Args:
|
||||
schedule_id: Schedule ID
|
||||
|
||||
Returns:
|
||||
JSON response with schedule details including execution history
|
||||
"""
|
||||
try:
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
schedule = schedule_service.get_schedule(schedule_id)
|
||||
|
||||
return jsonify(schedule), 200
|
||||
|
||||
except ValueError as e:
|
||||
# Schedule not found
|
||||
return jsonify({'error': str(e)}), 404
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting schedule {schedule_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
@bp.route('', methods=['POST'])
|
||||
@api_auth_required
|
||||
def create_schedule():
|
||||
"""
|
||||
Create a new schedule.
|
||||
|
||||
Request body:
|
||||
name: Schedule name (required)
|
||||
config_file: Path to YAML config (required)
|
||||
cron_expression: Cron expression (required, e.g., '0 2 * * *')
|
||||
enabled: Whether schedule is active (optional, default: true)
|
||||
|
||||
Returns:
|
||||
JSON response with created schedule ID
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Validate required fields
|
||||
required = ['name', 'config_file', 'cron_expression']
|
||||
missing = [field for field in required if field not in data]
|
||||
if missing:
|
||||
return jsonify({'error': f'Missing required fields: {", ".join(missing)}'}), 400
|
||||
|
||||
# Create schedule
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
schedule_id = schedule_service.create_schedule(
|
||||
name=data['name'],
|
||||
config_file=data['config_file'],
|
||||
cron_expression=data['cron_expression'],
|
||||
enabled=data.get('enabled', True)
|
||||
)
|
||||
|
||||
# Get the created schedule
|
||||
schedule = schedule_service.get_schedule(schedule_id)
|
||||
|
||||
# Add to APScheduler if enabled
|
||||
if schedule['enabled'] and hasattr(current_app, 'scheduler'):
|
||||
try:
|
||||
current_app.scheduler.add_scheduled_scan(
|
||||
schedule_id=schedule_id,
|
||||
config_file=schedule['config_file'],
|
||||
cron_expression=schedule['cron_expression']
|
||||
)
|
||||
logger.info(f"Schedule {schedule_id} added to APScheduler")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add schedule {schedule_id} to APScheduler: {str(e)}")
|
||||
# Continue anyway - schedule is created in DB
|
||||
|
||||
return jsonify({
|
||||
'schedule_id': schedule_id,
|
||||
'message': 'Schedule created successfully',
|
||||
'schedule': schedule
|
||||
}), 201
|
||||
|
||||
except ValueError as e:
|
||||
# Validation error
|
||||
return jsonify({'error': str(e)}), 400
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating schedule: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:schedule_id>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_schedule(schedule_id):
|
||||
"""
|
||||
Update an existing schedule.
|
||||
|
||||
Args:
|
||||
schedule_id: Schedule ID to update
|
||||
|
||||
Request body:
|
||||
name: Schedule name (optional)
|
||||
config_file: Path to YAML config (optional)
|
||||
cron_expression: Cron expression (optional)
|
||||
enabled: Whether schedule is active (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with updated schedule
|
||||
"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
if not data:
|
||||
return jsonify({'error': 'No update data provided'}), 400
|
||||
|
||||
# Update schedule
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
|
||||
# Store old state to check if scheduler update needed
|
||||
old_schedule = schedule_service.get_schedule(schedule_id)
|
||||
|
||||
# Perform update
|
||||
updated_schedule = schedule_service.update_schedule(schedule_id, **data)
|
||||
|
||||
# Update in APScheduler if needed
|
||||
if hasattr(current_app, 'scheduler'):
|
||||
try:
|
||||
# If cron expression or config changed, or enabled status changed
|
||||
cron_changed = 'cron_expression' in data
|
||||
config_changed = 'config_file' in data
|
||||
enabled_changed = 'enabled' in data
|
||||
|
||||
if enabled_changed:
|
||||
if updated_schedule['enabled']:
|
||||
# Re-add to scheduler (replaces existing)
|
||||
current_app.scheduler.add_scheduled_scan(
|
||||
schedule_id=schedule_id,
|
||||
config_file=updated_schedule['config_file'],
|
||||
cron_expression=updated_schedule['cron_expression']
|
||||
)
|
||||
logger.info(f"Schedule {schedule_id} enabled and added to APScheduler")
|
||||
else:
|
||||
# Remove from scheduler
|
||||
current_app.scheduler.remove_scheduled_scan(schedule_id)
|
||||
logger.info(f"Schedule {schedule_id} disabled and removed from APScheduler")
|
||||
elif (cron_changed or config_changed) and updated_schedule['enabled']:
|
||||
# Reload schedule in APScheduler
|
||||
current_app.scheduler.add_scheduled_scan(
|
||||
schedule_id=schedule_id,
|
||||
config_file=updated_schedule['config_file'],
|
||||
cron_expression=updated_schedule['cron_expression']
|
||||
)
|
||||
logger.info(f"Schedule {schedule_id} reloaded in APScheduler")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update schedule {schedule_id} in APScheduler: {str(e)}")
|
||||
# Continue anyway - schedule is updated in DB
|
||||
|
||||
return jsonify({
|
||||
'message': 'Schedule updated successfully',
|
||||
'schedule': updated_schedule
|
||||
}), 200
|
||||
|
||||
except ValueError as e:
|
||||
# Schedule not found or validation error
|
||||
if 'not found' in str(e):
|
||||
return jsonify({'error': str(e)}), 404
|
||||
return jsonify({'error': str(e)}), 400
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating schedule {schedule_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:schedule_id>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_schedule(schedule_id):
|
||||
"""
|
||||
Delete a schedule.
|
||||
|
||||
Note: Associated scans are NOT deleted (schedule_id becomes null).
|
||||
Active scans will complete normally.
|
||||
|
||||
Args:
|
||||
schedule_id: Schedule ID to delete
|
||||
|
||||
Returns:
|
||||
JSON response with deletion status
|
||||
"""
|
||||
try:
|
||||
# Remove from APScheduler first
|
||||
if hasattr(current_app, 'scheduler'):
|
||||
try:
|
||||
current_app.scheduler.remove_scheduled_scan(schedule_id)
|
||||
logger.info(f"Schedule {schedule_id} removed from APScheduler")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to remove schedule {schedule_id} from APScheduler: {str(e)}")
|
||||
# Continue anyway
|
||||
|
||||
# Delete from database
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
schedule_service.delete_schedule(schedule_id)
|
||||
|
||||
return jsonify({
|
||||
'message': 'Schedule deleted successfully',
|
||||
'schedule_id': schedule_id
|
||||
}), 200
|
||||
|
||||
except ValueError as e:
|
||||
# Schedule not found
|
||||
return jsonify({'error': str(e)}), 404
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting schedule {schedule_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
@bp.route('/<int:schedule_id>/trigger', methods=['POST'])
|
||||
@api_auth_required
|
||||
def trigger_schedule(schedule_id):
|
||||
"""
|
||||
Manually trigger a scheduled scan.
|
||||
|
||||
Creates a new scan with the schedule's configuration and queues it
|
||||
for immediate execution.
|
||||
|
||||
Args:
|
||||
schedule_id: Schedule ID to trigger
|
||||
|
||||
Returns:
|
||||
JSON response with triggered scan ID
|
||||
"""
|
||||
try:
|
||||
# Get schedule
|
||||
schedule_service = ScheduleService(current_app.db_session)
|
||||
schedule = schedule_service.get_schedule(schedule_id)
|
||||
|
||||
# Trigger scan
|
||||
scan_service = ScanService(current_app.db_session)
|
||||
|
||||
# Get scheduler if available
|
||||
scheduler = current_app.scheduler if hasattr(current_app, 'scheduler') else None
|
||||
|
||||
scan_id = scan_service.trigger_scan(
|
||||
config_file=schedule['config_file'],
|
||||
triggered_by='manual',
|
||||
schedule_id=schedule_id,
|
||||
scheduler=scheduler
|
||||
)
|
||||
|
||||
logger.info(f"Manual trigger of schedule {schedule_id} created scan {scan_id}")
|
||||
|
||||
return jsonify({
|
||||
'message': 'Scan triggered successfully',
|
||||
'schedule_id': schedule_id,
|
||||
'scan_id': scan_id
|
||||
}), 201
|
||||
|
||||
except ValueError as e:
|
||||
# Schedule not found
|
||||
return jsonify({'error': str(e)}), 404
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering schedule {schedule_id}: {str(e)}", exc_info=True)
|
||||
return jsonify({'error': 'Internal server error'}), 500
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@bp.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""
|
||||
Health check endpoint for monitoring.
|
||||
|
||||
Returns:
|
||||
JSON response with API health status
|
||||
"""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'api': 'schedules',
|
||||
'version': '1.0.0-phase1'
|
||||
})
|
||||
271
app/web/api/settings.py
Normal file
271
app/web/api/settings.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""
|
||||
Settings API blueprint.
|
||||
|
||||
Handles endpoints for managing application settings including SMTP configuration,
|
||||
authentication, and system preferences.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.utils.settings import PasswordManager, SettingsManager
|
||||
|
||||
bp = Blueprint('settings', __name__)
|
||||
|
||||
|
||||
def get_settings_manager():
|
||||
"""Get SettingsManager instance with current DB session."""
|
||||
return SettingsManager(current_app.db_session)
|
||||
|
||||
|
||||
@bp.route('', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_settings():
|
||||
"""
|
||||
Get all settings (sanitized - encrypted values masked).
|
||||
|
||||
Returns:
|
||||
JSON response with all settings
|
||||
"""
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
settings = settings_manager.get_all(decrypt=False, sanitize=True)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'settings': settings
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to retrieve settings: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to retrieve settings'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_settings():
|
||||
"""
|
||||
Update multiple settings at once.
|
||||
|
||||
Request body:
|
||||
settings: Dictionary of setting key-value pairs
|
||||
|
||||
Returns:
|
||||
JSON response with update status
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
settings_dict = data.get('settings', {})
|
||||
|
||||
if not settings_dict:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'No settings provided'
|
||||
}), 400
|
||||
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
# Update each setting
|
||||
for key, value in settings_dict.items():
|
||||
settings_manager.set(key, value)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Updated {len(settings_dict)} settings'
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to update settings: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to update settings'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:key>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def get_setting(key):
|
||||
"""
|
||||
Get a specific setting by key.
|
||||
|
||||
Args:
|
||||
key: Setting key
|
||||
|
||||
Returns:
|
||||
JSON response with setting value
|
||||
"""
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
value = settings_manager.get(key)
|
||||
|
||||
if value is None:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Setting "{key}" not found'
|
||||
}), 404
|
||||
|
||||
# Sanitize if encrypted key
|
||||
if settings_manager._should_encrypt(key):
|
||||
value = '***ENCRYPTED***'
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'key': key,
|
||||
'value': value
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to retrieve setting {key}: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to retrieve setting'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:key>', methods=['PUT'])
|
||||
@api_auth_required
|
||||
def update_setting(key):
|
||||
"""
|
||||
Update a specific setting.
|
||||
|
||||
Args:
|
||||
key: Setting key
|
||||
|
||||
Request body:
|
||||
value: New value for the setting
|
||||
|
||||
Returns:
|
||||
JSON response with update status
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
value = data.get('value')
|
||||
|
||||
if value is None:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'No value provided'
|
||||
}), 400
|
||||
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_manager.set(key, value)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Setting "{key}" updated'
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to update setting {key}: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to update setting'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:key>', methods=['DELETE'])
|
||||
@api_auth_required
|
||||
def delete_setting(key):
|
||||
"""
|
||||
Delete a setting.
|
||||
|
||||
Args:
|
||||
key: Setting key to delete
|
||||
|
||||
Returns:
|
||||
JSON response with deletion status
|
||||
"""
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
deleted = settings_manager.delete(key)
|
||||
|
||||
if not deleted:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Setting "{key}" not found'
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Setting "{key}" deleted'
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to delete setting {key}: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to delete setting'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/password', methods=['POST'])
|
||||
@api_auth_required
|
||||
def set_password():
|
||||
"""
|
||||
Set the application password.
|
||||
|
||||
Request body:
|
||||
password: New password
|
||||
|
||||
Returns:
|
||||
JSON response with status
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
password = data.get('password')
|
||||
|
||||
if not password:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'No password provided'
|
||||
}), 400
|
||||
|
||||
if len(password) < 8:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Password must be at least 8 characters'
|
||||
}), 400
|
||||
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
PasswordManager.set_app_password(settings_manager, password)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Password updated successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to set password: {e}")
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to set password'
|
||||
}), 500
|
||||
|
||||
|
||||
@bp.route('/test-email', methods=['POST'])
|
||||
@api_auth_required
|
||||
def test_email():
|
||||
"""
|
||||
Test email configuration by sending a test email.
|
||||
|
||||
Returns:
|
||||
JSON response with test result
|
||||
"""
|
||||
# TODO: Implement in Phase 4 (email support)
|
||||
return jsonify({
|
||||
'status': 'not_implemented',
|
||||
'message': 'Email testing endpoint - to be implemented in Phase 4'
|
||||
}), 501
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@bp.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""
|
||||
Health check endpoint for monitoring.
|
||||
|
||||
Returns:
|
||||
JSON response with API health status
|
||||
"""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'api': 'settings',
|
||||
'version': '1.0.0-phase1'
|
||||
})
|
||||
258
app/web/api/stats.py
Normal file
258
app/web/api/stats.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""
|
||||
Stats API blueprint.
|
||||
|
||||
Handles endpoints for dashboard statistics, trending data, and analytics.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from flask import Blueprint, current_app, jsonify, request
|
||||
from sqlalchemy import func, Date
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from web.auth.decorators import api_auth_required
|
||||
from web.models import Scan
|
||||
|
||||
bp = Blueprint('stats', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@bp.route('/scan-trend', methods=['GET'])
|
||||
@api_auth_required
|
||||
def scan_trend():
|
||||
"""
|
||||
Get scan activity trend data for charts.
|
||||
|
||||
Query params:
|
||||
days: Number of days to include (default: 30, max: 365)
|
||||
|
||||
Returns:
|
||||
JSON response with labels and values arrays for Chart.js
|
||||
{
|
||||
"labels": ["2025-01-01", "2025-01-02", ...],
|
||||
"values": [5, 3, 7, 2, ...]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Get and validate query parameters
|
||||
days = request.args.get('days', 30, type=int)
|
||||
|
||||
# Validate days parameter
|
||||
if days < 1:
|
||||
return jsonify({'error': 'days parameter must be at least 1'}), 400
|
||||
if days > 365:
|
||||
return jsonify({'error': 'days parameter cannot exceed 365'}), 400
|
||||
|
||||
# Calculate date range
|
||||
end_date = datetime.utcnow().date()
|
||||
start_date = end_date - timedelta(days=days - 1)
|
||||
|
||||
# Query scan counts per day
|
||||
db_session = current_app.db_session
|
||||
scan_counts = (
|
||||
db_session.query(
|
||||
func.date(Scan.timestamp).label('scan_date'),
|
||||
func.count(Scan.id).label('scan_count')
|
||||
)
|
||||
.filter(func.date(Scan.timestamp) >= start_date)
|
||||
.filter(func.date(Scan.timestamp) <= end_date)
|
||||
.group_by(func.date(Scan.timestamp))
|
||||
.order_by('scan_date')
|
||||
.all()
|
||||
)
|
||||
|
||||
# Create a dictionary of date -> count
|
||||
scan_dict = {str(row.scan_date): row.scan_count for row in scan_counts}
|
||||
|
||||
# Generate all dates in range (fill missing dates with 0)
|
||||
labels = []
|
||||
values = []
|
||||
current_date = start_date
|
||||
while current_date <= end_date:
|
||||
date_str = str(current_date)
|
||||
labels.append(date_str)
|
||||
values.append(scan_dict.get(date_str, 0))
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
return jsonify({
|
||||
'labels': labels,
|
||||
'values': values,
|
||||
'start_date': str(start_date),
|
||||
'end_date': str(end_date),
|
||||
'total_scans': sum(values)
|
||||
}), 200
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error in scan_trend: {str(e)}")
|
||||
return jsonify({'error': 'Database error occurred'}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error in scan_trend: {str(e)}")
|
||||
return jsonify({'error': 'An error occurred'}), 500
|
||||
|
||||
|
||||
@bp.route('/summary', methods=['GET'])
|
||||
@api_auth_required
|
||||
def summary():
|
||||
"""
|
||||
Get dashboard summary statistics.
|
||||
|
||||
Returns:
|
||||
JSON response with summary stats
|
||||
{
|
||||
"total_scans": 150,
|
||||
"completed_scans": 140,
|
||||
"failed_scans": 5,
|
||||
"running_scans": 5,
|
||||
"scans_today": 3,
|
||||
"scans_this_week": 15
|
||||
}
|
||||
"""
|
||||
try:
|
||||
db_session = current_app.db_session
|
||||
|
||||
# Get total counts by status
|
||||
total_scans = db_session.query(func.count(Scan.id)).scalar() or 0
|
||||
completed_scans = db_session.query(func.count(Scan.id)).filter(
|
||||
Scan.status == 'completed'
|
||||
).scalar() or 0
|
||||
failed_scans = db_session.query(func.count(Scan.id)).filter(
|
||||
Scan.status == 'failed'
|
||||
).scalar() or 0
|
||||
running_scans = db_session.query(func.count(Scan.id)).filter(
|
||||
Scan.status == 'running'
|
||||
).scalar() or 0
|
||||
|
||||
# Get scans today
|
||||
today = datetime.utcnow().date()
|
||||
scans_today = db_session.query(func.count(Scan.id)).filter(
|
||||
func.date(Scan.timestamp) == today
|
||||
).scalar() or 0
|
||||
|
||||
# Get scans this week (last 7 days)
|
||||
week_ago = today - timedelta(days=6)
|
||||
scans_this_week = db_session.query(func.count(Scan.id)).filter(
|
||||
func.date(Scan.timestamp) >= week_ago
|
||||
).scalar() or 0
|
||||
|
||||
return jsonify({
|
||||
'total_scans': total_scans,
|
||||
'completed_scans': completed_scans,
|
||||
'failed_scans': failed_scans,
|
||||
'running_scans': running_scans,
|
||||
'scans_today': scans_today,
|
||||
'scans_this_week': scans_this_week
|
||||
}), 200
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error in summary: {str(e)}")
|
||||
return jsonify({'error': 'Database error occurred'}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error in summary: {str(e)}")
|
||||
return jsonify({'error': 'An error occurred'}), 500
|
||||
|
||||
|
||||
@bp.route('/scan-history/<int:scan_id>', methods=['GET'])
|
||||
@api_auth_required
|
||||
def scan_history(scan_id):
|
||||
"""
|
||||
Get historical trend data for scans with the same config file.
|
||||
|
||||
Returns port counts and other metrics over time for the same
|
||||
configuration/target as the specified scan.
|
||||
|
||||
Args:
|
||||
scan_id: Reference scan ID
|
||||
|
||||
Query params:
|
||||
limit: Maximum number of historical scans to include (default: 10, max: 50)
|
||||
|
||||
Returns:
|
||||
JSON response with historical scan data
|
||||
{
|
||||
"scans": [
|
||||
{
|
||||
"id": 123,
|
||||
"timestamp": "2025-01-01T12:00:00",
|
||||
"title": "Scan title",
|
||||
"port_count": 25,
|
||||
"ip_count": 5
|
||||
},
|
||||
...
|
||||
],
|
||||
"labels": ["2025-01-01", ...],
|
||||
"port_counts": [25, 26, 24, ...]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Get query parameters
|
||||
limit = request.args.get('limit', 10, type=int)
|
||||
if limit > 50:
|
||||
limit = 50
|
||||
|
||||
db_session = current_app.db_session
|
||||
|
||||
# Get the reference scan to find its config file
|
||||
from web.models import ScanPort
|
||||
reference_scan = db_session.query(Scan).filter(Scan.id == scan_id).first()
|
||||
|
||||
if not reference_scan:
|
||||
return jsonify({'error': 'Scan not found'}), 404
|
||||
|
||||
config_file = reference_scan.config_file
|
||||
|
||||
# Query historical scans with the same config file
|
||||
historical_scans = (
|
||||
db_session.query(Scan)
|
||||
.filter(Scan.config_file == config_file)
|
||||
.filter(Scan.status == 'completed')
|
||||
.order_by(Scan.timestamp.desc())
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Build result data
|
||||
scans_data = []
|
||||
labels = []
|
||||
port_counts = []
|
||||
|
||||
for scan in reversed(historical_scans): # Reverse to get chronological order
|
||||
# Count ports for this scan
|
||||
port_count = (
|
||||
db_session.query(func.count(ScanPort.id))
|
||||
.filter(ScanPort.scan_id == scan.id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
# Count unique IPs for this scan
|
||||
from web.models import ScanIP
|
||||
ip_count = (
|
||||
db_session.query(func.count(ScanIP.id))
|
||||
.filter(ScanIP.scan_id == scan.id)
|
||||
.scalar() or 0
|
||||
)
|
||||
|
||||
scans_data.append({
|
||||
'id': scan.id,
|
||||
'timestamp': scan.timestamp.isoformat() if scan.timestamp else None,
|
||||
'title': scan.title,
|
||||
'port_count': port_count,
|
||||
'ip_count': ip_count
|
||||
})
|
||||
|
||||
# For chart data
|
||||
labels.append(scan.timestamp.strftime('%Y-%m-%d %H:%M') if scan.timestamp else '')
|
||||
port_counts.append(port_count)
|
||||
|
||||
return jsonify({
|
||||
'scans': scans_data,
|
||||
'labels': labels,
|
||||
'port_counts': port_counts,
|
||||
'config_file': config_file
|
||||
}), 200
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Database error in scan_history: {str(e)}")
|
||||
return jsonify({'error': 'Database error occurred'}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error in scan_history: {str(e)}")
|
||||
return jsonify({'error': 'An error occurred'}), 500
|
||||
Reference in New Issue
Block a user