diff --git a/app.py b/app.py index 1986d6f..feac89d 100644 --- a/app.py +++ b/app.py @@ -1,14 +1,15 @@ -from flask import Flask, render_template, render_template_string -import configparser +import os +import sys import ssl + +import configparser +from flask import Flask, render_template, render_template_string from routes.main_routes import main_bp from routes.edit_routes import edit_bp from utils.stats_utils import fetch_haproxy_stats, parse_haproxy_stats from auth.auth_middleware import setup_auth from log_parser import parse_log_file -import os -import sys from utils.haproxy_config import update_haproxy_config, is_frontend_exist, count_frontends_and_backends BASE_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -73,23 +74,23 @@ try: certificate_path = config2.get('ssl', 'certificate_path') private_key_path = config2.get('ssl', 'private_key_path') else: - print(f"[APP] ✗ No [ssl] section in {SSL_INI}", flush=True) + print(f"[APP] No [ssl] section in {SSL_INI}", flush=True) sys.exit(1) if not os.path.exists(certificate_path): - print(f"[APP] ✗ Certificate not found: {certificate_path}", flush=True) + print(f"[APP] Certificate not found: {certificate_path}", flush=True) sys.exit(1) if not os.path.exists(private_key_path): - print(f"[APP] ✗ Private key not found: {private_key_path}", flush=True) + print(f"[APP] Private key not found: {private_key_path}", flush=True) sys.exit(1) ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(certfile=certificate_path, keyfile=private_key_path) - print(f"[APP] ✓ SSL context loaded", flush=True) + print(f"[APP] SSL context loaded", flush=True) except Exception as e: - print(f"[APP] ✗ SSL error: {e}", flush=True) + print(f"[APP] SSL error: {e}", flush=True) sys.exit(1) @@ -105,7 +106,6 @@ def display_logs(): parsed_entries = parse_log_file(log_file_path) return render_template('logs.html', entries=parsed_entries) - @app.route('/home') def home(): frontend_count, backend_count, acl_count, layer7_count, layer4_count = count_frontends_and_backends() diff --git a/log_parser.py b/log_parser.py index 414b506..984ac37 100644 --- a/log_parser.py +++ b/log_parser.py @@ -1,7 +1,11 @@ import re +from collections import defaultdict +from datetime import datetime def parse_log_file(log_file_path): + parsed_entries = [] + xss_patterns = [ r'<\s*script\s*', r'javascript:', @@ -16,88 +20,170 @@ def parse_log_file(log_file_path): r'alert', r'onerror', r'onload', - r'javascript' ] - + sql_patterns = [ - r';', - r'substring', - r'extract', - r'union\s+all', - r'order\s+by', + r'(union|select|insert|update|delete|drop)\s+(from|into|table)', + r';\s*(union|select|insert|update|delete|drop)', + r'substring\s*\(', + r'extract\s*\(', + r'order\s+by\s+\d+', r'--\+', - r'union', - r'select', - r'insert', - r'update', - r'delete', - r'drop', - r'@@', - r'1=1', + r'1\s*=\s*1', + r'@@\w+', r'`1', - r'union', - r'select', - r'insert', - r'update', - r'delete', - r'drop', - r'@@', - r'1=1', - r'`1' + r'\|\|\s*chr\(', ] - + webshells_patterns = [ - r'payload', - r'eval|system|passthru|shell_exec|exec|popen|proc_open|pcntl_exec|cmd|shell|backdoor|webshell|phpspy|c99|kacak|b374k|log4j|log4shell|wsos|madspot|malicious|evil.*\.php.*' + r'eval\s*\(', + r'system\s*\(', + r'passthru\s*\(', + r'shell_exec\s*\(', + r'exec\s*\(', + r'popen\s*\(', + r'proc_open\s*\(', + r'pcntl_exec\s*\(', + r'\.php\?cmd=', + r'\.php\?id=', + r'backdoor|webshell|phpspy|c99|kacak|b374k|wsos|madspot|r57|c100|r57shell', ] - - combined_xss_pattern = re.compile('|'.join(xss_patterns), re.IGNORECASE) - combined_sql_pattern = re.compile('|'.join(sql_patterns), re.IGNORECASE) - combined_webshells_pattern = re.compile('|'.join(webshells_patterns), re.IGNORECASE) - - with open(log_file_path, 'r') as log_file: - log_lines = log_file.readlines() + + xss_pattern = re.compile('|'.join(xss_patterns), re.IGNORECASE) + sql_pattern = re.compile('|'.join(sql_patterns), re.IGNORECASE) + webshell_pattern = re.compile('|'.join(webshells_patterns), re.IGNORECASE) + + try: + with open(log_file_path, 'r') as log_file: + log_lines = log_file.readlines() + for line in log_lines: - if " 403 " in line: # Check if the line contains " 403 " indicating a 403 status code - match = re.search(r'(\w+\s+\d+\s\d+:\d+:\d+).*\s(\d+\.\d+\.\d+\.\d+).*"\s*(GET|POST|PUT|DELETE)\s+([^"]+)"', line) - if match: - timestamp = match.group(1) # Extract the date and time - ip_address = match.group(2) - http_method = match.group(3) - requested_url = match.group(4) + if not line.strip(): + continue + + match = re.search( + r'(\w+\s+\d+\s\d+:\d+:\d+).*\s(\d+\.\d+\.\d+\.\d+).*"?\s*(GET|POST|PUT|DELETE|PATCH|HEAD|OPTIONS)\s+([^"\s]+)"?\s+(\d{3})', + line + ) + + if not match: + continue + + timestamp = match.group(1) + ip_address = match.group(2) + http_method = match.group(3) + requested_url = match.group(4) + status_code = int(match.group(5)) + + threats = [] + threat_level = 'info' + + if xss_pattern.search(line): + threats.append('XSS Attack') + threat_level = 'danger' + + if sql_pattern.search(line): + threats.append('SQL Injection') + threat_level = 'danger' + + if webshell_pattern.search(line): + threats.append('Webshell') + threat_level = 'danger' + + if http_method == 'PUT': + threats.append('Remote Upload') + threat_level = 'warning' + + if 'admin' in requested_url.lower() or 'config' in requested_url.lower(): + if status_code == 403: + threats.append('Unauthorized Access') + threat_level = 'warning' + + status_category = 'info' + if 200 <= status_code < 300: + status_category = 'success' + elif 300 <= status_code < 400: + status_category = 'secondary' + elif 400 <= status_code < 500: + status_category = 'warning' + elif status_code >= 500: + status_category = 'danger' + + parsed_entries.append({ + 'timestamp': timestamp, + 'ip_address': ip_address, + 'http_method': http_method, + 'requested_url': requested_url, + 'status_code': status_code, + 'status_category': status_category, + 'threats': threats if threats else ['None'], + 'threat_level': threat_level if threats else 'info', + 'is_threat': bool(threats), + }) + + except FileNotFoundError: + return [{'error': f'Log file not found: {log_file_path}'}] + except Exception as e: + return [{'error': f'Error parsing log: {str(e)}'}] + + return parsed_entries - if combined_xss_pattern.search(line): - xss_alert = 'Possible XSS Attack Was Identified.' - else: - xss_alert = '' - if combined_sql_pattern.search(line): - sql_alert = 'Possible SQL Injection Attempt Was Made.' - else: - sql_alert = '' - if "PUT" in line: - put_method = 'Possible Remote File Upload Attempt Was Made.' - else: - put_method = '' - if "admin" in line: - illegal_resource = 'Possible Illegal Resource Access Attempt Was Made.' - else: - illegal_resource = '' +def get_log_statistics(parsed_entries): - if combined_webshells_pattern.search(line): - webshell_alert = 'Possible WebShell Attack Attempt Was Made.' - else: - webshell_alert = '' + stats = { + 'total_requests': len(parsed_entries), + 'threat_count': sum(1 for e in parsed_entries if e.get('is_threat')), + 'status_codes': defaultdict(int), + 'http_methods': defaultdict(int), + 'top_ips': defaultdict(int), + 'threat_types': defaultdict(int), + } + + for entry in parsed_entries: + if 'error' in entry: + continue + + stats['status_codes'][entry['status_code']] += 1 + stats['http_methods'][entry['http_method']] += 1 + stats['top_ips'][entry['ip_address']] += 1 + + for threat in entry.get('threats', []): + if threat != 'None': + stats['threat_types'][threat] += 1 + + stats['top_ips'] = sorted( + stats['top_ips'].items(), + key=lambda x: x[1], + reverse=True + )[:5] + + stats['status_codes'] = dict(stats['status_codes']) + stats['http_methods'] = dict(stats['http_methods']) + stats['threat_types'] = dict(stats['threat_types']) + + return stats - parsed_entries.append({ - 'timestamp': timestamp, - 'ip_address': ip_address, - 'http_method': http_method, - 'requested_url': requested_url, - 'xss_alert': xss_alert, - 'sql_alert': sql_alert, - 'put_method': put_method, - 'illegal_resource': illegal_resource, - 'webshell_alert': webshell_alert - }) - return parsed_entries \ No newline at end of file + +def filter_logs(parsed_entries, filters=None): + if not filters: + return parsed_entries + + filtered = parsed_entries + + if 'status_code' in filters and filters['status_code']: + filtered = [e for e in filtered if e.get('status_code') == int(filters['status_code'])] + + if 'threat_level' in filters and filters['threat_level']: + filtered = [e for e in filtered if e.get('threat_level') == filters['threat_level']] + + if 'http_method' in filters and filters['http_method']: + filtered = [e for e in filtered if e.get('http_method') == filters['http_method']] + + if 'ip_address' in filters and filters['ip_address']: + filtered = [e for e in filtered if e.get('ip_address') == filters['ip_address']] + + if 'has_threat' in filters and filters['has_threat']: + filtered = [e for e in filtered if e.get('is_threat')] + + return filtered diff --git a/static/js/logs.js b/static/js/logs.js index e69de29..5fe277c 100644 --- a/static/js/logs.js +++ b/static/js/logs.js @@ -0,0 +1,22 @@ +document.getElementById('filter_status')?.addEventListener('change', filterLogs); +document.getElementById('filter_threat')?.addEventListener('change', filterLogs); +document.getElementById('filter_method')?.addEventListener('change', filterLogs); +document.getElementById('filter_threats_only')?.addEventListener('change', filterLogs); + +function filterLogs() { + const statusFilter = document.getElementById('filter_status')?.value; + const threatFilter = document.getElementById('filter_threat')?.value; + const methodFilter = document.getElementById('filter_method')?.value; + const threatsOnly = document.getElementById('filter_threats_only')?.checked; + + document.querySelectorAll('.log-row').forEach(row => { + let show = true; + + if (statusFilter && row.dataset.status !== statusFilter) show = false; + if (threatFilter && row.dataset.threat !== threatFilter) show = false; + if (methodFilter && row.dataset.method !== methodFilter) show = false; + if (threatsOnly && row.dataset.threatCount === '0') show = false; + + row.style.display = show ? '' : 'none'; + }); +} \ No newline at end of file diff --git a/templates/logs.html b/templates/logs.html index 94f62b7..47a26e0 100644 --- a/templates/logs.html +++ b/templates/logs.html @@ -1,50 +1,162 @@ {% extends "base.html" %} -{% set active_page = "" %} -{% block title %}HAProxy • Logs{% endblock %} -{% block breadcrumb %}{% endblock %} + +{% set active_page = "logs" %} + +{% block title %}HAProxy • Access Logs{% endblock %} + +{% block breadcrumb %}Access Logs{% endblock %} + {% block content %} -
{{ entry['xss_alert'] }}{{ entry['sql_alert'] }}{{ entry['put_method'] }}{{ entry['illegal_resource'] }}{{ entry['webshell_alert'] }}| Timestamp | +IP Address | +Method | +URL | +Status | +Threats | +
|---|---|---|---|---|---|
| {{ log.timestamp }} | ++ {{ log.ip_address }} + | ++ {{ log.http_method }} + | ++ {{ log.requested_url }} + | ++ {{ log.status_code }} + | ++ {% if log.is_threat %} + {% for threat in log.threats %} + {{ threat }} + {% endfor %} + {% else %} + — + {% endif %} + | +