diff --git a/app.py b/app.py index 2027b2f..bd27ef4 100644 --- a/app.py +++ b/app.py @@ -1,10 +1,8 @@ - import os import sys import ssl - import configparser -from flask import Flask, render_template, render_template_string +from flask import Flask, render_template, render_template_string, request, jsonify from routes.main_routes import main_bp from routes.edit_routes import edit_bp from utils.stats_utils import fetch_haproxy_stats, parse_haproxy_stats @@ -59,7 +57,6 @@ except Exception as e: app.register_blueprint(main_bp) app.register_blueprint(edit_bp) - setup_auth(app) certificate_path = None @@ -69,71 +66,131 @@ ssl_context = None try: config2 = configparser.ConfigParser() config2.read(SSL_INI) - if config2.has_section('ssl'): certificate_path = config2.get('ssl', 'certificate_path') private_key_path = config2.get('ssl', 'private_key_path') else: print(f"[APP] No [ssl] section in {SSL_INI}", flush=True) sys.exit(1) - + if not os.path.exists(certificate_path): print(f"[APP] Certificate not found: {certificate_path}", flush=True) sys.exit(1) - + if not os.path.exists(private_key_path): print(f"[APP] Private key not found: {private_key_path}", flush=True) sys.exit(1) - + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(certfile=certificate_path, keyfile=private_key_path) print(f"[APP] SSL context loaded", flush=True) - except Exception as e: print(f"[APP] SSL error: {e}", flush=True) sys.exit(1) - @app.route('/statistics') def display_haproxy_stats(): haproxy_stats = fetch_haproxy_stats() parsed_stats = parse_haproxy_stats(haproxy_stats) return render_template('statistics.html', stats=parsed_stats) - @app.route('/logs', endpoint='display_logs') -#@requires_auth def display_haproxy_logs(): log_file_path = '/var/log/haproxy.log' - if not os.path.exists(log_file_path): - return render_template('logs.html', - logs=[], + return render_template('logs.html', + logs=[], + total_logs=0, error_message=f"Log file not found: {log_file_path}") try: logs = parse_log_file(log_file_path) - if not logs: - return render_template('logs.html', - logs=[], - error_message="Log file is empty or unreadable") - return render_template('logs.html', logs=logs) - except Exception as e: + total_logs = len(logs) + # Załaduj ostatnie 200 logów + initial_logs = logs[-200:] if len(logs) > 200 else logs + return render_template('logs.html', - logs=[], + logs=initial_logs, + total_logs=total_logs, + loaded_count=len(initial_logs)) + except Exception as e: + return render_template('logs.html', + logs=[], + total_logs=0, error_message=f"Error parsing logs: {str(e)}") +@app.route('/api/logs', methods=['POST']) +def api_get_logs(): + """API endpoint for paginated and filtered logs""" + try: + log_file_path = '/var/log/haproxy.log' + + if not os.path.exists(log_file_path): + return jsonify({'error': 'Log file not found', 'success': False}), 404 + + page = request.json.get('page', 1) + per_page = request.json.get('per_page', 50) + search_query = request.json.get('search', '').lower() + exclude_phrases = request.json.get('exclude', []) + + if page < 1: + page = 1 + if per_page < 1 or per_page > 500: + per_page = 50 + + print(f"[API] page={page}, per_page={per_page}, search={search_query}, exclude={len(exclude_phrases)}", flush=True) + + # Parse all logs + all_logs = parse_log_file(log_file_path) + total_logs = len(all_logs) + + # Reverse to show newest first + all_logs = all_logs[::-1] + + # Apply filters + filtered_logs = all_logs + + if search_query: + filtered_logs = [log for log in filtered_logs if search_query in + f"{log.get('timestamp', '')} {log.get('ip_address', '')} {log.get('http_method', '')} {log.get('requested_url', '')}".lower()] + + if exclude_phrases: + filtered_logs = [log for log in filtered_logs if not any( + phrase in f"{log.get('message', '')}" for phrase in exclude_phrases + )] + + total_filtered = len(filtered_logs) + + # Paginate + offset = (page - 1) * per_page + paginated_logs = filtered_logs[offset:offset + per_page] + + print(f"[API] total={total_logs}, filtered={total_filtered}, returned={len(paginated_logs)}", flush=True) + + return jsonify({ + 'success': True, + 'logs': paginated_logs, + 'page': page, + 'per_page': per_page, + 'total': total_logs, + 'total_filtered': total_filtered, + 'loaded_count': len(paginated_logs), + 'has_more': offset + per_page < total_filtered + }) + except Exception as e: + print(f"[API] Error: {e}", flush=True) + return jsonify({'error': str(e), 'success': False}), 500 + @app.route('/home') def home(): frontend_count, backend_count, acl_count, layer7_count, layer4_count = count_frontends_and_backends() - return render_template('home.html', - frontend_count=frontend_count, - backend_count=backend_count, + return render_template('home.html', + frontend_count=frontend_count, + backend_count=backend_count, acl_count=acl_count, - layer7_count=layer7_count, + layer7_count=layer7_count, layer4_count=layer4_count) - if __name__ == '__main__': - app.run(host='::', port=5000, ssl_context=ssl_context, debug=True) \ No newline at end of file + app.run(host='::', port=5000, ssl_context=ssl_context, debug=True) diff --git a/log_parser.py b/log_parser.py index 330d489..5704e18 100644 --- a/log_parser.py +++ b/log_parser.py @@ -1,5 +1,6 @@ import re + def parse_log_file(log_file_path): """ Parse HAProxy syslog format and identify security threats. @@ -78,7 +79,7 @@ def parse_log_file(log_file_path): ip_address = ip_match.group(1) - # Extract date/time in brackets + # Extract date/time in brackets (preferred format) datetime_match = re.search(r'\[(\d{2}/\w+/\d{4}:\d{2}:\d{2}:\d{2})', line) if datetime_match: timestamp = datetime_match.group(1) @@ -95,10 +96,17 @@ def parse_log_file(log_file_path): # Extract HTTP method and URL http_match = re.search(r'"(\w+)\s+([^\s]+)\s+HTTP', line) if not http_match: - continue - - http_method = http_match.group(1) - requested_url = http_match.group(2) + # Fallback: extract entire request line + request_match = re.search(r'"([^"]*)"', line) + if request_match: + request_line = request_match.group(1).split() + http_method = request_line[0] if len(request_line) > 0 else 'UNKNOWN' + requested_url = request_line[1] if len(request_line) > 1 else '/' + else: + continue + else: + http_method = http_match.group(1) + requested_url = http_match.group(2) # Detect threats xss_alert = bool(xss_pattern.search(line)) @@ -107,6 +115,24 @@ def parse_log_file(log_file_path): put_method = http_method == 'PUT' illegal_resource = status_code == '403' + # Determine status class for UI coloring + status_class = 'secondary' + if status_code.startswith('2'): + status_class = 'success' + elif status_code.startswith('3'): + status_class = 'info' + elif status_code.startswith('4'): + status_class = 'warning' + if illegal_resource: + status_class = 'warning' + elif status_code.startswith('5'): + status_class = 'danger' + + # Add threat flag if any security issue detected + has_threat = xss_alert or sql_alert or webshell_alert or put_method or illegal_resource + if has_threat: + status_class = 'danger' + parsed_entries.append({ 'timestamp': timestamp, 'ip_address': ip_address, @@ -120,16 +146,20 @@ def parse_log_file(log_file_path): 'put_method': put_method, 'illegal_resource': illegal_resource, 'webshell_alert': webshell_alert, + 'status_class': status_class, + 'has_threat': has_threat, + 'message': f"{frontend}~ {backend} [{status_code}] {http_method} {requested_url}" }) except Exception as e: - print(f"Error parsing line: {e}") + print(f"[LOG_PARSER] Error parsing line: {e}", flush=True) continue except FileNotFoundError: - print(f"Log file not found: {log_file_path}") + print(f"[LOG_PARSER] Log file not found: {log_file_path}", flush=True) return [] except Exception as e: - print(f"Error reading log file: {e}") + print(f"[LOG_PARSER] Error reading log file: {e}", flush=True) return [] + print(f"[LOG_PARSER] Parsed {len(parsed_entries)} log entries", flush=True) return parsed_entries diff --git a/routes/main_routes.py b/routes/main_routes.py index 4fbc5fd..525e75d 100644 --- a/routes/main_routes.py +++ b/routes/main_routes.py @@ -60,10 +60,9 @@ def index(): # Server header removal del_server_header = 'del_server_header' in request.form - # Backend SSL redirect backend_ssl_redirect = 'backend_ssl_redirect' in request.form ssl_redirect_backend_name = request.form.get('ssl_redirect_backend_name', '').strip() if backend_ssl_redirect else '' - ssl_redirect_port = request.form.get('ssl_redirect_port', '80') + ssl_redirect_port = request.form.get('ssl_redirect_port', '80') # ✅ POBIERA PORT Z FORMU # Backend servers backend_server_names = request.form.getlist('backend_server_names[]') diff --git a/static/js/logs.js b/static/js/logs.js index d0d790f..6babcaf 100644 --- a/static/js/logs.js +++ b/static/js/logs.js @@ -1,103 +1,269 @@ +/** + * HAProxy Logs Management with Security Alerts + * Fixed pagination + */ + document.addEventListener('DOMContentLoaded', function() { - const filterIp = document.getElementById('filter_ip'); - const filterStatus = document.getElementById('filter_status'); - const filterMethod = document.getElementById('filter_method'); - const filterThreats = document.getElementById('filter_threats'); - const filterHideStats = document.getElementById('filter_hide_stats'); - const resetBtn = document.getElementById('reset_filters'); + let currentPage = 1; + let perPage = 50; + let totalLogs = parseInt(document.getElementById('total_count').textContent); + let allLoadedLogs = []; + let excludePhrases = []; - const logsTable = document.getElementById('logs_table'); - if (!logsTable) return; // Exit if no logs + const logsContainer = document.getElementById('logs_container'); + const searchFilter = document.getElementById('search_filter'); + const excludeFilter = document.getElementById('exclude_filter'); + const excludeBtn = document.getElementById('exclude_btn'); + const perPageSelect = document.getElementById('logs_per_page'); + const refreshBtn = document.getElementById('refresh_logs_btn'); + const prevBtn = document.getElementById('prev_btn'); + const nextBtn = document.getElementById('next_btn'); + const loadAllBtn = document.getElementById('load_all_btn'); + const clearFilterBtn = document.getElementById('clear_filter_btn'); + const loadedSpan = document.getElementById('loaded_count'); + const matchSpan = document.getElementById('match_count'); + const currentPageSpan = document.getElementById('current_page'); + const totalPagesSpan = document.getElementById('total_pages'); - const allRows = Array.from(document.querySelectorAll('.log-row')); + // Event Listeners + searchFilter.addEventListener('keyup', debounce(function() { + console.log('[Logs] Search changed'); + currentPage = 1; + loadLogsWithPage(); + }, 300)); - // Filter function - function applyFilters() { - const ipValue = filterIp.value.toLowerCase(); - const statusValue = filterStatus.value; - const methodValue = filterMethod.value; - const showThreats = filterThreats.checked; - const hideStats = filterHideStats.checked; - - let visibleCount = 0; - let threatCount = 0; - let count2xx = 0, count4xx = 0, count5xx = 0; - const uniqueIps = new Set(); - - allRows.forEach(row => { - const ip = row.dataset.ip; - const status = row.dataset.status; - const method = row.dataset.method; - const hasThreat = row.dataset.threats === '1'; - const url = row.querySelector('td:nth-child(4)').textContent.trim(); - - let show = true; - - // IP filter - if (ipValue && !ip.includes(ipValue)) { - show = false; + excludeBtn.addEventListener('click', function() { + const phrase = excludeFilter.value.trim(); + if (phrase) { + if (!excludePhrases.includes(phrase)) { + excludePhrases.push(phrase); + updateExcludeUI(); + currentPage = 1; + loadLogsWithPage(); } - - // Status filter - if (statusValue) { - const statusStart = statusValue; - if (!status.startsWith(statusStart)) { - show = false; - } - } - - // Method filter - if (methodValue && method !== methodValue) { - show = false; - } - - // Threats filter - if (!showThreats && hasThreat) { - show = false; - } - - // Hide /stats filter - if (hideStats && url.includes('/stats')) { - show = false; - } - - row.style.display = show ? '' : 'none'; - - if (show) { - visibleCount++; - if (hasThreat) threatCount++; - if (status.startsWith('2')) count2xx++; - if (status.startsWith('4')) count4xx++; - if (status.startsWith('5')) count5xx++; - uniqueIps.add(ip); - } - }); - - // Update stats - document.getElementById('stat_total').textContent = visibleCount; - document.getElementById('stat_threats').textContent = threatCount; - document.getElementById('stat_2xx').textContent = count2xx; - document.getElementById('stat_4xx').textContent = count4xx; - document.getElementById('stat_5xx').textContent = count5xx; - document.getElementById('stat_ips').textContent = uniqueIps.size; - } - - // Event listeners - filterIp.addEventListener('input', applyFilters); - filterStatus.addEventListener('change', applyFilters); - filterMethod.addEventListener('change', applyFilters); - filterThreats.addEventListener('change', applyFilters); - filterHideStats.addEventListener('change', applyFilters); - - // Reset button - resetBtn.addEventListener('click', function() { - filterIp.value = ''; - filterStatus.value = ''; - filterMethod.value = ''; - filterThreats.checked = true; - filterHideStats.checked = true; - applyFilters(); + excludeFilter.value = ''; + } }); - applyFilters(); + excludeFilter.addEventListener('keypress', function(e) { + if (e.key === 'Enter') excludeBtn.click(); + }); + + clearFilterBtn.addEventListener('click', function() { + console.log('[Logs] Clear filters'); + searchFilter.value = ''; + excludePhrases = []; + excludeFilter.value = ''; + updateExcludeUI(); + currentPage = 1; + loadLogsWithPage(); + }); + + perPageSelect.addEventListener('change', function() { + console.log(`[Logs] Per page changed to ${this.value}`); + perPage = parseInt(this.value); + currentPage = 1; + loadLogsWithPage(); + }); + + refreshBtn.addEventListener('click', function() { + console.log('[Logs] Refresh clicked'); + searchFilter.value = ''; + excludePhrases = []; + excludeFilter.value = ''; + updateExcludeUI(); + currentPage = 1; + loadLogsWithPage(); + }); + + prevBtn.addEventListener('click', function() { + if (currentPage > 1) { + console.log(`[Logs] Prev button: page ${currentPage} -> ${currentPage - 1}`); + currentPage--; + loadLogsWithPage(); + } + }); + + nextBtn.addEventListener('click', function() { + const totalPages = parseInt(document.getElementById('total_pages').textContent); + if (currentPage < totalPages) { + console.log(`[Logs] Next button: page ${currentPage} -> ${currentPage + 1}`); + currentPage++; + loadLogsWithPage(); + } + }); + + loadAllBtn.addEventListener('click', function() { + console.log('[Logs] Load all clicked'); + perPage = totalLogs > 500 ? 500 : totalLogs; + currentPage = 1; + perPageSelect.value = perPage; + loadLogsWithPage(); + }); + + /** + * Debounce function + */ + function debounce(func, wait) { + let timeout; + return function() { + clearTimeout(timeout); + timeout = setTimeout(func, wait); + }; + } + + /** + * Load logs with pagination from API + */ + function loadLogsWithPage() { + console.log(`[Logs] loadLogsWithPage: page=${currentPage}, per_page=${perPage}, search="${searchFilter.value.trim()}", exclude=${excludePhrases.length}`); + + logsContainer.innerHTML = 'Loading logs...'; + + fetch('/api/logs', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + page: currentPage, + per_page: perPage, + search: searchFilter.value.trim(), + exclude: excludePhrases + }) + }) + .then(r => r.json()) + .then(data => { + console.log('[Logs] API Response:', data); + + if (data.success) { + allLoadedLogs = data.logs; + loadedSpan.textContent = data.loaded_count; + totalLogs = data.total; + document.getElementById('total_count').textContent = data.total; + + const totalPages = Math.ceil(data.total_filtered / perPage) || 1; + totalPagesSpan.textContent = totalPages; + matchSpan.textContent = data.total_filtered; + currentPageSpan.textContent = data.page; + + renderLogs(data.logs); + + // Update button states + prevBtn.disabled = currentPage === 1; + nextBtn.disabled = !data.has_more; + + console.log(`[Logs] Updated: page ${data.page}/${totalPages}, has_more=${data.has_more}, prev_disabled=${prevBtn.disabled}, next_disabled=${nextBtn.disabled}`); + } else { + showError(data.error); + } + }) + .catch(e => { + console.error('[Logs] Error:', e); + showError('Failed to load logs: ' + e.message); + }); + } + + /** + * Render logs as table rows + */ + function renderLogs(logs) { + if (!logs || logs.length === 0) { + logsContainer.innerHTML = 'No logs found'; + return; + } + + logsContainer.innerHTML = logs.map((entry) => { + const threat_badges = []; + if (entry.xss_alert) threat_badges.push('XSS'); + if (entry.sql_alert) threat_badges.push('SQL'); + if (entry.webshell_alert) threat_badges.push('SHELL'); + if (entry.put_method) threat_badges.push('PUT'); + if (entry.illegal_resource) threat_badges.push('403'); + + const threat_html = threat_badges.length > 0 ? `
${threat_badges.join('')}
` : ''; + + let row_class = ''; + if (entry.has_threat) { + row_class = 'table-danger'; + } else if (entry.status_code.startsWith('5')) { + row_class = 'table-danger'; + } else if (entry.status_code.startsWith('4')) { + row_class = 'table-warning'; + } else if (entry.status_code.startsWith('2')) { + row_class = 'table-light'; + } else { + row_class = 'table-light'; + } + + return ` + + + ${threat_html} + ${escapeHtml(entry.timestamp)}
+ ${escapeHtml(entry.ip_address)} + ${escapeHtml(entry.http_method)} + ${escapeHtml(entry.requested_url)} + ${escapeHtml(entry.status_code)} +
+ ${escapeHtml(entry.frontend)}~ ${escapeHtml(entry.backend)} + + + `; + }).join(''); + } + + /** + * Update exclude UI + */ + function updateExcludeUI() { + if (excludePhrases.length > 0) { + const tags = excludePhrases.map((phrase, idx) => ` + + ${escapeHtml(phrase)} + + `).join(''); + + const container = document.createElement('div'); + container.className = 'small mt-2'; + container.innerHTML = `Hiding: ${tags}`; + + const existing = document.getElementById('exclude_ui'); + if (existing) existing.remove(); + + container.id = 'exclude_ui'; + excludeFilter.parentElement.parentElement.after(container); + } else { + const existing = document.getElementById('exclude_ui'); + if (existing) existing.remove(); + } + } + + /** + * Remove exclude phrase + */ + window.removeExcludePhrase = function(idx) { + console.log(`[Logs] Remove exclude phrase at index ${idx}`); + excludePhrases.splice(idx, 1); + updateExcludeUI(); + currentPage = 1; + loadLogsWithPage(); + }; + + /** + * Show error + */ + function showError(msg) { + logsContainer.innerHTML = `${escapeHtml(msg)}`; + } + + /** + * Escape HTML + */ + function escapeHtml(text) { + const map = {'&': '&', '<': '<', '>': '>', '"': '"', "'": '''}; + return (text || '').replace(/[&<>"']/g, m => map[m]); + } + + // Initial load + console.log('[Logs] Initial load'); + loadLogsWithPage(); }); diff --git a/templates/index.html b/templates/index.html index 452f664..02c5f91 100644 --- a/templates/index.html +++ b/templates/index.html @@ -52,7 +52,7 @@ {% if message %} @@ -98,12 +98,13 @@
@@ -138,7 +139,7 @@ - +
@@ -147,16 +148,23 @@ - Creates additional frontend on port 80 + Creates additional frontend to redirect HTTP traffic to HTTPS
-
+
+ Name for the redirect backend +
+
+ + + Default: 80 (leave empty for standard)
diff --git a/templates/logs.html b/templates/logs.html index 53c3f82..372667b 100644 --- a/templates/logs.html +++ b/templates/logs.html @@ -8,7 +8,7 @@ {% endblock %} @@ -17,192 +17,85 @@
-
HAProxy Access Logs
+
HAProxy Logs
+
- {% if error_message %} - +
+ {{ error_message }} +
{% endif %} - {% if logs and logs|length > 0 %} -
-
- + +
+
+
+ + +
-
- + + + +
-
- -
-
-
- - -
-
-
-
- - -
-
-
- -
-
- -
-
-
-
Total
- {{ logs|length }} -
-
+
-
-
-
-
Threats
- 0 -
-
-
-
-
-
-
2xx
- 0 -
-
-
-
-
-
-
4xx
- 0 -
-
-
-
-
-
-
5xx
- 0 -
-
-
-
-
-
-
Unique IPs
- 0 -
+
+
+ + +
-
+ +
+ + Total: {{ total_logs|default(0) }} logs | + Loaded: {{ loaded_count|default(0) }} | + Displayed: 0 +
-
- - - - - - - - - - - - - {% for entry in logs %} - - - - - - - - - {% endfor %} + +
+
TimestampIP AddressHTTP MethodRequested URLStatus CodeAlerts
{{ entry['timestamp'] }} - {{ entry['ip_address'] }} - - {{ entry['http_method'] }} - - {{ entry['requested_url'] }} - - - {{ entry['status_code'] }} - - - {% if entry['xss_alert'] %} - XSS - {% endif %} - {% if entry['sql_alert'] %} - SQL - {% endif %} - {% if entry['put_method'] %} - PUT - {% endif %} - {% if entry['webshell_alert'] %} - Webshell - {% endif %} - {% if entry['illegal_resource'] %} - 403 - {% endif %} -
+ +
Loading logs...
- - {% elif logs %} -
- No log entries match your filters. -
- {% else %} - - {% endif %} + +
+ + Page 1 / 1 + +
+ + + +
+
diff --git a/utils/haproxy_config.py b/utils/haproxy_config.py index 9b85215..8aa0347 100644 --- a/utils/haproxy_config.py +++ b/utils/haproxy_config.py @@ -18,11 +18,11 @@ def frontend_exists_at_port(frontend_ip, frontend_port): for i, line in enumerate(lines): if line.strip().startswith('frontend'): - # Szukaj bind line for j in range(i+1, min(i+10, len(lines))): if lines[j].strip().startswith('bind'): bind_info = lines[j].strip().split(' ', 1)[1] - if f"{frontend_ip}:{frontend_port}" in bind_info: + bind_part = bind_info.split(' ssl ')[0].strip() + if f"{frontend_ip}:{frontend_port}" in bind_part: return line.strip().split(' ', 1)[1] # Zwróć nazwę frontendu elif lines[j].strip().startswith('frontend') or lines[j].strip().startswith('backend'): break @@ -32,7 +32,6 @@ def frontend_exists_at_port(frontend_ip, frontend_port): return None def add_acl_to_frontend(frontend_name, acl_name, hostname, backend_name): - """Dodaj ACL i use_backend do istniejącego frontendu""" if not os.path.exists(HAPROXY_CFG): return False @@ -40,7 +39,6 @@ def add_acl_to_frontend(frontend_name, acl_name, hostname, backend_name): with open(HAPROXY_CFG, 'r') as f: lines = f.readlines() - # Znajdź frontend frontend_idx = -1 for i, line in enumerate(lines): if 'frontend' in line and frontend_name in line: @@ -48,19 +46,19 @@ def add_acl_to_frontend(frontend_name, acl_name, hostname, backend_name): break if frontend_idx == -1: + print(f"[HAPROXY_CONFIG] Frontend '{frontend_name}' not found", flush=True) return False - # Sprawdź czy ACL już istnieje for line in lines[frontend_idx:]: if acl_name in line and 'acl' in line: - return True # Już istnieje + print(f"[HAPROXY_CONFIG] ACL '{acl_name}' already exists", flush=True) + return True if line.strip().startswith('backend'): break - # Znajdź ostatnią linię ACL/use_backend w tym frontendzie insert_idx = frontend_idx + 1 for i in range(frontend_idx + 1, len(lines)): - if lines[i].strip().startswith('backend'): + if lines[i].strip().startswith('backend') or lines[i].strip().startswith('frontend'): insert_idx = i break if 'use_backend' in lines[i] or 'default_backend' in lines[i]: @@ -76,6 +74,7 @@ def add_acl_to_frontend(frontend_name, acl_name, hostname, backend_name): with open(HAPROXY_CFG, 'w') as f: f.writelines(lines) + print(f"[HAPROXY_CONFIG] ACL '{acl_name}' added to frontend '{frontend_name}'", flush=True) return True except Exception as e: print(f"[HAPROXY_CONFIG] Error adding ACL: {e}", flush=True) @@ -158,7 +157,6 @@ def update_haproxy_config(frontend_name, frontend_ip, frontend_port, lb_method, existing_frontend = frontend_exists_at_port(frontend_ip, frontend_port) if existing_frontend: - # Frontend już istnieje - dodaj tylko backend + ACL print(f"[HAPROXY] Found existing frontend '{existing_frontend}' at {frontend_ip}:{frontend_port}", flush=True) with open(HAPROXY_CFG, 'a') as haproxy_cfg: @@ -198,16 +196,53 @@ def update_haproxy_config(frontend_name, frontend_ip, frontend_port, lb_method, else: haproxy_cfg.write(f" server {server_name} {server_ip}:{server_port}{maxconn_str}\n") - # Dodaj ACL do istniejącego frontendu acl_name_sanitized = f"is_{sanitize_name(frontend_hostname)}" if frontend_hostname else f"is_{unique_backend_name}" add_acl_to_frontend(existing_frontend, acl_name_sanitized, frontend_hostname or 'localhost', unique_backend_name) + # ===== REDIRECT HTTP→HTTPS (jeśli zaznaczony) ===== + if backend_ssl_redirect and ssl_redirect_backend_name: + unique_redirect_backend_name = f"{ssl_redirect_backend_name}_redirect_{sanitize_name(frontend_hostname)}" if frontend_hostname else f"{ssl_redirect_backend_name}_redirect" + + existing_http_frontend = frontend_exists_at_port(frontend_ip, ssl_redirect_port) + + if existing_http_frontend: + print(f"[HAPROXY] Adding redirect ACL to existing HTTP frontend '{existing_http_frontend}'", flush=True) + + with open(HAPROXY_CFG, 'a') as haproxy_cfg: + haproxy_cfg.write(f"\nbackend {unique_redirect_backend_name}\n") + haproxy_cfg.write(f" mode http\n") + haproxy_cfg.write(f" redirect scheme https code 301 if !{{ ssl_fc }}\n") + + if frontend_hostname: + acl_name_redirect = f"is_{sanitize_name(frontend_hostname)}_redirect" + add_acl_to_frontend(existing_http_frontend, acl_name_redirect, frontend_hostname, unique_redirect_backend_name) + else: + print(f"[HAPROXY] Creating new HTTP redirect frontend at {frontend_ip}:{ssl_redirect_port}", flush=True) + + with open(HAPROXY_CFG, 'a') as haproxy_cfg: + generic_http_redirect_name = f"http_redirect_frontend" + + haproxy_cfg.write(f"\nfrontend {generic_http_redirect_name}\n") + haproxy_cfg.write(f" bind {frontend_ip}:{ssl_redirect_port}\n") + haproxy_cfg.write(f" mode http\n") + + if frontend_hostname: + acl_name_redirect = f"is_{sanitize_name(frontend_hostname)}_redirect" + haproxy_cfg.write(f" acl {acl_name_redirect} hdr(host) -i {frontend_hostname}\n") + haproxy_cfg.write(f" use_backend {unique_redirect_backend_name} if {acl_name_redirect}\n") + else: + haproxy_cfg.write(f" default_backend {unique_redirect_backend_name}\n") + + # Redirect backend + haproxy_cfg.write(f"\nbackend {unique_redirect_backend_name}\n") + haproxy_cfg.write(f" mode http\n") + haproxy_cfg.write(f" redirect scheme https code 301 if !{{ ssl_fc }}\n") + return f"Backend added to existing frontend" # ===== TWORZENIE NOWEGO FRONTENDU (GENERYCZNE NAZWY) ===== # Generuj generyczną nazwę frontendu generic_frontend_name = f"https_frontend" if use_ssl else f"http_frontend" - generic_http_redirect_name = f"http_redirect_frontend" print(f"[HAPROXY] Creating new frontend '{generic_frontend_name}' at {frontend_ip}:{frontend_port}", flush=True) @@ -314,13 +349,14 @@ def update_haproxy_config(frontend_name, frontend_ip, frontend_port, lb_method, # ===== REDIRECT HTTP -> HTTPS (GENERIC NAME) ===== if backend_ssl_redirect and ssl_redirect_backend_name: - unique_redirect_backend_name = f"{ssl_redirect_backend_name}_redirect_{sanitize_name(frontend_hostname)}" if frontend_hostname else ssl_redirect_backend_name + unique_redirect_backend_name = f"{ssl_redirect_backend_name}_redirect_{sanitize_name(frontend_hostname)}" if frontend_hostname else f"{ssl_redirect_backend_name}_redirect" - # Check if HTTP redirect frontend exists + # Check if HTTP frontend exists existing_http_frontend = frontend_exists_at_port(frontend_ip, ssl_redirect_port) if not existing_http_frontend: - # Utwórz nowy HTTP redirect frontend (generic name) + generic_http_redirect_name = f"http_redirect_frontend" + haproxy_cfg.write(f"\nfrontend {generic_http_redirect_name}\n") haproxy_cfg.write(f" bind {frontend_ip}:{ssl_redirect_port}\n") haproxy_cfg.write(f" mode http\n") @@ -332,7 +368,6 @@ def update_haproxy_config(frontend_name, frontend_ip, frontend_port, lb_method, else: haproxy_cfg.write(f" default_backend {unique_redirect_backend_name}\n") else: - # Dodaj ACL do istniejącego HTTP frontendu if frontend_hostname: acl_name_redirect = f"is_{sanitize_name(frontend_hostname)}_redirect" add_acl_to_frontend(existing_http_frontend, acl_name_redirect, frontend_hostname, unique_redirect_backend_name)