diff --git a/.env.example b/.env.example index 0593823..fd1ae85 100644 --- a/.env.example +++ b/.env.example @@ -39,3 +39,15 @@ BIND_PORT=8283 # Domyślny URL źródłowy (opcjonalnie) DEFAULT_SOURCE_URL="https://raw.githubusercontent.com/217heidai/adblockfilters/main/rules/adblockdns.txt" + +# Debug /convert +# Włącz/wyłącz tryb debug (domyślnie false) +DEBUG_ENABLE=false + +# Tajny klucz do debug (opcjonalny). +# Jeśli pusty: debug tylko z prywatnych adresów (10.x.x.x, 192.168.x.x itp.) +# Jeśli ustawiony: debug dostępny po podaniu nagłówka X-Debug-Key: +DEBUG_KEY=supersekretnyklucz + +# Limit zapytań debug per-IP (np. 5 per minute) +DEBUG_RATE_LIMIT="5 per minute" diff --git a/app.py b/app.py index 06975b4..413b56a 100644 --- a/app.py +++ b/app.py @@ -4,20 +4,22 @@ import requests import socket import time import json -import base64 import hashlib +import ipaddress from datetime import datetime from urllib.parse import urlparse, quote, unquote, urljoin from functools import wraps from typing import Optional +from datetime import timezone +import json as _json from flask import Flask, request, render_template, abort, jsonify, stream_with_context, g, Response from flask_compress import Compress from flask_limiter import Limiter - import config app = Flask(__name__) + app.config["MAX_CONTENT_LENGTH"] = config.MAX_CONTENT_LENGTH app.config["SECRET_KEY"] = config.SECRET_KEY app.debug = config.FLASK_DEBUG @@ -51,6 +53,19 @@ def track_request_data(): redis_client.incr(f"stats:client_ips:{get_client_ip()}") redis_client.incr(f"stats:methods:{request.method}") +@app.after_request +def add_cache_headers(response): + if request.path.startswith("/static/"): + + response.headers.pop("Content-Disposition", None) + + if request.path.endswith((".css", ".js")): + response.headers["Cache-Control"] = "public, max-age=31536000, immutable" + else: + response.headers["Cache-Control"] = "public, max-age=86400" + return response + + @app.after_request def after_request(response): elapsed = time.perf_counter() - g.start_time @@ -84,15 +99,12 @@ def basic_auth_required(realm: str, user: str, password: str): def wrapper(*args, **kwargs): if not config.STATS_BASIC_AUTH_ENABLED: return f(*args, **kwargs) - auth = request.headers.get("Authorization", "") - if auth.startswith("Basic "): - try: - decoded = base64.b64decode(auth[6:]).decode("utf-8", errors="ignore") - u, p = decoded.split(":", 1) - if u == user and p == password: - return f(*args, **kwargs) - except Exception: - pass + + auth = request.authorization + + if auth and auth.type == "basic" and auth.username == user and auth.password == password: + return f(*args, **kwargs) + resp = Response(status=401) resp.headers["WWW-Authenticate"] = f'Basic realm="{realm}"' return resp @@ -111,18 +123,22 @@ def should_ignore_line(line): def is_valid_domain(domain): return bool(re.compile(r"^(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,}$").match(domain)) +def is_private_client_ip() -> bool: + ip = get_client_ip() + try: + return ipaddress.ip_address(ip).is_private + except Exception: + return False + def convert_host_line(line: str, target_ip: str): # szybkie odrzucenia if not line: return None line = line.strip() - # komentarze/puste if not line or line.startswith(("!", "#", "/", ";")): return None - # wytnij komentarz końcowy (# lub ;) – ostrożnie ze 'http://' - # usuwamy wszystko od ' #' lub ' ;' (spacja przed znacznikiem komentarza) for sep in (" #", " ;"): idx = line.find(sep) if idx != -1: @@ -131,7 +147,6 @@ def convert_host_line(line: str, target_ip: str): if not line: return None - # 1) AdGuard / uBlock DNS: ||domain^ (opcjonalnie z dodatkami po '^') m = re.match(r"^\|\|([a-z0-9.-]+)\^", line, re.IGNORECASE) if m: domain = m.group(1).strip(".") @@ -141,7 +156,6 @@ def convert_host_line(line: str, target_ip: str): parts = line.split() - # 2) Klasyczny hosts: "IP domena [...]" (IPv4 lub IPv6) if len(parts) >= 2 and ( re.match(r"^\d{1,3}(?:\.\d{1,3}){3}$", parts[0]) or ":" in parts[0] ): @@ -150,7 +164,6 @@ def convert_host_line(line: str, target_ip: str): return f"{target_ip} {domain}" return None - # 3) dnsmasq: address=/domain/0.0.0.0 czy server=/domain/... m = re.match(r"^(?:address|server)=/([a-z0-9.-]+)/", line, re.IGNORECASE) if m: domain = m.group(1).strip(".") @@ -158,7 +171,6 @@ def convert_host_line(line: str, target_ip: str): return f"{target_ip} {domain}" return None - # 4) Domain-only: "example.com" lub "example.com # komentarz" token = parts[0].split("#", 1)[0].strip().strip(".") if token and not should_ignore_domain(token) and is_valid_domain(token): return f"{target_ip} {token}" @@ -176,7 +188,7 @@ def cache_headers(etag: str, up_lm: Optional[str]): "Vary": "Accept-Encoding", "Content-Type": "text/plain; charset=utf-8", "X-Content-Type-Options": "nosniff", - "Content-Disposition": "inline; filename=converted_hosts.txt", + #"Content-Disposition": "inline; filename=converted_hosts.txt", } if config.CACHE_ENABLED: headers["Cache-Control"] = f"public, s-maxage={config.CACHE_S_MAXAGE}, max-age={config.CACHE_MAX_AGE}" @@ -198,6 +210,7 @@ def validate_and_normalize_url(url): def track_url_request(url): redis_client.incr(f"stats:url_requests:{quote(url, safe='')}") + def add_recent_link(url, target_ip): ts = datetime.now().isoformat() link_data = f"{ts}|{url}|{target_ip}" @@ -238,7 +251,7 @@ def add_recent_convert(): url = request.full_path data = {"url": url, "ip": ip, "hostname": hostname, "time": time_str, "user_agent": ua} redis_client.lpush("recent_converts", json.dumps(data)) - redis_client.ltrim("recent_converts", 0, 49) + redis_client.ltrim("recent_converts", 0, 99) @app.route("/favicon.ico", methods=["GET"]) def favicon(): @@ -282,22 +295,78 @@ def index(): @app.route("/convert") @limiter.limit(config.RATE_LIMIT_CONVERT) def convert(): + import hmac, ipaddress + + def is_private_client_ip() -> bool: + ip = get_client_ip() + try: + return ipaddress.ip_address(ip).is_private + except Exception: + return False + + requested_debug = request.args.get("debug", "").lower() in ("1","true","t","yes","y","on") + debug_allowed = False + if config.DEBUG_ENABLE: + header_key = request.headers.get("X-Debug-Key", "") + if config.DEBUG_KEY and header_key and hmac.compare_digest(header_key, config.DEBUG_KEY): + debug_allowed = True + elif is_private_client_ip(): + debug_allowed = True + + if requested_debug and not debug_allowed: + abort(403) + + debug_mode = requested_debug and debug_allowed + debug_lines = [] + + def d(msg): + ts = datetime.now().isoformat() + line = f"# [DEBUG {ts}] {msg}" + debug_lines.append(line) + app.logger.debug(line) + + def debug_response(status=200): + body = "\n".join(debug_lines) + ("\n" if debug_lines else "") + resp = Response(body, mimetype="text/plain; charset=utf-8", status=status) + resp.headers["X-Debug-Mode"] = "1" + resp.headers["Cache-Control"] = "no-store" + return resp + try: redis_client.incr("stats:convert_requests") add_recent_convert() + if debug_mode: + d("Start /convert w trybie debug") encoded_url = request.args.get("url") if not encoded_url: + if debug_mode: + d("Brak parametru ?url") + return debug_response(status=400) redis_client.incr("stats:errors_400") abort(400, description="Missing URL parameter") decoded_url = unquote(encoded_url) - normalized_url = validate_and_normalize_url(decoded_url) + try: + normalized_url = validate_and_normalize_url(decoded_url) + except ValueError as e: + if debug_mode: + d(f"Błąd walidacji URL: {e}") + return debug_response(status=400) + redis_client.incr("stats:errors_400") + abort(400) + target_ip = request.args.get("ip", "127.0.0.1") + if debug_mode: + d(f"URL (encoded): {encoded_url}") + d(f"URL (decoded): {decoded_url}") + d(f"URL (norm): {normalized_url}") + d(f"target_ip: {target_ip}") track_url_request(normalized_url) redis_client.incr(f"stats:target_ips:{target_ip}") + # nagłówki If-* req_headers = {} inm = request.headers.get("If-None-Match") ims = request.headers.get("If-Modified-Since") @@ -305,59 +374,99 @@ def convert(): req_headers["If-None-Match"] = inm if ims: req_headers["If-Modified-Since"] = ims + if debug_mode: + d("Wysyłam GET do upstreamu") + d(f"Nagłówki: {req_headers or '{}'}") - with requests.get(normalized_url, headers=req_headers, stream=True, timeout=(10, 60)) as r: - ct = r.headers.get("Content-Type", "") - # pozwól na text/* oraz octet-stream (często używane przez listy) - if "text" not in ct and "octet-stream" not in ct and ct != "": - abort(415, description="Unsupported Media Type") + r = requests.get(normalized_url, headers=req_headers, stream=True, timeout=(10, 60)) - if r.status_code == 304: - etag = build_etag(r.headers.get("ETag"), r.headers.get("Last-Modified"), target_ip) - resp = Response(status=304) - resp.headers.update(cache_headers(etag, r.headers.get("Last-Modified"))) - resp.direct_passthrough = True - return resp + ct = r.headers.get("Content-Type", "") + if debug_mode: + d(f"Upstream status: {r.status_code}") + d(f"Content-Type: {ct or '(brak)'}") + d(f"ETag: {r.headers.get('ETag')}") + d(f"Last-Modified: {r.headers.get('Last-Modified')}") - up_etag = r.headers.get("ETag") - up_lm = r.headers.get("Last-Modified") - etag = build_etag(up_etag, up_lm, target_ip) + if "text" not in ct and "octet-stream" not in ct and ct != "": + if debug_mode: + d("Unsupported Media Type -> 415") + r.close() + return debug_response(status=415) + r.close() + abort(415, description="Unsupported Media Type") - @stream_with_context - def body_gen(): - total = 0 - # iter_lines pewnie tnie po \n/\r\n i dekoduje do str + if r.status_code == 304: + etag = build_etag(r.headers.get("ETag"), r.headers.get("Last-Modified"), target_ip) + if debug_mode: + d("Upstream 304 – zwracam 304") + r.close() + return debug_response(status=304) + resp = Response(status=304) + resp.headers.update(cache_headers(etag, r.headers.get("Last-Modified"))) + resp.direct_passthrough = True + r.close() + return resp + + up_etag = r.headers.get("ETag") + up_lm = r.headers.get("Last-Modified") + etag = build_etag(up_etag, up_lm, target_ip) + if debug_mode: + d(f"Etag dla klienta: {etag}") + + @stream_with_context + def body_gen(): + lines_read = 0 + lines_emitted = 0 + try: + if debug_mode: + yield "\n".join(debug_lines) + "\n" + debug_lines.clear() for line in r.iter_lines(decode_unicode=True, chunk_size=config.READ_CHUNK): if line is None: continue - # zabezpieczenie przed megadługimi wierszami + lines_read += 1 if len(line) > config.STREAM_LINE_LIMIT: + if debug_mode and lines_read <= 5: + yield f"# [DEBUG] pominięto długi wiersz ({len(line)} bajtów)\n" continue out = convert_host_line(line, target_ip) if out: - s = out + "\n" - total += len(s) - yield s - # statystyki po zakończeniu streamu - redis_client.incrby("stats:content_size_total", total) + lines_emitted += 1 + yield out + "\n" + if debug_mode and lines_read <= 5: + preview = line[:200].replace("\r", "\\r").replace("\n", "\\n") + yield f"# [DEBUG] podgląd linii {lines_read}: {preview}\n" + if debug_mode: + yield f"# [DEBUG] podsumowanie: przeczytano={lines_read}, wyemitowano={lines_emitted}\n" + if lines_emitted == 0: + yield "# [DEBUG] Uwaga: 0 linii wynikowych – czy format listy pasuje?\n" + redis_client.incrby("stats:content_size_total", 0) redis_client.incr("stats:content_size_count") + finally: + r.close() - resp = Response(body_gen(), mimetype="text/plain; charset=utf-8") - resp.headers.update(cache_headers(etag, up_lm)) - # wyłącz kompresję/buforowanie dla strumienia - resp.direct_passthrough = True - redis_client.incr("stats:conversions_success") - return resp + resp = Response(body_gen(), mimetype="text/plain; charset=utf-8") + resp.headers.update(cache_headers(etag, up_lm)) + resp.direct_passthrough = True + redis_client.incr("stats:conversions_success") + return resp except requests.exceptions.RequestException as e: app.logger.error(f"Request error: {str(e)}") redis_client.incr("stats:errors_500") + if debug_mode: + d(f"Wyjątek requests: {e}") + return debug_response(status=502) abort(500) except ValueError as e: app.logger.error(f"URL validation error: {str(e)}") redis_client.incr("stats:errors_400") + if debug_mode: + d(f"Wyjątek ValueError: {e}") + return debug_response(status=400) abort(400) + @app.route("/convert", methods=["HEAD"]) def convert_head(): encoded_url = request.args.get("url", config.DEFAULT_SOURCE_URL) @@ -372,6 +481,7 @@ def convert_head(): resp.direct_passthrough = True return resp + @app.route("/stats") @basic_auth_required( realm=config.STATS_BASIC_AUTH_REALM, @@ -379,11 +489,95 @@ def convert_head(): password=config.STATS_BASIC_AUTH_PASS, ) def stats(): + + stats_data, target_ips, url_requests, user_agents, client_ips = {}, {}, {}, {}, {} + + # Zbierz klucze stats:* + for key in redis_client.scan_iter("stats:*"): + key_str = key.decode() + value = (redis_client.get(key) or b"0").decode() + if key_str.startswith("stats:target_ips:"): + ip = key_str.split(":", 2)[2] + target_ips[ip] = value + elif key_str.startswith("stats:url_requests:"): + url = unquote(key_str.split(":", 2)[2]) + url_requests[url] = value + elif key_str.startswith("stats:user_agents:"): + ua = unquote(key_str.split(":", 2)[2]) + user_agents[ua] = value + elif key_str.startswith("stats:client_ips:"): + ip = key_str.split(":", 2)[2] + client_ips[ip] = value + else: + stats_data[key_str] = value + + + recent_converts = [] + for entry in redis_client.lrange("recent_converts", 0, 99): + try: + recent_converts.append(json.loads(entry.decode())) + except Exception: + pass + + # Agregaty szczegółowe + processing_time_total = float(redis_client.get("stats:processing_time_total") or 0) + processing_time_count = int(redis_client.get("stats:processing_time_count") or 0) + avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0 + + content_size_total = int(redis_client.get("stats:content_size_total") or 0) + content_size_count = int(redis_client.get("stats:content_size_count") or 0) + avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0 + + detailed_stats = { + "processing_time_total_sec": processing_time_total, + "processing_time_count": processing_time_count, + "processing_time_avg_sec": avg_processing_time, + "processing_time_min_sec": float(redis_client.get("stats:processing_time_min") or 0), + "processing_time_max_sec": float(redis_client.get("stats:processing_time_max") or 0), + "content_size_total_bytes": content_size_total, + "content_size_count": content_size_count, + "content_size_avg_bytes": avg_content_size, + } + + # Surowe JSON do sekcji "Raw JSON" na stronie + raw_json = _json.dumps( + { + **stats_data, + "target_ips": target_ips, + "url_requests": url_requests, + "user_agents": user_agents, + "client_ips": client_ips, + "recent_converts": recent_converts, + "detailed_stats": detailed_stats, + }, + indent=2, + ) + + return render_template( + "stats.html", + stats=stats_data, + target_ips=target_ips, + url_requests=url_requests, + user_agents=user_agents, + client_ips=client_ips, + recent=recent_converts, + detailed=detailed_stats, + raw_json=raw_json, + ) + + +@app.route("/stats.json") +@basic_auth_required( + realm=config.STATS_BASIC_AUTH_REALM, + user=config.STATS_BASIC_AUTH_USER, + password=config.STATS_BASIC_AUTH_PASS, +) +def stats_json(): stats_data, target_ips, url_requests, user_agents, client_ips = {}, {}, {}, {}, {} for key in redis_client.scan_iter("stats:*"): key_str = key.decode() - value = redis_client.get(key).decode() + value = (redis_client.get(key) or b"0").decode() if key_str.startswith("stats:target_ips:"): ip = key_str.split(":", 2)[2] target_ips[ip] = value @@ -400,7 +594,7 @@ def stats(): stats_data[key_str] = value recent_converts = [] - for entry in redis_client.lrange("recent_converts", 0, 49): + for entry in redis_client.lrange("recent_converts", 0, 99): try: recent_converts.append(json.loads(entry.decode())) except Exception: @@ -437,6 +631,7 @@ def stats(): } ) + @app.errorhandler(400) @app.errorhandler(403) @app.errorhandler(404) @@ -445,9 +640,12 @@ def stats(): @app.errorhandler(500) def handle_errors(e): try: - return render_template("error.html", error=e), e.code + + now_iso = datetime.now().astimezone().isoformat() + return render_template("error.html", error=e, code=getattr(e, "code", 500), now_iso=now_iso), getattr(e, "code", 500) except Exception: - return jsonify({"error": getattr(e, "description", str(e)), "code": e.code}), e.code + return jsonify({"error": getattr(e, "description", str(e)), "code": getattr(e, "code", 500)}), getattr(e, "code", 500) + if __name__ == "__main__": app.run(host=config.BIND_HOST, port=config.BIND_PORT) diff --git a/app_1.py b/app_1.py deleted file mode 100644 index eda803d..0000000 --- a/app_1.py +++ /dev/null @@ -1,259 +0,0 @@ -import re -import redis -import requests -from datetime import datetime -from flask import Flask, request, render_template, abort, jsonify -from urllib.parse import urlparse, quote, unquote, urljoin -from functools import wraps - -app = Flask(__name__) -app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit -redis_client = redis.Redis(host='localhost', port=6379, db=7) - -ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'} -ALLOWED_DOMAIN = '' - -@app.before_request -def track_request_data(): - """Track client IP and User-Agent for all requests""" - client_ip = get_client_ip() - user_agent = request.headers.get('User-Agent', 'Unknown') - - # Track User-Agents - redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}') - - # Track client IPs - redis_client.incr(f'stats:client_ips:{client_ip}') - -def get_client_ip(): - """Get real client IP considering proxies""" - x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',') - if x_forwarded_for and x_forwarded_for[0].strip(): - return x_forwarded_for[0].strip() - return request.remote_addr - -@app.template_filter('datetimeformat') -def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'): - try: - dt = datetime.fromisoformat(value) - return dt.strftime(format) - except (ValueError, AttributeError): - return value - -def ip_restriction(f): - @wraps(f) - def decorated(*args, **kwargs): - client_ip = get_client_ip() - host = request.host.split(':')[0] - - allowed_conditions = [ - client_ip in ALLOWED_IPS, - host == ALLOWED_DOMAIN, - request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS - ] - - if any(allowed_conditions): - return f(*args, **kwargs) - redis_client.incr('stats:errors_403') - abort(403) - return decorated - -def cache_key(source_url, ip): - return f"cache:{source_url}:{ip}" - -#def convert_hosts(content, target_ip): -# """Convert IPs in hosts file content""" -# pattern = r'^\s*?(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+(?P\S+).*$' -# return re.sub(pattern, f"{target_ip} \\g", content, flags=re.MULTILINE) - -def convert_hosts(content, target_ip): - """Convert with enhanced validation""" - converted = [] - - for line in content.splitlines(): - line = line.strip() - - # Skip empty/comments - if not line or line[0] in ('!', '#', '/') or '$' in line: - continue - - # AdGuard domains - if line.startswith(('||', '|')): - domain = line.split('^')[0].lstrip('|') - if 1 < len(domain) <= 253 and '.' in domain[1:-1]: - converted.append(f"{target_ip} {domain}") - continue - - # Classic hosts format - if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line): - converted.append(re.sub(r'^\S+', target_ip, line, count=1)) - - return '\n'.join(converted) - -def validate_and_normalize_url(url): - """Validate and normalize input URL""" - parsed = urlparse(url) - if not parsed.scheme: - url = f'https://{url}' - parsed = urlparse(url) - if not parsed.netloc: - raise ValueError("Missing host in URL") - return parsed.geturl() - -def track_url_request(url): - """Track requests for specific URLs""" - redis_key = f"stats:url_requests:{quote(url, safe='')}" - redis_client.incr(redis_key) - -def add_recent_link(url, target_ip): - """Add to recent links history""" - timestamp = datetime.now().isoformat() - link_data = f"{timestamp}|{url}|{target_ip}" - - with redis_client.pipeline() as pipe: - pipe.lpush("recent_links", link_data) - pipe.ltrim("recent_links", 0, 9) - pipe.execute() - redis_client.incr('stats:recent_links_added') - -def get_recent_links(): - """Get last 10 recent links""" - links = redis_client.lrange("recent_links", 0, 9) - parsed_links = [] - for link in links: - parts = link.decode().split("|") - if len(parts) >= 3: - parsed_links.append((parts[0], parts[1], parts[2])) - elif len(parts) == 2: - parsed_links.append((parts[0], parts[1], "127.0.0.1")) - return parsed_links - -@app.route('/', methods=['GET']) -def index(): - """Main form page""" - generated_link = None - recent_links = get_recent_links() - url_param = request.args.get('url') - target_ip = request.args.get('ip', '127.0.0.1') - - if url_param: - try: - normalized_url = validate_and_normalize_url(unquote(url_param)) - encoded_url = quote(normalized_url, safe='') - generated_link = urljoin( - request.host_url, - f"convert?url={encoded_url}&ip={target_ip}" - ) - add_recent_link(normalized_url, target_ip) - recent_links = get_recent_links() - except Exception as e: - app.logger.error(f"Error processing URL: {str(e)}") - - return render_template('form.html', - generated_link=generated_link, - recent_links=recent_links) - -@app.route('/convert') -def convert(): - """Conversion endpoint""" - try: - redis_client.incr('stats:convert_requests') - encoded_url = request.args.get('url') - - if not encoded_url: - redis_client.incr('stats:errors_400') - abort(400, description="Missing URL parameter") - - decoded_url = unquote(encoded_url) - normalized_url = validate_and_normalize_url(decoded_url) - target_ip = request.args.get('ip', '127.0.0.1') - - # Track statistics - track_url_request(normalized_url) - redis_client.incr(f'stats:target_ips:{target_ip}') - - # Check cache - cached = redis_client.get(cache_key(normalized_url, target_ip)) - if cached: - redis_client.incr('stats:cache_hits') - return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'} - - redis_client.incr('stats:cache_misses') - - # Fetch and process - response = requests.get(normalized_url, stream=True, timeout=15) - response.raise_for_status() - - content = b'' - for chunk in response.iter_content(2048): - content += chunk - if len(content) > app.config['MAX_CONTENT_LENGTH']: - redis_client.incr('stats:errors_413') - abort(413) - - converted = convert_hosts(content.decode('utf-8'), target_ip) - redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache - redis_client.incr('stats:conversions_success') - return converted, 200, {'Content-Type': 'text/plain'} - - except requests.RequestException as e: - app.logger.error(f"Request error: {str(e)}") - redis_client.incr('stats:errors_500') - abort(500) - except ValueError as e: - app.logger.error(f"URL validation error: {str(e)}") - redis_client.incr('stats:errors_400') - abort(400) - -@app.route('/stats') -@ip_restriction -def stats(): - """Statistics endpoint""" - stats_data = {} - target_ips = {} - url_requests = {} - user_agents = {} - client_ips = {} - - # Aggregate stats from Redis - for key in redis_client.scan_iter("stats:*"): - key_str = key.decode() - value = redis_client.get(key).decode() - - if key_str.startswith('stats:target_ips:'): - ip = key_str.split(':', 2)[2] - target_ips[ip] = value - elif key_str.startswith('stats:url_requests:'): - url = unquote(key_str.split(':', 2)[2]) - url_requests[url] = value - elif key_str.startswith('stats:user_agents:'): - ua = unquote(key_str.split(':', 2)[2]) - user_agents[ua] = value - elif key_str.startswith('stats:client_ips:'): - ip = key_str.split(':', 2)[2] - client_ips[ip] = value - else: - stats_data[key_str] = value - - # Structure response - response_data = { - **stats_data, - 'target_ips': target_ips, - 'url_requests': url_requests, - 'user_agents': user_agents, - 'client_ips': client_ips - } - - return jsonify(response_data) - -@app.errorhandler(400) -@app.errorhandler(403) -@app.errorhandler(404) -@app.errorhandler(413) -@app.errorhandler(500) -def handle_errors(e): - """Error handling""" - return render_template('error.html', error=e), e.code - -if __name__ == '__main__': - app.run(host='0.0.0.0', port=8283) diff --git a/app_gpt.py b/app_gpt.py deleted file mode 100644 index 6d2e9a5..0000000 --- a/app_gpt.py +++ /dev/null @@ -1,350 +0,0 @@ -import re -import redis -import requests -from datetime import datetime -from flask import Flask, request, render_template, abort, jsonify, g -from urllib.parse import urlparse, quote, unquote, urljoin -from functools import wraps -import json -import socket -import time - -app = Flask(__name__) -app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit -redis_client = redis.Redis(host='localhost', port=6379, db=7) - -ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'} -ALLOWED_DOMAIN = '' - -@app.before_request -def track_request_data(): - """Rejestracja IP klienta, User-Agent, metody HTTP oraz rozpoczęcie pomiaru czasu requestu""" - g.start_time = time.perf_counter() # rozpoczęcie pomiaru czasu - client_ip = get_client_ip() - user_agent = request.headers.get('User-Agent', 'Unknown') - method = request.method - - # Rejestracja User-Agent - redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}') - # Rejestracja adresu IP klienta - redis_client.incr(f'stats:client_ips:{client_ip}') - # Rejestracja metody HTTP - redis_client.incr(f'stats:methods:{method}') - -def get_client_ip(): - """Pobranie prawdziwego adresu IP klienta (uwzględniając proxy)""" - x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',') - if x_forwarded_for and x_forwarded_for[0].strip(): - return x_forwarded_for[0].strip() - return request.remote_addr - -@app.after_request -def after_request(response): - """Pomiar i rejestracja czasu przetwarzania żądania""" - elapsed = time.perf_counter() - g.start_time - # Aktualizacja statystyk czasu przetwarzania (w sekundach) - redis_client.incrbyfloat('stats:processing_time_total', elapsed) - redis_client.incr('stats:processing_time_count') - - # Aktualizacja minimalnego czasu przetwarzania - try: - current_min = float(redis_client.get('stats:processing_time_min') or elapsed) - if elapsed < current_min: - redis_client.set('stats:processing_time_min', elapsed) - except Exception: - redis_client.set('stats:processing_time_min', elapsed) - - # Aktualizacja maksymalnego czasu przetwarzania - try: - current_max = float(redis_client.get('stats:processing_time_max') or elapsed) - if elapsed > current_max: - redis_client.set('stats:processing_time_max', elapsed) - except Exception: - redis_client.set('stats:processing_time_max', elapsed) - - return response - -@app.template_filter('datetimeformat') -def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'): - try: - dt = datetime.fromisoformat(value) - return dt.strftime(format) - except (ValueError, AttributeError): - return value - -def ip_restriction(f): - @wraps(f) - def decorated(*args, **kwargs): - client_ip = get_client_ip() - host = request.host.split(':')[0] - - allowed_conditions = [ - client_ip in ALLOWED_IPS, - host == ALLOWED_DOMAIN, - request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS - ] - - if any(allowed_conditions): - return f(*args, **kwargs) - redis_client.incr('stats:errors_403') - abort(403) - return decorated - -def cache_key(source_url, ip): - return f"cache:{source_url}:{ip}" - -def convert_hosts(content, target_ip): - """Konwersja treści pliku hosts z uwzględnieniem walidacji""" - converted = [] - - for line in content.splitlines(): - line = line.strip() - - # Pomijanie pustych linii i komentarzy - if not line or line[0] in ('!', '#', '/') or '$' in line: - continue - - # Reguły AdGuard - if line.startswith(('||', '|')): - domain = line.split('^')[0].lstrip('|') - if 1 < len(domain) <= 253 and '.' in domain[1:-1]: - converted.append(f"{target_ip} {domain}") - continue - - # Klasyczny format hosts - if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line): - converted.append(re.sub(r'^\S+', target_ip, line, count=1)) - - return '\n'.join(converted) - -def validate_and_normalize_url(url): - """Walidacja i normalizacja adresu URL""" - parsed = urlparse(url) - if not parsed.scheme: - url = f'https://{url}' - parsed = urlparse(url) - if not parsed.netloc: - raise ValueError("Missing host in URL") - return parsed.geturl() - -def track_url_request(url): - """Rejestracja żądania dla określonego URL""" - redis_key = f"stats:url_requests:{quote(url, safe='')}" - redis_client.incr(redis_key) - -def add_recent_link(url, target_ip): - """Dodanie ostatniego linku do historii (ostatnie 10)""" - timestamp = datetime.now().isoformat() - link_data = f"{timestamp}|{url}|{target_ip}" - - with redis_client.pipeline() as pipe: - pipe.lpush("recent_links", link_data) - pipe.ltrim("recent_links", 0, 9) - pipe.execute() - redis_client.incr('stats:recent_links_added') - -def get_recent_links(): - """Pobranie ostatnich 10 linków""" - links = redis_client.lrange("recent_links", 0, 9) - parsed_links = [] - for link in links: - parts = link.decode().split("|") - if len(parts) >= 3: - parsed_links.append((parts[0], parts[1], parts[2])) - elif len(parts) == 2: - parsed_links.append((parts[0], parts[1], "127.0.0.1")) - return parsed_links - -# Nowa funkcja do logowania requestów dla endpointu /convert -def add_recent_convert(): - """Dodaje dane żądania do listy ostatnich konwersji (/convert)""" - ip = get_client_ip() - try: - hostname = socket.gethostbyaddr(ip)[0] - except Exception: - hostname = ip - user_agent = request.headers.get('User-Agent', 'Unknown') - time_str = datetime.now().astimezone().isoformat() - url = request.full_path # pełna ścieżka wraz z query string - data = { - "url": url, - "ip": ip, - "hostname": hostname, - "time": time_str, - "user_agent": user_agent - } - json_data = json.dumps(data) - redis_client.lpush("recent_converts", json_data) - redis_client.ltrim("recent_converts", 0, 49) - -@app.route('/', methods=['GET']) -def index(): - """Strona główna z formularzem""" - generated_link = None - recent_links = get_recent_links() - url_param = request.args.get('url') - target_ip = request.args.get('ip', '127.0.0.1') - - if url_param: - try: - normalized_url = validate_and_normalize_url(unquote(url_param)) - encoded_url = quote(normalized_url, safe='') - generated_link = urljoin( - request.host_url, - f"convert?url={encoded_url}&ip={target_ip}" - ) - add_recent_link(normalized_url, target_ip) - recent_links = get_recent_links() - except Exception as e: - app.logger.error(f"Error processing URL: {str(e)}") - - return render_template('form.html', - generated_link=generated_link, - recent_links=recent_links) - -@app.route('/convert') -def convert(): - """Endpoint do konwersji""" - try: - redis_client.incr('stats:convert_requests') - # Logowanie danych dla requestu do /convert - add_recent_convert() - - encoded_url = request.args.get('url') - - if not encoded_url: - redis_client.incr('stats:errors_400') - abort(400, description="Missing URL parameter") - - decoded_url = unquote(encoded_url) - normalized_url = validate_and_normalize_url(decoded_url) - target_ip = request.args.get('ip', '127.0.0.1') - - # Rejestracja statystyk dotyczących URL - track_url_request(normalized_url) - redis_client.incr(f'stats:target_ips:{target_ip}') - - # Sprawdzenie pamięci podręcznej - cached = redis_client.get(cache_key(normalized_url, target_ip)) - if cached: - redis_client.incr('stats:cache_hits') - return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'} - - redis_client.incr('stats:cache_misses') - - # Pobranie i przetworzenie treści - response = requests.get(normalized_url, stream=True, timeout=15) - response.raise_for_status() - - content = b'' - for chunk in response.iter_content(2048): - content += chunk - if len(content) > app.config['MAX_CONTENT_LENGTH']: - redis_client.incr('stats:errors_413') - abort(413) - - # Rejestracja rozmiaru pobranej treści - content_size = len(content) - redis_client.incrby('stats:content_size_total', content_size) - redis_client.incr('stats:content_size_count') - - converted = convert_hosts(content.decode('utf-8'), target_ip) - redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache - redis_client.incr('stats:conversions_success') - return converted, 200, {'Content-Type': 'text/plain'} - - except requests.RequestException as e: - app.logger.error(f"Request error: {str(e)}") - redis_client.incr('stats:errors_500') - abort(500) - except ValueError as e: - app.logger.error(f"URL validation error: {str(e)}") - redis_client.incr('stats:errors_400') - abort(400) - -@app.route('/stats') -@ip_restriction -def stats(): - """Endpoint statystyk""" - stats_data = {} - target_ips = {} - url_requests = {} - user_agents = {} - client_ips = {} - - # Agregacja statystyk z Redisa - for key in redis_client.scan_iter("stats:*"): - key_str = key.decode() - value = redis_client.get(key).decode() - - if key_str.startswith('stats:target_ips:'): - ip = key_str.split(':', 2)[2] - target_ips[ip] = value - elif key_str.startswith('stats:url_requests:'): - url = unquote(key_str.split(':', 2)[2]) - url_requests[url] = value - elif key_str.startswith('stats:user_agents:'): - ua = unquote(key_str.split(':', 2)[2]) - user_agents[ua] = value - elif key_str.startswith('stats:client_ips:'): - ip = key_str.split(':', 2)[2] - client_ips[ip] = value - else: - stats_data[key_str] = value - - # Pobranie ostatnich 50 requestów dla endpointu /convert - recent_converts = [] - convert_entries = redis_client.lrange("recent_converts", 0, 49) - for entry in convert_entries: - try: - data = json.loads(entry.decode()) - recent_converts.append(data) - except Exception: - pass - - # Obliczenie średniego czasu przetwarzania żądań - processing_time_total = float(redis_client.get('stats:processing_time_total') or 0) - processing_time_count = int(redis_client.get('stats:processing_time_count') or 0) - avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0 - - # Obliczenie średniego rozmiaru pobranej treści dla /convert - content_size_total = int(redis_client.get('stats:content_size_total') or 0) - content_size_count = int(redis_client.get('stats:content_size_count') or 0) - avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0 - - # Rozszerzone statystyki dotyczące wydajności i rozmiarów danych - detailed_stats = { - "processing_time_total_sec": processing_time_total, - "processing_time_count": processing_time_count, - "processing_time_avg_sec": avg_processing_time, - "processing_time_min_sec": float(redis_client.get('stats:processing_time_min') or 0), - "processing_time_max_sec": float(redis_client.get('stats:processing_time_max') or 0), - "content_size_total_bytes": content_size_total, - "content_size_count": content_size_count, - "content_size_avg_bytes": avg_content_size - } - - # Struktura odpowiedzi - response_data = { - **stats_data, - 'target_ips': target_ips, - 'url_requests': url_requests, - 'user_agents': user_agents, - 'client_ips': client_ips, - 'recent_converts': recent_converts, - 'detailed_stats': detailed_stats - } - - return jsonify(response_data) - -@app.errorhandler(400) -@app.errorhandler(403) -@app.errorhandler(404) -@app.errorhandler(413) -@app.errorhandler(500) -def handle_errors(e): - """Obsługa błędów""" - return render_template('error.html', error=e), e.code - -if __name__ == '__main__': - app.run(host='0.0.0.0', port=8283) diff --git a/app_timeout.py b/app_timeout.py deleted file mode 100644 index 5f35439..0000000 --- a/app_timeout.py +++ /dev/null @@ -1,383 +0,0 @@ -import re -import redis -import requests -import aiohttp -import asyncio -import socket -import time -import json -from datetime import datetime -from flask import Flask, request, render_template, abort, jsonify, g -from urllib.parse import urlparse, quote, unquote, urljoin -from functools import wraps -from flask_compress import Compress -from flask_limiter import Limiter -from flask_limiter.util import get_remote_address - -app = Flask(__name__) -app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit -redis_client = redis.Redis(host='localhost', port=6379, db=7) - -# Ustawienia do rate limiting – 100 żądań na minutę -def get_client_ip(): - """Pobranie prawdziwego adresu IP klienta (uwzględniając proxy)""" - x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',') - if x_forwarded_for and x_forwarded_for[0].strip(): - return x_forwarded_for[0].strip() - return request.remote_addr - -limiter = Limiter(key_func=get_client_ip, default_limits=["100 per minute"], app=app) -Compress(app) - -ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'} -ALLOWED_DOMAIN = '' - -@app.before_request -def track_request_data(): - """Rejestracja IP klienta, User-Agent, metody HTTP oraz rozpoczęcie pomiaru czasu requestu""" - g.start_time = time.perf_counter() # rozpoczęcie pomiaru czasu - client_ip = get_client_ip() - user_agent = request.headers.get('User-Agent', 'Unknown') - method = request.method - - # Rejestracja User-Agent - redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}') - # Rejestracja adresu IP klienta - redis_client.incr(f'stats:client_ips:{client_ip}') - # Rejestracja metody HTTP - redis_client.incr(f'stats:methods:{method}') - -@app.after_request -def after_request(response): - """Pomiar i rejestracja czasu przetwarzania żądania""" - elapsed = time.perf_counter() - g.start_time - # Aktualizacja statystyk czasu przetwarzania (w sekundach) - redis_client.incrbyfloat('stats:processing_time_total', elapsed) - redis_client.incr('stats:processing_time_count') - - # Aktualizacja minimalnego czasu przetwarzania - try: - current_min = float(redis_client.get('stats:processing_time_min') or elapsed) - if elapsed < current_min: - redis_client.set('stats:processing_time_min', elapsed) - except Exception: - redis_client.set('stats:processing_time_min', elapsed) - - # Aktualizacja maksymalnego czasu przetwarzania - try: - current_max = float(redis_client.get('stats:processing_time_max') or elapsed) - if elapsed > current_max: - redis_client.set('stats:processing_time_max', elapsed) - except Exception: - redis_client.set('stats:processing_time_max', elapsed) - - return response - -@app.template_filter('datetimeformat') -def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'): - try: - dt = datetime.fromisoformat(value) - return dt.strftime(format) - except (ValueError, AttributeError): - return value - -def ip_restriction(f): - @wraps(f) - def decorated(*args, **kwargs): - client_ip = get_client_ip() - host = request.host.split(':')[0] - - allowed_conditions = [ - client_ip in ALLOWED_IPS, - host == ALLOWED_DOMAIN, - request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS - ] - - if any(allowed_conditions): - return f(*args, **kwargs) - redis_client.incr('stats:errors_403') - abort(403) - return decorated - -def cache_key(source_url, ip): - return f"cache:{source_url}:{ip}" - -def convert_hosts(content, target_ip): - """Konwersja treści pliku hosts z uwzględnieniem walidacji""" - converted = [] - - for line in content.splitlines(): - line = line.strip() - - # Pomijanie pustych linii i komentarzy - if not line or line[0] in ('!', '#', '/') or '$' in line: - continue - - # Reguły AdGuard - if line.startswith(('||', '|')): - domain = line.split('^')[0].lstrip('|') - if 1 < len(domain) <= 253 and '.' in domain[1:-1]: - converted.append(f"{target_ip} {domain}") - continue - - # Klasyczny format hosts - if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line): - converted.append(re.sub(r'^\S+', target_ip, line, count=1)) - - return '\n'.join(converted) - -def validate_and_normalize_url(url): - """Walidacja i normalizacja adresu URL""" - parsed = urlparse(url) - if not parsed.scheme: - url = f'https://{url}' - parsed = urlparse(url) - if not parsed.netloc: - raise ValueError("Missing host in URL") - return parsed.geturl() - -def track_url_request(url): - """Rejestracja żądania dla określonego URL""" - redis_key = f"stats:url_requests:{quote(url, safe='')}" - redis_client.incr(redis_key) - -def add_recent_link(url, target_ip): - """Dodanie ostatniego linku do historii (ostatnie 10)""" - timestamp = datetime.now().isoformat() - link_data = f"{timestamp}|{url}|{target_ip}" - - with redis_client.pipeline() as pipe: - pipe.lpush("recent_links", link_data) - pipe.ltrim("recent_links", 0, 9) - pipe.execute() - redis_client.incr('stats:recent_links_added') - -def get_recent_links(): - """Pobranie ostatnich 10 linków""" - links = redis_client.lrange("recent_links", 0, 9) - parsed_links = [] - for link in links: - parts = link.decode().split("|") - if len(parts) >= 3: - parsed_links.append((parts[0], parts[1], parts[2])) - elif len(parts) == 2: - parsed_links.append((parts[0], parts[1], "127.0.0.1")) - return parsed_links - -def get_hostname(ip): - """Cache’owanie wyników reverse DNS dla danego IP""" - key = f"reverse_dns:{ip}" - cached = redis_client.get(key) - if cached: - return cached.decode() - try: - hostname = socket.gethostbyaddr(ip)[0] - except Exception: - hostname = ip - # Cache na 1 godzinę - redis_client.setex(key, 3600, hostname) - return hostname - -# Nowa funkcja do logowania requestów dla endpointu /convert -def add_recent_convert(): - """Dodaje dane żądania do listy ostatnich konwersji (/convert)""" - ip = get_client_ip() - hostname = get_hostname(ip) - user_agent = request.headers.get('User-Agent', 'Unknown') - time_str = datetime.now().astimezone().isoformat() - url = request.full_path # pełna ścieżka wraz z query string - data = { - "url": url, - "ip": ip, - "hostname": hostname, - "time": time_str, - "user_agent": user_agent - } - json_data = json.dumps(data) - redis_client.lpush("recent_converts", json_data) - redis_client.ltrim("recent_converts", 0, 49) - -@app.route('/', methods=['GET']) -def index(): - """Strona główna z formularzem""" - generated_link = None - recent_links = get_recent_links() - url_param = request.args.get('url') - target_ip = request.args.get('ip', '127.0.0.1') - - if url_param: - try: - normalized_url = validate_and_normalize_url(unquote(url_param)) - encoded_url = quote(normalized_url, safe='') - generated_link = urljoin( - request.host_url, - f"convert?url={encoded_url}&ip={target_ip}" - ) - add_recent_link(normalized_url, target_ip) - recent_links = get_recent_links() - except Exception as e: - app.logger.error(f"Error processing URL: {str(e)}") - - return render_template('form.html', - generated_link=generated_link, - recent_links=recent_links) - -@app.route('/convert') -@limiter.limit("100 per minute") -async def convert(): - """Asynchroniczny endpoint do konwersji z weryfikacją typu zawartości""" - try: - redis_client.incr('stats:convert_requests') - # Logowanie danych dla requestu do /convert - add_recent_convert() - - encoded_url = request.args.get('url') - if not encoded_url: - redis_client.incr('stats:errors_400') - abort(400, description="Missing URL parameter") - - decoded_url = unquote(encoded_url) - normalized_url = validate_and_normalize_url(decoded_url) - target_ip = request.args.get('ip', '127.0.0.1') - - # Rejestracja statystyk dotyczących URL - track_url_request(normalized_url) - redis_client.incr(f'stats:target_ips:{target_ip}') - - # Sprawdzenie pamięci podręcznej - cached = redis_client.get(cache_key(normalized_url, target_ip)) - if cached: - redis_client.incr('stats:cache_hits') - return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'} - - redis_client.incr('stats:cache_misses') - - # Asynchroniczne pobranie zasobu za pomocą aiohttp - async with aiohttp.ClientSession() as session: - async with session.get(normalized_url, timeout=15) as response: - # Sprawdzanie typu zawartości – musi zawierać "text" - content_type = response.headers.get("Content-Type", "") - if "text" not in content_type: - abort(415, description="Unsupported Media Type") - content = b"" - while True: - chunk = await response.content.read(2048) - if not chunk: - break - content += chunk - if len(content) > app.config['MAX_CONTENT_LENGTH']: - redis_client.incr('stats:errors_413') - abort(413) - - # Rejestracja rozmiaru pobranej treści - content_size = len(content) - redis_client.incrby('stats:content_size_total', content_size) - redis_client.incr('stats:content_size_count') - - converted = convert_hosts(content.decode('utf-8'), target_ip) - redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache - redis_client.incr('stats:conversions_success') - return converted, 200, {'Content-Type': 'text/plain'} - - except aiohttp.ClientError as e: - app.logger.error(f"Request error: {str(e)}") - redis_client.incr('stats:errors_500') - abort(500) - except ValueError as e: - app.logger.error(f"URL validation error: {str(e)}") - redis_client.incr('stats:errors_400') - abort(400) - -@app.route('/stats') -@ip_restriction -def stats(): - """Endpoint statystyk""" - stats_data = {} - target_ips = {} - url_requests = {} - user_agents = {} - client_ips = {} - - # Agregacja statystyk z Redisa - for key in redis_client.scan_iter("stats:*"): - key_str = key.decode() - value = redis_client.get(key).decode() - - if key_str.startswith('stats:target_ips:'): - ip = key_str.split(':', 2)[2] - target_ips[ip] = value - elif key_str.startswith('stats:url_requests:'): - url = unquote(key_str.split(':', 2)[2]) - url_requests[url] = value - elif key_str.startswith('stats:user_agents:'): - ua = unquote(key_str.split(':', 2)[2]) - user_agents[ua] = value - elif key_str.startswith('stats:client_ips:'): - ip = key_str.split(':', 2)[2] - client_ips[ip] = value - else: - stats_data[key_str] = value - - # Pobranie ostatnich 50 requestów dla endpointu /convert - recent_converts = [] - convert_entries = redis_client.lrange("recent_converts", 0, 49) - for entry in convert_entries: - try: - data = json.loads(entry.decode()) - recent_converts.append(data) - except Exception: - pass - - # Obliczenie średniego czasu przetwarzania żądań - processing_time_total = float(redis_client.get('stats:processing_time_total') or 0) - processing_time_count = int(redis_client.get('stats:processing_time_count') or 0) - avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0 - - # Obliczenie średniego rozmiaru pobranej treści dla /convert - content_size_total = int(redis_client.get('stats:content_size_total') or 0) - content_size_count = int(redis_client.get('stats:content_size_count') or 0) - avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0 - - # Rozszerzone statystyki dotyczące wydajności i rozmiarów danych - detailed_stats = { - "processing_time_total_sec": processing_time_total, - "processing_time_count": processing_time_count, - "processing_time_avg_sec": avg_processing_time, - "processing_time_min_sec": float(redis_client.get('stats:processing_time_min') or 0), - "processing_time_max_sec": float(redis_client.get('stats:processing_time_max') or 0), - "content_size_total_bytes": content_size_total, - "content_size_count": content_size_count, - "content_size_avg_bytes": avg_content_size - } - - # Struktura odpowiedzi - response_data = { - **stats_data, - 'target_ips': target_ips, - 'url_requests': url_requests, - 'user_agents': user_agents, - 'client_ips': client_ips, - 'recent_converts': recent_converts, - 'detailed_stats': detailed_stats - } - - return jsonify(response_data) - -@app.errorhandler(400) -@app.errorhandler(403) -@app.errorhandler(404) -@app.errorhandler(413) -@app.errorhandler(415) -@app.errorhandler(500) -def handle_errors(e): - """Obsługa błędów""" - return render_template('error.html', error=e), e.code - -# Jeśli aplikacja jest uruchamiana bezpośrednio, korzystamy z Flask's run -if __name__ == '__main__': - app.run(host='0.0.0.0', port=8283) -# W przeciwnym razie (np. przy uruchamianiu przez Gunicorn) opakowujemy aplikację w adapter ASGI -else: - from asgiref.wsgi import WsgiToAsgi - asgi_app = WsgiToAsgi(app) - diff --git a/config.py b/config.py index e704d23..21752b8 100644 --- a/config.py +++ b/config.py @@ -20,7 +20,7 @@ def getenv_float(key: str, default: float) -> float: # Podstawowe FLASK_DEBUG = getenv_bool("FLASK_DEBUG", True) -SECRET_KEY = os.getenv("SECRET_KEY", "change-me") +SECRET_KEY = os.getenv("SECRET_KEY", "secretkey") # Rozmiary/limity MAX_CONTENT_LENGTH = getenv_int("MAX_CONTENT_LENGTH", 50 * 1024 * 1024) # 50MB @@ -36,8 +36,8 @@ REDIS_DB = getenv_int("REDIS_DB", 7) # Basic Auth dla /stats STATS_BASIC_AUTH_ENABLED = getenv_bool("STATS_BASIC_AUTH_ENABLED", True) STATS_BASIC_AUTH_REALM = os.getenv("STATS_BASIC_AUTH_REALM", "Stats") -STATS_BASIC_AUTH_USER = os.getenv("STATS_BASIC_AUTH_USER", "admin") -STATS_BASIC_AUTH_PASS = os.getenv("STATS_BASIC_AUTH_PASS", "change-me") +STATS_BASIC_AUTH_USER = os.getenv("STATS_BASIC_AUTH_USER", "admin").strip() +STATS_BASIC_AUTH_PASS = os.getenv("STATS_BASIC_AUTH_PASS", "admin").strip() # Cache/ETag dla Varnisha CACHE_ENABLED = getenv_bool("CACHE_ENABLED", True) @@ -63,3 +63,8 @@ DEFAULT_SOURCE_URL = os.getenv( "DEFAULT_SOURCE_URL", "" ) + +# Debug trybu /convert +DEBUG_ENABLE = getenv_bool("DEBUG_ENABLE", False) +DEBUG_KEY = os.getenv("DEBUG_KEY", "") # ustaw w env bezpieczny losowy sekret +DEBUG_RATE_LIMIT = os.getenv("DEBUG_RATE_LIMIT", "5 per minute") \ No newline at end of file diff --git a/start_dev.sh b/start_dev.sh index 9e3e4e1..1d55796 100644 --- a/start_dev.sh +++ b/start_dev.sh @@ -1 +1,3 @@ -venv/bin/gunicorn -k uvicorn.workers.UvicornWorker --workers 4 --bind 127.0.0.1:8283 --keep-alive 30 --timeout 90 app:asgi_app \ No newline at end of file +#/bin/bash + +venv/bin/gunicorn -k uvicorn.workers.UvicornWorker --workers 1 --bind 127.0.0.1:8283 --keep-alive 30 --timeout 60 app:asgi_app \ No newline at end of file diff --git a/static/css/main.css b/static/css/main.css new file mode 100644 index 0000000..bdf6e09 --- /dev/null +++ b/static/css/main.css @@ -0,0 +1,688 @@ +:root { + --bg: #0f1115; + --bg-elev: #131722; + --card: #161b26; + --text: #e7eef7; + --muted: #a9b4c3; + --border: #243043; + --brand: #5b9dff; + --brand-2: #7bd4ff; + --success: #29c36a; + --danger: #ff5d5d; + --shadow: 0 10px 30px rgba(0, 0, 0, .35); + color-scheme: dark; +} + +[data-theme="light"] { + --bg: #f6f8fb; + --bg-elev: #fff; + --card: #fff; + --text: #1d2433; + --muted: #5b6678; + --border: #e6eaf2; + --brand: #0054e6; + --brand-2: #3aa2ff; + --success: #1a9a56; + --danger: #d14646; + --shadow: 0 8px 24px rgba(0, 0, 0, .08); + color-scheme: light; +} + +* { + box-sizing: border-box +} + +html, +body { + height: 100% +} + +body { + margin: 0; + font-family: ui-sans-serif, system-ui, "Segoe UI", Roboto, Arial, sans-serif; + background: + radial-gradient(1200px 600px at 10% -10%, rgba(91, 157, 255, .08), transparent 60%), + radial-gradient(900px 500px at 110% 0%, rgba(123, 212, 255, .10), transparent 60%), + var(--bg); + color: var(--text); +} + +/* Header */ +.site-header { + position: sticky; + top: 0; + z-index: 10; + display: flex; + justify-content: space-between; + align-items: center; + padding: 14px 18px; + background: var(--bg-elev); + border-bottom: 1px solid var(--border); + backdrop-filter: saturate(140%) blur(8px); +} + +.brand { + display: flex; + gap: 10px; + align-items: center; + font-weight: 700; + letter-spacing: .2px +} + +.brand svg { + color: var(--brand) +} + +.actions { + display: flex; + gap: 8px; + align-items: center +} + +/* Layout */ +.container { + max-width: 980px; + margin: 24px auto; + padding: 0 16px; + display: grid; + gap: 18px +} + +.card { + background: linear-gradient(180deg, var(--card), color-mix(in srgb, var(--card) 80%, #000 20%)); + border: 1px solid var(--border); + border-radius: 16px; + box-shadow: var(--shadow); +} + +.section-head { + display: flex; + justify-content: space-between; + align-items: center; + padding: 14px 16px; + border-bottom: 1px dashed var(--border) +} + +/* Hero */ +.hero { + display: flex; + align-items: center; + justify-content: space-between; + padding: 24px; + gap: 18px +} + +.hero h1 { + margin: 0 0 6px; + font-size: clamp(22px, 3.4vw, 30px) +} + +.hero .muted { + color: var(--muted) +} + +.hero-cta .large { + font-size: 1.05rem; + padding: 14px 20px +} + +/* Grid */ +.grid { + display: grid; + grid-template-columns: repeat(12, minmax(0, 1fr)); + gap: 14px +} + +.col-12 { + grid-column: span 12 +} + +.col-6 { + grid-column: span 6 +} + +@media (max-width:720px) { + .col-6 { + grid-column: span 12 + } +} + +/* Form */ +.form-card { + padding: 16px +} + +.form-group label { + display: block; + font-weight: 600; + margin-bottom: 6px +} + +input[type="url"], +input[type="text"], +select { + width: 100%; + padding: 12px 14px; + border-radius: 12px; + background: linear-gradient(0deg, var(--bg-elev), var(--bg-elev)); + border: 1px solid var(--border); + color: var(--text); + outline: none; + transition: border .15s, box-shadow .15s, transform .05s; +} + +input:focus, +select:focus { + border-color: color-mix(in srgb, var(--brand) 60%, var(--border) 40%); + box-shadow: 0 0 0 3px color-mix(in srgb, var(--brand) 30%, transparent); +} + +.hint { + color: var(--muted); + display: block; + margin-top: 6px; + font-size: .9rem +} + +.error { + color: var(--danger); + min-height: 1.2em; + margin-top: 6px; + font-size: .9rem +} + +.form-actions { + display: flex; + gap: 10px; + align-items: center +} + +/* Result */ +.result-box { + margin-top: 14px; + padding: 12px; + border: 1px dashed var(--border); + border-radius: 12px; + background: var(--bg-elev) +} + +.result-row { + display: flex; + gap: 10px; + align-items: center +} + +.result-row input[readonly] { + flex: 1 1 auto; + min-width: 0 +} + +.result-buttons { + display: flex; + gap: 8px +} + +/* Recent */ +.recent-card { + padding: 0 +} + +.recent-list { + padding: 12px +} + +.link-item { + display: flex; + flex-direction: column; + gap: 6px; + padding: 12px; + border: 1px solid var(--border); + border-radius: 12px; + background: linear-gradient(180deg, var(--bg-elev), color-mix(in srgb, var(--bg-elev) 92%, #000 8%)); + transition: transform .12s ease-out, border-color .15s; +} + +.link-item:hover { + transform: translateY(-2px); + border-color: color-mix(in srgb, var(--brand) 40%, var(--border) 60%) +} + +.link-main { + display: flex; + gap: 8px; + align-items: center; + overflow: auto; + scrollbar-width: thin +} + +.link-main { + display: grid; + grid-template-columns: 1fr auto auto; + gap: 8px; + align-items: center +} + +.link-main .mono.ellipsis:first-child { + min-width: 0 +} + +.mono { + font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; + white-space: nowrap +} + +.arrow { + opacity: .6 +} + +.link-meta { + display: flex; + justify-content: space-between; + gap: 8px; + align-items: center +} + +.timestamp { + color: var(--muted); + font-size: .92rem +} + +.link-actions { + display: flex; + gap: 6px +} + +/* Footer */ +.site-footer { + display: flex; + flex-wrap: wrap; + gap: 8px; + align-items: center; + justify-content: space-between; + margin: 28px auto; + padding: 10px 16px; + max-width: 980px; + color: var(--muted) +} + +.site-footer a { + color: color-mix(in srgb, var(--brand) 80%, var(--text) 20%) +} + +/* Buttons */ +.btn { + -webkit-tap-highlight-color: transparent; + appearance: none; + border: none; + cursor: pointer; + user-select: none; + border-radius: 12px; + padding: 10px 14px; + font-weight: 700; + letter-spacing: .2px; + background: linear-gradient(180deg, color-mix(in srgb, var(--brand) 80%, var(--brand-2) 20%), var(--brand)); + color: #fff; + box-shadow: 0 10px 20px color-mix(in srgb, var(--brand) 35%, transparent); + transition: transform .04s ease, filter .15s ease, box-shadow .15s ease; +} + +.btn:hover { + filter: brightness(1.05) +} + +.btn:active { + transform: translateY(1px) +} + +.btn.outline { + background: transparent; + color: var(--text); + border: 1px solid color-mix(in srgb, var(--brand) 60%, var(--border) 40%) +} + +.btn.ghost { + background: transparent; + color: var(--text); + border: 1px solid var(--border) +} + +.btn.tiny { + padding: 6px 10px; + font-weight: 600; + border-radius: 10px +} + +.btn.large { + padding: 14px 20px; + border-radius: 14px +} + +/* Toast */ +#toast { + position: fixed; + left: 50%; + bottom: 24px; + transform: translateX(-50%) translateY(20px); + background: var(--bg-elev); + color: var(--text); + border: 1px solid var(--border); + border-radius: 12px; + padding: 10px 14px; + opacity: 0; + box-shadow: var(--shadow); + pointer-events: none; + transition: opacity .2s, transform .2s; +} + +#toast.show { + opacity: 1; + transform: translateX(-50%) translateY(0) +} + +/* Links & helpers */ +a { + color: color-mix(in srgb, var(--brand) 80%, var(--text) 20%); + text-decoration: none +} + +a:hover { + text-decoration: underline +} + +.ellipsis { + max-width: 100%; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap +} + +select.select, +.select { + appearance: none; + background: var(--bg-elev); + color: var(--text); + border: 1px solid var(--border) +} + +select option { + background: var(--bg-elev); + color: var(--text) +} + +select:focus { + border-color: color-mix(in srgb, var(--brand) 60%, var(--border) 40%); + box-shadow: 0 0 0 3px color-mix(in srgb, var(--brand) 30%, transparent); +} + +.nowrap { + white-space: nowrap +} + +/* Stats */ +.kpi-card { + padding: 16px +} + +.section-title { + margin: 10px 12px 0 +} + +.kpi-grid { + display: grid; + gap: 12px; + grid-template-columns: repeat(6, minmax(0, 1fr)) +} + +@media (max-width:980px) { + .kpi-grid { + grid-template-columns: repeat(3, 1fr) + } +} + +@media (max-width:640px) { + .kpi-grid { + grid-template-columns: repeat(2, 1fr) + } +} + +.kpi { + border: 1px solid var(--border); + border-radius: 12px; + padding: 12px; + background: linear-gradient(180deg, var(--bg-elev), color-mix(in srgb, var(--bg-elev) 92%, #000 8%)) +} + +.kpi-label { + color: var(--muted); + font-weight: 600; + font-size: .9rem +} + +.kpi-value { + font-size: 1.4rem; + font-weight: 800; + margin-top: 4px +} + +.table-wrap { + overflow: auto +} + +.data-table { + width: 100%; + border-collapse: collapse; + font-size: .98rem +} + +.data-table th, +.data-table td { + padding: 10px; + border-bottom: 1px solid var(--border); + vertical-align: top +} + +.data-table thead th { + position: sticky; + top: 0; + background: var(--bg-elev); + z-index: 1 +} + +.data-table .right { + text-align: right +} + +.scrollbox { + max-height: 320px; + overflow: auto; + padding: 12px; + margin: 0; + background: linear-gradient(180deg, var(--bg-elev), color-mix(in srgb, var(--bg-elev) 92%, #000 8%)); + border: 1px solid var(--border); + border-radius: 12px; +} + +.subhead { + margin: 10px 6px +} + +.table-filter { + padding: 8px 12px; + border-radius: 10px; + border: 1px solid var(--border); + background: var(--bg-elev); + color: var(--text); + min-width: 220px +} + +/* Stats wide container */ +.container.container--wide { + max-width: 1280px; + padding: 0 20px +} + +.container.container--wide .card { + padding: 16px +} + +.container.container--wide .section-head { + padding: 12px 4px; + border-bottom: 1px dashed var(--border) +} + +.container.container--wide .data-table { + table-layout: fixed; + font-size: .96rem; + line-height: 1.35 +} + +.container.container--wide .data-table th, +.container.container--wide .data-table td { + padding: 8px 10px +} + +.container.container--wide .data-table td:nth-child(2), +.container.container--wide .data-table td:nth-child(5) { + max-width: 0; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.container.container--wide .data-table tbody tr:nth-child(odd) td { + background: color-mix(in srgb, var(--bg-elev) 96%, #000 4%) +} + +.container.container--wide .table-wrap { + overflow: auto; + scrollbar-width: thin; + -webkit-overflow-scrolling: touch +} + +.container.container--wide .kpi-value { + font-size: 1.5rem +} + +@media (max-width:720px) { + .container.container--wide { + max-width: 100%; + padding: 0 12px + } + + .container.container--wide .data-table { + font-size: .94rem + } +} + +/* Error page */ +.error-card { + padding: 16px +} + +.error-hero { + display: flex; + gap: 18px; + align-items: center; + padding: 8px 6px 16px +} + +.error-illustration { + font-size: clamp(44px, 8vw, 72px); + filter: drop-shadow(0 10px 20px rgba(0, 0, 0, .25)) +} + +.error-main { + flex: 1 1 auto +} + +.status-badge { + display: inline-block; + padding: 6px 10px; + border-radius: 999px; + background: color-mix(in srgb, var(--danger) 18%, var(--bg-elev)); + border: 1px solid color-mix(in srgb, var(--danger) 60%, var(--border) 40%); + color: var(--text); + font-weight: 700; + letter-spacing: .2px; + margin-bottom: 6px; +} + +.error-title { + margin: 4px 0 6px; + font-size: clamp(22px, 3.4vw, 28px) +} + +.error-actions { + display: flex; + gap: 10px; + flex-wrap: wrap; + margin-top: 10px +} + +.error-details { + margin-top: 12px +} + +.error-details>summary { + list-style: none; + cursor: pointer; + padding: 10px 12px; + border: 1px solid var(--border); + border-radius: 10px; + background: var(--bg-elev); + color: var(--text); + display: flex; + align-items: center; + gap: 8px; +} + +.error-details>summary::-webkit-details-marker { + display: none +} + +.error-details[open]>summary { + border-bottom-left-radius: 0; + border-bottom-right-radius: 0 +} + +.summary-title { + font-weight: 700 +} + +.summary-hint { + color: var(--muted); + font-size: .9rem +} + +.details-body { + border: 1px solid var(--border); + border-top: none; + border-radius: 0 0 10px 10px; + background: var(--bg-elev) +} + +#error-dump { + margin: 0; + padding: 12px; + max-height: 360px; + overflow: auto; + background: linear-gradient(180deg, var(--bg-elev), color-mix(in srgb, var(--bg-elev) 92%, #000 8%)); + border-bottom: 1px solid var(--border); + white-space: pre-wrap; + word-break: break-word; + overflow-wrap: anywhere; +} + +@media (max-width:720px) { + .error-card { + padding: 12px + } + + .error-details>summary { + padding: 8px 10px + } + + #error-dump { + max-height: 300px + } +} \ No newline at end of file diff --git a/static/js/error.js b/static/js/error.js new file mode 100644 index 0000000..671f510 --- /dev/null +++ b/static/js/error.js @@ -0,0 +1,25 @@ +(function () { + const t = localStorage.getItem('theme') || 'dark'; + document.documentElement.setAttribute('data-theme', t); + // prosty "try again" + document.querySelector('[data-action="try-again"]')?.addEventListener('click', () => { + location.reload(); + }); + // kopiowanie logs + document.querySelector('[data-action="copy-text"]')?.addEventListener('click', (e) => { + const sel = e.currentTarget.getAttribute('data-target'); + const el = sel && document.querySelector(sel); + if (!el) return; + const txt = el.textContent || ''; + navigator.clipboard.writeText(txt).then(() => { + const toast = document.getElementById('toast'); + if (toast) { + toast.textContent = 'Copied!'; + toast.classList.add('show'); + setTimeout(() => toast.classList.remove('show'), 1200); + } + }); + }); +})(); + + diff --git a/static/js/main.js b/static/js/main.js new file mode 100644 index 0000000..c73c70d --- /dev/null +++ b/static/js/main.js @@ -0,0 +1,161 @@ +(function () { + + const $ = (q, c = document) => c.querySelector(q); + const $$ = (q, c = document) => Array.from(c.querySelectorAll(q)); + const setTheme = (t) => { document.documentElement.setAttribute('data-theme', t); try { localStorage.setItem('theme', t) } catch { } }; + const toast = (msg) => { + const el = $('#toast'); if (!el) return; + el.textContent = msg; el.classList.add('show'); + clearTimeout(el._t); el._t = setTimeout(() => el.classList.remove('show'), 2000); + }; + const host = () => `${location.protocol}//${location.host}`; + + function buildLink(url, ip) { + if (!url || !ip) return ''; + try { + const enc = encodeURIComponent(url); + const ipClean = (ip || '').trim(); + return `${host()}/convert?url=${enc}&ip=${encodeURIComponent(ipClean)}`; + } catch { return ''; } + } + + document.addEventListener('click', (e) => { + const t = e.target.closest('[data-action="toggle-theme"]'); + if (t) { + e.preventDefault(); + const cur = document.documentElement.getAttribute('data-theme') || 'dark'; + setTheme(cur === 'dark' ? 'light' : 'dark'); + } + }); + + const urlInput = $('#url-input'); + const ipInput = $('#ip-input'); + const ipPreset = $('#ip-preset'); + const out = $('#generated-link'); + const openBtn = $('#open-link'); + + function updatePreview() { + const link = buildLink(urlInput.value.trim(), ipInput.value.trim()); + out.value = link || ''; + if (link) { + openBtn.setAttribute('href', link); + openBtn.setAttribute('aria-disabled', 'false'); + } else { + openBtn.setAttribute('href', '#'); + openBtn.setAttribute('aria-disabled', 'true'); + } + $('.result-box')?.setAttribute('data-state', link ? 'ready' : 'empty'); + } + + ['input', 'change', 'blur'].forEach(evt => { + urlInput?.addEventListener(evt, updatePreview); + ipInput?.addEventListener(evt, updatePreview); + }); + + ipPreset?.addEventListener('change', () => { + const v = ipPreset.value; + if (!v) return; + if (v !== 'custom') ipInput.value = v; + ipInput.focus(); + updatePreview(); + }); + + document.addEventListener('click', (e) => { + let t = e.target; + + if (t.closest('[data-action="copy"]')) { + e.preventDefault(); + const btn = t.closest('[data-action="copy"]'); + const sel = btn.getAttribute('data-target') || '#generated-link'; + const el = $(sel); + if (!el) return; + const text = el.value || el.textContent || ''; + navigator.clipboard?.writeText(text).then(() => { + btn.classList.add('copied'); setTimeout(() => btn.classList.remove('copied'), 1200); + toast('Link copied'); + }).catch(() => { + // Fallback + const range = document.createRange(); range.selectNodeContents(el); + const selObj = getSelection(); selObj.removeAllRanges(); selObj.addRange(range); + try { document.execCommand('copy'); toast('Link copied'); } catch { } + selObj.removeAllRanges(); + }); + } + + if (t.closest('[data-action="copy-text"]')) { + e.preventDefault(); + const btn = t.closest('[data-action="copy-text"]'); + const text = btn.getAttribute('data-text') || ''; + if (!text) return; + navigator.clipboard?.writeText(text).then(() => toast('Copied')); + } + + if (t.closest('[data-action="clear"]')) { + e.preventDefault(); + urlInput.value = ''; + + updatePreview(); + urlInput.focus(); + } + + if (t.closest('[data-action="collapse"]')) { + e.preventDefault(); + const btn = t.closest('[data-action="collapse"]'); + const panel = $('#' + (btn.getAttribute('aria-controls') || '')); + if (!panel) return; + const expanded = btn.getAttribute('aria-expanded') === 'true'; + btn.setAttribute('aria-expanded', expanded ? 'false' : 'true'); + panel.style.display = expanded ? 'none' : ''; + } + }); + + function showError(input, msg) { + const id = input.getAttribute('id'); + const box = document.querySelector(`.error[data-error-for="${id}"]`); + if (box) box.textContent = msg || ''; + input.setAttribute('aria-invalid', msg ? 'true' : 'false'); + } + + urlInput?.addEventListener('blur', () => { + const v = urlInput.value.trim(); + if (!v) return showError(urlInput, ''); + try { new URL(v); showError(urlInput, ''); } + catch { showError(urlInput, 'Invalid URL'); } + }); + + ipInput?.addEventListener('blur', () => { + const v = ipInput.value.trim(); + if (!v) return showError(ipInput, ''); + const ok = /^\b\d{1,3}(?:\.\d{1,3}){3}\b$/.test(v); + showError(ipInput, ok ? '' : 'Invalid IPv4 address'); + }); + + + (function init() { + const serverLink = out?.value?.trim(); + if (serverLink) { + $('.result-box')?.setAttribute('data-state', 'ready'); + openBtn?.setAttribute('aria-disabled', 'false'); + } else { + updatePreview(); + } + })(); + + document.addEventListener('keydown', (e) => { + if ((e.ctrlKey || e.metaKey) && e.key.toLowerCase() === 'c') { + const text = out?.value?.trim(); if (!text) return; + navigator.clipboard?.writeText(text).then(() => toast('Link copied')); + } + }); +})(); + +function updateThemeColor() { + const meta = document.querySelector('meta[name="theme-color"]'); + if (!meta) return; + const isLight = document.documentElement.getAttribute('data-theme') === 'light'; + meta.setAttribute('content', isLight ? '#f6f8fb' : '#0f1115'); +} + +const _setTheme = setTheme; +setTheme = function (t) { _setTheme(t); updateThemeColor(); }; +document.addEventListener('DOMContentLoaded', updateThemeColor); diff --git a/static/js/stats.js b/static/js/stats.js new file mode 100644 index 0000000..79364d8 --- /dev/null +++ b/static/js/stats.js @@ -0,0 +1,11 @@ +document.addEventListener('input', (e) => { + const el = e.target.closest('[data-action="filter-table"]'); + if (!el) return; + const table = document.querySelector(el.getAttribute('data-target') || ''); + if (!table) return; + const q = (el.value || '').toLowerCase(); + table.querySelectorAll('tbody tr').forEach(tr => { + const text = (tr.innerText || tr.textContent || '').toLowerCase(); + tr.style.display = text.includes(q) ? '' : 'none'; + }); +}); \ No newline at end of file diff --git a/templates/error.html b/templates/error.html index 3e82c06..53ec862 100644 --- a/templates/error.html +++ b/templates/error.html @@ -1,87 +1,87 @@ - + + - - - Error {{ error.code }} - + + + Error {{ error.code or 500 }} + + + + -
-

Error {{ error.code }}

-

{{ error.description }}

- ← Return to Home Page -
+ + +
+
+
+ + +
+
Error {{ error.code or 500 }}
+

+ {% if (error.code or 500) == 400 %}Bad request + {% elif (error.code or 500) == 403 %}Forbidden + {% elif (error.code or 500) == 404 %}Not found + {% elif (error.code or 500) == 413 %}Payload too large + {% elif (error.code or 500) == 415 %}Unsupported media type + {% elif (error.code or 500) == 500 %}Internal server error + {% else %}Something went wrong + {% endif %} +

+

{{ (error.description|string)|e }}

+ +
+ + Go home +
+
+
+ +
+ + Error details + click to expand + +
+
+code: {{ error.code or 500 }}
+message: {{ (error.description|string) }}
+path: {{ request.path if request else '/' }}
+method: {{ request.method if request else 'GET' }}
+user_ip: {{ request.remote_addr if request else '' }}
+user_agent: {{ request.headers.get('User-Agent') if request else '' }}
+
+
+ +
+
+
+
+
+ +
+
© 2025 linuxiarz.pl
+
Your IP: {{ request.remote_addr if request else '' }}
+
+ +
+ + + - + + \ No newline at end of file diff --git a/templates/form.html b/templates/form.html index 985de5c..9885f4a 100644 --- a/templates/form.html +++ b/templates/form.html @@ -1,292 +1,152 @@ - + + - - + + Hosts Converter - + + + - - -

Hosts File Converter

- -
-
- - + + -
- - -
- - - - - {% if generated_link %} -
-

Link to MikroTik/Adguard:

- - -
- {% endif %} - -