clean code
This commit is contained in:
159
app.py
159
app.py
@@ -13,7 +13,16 @@ from typing import Optional
|
||||
from datetime import timezone
|
||||
import json as _json
|
||||
|
||||
from flask import Flask, request, render_template, abort, jsonify, stream_with_context, g, Response
|
||||
from flask import (
|
||||
Flask,
|
||||
request,
|
||||
render_template,
|
||||
abort,
|
||||
jsonify,
|
||||
stream_with_context,
|
||||
g,
|
||||
Response,
|
||||
)
|
||||
from flask_compress import Compress
|
||||
from flask_limiter import Limiter
|
||||
import config
|
||||
@@ -24,35 +33,45 @@ app.config["MAX_CONTENT_LENGTH"] = config.MAX_CONTENT_LENGTH
|
||||
app.config["SECRET_KEY"] = config.SECRET_KEY
|
||||
app.debug = config.FLASK_DEBUG
|
||||
|
||||
|
||||
def build_redis():
|
||||
if config.REDIS_URL:
|
||||
return redis.Redis.from_url(config.REDIS_URL)
|
||||
return redis.Redis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB)
|
||||
return redis.Redis(
|
||||
host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB
|
||||
)
|
||||
|
||||
|
||||
redis_client = build_redis()
|
||||
|
||||
|
||||
def get_client_ip():
|
||||
xff = request.headers.get("X-Forwarded-For", "").split(",")
|
||||
if xff and xff[0].strip():
|
||||
return xff[0].strip()
|
||||
return request.remote_addr
|
||||
|
||||
|
||||
limiter = Limiter(
|
||||
key_func=get_client_ip,
|
||||
app=app,
|
||||
default_limits=[config.RATE_LIMIT_DEFAULT],
|
||||
storage_uri=config.REDIS_URL
|
||||
storage_uri=config.REDIS_URL,
|
||||
)
|
||||
|
||||
Compress(app)
|
||||
|
||||
|
||||
@app.before_request
|
||||
def track_request_data():
|
||||
g.start_time = time.perf_counter()
|
||||
redis_client.incr(f"stats:user_agents:{quote(request.headers.get('User-Agent', 'Unknown'), safe='')}")
|
||||
redis_client.incr(
|
||||
f"stats:user_agents:{quote(request.headers.get('User-Agent', 'Unknown'), safe='')}"
|
||||
)
|
||||
redis_client.incr(f"stats:client_ips:{get_client_ip()}")
|
||||
redis_client.incr(f"stats:methods:{request.method}")
|
||||
|
||||
|
||||
@app.after_request
|
||||
def add_cache_headers(response):
|
||||
if request.path.startswith("/static/"):
|
||||
@@ -85,6 +104,7 @@ def after_request(response):
|
||||
redis_client.set("stats:processing_time_max", elapsed)
|
||||
return response
|
||||
|
||||
|
||||
@app.template_filter("datetimeformat")
|
||||
def datetimeformat_filter(value, format="%Y-%m-%d %H:%M"):
|
||||
try:
|
||||
@@ -93,6 +113,7 @@ def datetimeformat_filter(value, format="%Y-%m-%d %H:%M"):
|
||||
except (ValueError, AttributeError):
|
||||
return value
|
||||
|
||||
|
||||
def basic_auth_required(realm: str, user: str, password: str):
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
@@ -100,29 +121,43 @@ def basic_auth_required(realm: str, user: str, password: str):
|
||||
if not config.STATS_BASIC_AUTH_ENABLED:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
auth = request.authorization
|
||||
auth = request.authorization
|
||||
|
||||
if auth and auth.type == "basic" and auth.username == user and auth.password == password:
|
||||
if (
|
||||
auth
|
||||
and auth.type == "basic"
|
||||
and auth.username == user
|
||||
and auth.password == password
|
||||
):
|
||||
return f(*args, **kwargs)
|
||||
|
||||
resp = Response(status=401)
|
||||
resp.headers["WWW-Authenticate"] = f'Basic realm="{realm}"'
|
||||
return resp
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_key(source_url, ip):
|
||||
return f"cache:{source_url}:{ip}"
|
||||
|
||||
|
||||
def should_ignore_domain(domain):
|
||||
return domain.startswith(".") or any(ch in domain for ch in ["~", "=", "$", "'", "^", "_", ">", "<", ":"])
|
||||
return domain.startswith(".") or any(
|
||||
ch in domain for ch in ["~", "=", "$", "'", "^", "_", ">", "<", ":"]
|
||||
)
|
||||
|
||||
|
||||
def should_ignore_line(line):
|
||||
return any(sym in line for sym in ["<", ">", "##", "###", "div", "span"])
|
||||
|
||||
|
||||
def is_valid_domain(domain):
|
||||
return bool(re.compile(r"^(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,}$").match(domain))
|
||||
|
||||
|
||||
def is_private_client_ip() -> bool:
|
||||
ip = get_client_ip()
|
||||
try:
|
||||
@@ -130,6 +165,7 @@ def is_private_client_ip() -> bool:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def convert_host_line(line: str, target_ip: str):
|
||||
# szybkie odrzucenia
|
||||
if not line:
|
||||
@@ -178,26 +214,32 @@ def convert_host_line(line: str, target_ip: str):
|
||||
return None
|
||||
|
||||
|
||||
def build_etag(up_etag: Optional[str], up_lastmod: Optional[str], target_ip: str) -> str:
|
||||
def build_etag(
|
||||
up_etag: Optional[str], up_lastmod: Optional[str], target_ip: str
|
||||
) -> str:
|
||||
base = (up_etag or up_lastmod or "no-upstream") + f"::{target_ip}::v1"
|
||||
return 'W/"' + hashlib.sha1(base.encode("utf-8")).hexdigest() + '"'
|
||||
|
||||
|
||||
def cache_headers(etag: str, up_lm: Optional[str]):
|
||||
headers = {
|
||||
"ETag": etag,
|
||||
"Vary": "Accept-Encoding",
|
||||
"Content-Type": "text/plain; charset=utf-8",
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
#"Content-Disposition": "inline; filename=converted_hosts.txt",
|
||||
# "Content-Disposition": "inline; filename=converted_hosts.txt",
|
||||
}
|
||||
if config.CACHE_ENABLED:
|
||||
headers["Cache-Control"] = f"public, s-maxage={config.CACHE_S_MAXAGE}, max-age={config.CACHE_MAX_AGE}"
|
||||
headers["Cache-Control"] = (
|
||||
f"public, s-maxage={config.CACHE_S_MAXAGE}, max-age={config.CACHE_MAX_AGE}"
|
||||
)
|
||||
else:
|
||||
headers["Cache-Control"] = "no-store"
|
||||
if up_lm:
|
||||
headers["Last-Modified"] = up_lm
|
||||
return headers
|
||||
|
||||
|
||||
def validate_and_normalize_url(url):
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme:
|
||||
@@ -207,6 +249,7 @@ def validate_and_normalize_url(url):
|
||||
raise ValueError("Missing host in URL")
|
||||
return parsed.geturl()
|
||||
|
||||
|
||||
def track_url_request(url):
|
||||
redis_client.incr(f"stats:url_requests:{quote(url, safe='')}")
|
||||
|
||||
@@ -220,6 +263,7 @@ def add_recent_link(url, target_ip):
|
||||
pipe.execute()
|
||||
redis_client.incr("stats:recent_links_added")
|
||||
|
||||
|
||||
def get_recent_links():
|
||||
links = redis_client.lrange("recent_links", 0, 9)
|
||||
out = []
|
||||
@@ -231,6 +275,7 @@ def get_recent_links():
|
||||
out.append((parts[0], parts[1], "127.0.0.1"))
|
||||
return out
|
||||
|
||||
|
||||
def get_hostname(ip):
|
||||
key = f"reverse_dns:{ip}"
|
||||
cached = redis_client.get(key)
|
||||
@@ -243,20 +288,29 @@ def get_hostname(ip):
|
||||
redis_client.setex(key, 3600, hostname)
|
||||
return hostname
|
||||
|
||||
|
||||
def add_recent_convert():
|
||||
ip = get_client_ip()
|
||||
hostname = get_hostname(ip)
|
||||
ua = request.headers.get("User-Agent", "Unknown")
|
||||
time_str = datetime.now().astimezone().isoformat()
|
||||
url = request.full_path
|
||||
data = {"url": url, "ip": ip, "hostname": hostname, "time": time_str, "user_agent": ua}
|
||||
data = {
|
||||
"url": url,
|
||||
"ip": ip,
|
||||
"hostname": hostname,
|
||||
"time": time_str,
|
||||
"user_agent": ua,
|
||||
}
|
||||
redis_client.lpush("recent_converts", json.dumps(data))
|
||||
redis_client.ltrim("recent_converts", 0, 99)
|
||||
redis_client.ltrim("recent_converts", 0, 99)
|
||||
|
||||
|
||||
@app.route("/favicon.ico", methods=["GET"])
|
||||
def favicon():
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
@app.route("/", methods=["GET"])
|
||||
def index():
|
||||
generated_link = None
|
||||
@@ -268,7 +322,9 @@ def index():
|
||||
try:
|
||||
normalized = validate_and_normalize_url(unquote(url_param))
|
||||
encoded = quote(normalized, safe="")
|
||||
generated_link = urljoin(request.host_url, f"convert?url={encoded}&ip={target_ip}")
|
||||
generated_link = urljoin(
|
||||
request.host_url, f"convert?url={encoded}&ip={target_ip}"
|
||||
)
|
||||
add_recent_link(normalized, target_ip)
|
||||
recent_links = get_recent_links()
|
||||
except Exception as e:
|
||||
@@ -292,6 +348,7 @@ def index():
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.route("/convert")
|
||||
@limiter.limit(config.RATE_LIMIT_CONVERT)
|
||||
def convert():
|
||||
@@ -304,11 +361,22 @@ def convert():
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
requested_debug = request.args.get("debug", "").lower() in ("1","true","t","yes","y","on")
|
||||
requested_debug = request.args.get("debug", "").lower() in (
|
||||
"1",
|
||||
"true",
|
||||
"t",
|
||||
"yes",
|
||||
"y",
|
||||
"on",
|
||||
)
|
||||
debug_allowed = False
|
||||
if config.DEBUG_ENABLE:
|
||||
header_key = request.headers.get("X-Debug-Key", "")
|
||||
if config.DEBUG_KEY and header_key and hmac.compare_digest(header_key, config.DEBUG_KEY):
|
||||
if (
|
||||
config.DEBUG_KEY
|
||||
and header_key
|
||||
and hmac.compare_digest(header_key, config.DEBUG_KEY)
|
||||
):
|
||||
debug_allowed = True
|
||||
elif is_private_client_ip():
|
||||
debug_allowed = True
|
||||
@@ -378,7 +446,9 @@ def convert():
|
||||
d("Wysyłam GET do upstreamu")
|
||||
d(f"Nagłówki: {req_headers or '{}'}")
|
||||
|
||||
r = requests.get(normalized_url, headers=req_headers, stream=True, timeout=(10, 60))
|
||||
r = requests.get(
|
||||
normalized_url, headers=req_headers, stream=True, timeout=(10, 60)
|
||||
)
|
||||
|
||||
ct = r.headers.get("Content-Type", "")
|
||||
if debug_mode:
|
||||
@@ -396,7 +466,9 @@ def convert():
|
||||
abort(415, description="Unsupported Media Type")
|
||||
|
||||
if r.status_code == 304:
|
||||
etag = build_etag(r.headers.get("ETag"), r.headers.get("Last-Modified"), target_ip)
|
||||
etag = build_etag(
|
||||
r.headers.get("ETag"), r.headers.get("Last-Modified"), target_ip
|
||||
)
|
||||
if debug_mode:
|
||||
d("Upstream 304 – zwracam 304")
|
||||
r.close()
|
||||
@@ -421,7 +493,9 @@ def convert():
|
||||
if debug_mode:
|
||||
yield "\n".join(debug_lines) + "\n"
|
||||
debug_lines.clear()
|
||||
for line in r.iter_lines(decode_unicode=True, chunk_size=config.READ_CHUNK):
|
||||
for line in r.iter_lines(
|
||||
decode_unicode=True, chunk_size=config.READ_CHUNK
|
||||
):
|
||||
if line is None:
|
||||
continue
|
||||
lines_read += 1
|
||||
@@ -511,7 +585,6 @@ def stats():
|
||||
else:
|
||||
stats_data[key_str] = value
|
||||
|
||||
|
||||
recent_converts = []
|
||||
for entry in redis_client.lrange("recent_converts", 0, 99):
|
||||
try:
|
||||
@@ -522,18 +595,28 @@ def stats():
|
||||
# Agregaty szczegółowe
|
||||
processing_time_total = float(redis_client.get("stats:processing_time_total") or 0)
|
||||
processing_time_count = int(redis_client.get("stats:processing_time_count") or 0)
|
||||
avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0
|
||||
avg_processing_time = (
|
||||
processing_time_total / processing_time_count
|
||||
if processing_time_count > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
content_size_total = int(redis_client.get("stats:content_size_total") or 0)
|
||||
content_size_count = int(redis_client.get("stats:content_size_count") or 0)
|
||||
avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
avg_content_size = (
|
||||
content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
)
|
||||
|
||||
detailed_stats = {
|
||||
"processing_time_total_sec": processing_time_total,
|
||||
"processing_time_count": processing_time_count,
|
||||
"processing_time_avg_sec": avg_processing_time,
|
||||
"processing_time_min_sec": float(redis_client.get("stats:processing_time_min") or 0),
|
||||
"processing_time_max_sec": float(redis_client.get("stats:processing_time_max") or 0),
|
||||
"processing_time_min_sec": float(
|
||||
redis_client.get("stats:processing_time_min") or 0
|
||||
),
|
||||
"processing_time_max_sec": float(
|
||||
redis_client.get("stats:processing_time_max") or 0
|
||||
),
|
||||
"content_size_total_bytes": content_size_total,
|
||||
"content_size_count": content_size_count,
|
||||
"content_size_avg_bytes": avg_content_size,
|
||||
@@ -602,18 +685,28 @@ def stats_json():
|
||||
|
||||
processing_time_total = float(redis_client.get("stats:processing_time_total") or 0)
|
||||
processing_time_count = int(redis_client.get("stats:processing_time_count") or 0)
|
||||
avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0
|
||||
avg_processing_time = (
|
||||
processing_time_total / processing_time_count
|
||||
if processing_time_count > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
content_size_total = int(redis_client.get("stats:content_size_total") or 0)
|
||||
content_size_count = int(redis_client.get("stats:content_size_count") or 0)
|
||||
avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
avg_content_size = (
|
||||
content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
)
|
||||
|
||||
detailed_stats = {
|
||||
"processing_time_total_sec": processing_time_total,
|
||||
"processing_time_count": processing_time_count,
|
||||
"processing_time_avg_sec": avg_processing_time,
|
||||
"processing_time_min_sec": float(redis_client.get("stats:processing_time_min") or 0),
|
||||
"processing_time_max_sec": float(redis_client.get("stats:processing_time_max") or 0),
|
||||
"processing_time_min_sec": float(
|
||||
redis_client.get("stats:processing_time_min") or 0
|
||||
),
|
||||
"processing_time_max_sec": float(
|
||||
redis_client.get("stats:processing_time_max") or 0
|
||||
),
|
||||
"content_size_total_bytes": content_size_total,
|
||||
"content_size_count": content_size_count,
|
||||
"content_size_avg_bytes": avg_content_size,
|
||||
@@ -642,13 +735,21 @@ def handle_errors(e):
|
||||
try:
|
||||
|
||||
now_iso = datetime.now().astimezone().isoformat()
|
||||
return render_template("error.html", error=e, code=getattr(e, "code", 500), now_iso=now_iso), getattr(e, "code", 500)
|
||||
return render_template(
|
||||
"error.html", error=e, code=getattr(e, "code", 500), now_iso=now_iso
|
||||
), getattr(e, "code", 500)
|
||||
except Exception:
|
||||
return jsonify({"error": getattr(e, "description", str(e)), "code": getattr(e, "code", 500)}), getattr(e, "code", 500)
|
||||
return jsonify(
|
||||
{
|
||||
"error": getattr(e, "description", str(e)),
|
||||
"code": getattr(e, "code", 500),
|
||||
}
|
||||
), getattr(e, "code", 500)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host=config.BIND_HOST, port=config.BIND_PORT)
|
||||
else:
|
||||
from asgiref.wsgi import WsgiToAsgi
|
||||
|
||||
asgi_app = WsgiToAsgi(app)
|
||||
|
@@ -1,21 +1,19 @@
|
||||
[Unit]
|
||||
Description=ListApp - Flask application for hosts file conversion
|
||||
Description=Mikrotik Adlist - Flask application for hosts file conversion
|
||||
After=network-online.target redis.service
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
User=www-data
|
||||
Group=www-data
|
||||
WorkingDirectory=/var/www/listapp
|
||||
|
||||
# Globalne env + nadpisania (opcjonalne; minus oznacza „jeśli istnieje”)
|
||||
EnvironmentFile=-/var/www/listapp/.env
|
||||
# główny katalog aplikacji
|
||||
Environment="APP_DIR=/var/www/adlist_mikrotik"
|
||||
WorkingDirectory=/var/www/adlist_mikrotik
|
||||
EnvironmentFile=-/var/www/adlist_mikrotik/.env
|
||||
Environment="PATH=${APP_DIR}/venv/bin"
|
||||
|
||||
# Ścieżka do virtualenv
|
||||
Environment="PATH=/var/www/listapp/venv/bin"
|
||||
|
||||
# Gunicorn + UvicornWorker (ASGI)
|
||||
ExecStart=/var/www/listapp/venv/bin/gunicorn \
|
||||
ExecStart=${APP_DIR}/venv/bin/gunicorn \
|
||||
-k uvicorn.workers.UvicornWorker \
|
||||
--workers 4 \
|
||||
--bind 127.0.0.1:8283 \
|
||||
|
Reference in New Issue
Block a user