push
This commit is contained in:
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
venv/
|
||||
env
|
||||
.env
|
||||
bin
|
||||
include
|
||||
lib
|
||||
lib64
|
||||
__pycache__
|
||||
pyvenv.cfg
|
422
app.py
Normal file
422
app.py
Normal file
@@ -0,0 +1,422 @@
|
||||
import re
|
||||
import redis
|
||||
import requests
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import socket
|
||||
import time
|
||||
import json
|
||||
from datetime import datetime
|
||||
from flask import Flask, request, render_template, abort, jsonify, g
|
||||
from urllib.parse import urlparse, quote, unquote, urljoin
|
||||
from functools import wraps
|
||||
from flask_compress import Compress
|
||||
from flask_limiter import Limiter
|
||||
from flask_limiter.util import get_remote_address
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024 # limit
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=7)
|
||||
|
||||
# Ustawienia do rate limiting – 100 żądań na minutę
|
||||
def get_client_ip():
|
||||
"""Pobranie prawdziwego adresu IP klienta (uwzględniając proxy)"""
|
||||
x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',')
|
||||
if x_forwarded_for and x_forwarded_for[0].strip():
|
||||
return x_forwarded_for[0].strip()
|
||||
return request.remote_addr
|
||||
|
||||
limiter = Limiter(key_func=get_client_ip, default_limits=["100 per minute"], app=app)
|
||||
Compress(app)
|
||||
|
||||
ALLOWED_IPS = {'127.0.0.1', '109.173.163.139'}
|
||||
ALLOWED_DOMAIN = ''
|
||||
|
||||
@app.before_request
|
||||
def track_request_data():
|
||||
"""Rejestracja IP klienta, User-Agent, metody HTTP oraz rozpoczęcie pomiaru czasu requestu"""
|
||||
g.start_time = time.perf_counter() # rozpoczęcie pomiaru czasu
|
||||
client_ip = get_client_ip()
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
method = request.method
|
||||
|
||||
# Rejestracja User-Agent
|
||||
redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}')
|
||||
# Rejestracja adresu IP klienta
|
||||
redis_client.incr(f'stats:client_ips:{client_ip}')
|
||||
# Rejestracja metody HTTP
|
||||
redis_client.incr(f'stats:methods:{method}')
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Pomiar i rejestracja czasu przetwarzania żądania"""
|
||||
elapsed = time.perf_counter() - g.start_time
|
||||
# Aktualizacja statystyk czasu przetwarzania (w sekundach)
|
||||
redis_client.incrbyfloat('stats:processing_time_total', elapsed)
|
||||
redis_client.incr('stats:processing_time_count')
|
||||
|
||||
# Aktualizacja minimalnego czasu przetwarzania
|
||||
try:
|
||||
current_min = float(redis_client.get('stats:processing_time_min') or elapsed)
|
||||
if elapsed < current_min:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
|
||||
# Aktualizacja maksymalnego czasu przetwarzania
|
||||
try:
|
||||
current_max = float(redis_client.get('stats:processing_time_max') or elapsed)
|
||||
if elapsed > current_max:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
|
||||
return response
|
||||
|
||||
@app.template_filter('datetimeformat')
|
||||
def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'):
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
return dt.strftime(format)
|
||||
except (ValueError, AttributeError):
|
||||
return value
|
||||
|
||||
def ip_restriction(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
client_ip = get_client_ip()
|
||||
host = request.host.split(':')[0]
|
||||
|
||||
allowed_conditions = [
|
||||
client_ip in ALLOWED_IPS,
|
||||
host == ALLOWED_DOMAIN,
|
||||
request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS
|
||||
]
|
||||
|
||||
if any(allowed_conditions):
|
||||
return f(*args, **kwargs)
|
||||
redis_client.incr('stats:errors_403')
|
||||
abort(403)
|
||||
return decorated
|
||||
|
||||
def cache_key(source_url, ip):
|
||||
return f"cache:{source_url}:{ip}"
|
||||
|
||||
def should_ignore_domain(domain):
|
||||
"""Sprawdza, czy domena zaczyna się od kropki i powinna być ignorowana."""
|
||||
return domain.startswith('.') or any(char in domain for char in ['~', '=', '$', "'", "^", "_", ">", "<", ":"])
|
||||
|
||||
def should_ignore_line(line):
|
||||
"""Sprawdza, czy linia zawiera określone znaki i powinna być ignorowana."""
|
||||
return any(symbol in line for symbol in ['<', '>', '##', '###', "div", "span"])
|
||||
|
||||
def is_valid_domain(domain):
|
||||
"""Sprawdza, czy domena ma poprawną składnię."""
|
||||
domain_regex = re.compile(r'^(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,}$')
|
||||
return bool(domain_regex.match(domain))
|
||||
|
||||
def convert_hosts(content, target_ip):
|
||||
"""Konwersja treści pliku hosts oraz reguł AdGuard DNS."""
|
||||
converted = []
|
||||
invalid_lines = []
|
||||
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# Pomijanie pustych linii, komentarzy i linii do ignorowania
|
||||
if not line or line[0] in ('!', '#', '/') or should_ignore_line(line):
|
||||
continue
|
||||
|
||||
# Obsługa reguł AdGuard DNS
|
||||
match = re.match(r'^\|\|([^\^]+)\^.*', line)
|
||||
if match:
|
||||
domain = match.group(1)
|
||||
if should_ignore_domain(domain):
|
||||
continue
|
||||
if not is_valid_domain(domain):
|
||||
invalid_lines.append(line)
|
||||
continue
|
||||
converted.append(f"{target_ip} {domain}")
|
||||
continue
|
||||
|
||||
# Obsługa klasycznego formatu hosts
|
||||
parts = line.split()
|
||||
if len(parts) > 1:
|
||||
domain_part = parts[1]
|
||||
if should_ignore_domain(domain_part):
|
||||
continue
|
||||
if not is_valid_domain(domain_part):
|
||||
invalid_lines.append(line)
|
||||
continue
|
||||
converted.append(re.sub(r'^\S+', target_ip, line, count=1))
|
||||
|
||||
if invalid_lines:
|
||||
print("Niepoprawne linie:")
|
||||
for invalid in invalid_lines:
|
||||
print(invalid)
|
||||
|
||||
return '\n'.join(converted)
|
||||
|
||||
|
||||
def validate_and_normalize_url(url):
|
||||
"""Walidacja i normalizacja adresu URL"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme:
|
||||
url = f'https://{url}'
|
||||
parsed = urlparse(url)
|
||||
if not parsed.netloc:
|
||||
raise ValueError("Missing host in URL")
|
||||
return parsed.geturl()
|
||||
|
||||
def track_url_request(url):
|
||||
"""Rejestracja żądania dla określonego URL"""
|
||||
redis_key = f"stats:url_requests:{quote(url, safe='')}"
|
||||
redis_client.incr(redis_key)
|
||||
|
||||
def add_recent_link(url, target_ip):
|
||||
"""Dodanie ostatniego linku do historii (ostatnie 10)"""
|
||||
timestamp = datetime.now().isoformat()
|
||||
link_data = f"{timestamp}|{url}|{target_ip}"
|
||||
|
||||
with redis_client.pipeline() as pipe:
|
||||
pipe.lpush("recent_links", link_data)
|
||||
pipe.ltrim("recent_links", 0, 9)
|
||||
pipe.execute()
|
||||
redis_client.incr('stats:recent_links_added')
|
||||
|
||||
def get_recent_links():
|
||||
"""Pobranie ostatnich 10 linków"""
|
||||
links = redis_client.lrange("recent_links", 0, 9)
|
||||
parsed_links = []
|
||||
for link in links:
|
||||
parts = link.decode().split("|")
|
||||
if len(parts) >= 3:
|
||||
parsed_links.append((parts[0], parts[1], parts[2]))
|
||||
elif len(parts) == 2:
|
||||
parsed_links.append((parts[0], parts[1], "127.0.0.1"))
|
||||
return parsed_links
|
||||
|
||||
def get_hostname(ip):
|
||||
"""Cache’owanie wyników reverse DNS dla danego IP"""
|
||||
key = f"reverse_dns:{ip}"
|
||||
cached = redis_client.get(key)
|
||||
if cached:
|
||||
return cached.decode()
|
||||
try:
|
||||
hostname = socket.gethostbyaddr(ip)[0]
|
||||
except Exception:
|
||||
hostname = ip
|
||||
# Cache na 1 godzinę
|
||||
redis_client.setex(key, 3600, hostname)
|
||||
return hostname
|
||||
|
||||
# Nowa funkcja do logowania requestów dla endpointu /convert
|
||||
def add_recent_convert():
|
||||
"""Dodaje dane żądania do listy ostatnich konwersji (/convert)"""
|
||||
ip = get_client_ip()
|
||||
hostname = get_hostname(ip)
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
time_str = datetime.now().astimezone().isoformat()
|
||||
url = request.full_path # pełna ścieżka wraz z query string
|
||||
data = {
|
||||
"url": url,
|
||||
"ip": ip,
|
||||
"hostname": hostname,
|
||||
"time": time_str,
|
||||
"user_agent": user_agent
|
||||
}
|
||||
json_data = json.dumps(data)
|
||||
redis_client.lpush("recent_converts", json_data)
|
||||
redis_client.ltrim("recent_converts", 0, 49)
|
||||
|
||||
@app.route('/', methods=['GET'])
|
||||
def index():
|
||||
"""Strona główna z formularzem"""
|
||||
generated_link = None
|
||||
recent_links = get_recent_links()
|
||||
url_param = request.args.get('url')
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
client_ip = get_client_ip()
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
|
||||
if url_param:
|
||||
try:
|
||||
normalized_url = validate_and_normalize_url(unquote(url_param))
|
||||
encoded_url = quote(normalized_url, safe='')
|
||||
generated_link = urljoin(
|
||||
request.host_url,
|
||||
f"convert?url={encoded_url}&ip={target_ip}"
|
||||
)
|
||||
add_recent_link(normalized_url, target_ip)
|
||||
recent_links = get_recent_links()
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error processing URL: {str(e)}")
|
||||
|
||||
return render_template('form.html',
|
||||
generated_link=generated_link,
|
||||
recent_links=recent_links,
|
||||
client_ip=client_ip,
|
||||
user_agent=user_agent)
|
||||
@app.route('/convert')
|
||||
@limiter.limit("100 per minute")
|
||||
async def convert():
|
||||
"""Asynchroniczny endpoint do konwersji z weryfikacją typu zawartości"""
|
||||
try:
|
||||
redis_client.incr('stats:convert_requests')
|
||||
# Logowanie danych dla requestu do /convert
|
||||
add_recent_convert()
|
||||
|
||||
encoded_url = request.args.get('url')
|
||||
if not encoded_url:
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400, description="Missing URL parameter")
|
||||
|
||||
decoded_url = unquote(encoded_url)
|
||||
normalized_url = validate_and_normalize_url(decoded_url)
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
# Rejestracja statystyk dotyczących URL
|
||||
track_url_request(normalized_url)
|
||||
redis_client.incr(f'stats:target_ips:{target_ip}')
|
||||
|
||||
# Sprawdzenie pamięci podręcznej
|
||||
cached = redis_client.get(cache_key(normalized_url, target_ip))
|
||||
if cached:
|
||||
redis_client.incr('stats:cache_hits')
|
||||
return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
redis_client.incr('stats:cache_misses')
|
||||
|
||||
# Asynchroniczne pobranie zasobu za pomocą aiohttp
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(normalized_url, timeout=60) as response:
|
||||
# Sprawdzanie typu zawartości – musi zawierać "text"
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "text" not in content_type:
|
||||
abort(415, description="Unsupported Media Type")
|
||||
content = b""
|
||||
while True:
|
||||
try:
|
||||
chunk = await response.content.read(4096)
|
||||
except asyncio.TimeoutError:
|
||||
abort(504, description="Timeout reading remote data")
|
||||
if not chunk:
|
||||
break
|
||||
content += chunk
|
||||
if len(content) > app.config['MAX_CONTENT_LENGTH']:
|
||||
redis_client.incr('stats:errors_413')
|
||||
abort(413)
|
||||
|
||||
# Rejestracja rozmiaru pobranej treści
|
||||
content_size = len(content)
|
||||
redis_client.incrby('stats:content_size_total', content_size)
|
||||
redis_client.incr('stats:content_size_count')
|
||||
|
||||
converted = convert_hosts(content.decode('utf-8'), target_ip)
|
||||
redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache
|
||||
redis_client.incr('stats:conversions_success')
|
||||
return converted, 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
app.logger.error(f"Request error: {str(e)}")
|
||||
redis_client.incr('stats:errors_500')
|
||||
abort(500)
|
||||
except ValueError as e:
|
||||
app.logger.error(f"URL validation error: {str(e)}")
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400)
|
||||
|
||||
|
||||
@app.route('/stats')
|
||||
@ip_restriction
|
||||
def stats():
|
||||
"""Endpoint statystyk"""
|
||||
stats_data = {}
|
||||
target_ips = {}
|
||||
url_requests = {}
|
||||
user_agents = {}
|
||||
client_ips = {}
|
||||
|
||||
# Agregacja statystyk z Redisa
|
||||
for key in redis_client.scan_iter("stats:*"):
|
||||
key_str = key.decode()
|
||||
value = redis_client.get(key).decode()
|
||||
|
||||
if key_str.startswith('stats:target_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
target_ips[ip] = value
|
||||
elif key_str.startswith('stats:url_requests:'):
|
||||
url = unquote(key_str.split(':', 2)[2])
|
||||
url_requests[url] = value
|
||||
elif key_str.startswith('stats:user_agents:'):
|
||||
ua = unquote(key_str.split(':', 2)[2])
|
||||
user_agents[ua] = value
|
||||
elif key_str.startswith('stats:client_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
client_ips[ip] = value
|
||||
else:
|
||||
stats_data[key_str] = value
|
||||
|
||||
# Pobranie ostatnich 50 requestów dla endpointu /convert
|
||||
recent_converts = []
|
||||
convert_entries = redis_client.lrange("recent_converts", 0, 49)
|
||||
for entry in convert_entries:
|
||||
try:
|
||||
data = json.loads(entry.decode())
|
||||
recent_converts.append(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Obliczenie średniego czasu przetwarzania żądań
|
||||
processing_time_total = float(redis_client.get('stats:processing_time_total') or 0)
|
||||
processing_time_count = int(redis_client.get('stats:processing_time_count') or 0)
|
||||
avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0
|
||||
|
||||
# Obliczenie średniego rozmiaru pobranej treści dla /convert
|
||||
content_size_total = int(redis_client.get('stats:content_size_total') or 0)
|
||||
content_size_count = int(redis_client.get('stats:content_size_count') or 0)
|
||||
avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
|
||||
# Rozszerzone statystyki dotyczące wydajności i rozmiarów danych
|
||||
detailed_stats = {
|
||||
"processing_time_total_sec": processing_time_total,
|
||||
"processing_time_count": processing_time_count,
|
||||
"processing_time_avg_sec": avg_processing_time,
|
||||
"processing_time_min_sec": float(redis_client.get('stats:processing_time_min') or 0),
|
||||
"processing_time_max_sec": float(redis_client.get('stats:processing_time_max') or 0),
|
||||
"content_size_total_bytes": content_size_total,
|
||||
"content_size_count": content_size_count,
|
||||
"content_size_avg_bytes": avg_content_size
|
||||
}
|
||||
|
||||
# Struktura odpowiedzi
|
||||
response_data = {
|
||||
**stats_data,
|
||||
'target_ips': target_ips,
|
||||
'url_requests': url_requests,
|
||||
'user_agents': user_agents,
|
||||
'client_ips': client_ips,
|
||||
'recent_converts': recent_converts,
|
||||
'detailed_stats': detailed_stats
|
||||
}
|
||||
|
||||
return jsonify(response_data)
|
||||
|
||||
@app.errorhandler(400)
|
||||
@app.errorhandler(403)
|
||||
@app.errorhandler(404)
|
||||
@app.errorhandler(413)
|
||||
@app.errorhandler(415)
|
||||
@app.errorhandler(500)
|
||||
def handle_errors(e):
|
||||
"""Obsługa błędów"""
|
||||
return render_template('error.html', error=e), e.code
|
||||
|
||||
# Jeśli aplikacja jest uruchamiana bezpośrednio, korzystamy z Flask's run
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8283)
|
||||
# W przeciwnym razie (np. przy uruchamianiu przez Gunicorn) opakowujemy aplikację w adapter ASGI
|
||||
else:
|
||||
from asgiref.wsgi import WsgiToAsgi
|
||||
asgi_app = WsgiToAsgi(app)
|
259
app_1.py
Normal file
259
app_1.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import re
|
||||
import redis
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from flask import Flask, request, render_template, abort, jsonify
|
||||
from urllib.parse import urlparse, quote, unquote, urljoin
|
||||
from functools import wraps
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=7)
|
||||
|
||||
ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'}
|
||||
ALLOWED_DOMAIN = ''
|
||||
|
||||
@app.before_request
|
||||
def track_request_data():
|
||||
"""Track client IP and User-Agent for all requests"""
|
||||
client_ip = get_client_ip()
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
|
||||
# Track User-Agents
|
||||
redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}')
|
||||
|
||||
# Track client IPs
|
||||
redis_client.incr(f'stats:client_ips:{client_ip}')
|
||||
|
||||
def get_client_ip():
|
||||
"""Get real client IP considering proxies"""
|
||||
x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',')
|
||||
if x_forwarded_for and x_forwarded_for[0].strip():
|
||||
return x_forwarded_for[0].strip()
|
||||
return request.remote_addr
|
||||
|
||||
@app.template_filter('datetimeformat')
|
||||
def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'):
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
return dt.strftime(format)
|
||||
except (ValueError, AttributeError):
|
||||
return value
|
||||
|
||||
def ip_restriction(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
client_ip = get_client_ip()
|
||||
host = request.host.split(':')[0]
|
||||
|
||||
allowed_conditions = [
|
||||
client_ip in ALLOWED_IPS,
|
||||
host == ALLOWED_DOMAIN,
|
||||
request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS
|
||||
]
|
||||
|
||||
if any(allowed_conditions):
|
||||
return f(*args, **kwargs)
|
||||
redis_client.incr('stats:errors_403')
|
||||
abort(403)
|
||||
return decorated
|
||||
|
||||
def cache_key(source_url, ip):
|
||||
return f"cache:{source_url}:{ip}"
|
||||
|
||||
#def convert_hosts(content, target_ip):
|
||||
# """Convert IPs in hosts file content"""
|
||||
# pattern = r'^\s*?(?P<ip>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+(?P<host>\S+).*$'
|
||||
# return re.sub(pattern, f"{target_ip} \\g<host>", content, flags=re.MULTILINE)
|
||||
|
||||
def convert_hosts(content, target_ip):
|
||||
"""Convert with enhanced validation"""
|
||||
converted = []
|
||||
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# Skip empty/comments
|
||||
if not line or line[0] in ('!', '#', '/') or '$' in line:
|
||||
continue
|
||||
|
||||
# AdGuard domains
|
||||
if line.startswith(('||', '|')):
|
||||
domain = line.split('^')[0].lstrip('|')
|
||||
if 1 < len(domain) <= 253 and '.' in domain[1:-1]:
|
||||
converted.append(f"{target_ip} {domain}")
|
||||
continue
|
||||
|
||||
# Classic hosts format
|
||||
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line):
|
||||
converted.append(re.sub(r'^\S+', target_ip, line, count=1))
|
||||
|
||||
return '\n'.join(converted)
|
||||
|
||||
def validate_and_normalize_url(url):
|
||||
"""Validate and normalize input URL"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme:
|
||||
url = f'https://{url}'
|
||||
parsed = urlparse(url)
|
||||
if not parsed.netloc:
|
||||
raise ValueError("Missing host in URL")
|
||||
return parsed.geturl()
|
||||
|
||||
def track_url_request(url):
|
||||
"""Track requests for specific URLs"""
|
||||
redis_key = f"stats:url_requests:{quote(url, safe='')}"
|
||||
redis_client.incr(redis_key)
|
||||
|
||||
def add_recent_link(url, target_ip):
|
||||
"""Add to recent links history"""
|
||||
timestamp = datetime.now().isoformat()
|
||||
link_data = f"{timestamp}|{url}|{target_ip}"
|
||||
|
||||
with redis_client.pipeline() as pipe:
|
||||
pipe.lpush("recent_links", link_data)
|
||||
pipe.ltrim("recent_links", 0, 9)
|
||||
pipe.execute()
|
||||
redis_client.incr('stats:recent_links_added')
|
||||
|
||||
def get_recent_links():
|
||||
"""Get last 10 recent links"""
|
||||
links = redis_client.lrange("recent_links", 0, 9)
|
||||
parsed_links = []
|
||||
for link in links:
|
||||
parts = link.decode().split("|")
|
||||
if len(parts) >= 3:
|
||||
parsed_links.append((parts[0], parts[1], parts[2]))
|
||||
elif len(parts) == 2:
|
||||
parsed_links.append((parts[0], parts[1], "127.0.0.1"))
|
||||
return parsed_links
|
||||
|
||||
@app.route('/', methods=['GET'])
|
||||
def index():
|
||||
"""Main form page"""
|
||||
generated_link = None
|
||||
recent_links = get_recent_links()
|
||||
url_param = request.args.get('url')
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
if url_param:
|
||||
try:
|
||||
normalized_url = validate_and_normalize_url(unquote(url_param))
|
||||
encoded_url = quote(normalized_url, safe='')
|
||||
generated_link = urljoin(
|
||||
request.host_url,
|
||||
f"convert?url={encoded_url}&ip={target_ip}"
|
||||
)
|
||||
add_recent_link(normalized_url, target_ip)
|
||||
recent_links = get_recent_links()
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error processing URL: {str(e)}")
|
||||
|
||||
return render_template('form.html',
|
||||
generated_link=generated_link,
|
||||
recent_links=recent_links)
|
||||
|
||||
@app.route('/convert')
|
||||
def convert():
|
||||
"""Conversion endpoint"""
|
||||
try:
|
||||
redis_client.incr('stats:convert_requests')
|
||||
encoded_url = request.args.get('url')
|
||||
|
||||
if not encoded_url:
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400, description="Missing URL parameter")
|
||||
|
||||
decoded_url = unquote(encoded_url)
|
||||
normalized_url = validate_and_normalize_url(decoded_url)
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
# Track statistics
|
||||
track_url_request(normalized_url)
|
||||
redis_client.incr(f'stats:target_ips:{target_ip}')
|
||||
|
||||
# Check cache
|
||||
cached = redis_client.get(cache_key(normalized_url, target_ip))
|
||||
if cached:
|
||||
redis_client.incr('stats:cache_hits')
|
||||
return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
redis_client.incr('stats:cache_misses')
|
||||
|
||||
# Fetch and process
|
||||
response = requests.get(normalized_url, stream=True, timeout=15)
|
||||
response.raise_for_status()
|
||||
|
||||
content = b''
|
||||
for chunk in response.iter_content(2048):
|
||||
content += chunk
|
||||
if len(content) > app.config['MAX_CONTENT_LENGTH']:
|
||||
redis_client.incr('stats:errors_413')
|
||||
abort(413)
|
||||
|
||||
converted = convert_hosts(content.decode('utf-8'), target_ip)
|
||||
redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache
|
||||
redis_client.incr('stats:conversions_success')
|
||||
return converted, 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
except requests.RequestException as e:
|
||||
app.logger.error(f"Request error: {str(e)}")
|
||||
redis_client.incr('stats:errors_500')
|
||||
abort(500)
|
||||
except ValueError as e:
|
||||
app.logger.error(f"URL validation error: {str(e)}")
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400)
|
||||
|
||||
@app.route('/stats')
|
||||
@ip_restriction
|
||||
def stats():
|
||||
"""Statistics endpoint"""
|
||||
stats_data = {}
|
||||
target_ips = {}
|
||||
url_requests = {}
|
||||
user_agents = {}
|
||||
client_ips = {}
|
||||
|
||||
# Aggregate stats from Redis
|
||||
for key in redis_client.scan_iter("stats:*"):
|
||||
key_str = key.decode()
|
||||
value = redis_client.get(key).decode()
|
||||
|
||||
if key_str.startswith('stats:target_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
target_ips[ip] = value
|
||||
elif key_str.startswith('stats:url_requests:'):
|
||||
url = unquote(key_str.split(':', 2)[2])
|
||||
url_requests[url] = value
|
||||
elif key_str.startswith('stats:user_agents:'):
|
||||
ua = unquote(key_str.split(':', 2)[2])
|
||||
user_agents[ua] = value
|
||||
elif key_str.startswith('stats:client_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
client_ips[ip] = value
|
||||
else:
|
||||
stats_data[key_str] = value
|
||||
|
||||
# Structure response
|
||||
response_data = {
|
||||
**stats_data,
|
||||
'target_ips': target_ips,
|
||||
'url_requests': url_requests,
|
||||
'user_agents': user_agents,
|
||||
'client_ips': client_ips
|
||||
}
|
||||
|
||||
return jsonify(response_data)
|
||||
|
||||
@app.errorhandler(400)
|
||||
@app.errorhandler(403)
|
||||
@app.errorhandler(404)
|
||||
@app.errorhandler(413)
|
||||
@app.errorhandler(500)
|
||||
def handle_errors(e):
|
||||
"""Error handling"""
|
||||
return render_template('error.html', error=e), e.code
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8283)
|
350
app_gpt.py
Normal file
350
app_gpt.py
Normal file
@@ -0,0 +1,350 @@
|
||||
import re
|
||||
import redis
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from flask import Flask, request, render_template, abort, jsonify, g
|
||||
from urllib.parse import urlparse, quote, unquote, urljoin
|
||||
from functools import wraps
|
||||
import json
|
||||
import socket
|
||||
import time
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=7)
|
||||
|
||||
ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'}
|
||||
ALLOWED_DOMAIN = ''
|
||||
|
||||
@app.before_request
|
||||
def track_request_data():
|
||||
"""Rejestracja IP klienta, User-Agent, metody HTTP oraz rozpoczęcie pomiaru czasu requestu"""
|
||||
g.start_time = time.perf_counter() # rozpoczęcie pomiaru czasu
|
||||
client_ip = get_client_ip()
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
method = request.method
|
||||
|
||||
# Rejestracja User-Agent
|
||||
redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}')
|
||||
# Rejestracja adresu IP klienta
|
||||
redis_client.incr(f'stats:client_ips:{client_ip}')
|
||||
# Rejestracja metody HTTP
|
||||
redis_client.incr(f'stats:methods:{method}')
|
||||
|
||||
def get_client_ip():
|
||||
"""Pobranie prawdziwego adresu IP klienta (uwzględniając proxy)"""
|
||||
x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',')
|
||||
if x_forwarded_for and x_forwarded_for[0].strip():
|
||||
return x_forwarded_for[0].strip()
|
||||
return request.remote_addr
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Pomiar i rejestracja czasu przetwarzania żądania"""
|
||||
elapsed = time.perf_counter() - g.start_time
|
||||
# Aktualizacja statystyk czasu przetwarzania (w sekundach)
|
||||
redis_client.incrbyfloat('stats:processing_time_total', elapsed)
|
||||
redis_client.incr('stats:processing_time_count')
|
||||
|
||||
# Aktualizacja minimalnego czasu przetwarzania
|
||||
try:
|
||||
current_min = float(redis_client.get('stats:processing_time_min') or elapsed)
|
||||
if elapsed < current_min:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
|
||||
# Aktualizacja maksymalnego czasu przetwarzania
|
||||
try:
|
||||
current_max = float(redis_client.get('stats:processing_time_max') or elapsed)
|
||||
if elapsed > current_max:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
|
||||
return response
|
||||
|
||||
@app.template_filter('datetimeformat')
|
||||
def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'):
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
return dt.strftime(format)
|
||||
except (ValueError, AttributeError):
|
||||
return value
|
||||
|
||||
def ip_restriction(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
client_ip = get_client_ip()
|
||||
host = request.host.split(':')[0]
|
||||
|
||||
allowed_conditions = [
|
||||
client_ip in ALLOWED_IPS,
|
||||
host == ALLOWED_DOMAIN,
|
||||
request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS
|
||||
]
|
||||
|
||||
if any(allowed_conditions):
|
||||
return f(*args, **kwargs)
|
||||
redis_client.incr('stats:errors_403')
|
||||
abort(403)
|
||||
return decorated
|
||||
|
||||
def cache_key(source_url, ip):
|
||||
return f"cache:{source_url}:{ip}"
|
||||
|
||||
def convert_hosts(content, target_ip):
|
||||
"""Konwersja treści pliku hosts z uwzględnieniem walidacji"""
|
||||
converted = []
|
||||
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# Pomijanie pustych linii i komentarzy
|
||||
if not line or line[0] in ('!', '#', '/') or '$' in line:
|
||||
continue
|
||||
|
||||
# Reguły AdGuard
|
||||
if line.startswith(('||', '|')):
|
||||
domain = line.split('^')[0].lstrip('|')
|
||||
if 1 < len(domain) <= 253 and '.' in domain[1:-1]:
|
||||
converted.append(f"{target_ip} {domain}")
|
||||
continue
|
||||
|
||||
# Klasyczny format hosts
|
||||
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line):
|
||||
converted.append(re.sub(r'^\S+', target_ip, line, count=1))
|
||||
|
||||
return '\n'.join(converted)
|
||||
|
||||
def validate_and_normalize_url(url):
|
||||
"""Walidacja i normalizacja adresu URL"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme:
|
||||
url = f'https://{url}'
|
||||
parsed = urlparse(url)
|
||||
if not parsed.netloc:
|
||||
raise ValueError("Missing host in URL")
|
||||
return parsed.geturl()
|
||||
|
||||
def track_url_request(url):
|
||||
"""Rejestracja żądania dla określonego URL"""
|
||||
redis_key = f"stats:url_requests:{quote(url, safe='')}"
|
||||
redis_client.incr(redis_key)
|
||||
|
||||
def add_recent_link(url, target_ip):
|
||||
"""Dodanie ostatniego linku do historii (ostatnie 10)"""
|
||||
timestamp = datetime.now().isoformat()
|
||||
link_data = f"{timestamp}|{url}|{target_ip}"
|
||||
|
||||
with redis_client.pipeline() as pipe:
|
||||
pipe.lpush("recent_links", link_data)
|
||||
pipe.ltrim("recent_links", 0, 9)
|
||||
pipe.execute()
|
||||
redis_client.incr('stats:recent_links_added')
|
||||
|
||||
def get_recent_links():
|
||||
"""Pobranie ostatnich 10 linków"""
|
||||
links = redis_client.lrange("recent_links", 0, 9)
|
||||
parsed_links = []
|
||||
for link in links:
|
||||
parts = link.decode().split("|")
|
||||
if len(parts) >= 3:
|
||||
parsed_links.append((parts[0], parts[1], parts[2]))
|
||||
elif len(parts) == 2:
|
||||
parsed_links.append((parts[0], parts[1], "127.0.0.1"))
|
||||
return parsed_links
|
||||
|
||||
# Nowa funkcja do logowania requestów dla endpointu /convert
|
||||
def add_recent_convert():
|
||||
"""Dodaje dane żądania do listy ostatnich konwersji (/convert)"""
|
||||
ip = get_client_ip()
|
||||
try:
|
||||
hostname = socket.gethostbyaddr(ip)[0]
|
||||
except Exception:
|
||||
hostname = ip
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
time_str = datetime.now().astimezone().isoformat()
|
||||
url = request.full_path # pełna ścieżka wraz z query string
|
||||
data = {
|
||||
"url": url,
|
||||
"ip": ip,
|
||||
"hostname": hostname,
|
||||
"time": time_str,
|
||||
"user_agent": user_agent
|
||||
}
|
||||
json_data = json.dumps(data)
|
||||
redis_client.lpush("recent_converts", json_data)
|
||||
redis_client.ltrim("recent_converts", 0, 49)
|
||||
|
||||
@app.route('/', methods=['GET'])
|
||||
def index():
|
||||
"""Strona główna z formularzem"""
|
||||
generated_link = None
|
||||
recent_links = get_recent_links()
|
||||
url_param = request.args.get('url')
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
if url_param:
|
||||
try:
|
||||
normalized_url = validate_and_normalize_url(unquote(url_param))
|
||||
encoded_url = quote(normalized_url, safe='')
|
||||
generated_link = urljoin(
|
||||
request.host_url,
|
||||
f"convert?url={encoded_url}&ip={target_ip}"
|
||||
)
|
||||
add_recent_link(normalized_url, target_ip)
|
||||
recent_links = get_recent_links()
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error processing URL: {str(e)}")
|
||||
|
||||
return render_template('form.html',
|
||||
generated_link=generated_link,
|
||||
recent_links=recent_links)
|
||||
|
||||
@app.route('/convert')
|
||||
def convert():
|
||||
"""Endpoint do konwersji"""
|
||||
try:
|
||||
redis_client.incr('stats:convert_requests')
|
||||
# Logowanie danych dla requestu do /convert
|
||||
add_recent_convert()
|
||||
|
||||
encoded_url = request.args.get('url')
|
||||
|
||||
if not encoded_url:
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400, description="Missing URL parameter")
|
||||
|
||||
decoded_url = unquote(encoded_url)
|
||||
normalized_url = validate_and_normalize_url(decoded_url)
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
# Rejestracja statystyk dotyczących URL
|
||||
track_url_request(normalized_url)
|
||||
redis_client.incr(f'stats:target_ips:{target_ip}')
|
||||
|
||||
# Sprawdzenie pamięci podręcznej
|
||||
cached = redis_client.get(cache_key(normalized_url, target_ip))
|
||||
if cached:
|
||||
redis_client.incr('stats:cache_hits')
|
||||
return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
redis_client.incr('stats:cache_misses')
|
||||
|
||||
# Pobranie i przetworzenie treści
|
||||
response = requests.get(normalized_url, stream=True, timeout=15)
|
||||
response.raise_for_status()
|
||||
|
||||
content = b''
|
||||
for chunk in response.iter_content(2048):
|
||||
content += chunk
|
||||
if len(content) > app.config['MAX_CONTENT_LENGTH']:
|
||||
redis_client.incr('stats:errors_413')
|
||||
abort(413)
|
||||
|
||||
# Rejestracja rozmiaru pobranej treści
|
||||
content_size = len(content)
|
||||
redis_client.incrby('stats:content_size_total', content_size)
|
||||
redis_client.incr('stats:content_size_count')
|
||||
|
||||
converted = convert_hosts(content.decode('utf-8'), target_ip)
|
||||
redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache
|
||||
redis_client.incr('stats:conversions_success')
|
||||
return converted, 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
except requests.RequestException as e:
|
||||
app.logger.error(f"Request error: {str(e)}")
|
||||
redis_client.incr('stats:errors_500')
|
||||
abort(500)
|
||||
except ValueError as e:
|
||||
app.logger.error(f"URL validation error: {str(e)}")
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400)
|
||||
|
||||
@app.route('/stats')
|
||||
@ip_restriction
|
||||
def stats():
|
||||
"""Endpoint statystyk"""
|
||||
stats_data = {}
|
||||
target_ips = {}
|
||||
url_requests = {}
|
||||
user_agents = {}
|
||||
client_ips = {}
|
||||
|
||||
# Agregacja statystyk z Redisa
|
||||
for key in redis_client.scan_iter("stats:*"):
|
||||
key_str = key.decode()
|
||||
value = redis_client.get(key).decode()
|
||||
|
||||
if key_str.startswith('stats:target_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
target_ips[ip] = value
|
||||
elif key_str.startswith('stats:url_requests:'):
|
||||
url = unquote(key_str.split(':', 2)[2])
|
||||
url_requests[url] = value
|
||||
elif key_str.startswith('stats:user_agents:'):
|
||||
ua = unquote(key_str.split(':', 2)[2])
|
||||
user_agents[ua] = value
|
||||
elif key_str.startswith('stats:client_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
client_ips[ip] = value
|
||||
else:
|
||||
stats_data[key_str] = value
|
||||
|
||||
# Pobranie ostatnich 50 requestów dla endpointu /convert
|
||||
recent_converts = []
|
||||
convert_entries = redis_client.lrange("recent_converts", 0, 49)
|
||||
for entry in convert_entries:
|
||||
try:
|
||||
data = json.loads(entry.decode())
|
||||
recent_converts.append(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Obliczenie średniego czasu przetwarzania żądań
|
||||
processing_time_total = float(redis_client.get('stats:processing_time_total') or 0)
|
||||
processing_time_count = int(redis_client.get('stats:processing_time_count') or 0)
|
||||
avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0
|
||||
|
||||
# Obliczenie średniego rozmiaru pobranej treści dla /convert
|
||||
content_size_total = int(redis_client.get('stats:content_size_total') or 0)
|
||||
content_size_count = int(redis_client.get('stats:content_size_count') or 0)
|
||||
avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
|
||||
# Rozszerzone statystyki dotyczące wydajności i rozmiarów danych
|
||||
detailed_stats = {
|
||||
"processing_time_total_sec": processing_time_total,
|
||||
"processing_time_count": processing_time_count,
|
||||
"processing_time_avg_sec": avg_processing_time,
|
||||
"processing_time_min_sec": float(redis_client.get('stats:processing_time_min') or 0),
|
||||
"processing_time_max_sec": float(redis_client.get('stats:processing_time_max') or 0),
|
||||
"content_size_total_bytes": content_size_total,
|
||||
"content_size_count": content_size_count,
|
||||
"content_size_avg_bytes": avg_content_size
|
||||
}
|
||||
|
||||
# Struktura odpowiedzi
|
||||
response_data = {
|
||||
**stats_data,
|
||||
'target_ips': target_ips,
|
||||
'url_requests': url_requests,
|
||||
'user_agents': user_agents,
|
||||
'client_ips': client_ips,
|
||||
'recent_converts': recent_converts,
|
||||
'detailed_stats': detailed_stats
|
||||
}
|
||||
|
||||
return jsonify(response_data)
|
||||
|
||||
@app.errorhandler(400)
|
||||
@app.errorhandler(403)
|
||||
@app.errorhandler(404)
|
||||
@app.errorhandler(413)
|
||||
@app.errorhandler(500)
|
||||
def handle_errors(e):
|
||||
"""Obsługa błędów"""
|
||||
return render_template('error.html', error=e), e.code
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8283)
|
383
app_timeout.py
Normal file
383
app_timeout.py
Normal file
@@ -0,0 +1,383 @@
|
||||
import re
|
||||
import redis
|
||||
import requests
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import socket
|
||||
import time
|
||||
import json
|
||||
from datetime import datetime
|
||||
from flask import Flask, request, render_template, abort, jsonify, g
|
||||
from urllib.parse import urlparse, quote, unquote, urljoin
|
||||
from functools import wraps
|
||||
from flask_compress import Compress
|
||||
from flask_limiter import Limiter
|
||||
from flask_limiter.util import get_remote_address
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=7)
|
||||
|
||||
# Ustawienia do rate limiting – 100 żądań na minutę
|
||||
def get_client_ip():
|
||||
"""Pobranie prawdziwego adresu IP klienta (uwzględniając proxy)"""
|
||||
x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',')
|
||||
if x_forwarded_for and x_forwarded_for[0].strip():
|
||||
return x_forwarded_for[0].strip()
|
||||
return request.remote_addr
|
||||
|
||||
limiter = Limiter(key_func=get_client_ip, default_limits=["100 per minute"], app=app)
|
||||
Compress(app)
|
||||
|
||||
ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'}
|
||||
ALLOWED_DOMAIN = ''
|
||||
|
||||
@app.before_request
|
||||
def track_request_data():
|
||||
"""Rejestracja IP klienta, User-Agent, metody HTTP oraz rozpoczęcie pomiaru czasu requestu"""
|
||||
g.start_time = time.perf_counter() # rozpoczęcie pomiaru czasu
|
||||
client_ip = get_client_ip()
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
method = request.method
|
||||
|
||||
# Rejestracja User-Agent
|
||||
redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}')
|
||||
# Rejestracja adresu IP klienta
|
||||
redis_client.incr(f'stats:client_ips:{client_ip}')
|
||||
# Rejestracja metody HTTP
|
||||
redis_client.incr(f'stats:methods:{method}')
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Pomiar i rejestracja czasu przetwarzania żądania"""
|
||||
elapsed = time.perf_counter() - g.start_time
|
||||
# Aktualizacja statystyk czasu przetwarzania (w sekundach)
|
||||
redis_client.incrbyfloat('stats:processing_time_total', elapsed)
|
||||
redis_client.incr('stats:processing_time_count')
|
||||
|
||||
# Aktualizacja minimalnego czasu przetwarzania
|
||||
try:
|
||||
current_min = float(redis_client.get('stats:processing_time_min') or elapsed)
|
||||
if elapsed < current_min:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_min', elapsed)
|
||||
|
||||
# Aktualizacja maksymalnego czasu przetwarzania
|
||||
try:
|
||||
current_max = float(redis_client.get('stats:processing_time_max') or elapsed)
|
||||
if elapsed > current_max:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
except Exception:
|
||||
redis_client.set('stats:processing_time_max', elapsed)
|
||||
|
||||
return response
|
||||
|
||||
@app.template_filter('datetimeformat')
|
||||
def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'):
|
||||
try:
|
||||
dt = datetime.fromisoformat(value)
|
||||
return dt.strftime(format)
|
||||
except (ValueError, AttributeError):
|
||||
return value
|
||||
|
||||
def ip_restriction(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
client_ip = get_client_ip()
|
||||
host = request.host.split(':')[0]
|
||||
|
||||
allowed_conditions = [
|
||||
client_ip in ALLOWED_IPS,
|
||||
host == ALLOWED_DOMAIN,
|
||||
request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS
|
||||
]
|
||||
|
||||
if any(allowed_conditions):
|
||||
return f(*args, **kwargs)
|
||||
redis_client.incr('stats:errors_403')
|
||||
abort(403)
|
||||
return decorated
|
||||
|
||||
def cache_key(source_url, ip):
|
||||
return f"cache:{source_url}:{ip}"
|
||||
|
||||
def convert_hosts(content, target_ip):
|
||||
"""Konwersja treści pliku hosts z uwzględnieniem walidacji"""
|
||||
converted = []
|
||||
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# Pomijanie pustych linii i komentarzy
|
||||
if not line or line[0] in ('!', '#', '/') or '$' in line:
|
||||
continue
|
||||
|
||||
# Reguły AdGuard
|
||||
if line.startswith(('||', '|')):
|
||||
domain = line.split('^')[0].lstrip('|')
|
||||
if 1 < len(domain) <= 253 and '.' in domain[1:-1]:
|
||||
converted.append(f"{target_ip} {domain}")
|
||||
continue
|
||||
|
||||
# Klasyczny format hosts
|
||||
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line):
|
||||
converted.append(re.sub(r'^\S+', target_ip, line, count=1))
|
||||
|
||||
return '\n'.join(converted)
|
||||
|
||||
def validate_and_normalize_url(url):
|
||||
"""Walidacja i normalizacja adresu URL"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme:
|
||||
url = f'https://{url}'
|
||||
parsed = urlparse(url)
|
||||
if not parsed.netloc:
|
||||
raise ValueError("Missing host in URL")
|
||||
return parsed.geturl()
|
||||
|
||||
def track_url_request(url):
|
||||
"""Rejestracja żądania dla określonego URL"""
|
||||
redis_key = f"stats:url_requests:{quote(url, safe='')}"
|
||||
redis_client.incr(redis_key)
|
||||
|
||||
def add_recent_link(url, target_ip):
|
||||
"""Dodanie ostatniego linku do historii (ostatnie 10)"""
|
||||
timestamp = datetime.now().isoformat()
|
||||
link_data = f"{timestamp}|{url}|{target_ip}"
|
||||
|
||||
with redis_client.pipeline() as pipe:
|
||||
pipe.lpush("recent_links", link_data)
|
||||
pipe.ltrim("recent_links", 0, 9)
|
||||
pipe.execute()
|
||||
redis_client.incr('stats:recent_links_added')
|
||||
|
||||
def get_recent_links():
|
||||
"""Pobranie ostatnich 10 linków"""
|
||||
links = redis_client.lrange("recent_links", 0, 9)
|
||||
parsed_links = []
|
||||
for link in links:
|
||||
parts = link.decode().split("|")
|
||||
if len(parts) >= 3:
|
||||
parsed_links.append((parts[0], parts[1], parts[2]))
|
||||
elif len(parts) == 2:
|
||||
parsed_links.append((parts[0], parts[1], "127.0.0.1"))
|
||||
return parsed_links
|
||||
|
||||
def get_hostname(ip):
|
||||
"""Cache’owanie wyników reverse DNS dla danego IP"""
|
||||
key = f"reverse_dns:{ip}"
|
||||
cached = redis_client.get(key)
|
||||
if cached:
|
||||
return cached.decode()
|
||||
try:
|
||||
hostname = socket.gethostbyaddr(ip)[0]
|
||||
except Exception:
|
||||
hostname = ip
|
||||
# Cache na 1 godzinę
|
||||
redis_client.setex(key, 3600, hostname)
|
||||
return hostname
|
||||
|
||||
# Nowa funkcja do logowania requestów dla endpointu /convert
|
||||
def add_recent_convert():
|
||||
"""Dodaje dane żądania do listy ostatnich konwersji (/convert)"""
|
||||
ip = get_client_ip()
|
||||
hostname = get_hostname(ip)
|
||||
user_agent = request.headers.get('User-Agent', 'Unknown')
|
||||
time_str = datetime.now().astimezone().isoformat()
|
||||
url = request.full_path # pełna ścieżka wraz z query string
|
||||
data = {
|
||||
"url": url,
|
||||
"ip": ip,
|
||||
"hostname": hostname,
|
||||
"time": time_str,
|
||||
"user_agent": user_agent
|
||||
}
|
||||
json_data = json.dumps(data)
|
||||
redis_client.lpush("recent_converts", json_data)
|
||||
redis_client.ltrim("recent_converts", 0, 49)
|
||||
|
||||
@app.route('/', methods=['GET'])
|
||||
def index():
|
||||
"""Strona główna z formularzem"""
|
||||
generated_link = None
|
||||
recent_links = get_recent_links()
|
||||
url_param = request.args.get('url')
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
if url_param:
|
||||
try:
|
||||
normalized_url = validate_and_normalize_url(unquote(url_param))
|
||||
encoded_url = quote(normalized_url, safe='')
|
||||
generated_link = urljoin(
|
||||
request.host_url,
|
||||
f"convert?url={encoded_url}&ip={target_ip}"
|
||||
)
|
||||
add_recent_link(normalized_url, target_ip)
|
||||
recent_links = get_recent_links()
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error processing URL: {str(e)}")
|
||||
|
||||
return render_template('form.html',
|
||||
generated_link=generated_link,
|
||||
recent_links=recent_links)
|
||||
|
||||
@app.route('/convert')
|
||||
@limiter.limit("100 per minute")
|
||||
async def convert():
|
||||
"""Asynchroniczny endpoint do konwersji z weryfikacją typu zawartości"""
|
||||
try:
|
||||
redis_client.incr('stats:convert_requests')
|
||||
# Logowanie danych dla requestu do /convert
|
||||
add_recent_convert()
|
||||
|
||||
encoded_url = request.args.get('url')
|
||||
if not encoded_url:
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400, description="Missing URL parameter")
|
||||
|
||||
decoded_url = unquote(encoded_url)
|
||||
normalized_url = validate_and_normalize_url(decoded_url)
|
||||
target_ip = request.args.get('ip', '127.0.0.1')
|
||||
|
||||
# Rejestracja statystyk dotyczących URL
|
||||
track_url_request(normalized_url)
|
||||
redis_client.incr(f'stats:target_ips:{target_ip}')
|
||||
|
||||
# Sprawdzenie pamięci podręcznej
|
||||
cached = redis_client.get(cache_key(normalized_url, target_ip))
|
||||
if cached:
|
||||
redis_client.incr('stats:cache_hits')
|
||||
return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
redis_client.incr('stats:cache_misses')
|
||||
|
||||
# Asynchroniczne pobranie zasobu za pomocą aiohttp
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(normalized_url, timeout=15) as response:
|
||||
# Sprawdzanie typu zawartości – musi zawierać "text"
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "text" not in content_type:
|
||||
abort(415, description="Unsupported Media Type")
|
||||
content = b""
|
||||
while True:
|
||||
chunk = await response.content.read(2048)
|
||||
if not chunk:
|
||||
break
|
||||
content += chunk
|
||||
if len(content) > app.config['MAX_CONTENT_LENGTH']:
|
||||
redis_client.incr('stats:errors_413')
|
||||
abort(413)
|
||||
|
||||
# Rejestracja rozmiaru pobranej treści
|
||||
content_size = len(content)
|
||||
redis_client.incrby('stats:content_size_total', content_size)
|
||||
redis_client.incr('stats:content_size_count')
|
||||
|
||||
converted = convert_hosts(content.decode('utf-8'), target_ip)
|
||||
redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache
|
||||
redis_client.incr('stats:conversions_success')
|
||||
return converted, 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
app.logger.error(f"Request error: {str(e)}")
|
||||
redis_client.incr('stats:errors_500')
|
||||
abort(500)
|
||||
except ValueError as e:
|
||||
app.logger.error(f"URL validation error: {str(e)}")
|
||||
redis_client.incr('stats:errors_400')
|
||||
abort(400)
|
||||
|
||||
@app.route('/stats')
|
||||
@ip_restriction
|
||||
def stats():
|
||||
"""Endpoint statystyk"""
|
||||
stats_data = {}
|
||||
target_ips = {}
|
||||
url_requests = {}
|
||||
user_agents = {}
|
||||
client_ips = {}
|
||||
|
||||
# Agregacja statystyk z Redisa
|
||||
for key in redis_client.scan_iter("stats:*"):
|
||||
key_str = key.decode()
|
||||
value = redis_client.get(key).decode()
|
||||
|
||||
if key_str.startswith('stats:target_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
target_ips[ip] = value
|
||||
elif key_str.startswith('stats:url_requests:'):
|
||||
url = unquote(key_str.split(':', 2)[2])
|
||||
url_requests[url] = value
|
||||
elif key_str.startswith('stats:user_agents:'):
|
||||
ua = unquote(key_str.split(':', 2)[2])
|
||||
user_agents[ua] = value
|
||||
elif key_str.startswith('stats:client_ips:'):
|
||||
ip = key_str.split(':', 2)[2]
|
||||
client_ips[ip] = value
|
||||
else:
|
||||
stats_data[key_str] = value
|
||||
|
||||
# Pobranie ostatnich 50 requestów dla endpointu /convert
|
||||
recent_converts = []
|
||||
convert_entries = redis_client.lrange("recent_converts", 0, 49)
|
||||
for entry in convert_entries:
|
||||
try:
|
||||
data = json.loads(entry.decode())
|
||||
recent_converts.append(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Obliczenie średniego czasu przetwarzania żądań
|
||||
processing_time_total = float(redis_client.get('stats:processing_time_total') or 0)
|
||||
processing_time_count = int(redis_client.get('stats:processing_time_count') or 0)
|
||||
avg_processing_time = processing_time_total / processing_time_count if processing_time_count > 0 else 0
|
||||
|
||||
# Obliczenie średniego rozmiaru pobranej treści dla /convert
|
||||
content_size_total = int(redis_client.get('stats:content_size_total') or 0)
|
||||
content_size_count = int(redis_client.get('stats:content_size_count') or 0)
|
||||
avg_content_size = content_size_total / content_size_count if content_size_count > 0 else 0
|
||||
|
||||
# Rozszerzone statystyki dotyczące wydajności i rozmiarów danych
|
||||
detailed_stats = {
|
||||
"processing_time_total_sec": processing_time_total,
|
||||
"processing_time_count": processing_time_count,
|
||||
"processing_time_avg_sec": avg_processing_time,
|
||||
"processing_time_min_sec": float(redis_client.get('stats:processing_time_min') or 0),
|
||||
"processing_time_max_sec": float(redis_client.get('stats:processing_time_max') or 0),
|
||||
"content_size_total_bytes": content_size_total,
|
||||
"content_size_count": content_size_count,
|
||||
"content_size_avg_bytes": avg_content_size
|
||||
}
|
||||
|
||||
# Struktura odpowiedzi
|
||||
response_data = {
|
||||
**stats_data,
|
||||
'target_ips': target_ips,
|
||||
'url_requests': url_requests,
|
||||
'user_agents': user_agents,
|
||||
'client_ips': client_ips,
|
||||
'recent_converts': recent_converts,
|
||||
'detailed_stats': detailed_stats
|
||||
}
|
||||
|
||||
return jsonify(response_data)
|
||||
|
||||
@app.errorhandler(400)
|
||||
@app.errorhandler(403)
|
||||
@app.errorhandler(404)
|
||||
@app.errorhandler(413)
|
||||
@app.errorhandler(415)
|
||||
@app.errorhandler(500)
|
||||
def handle_errors(e):
|
||||
"""Obsługa błędów"""
|
||||
return render_template('error.html', error=e), e.code
|
||||
|
||||
# Jeśli aplikacja jest uruchamiana bezpośrednio, korzystamy z Flask's run
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8283)
|
||||
# W przeciwnym razie (np. przy uruchamianiu przez Gunicorn) opakowujemy aplikację w adapter ASGI
|
||||
else:
|
||||
from asgiref.wsgi import WsgiToAsgi
|
||||
asgi_app = WsgiToAsgi(app)
|
||||
|
18
listapp.service
Normal file
18
listapp.service
Normal file
@@ -0,0 +1,18 @@
|
||||
# /etc/systemd/system/listapp.service
|
||||
[Unit]
|
||||
Description=ListApp - Flask application for hosts file conversion
|
||||
After=network.target redis.service
|
||||
|
||||
[Service]
|
||||
User=www-data
|
||||
Group=www-data
|
||||
WorkingDirectory=/var/www/listapp
|
||||
Environment="PATH=/var/www/listapp/venv/bin"
|
||||
#ExecStart=/var/www/listapp/bin/gunicorn -w 2 --bind 127.0.0.1:8283 app:app
|
||||
ExecStart=/var/www/listapp/bin/gunicorn -k uvicorn.workers.UvicornWorker -w 4 --bind 127.0.0.1:8283 app:asgi_app
|
||||
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
87
templates/error.html
Normal file
87
templates/error.html
Normal file
@@ -0,0 +1,87 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Error {{ error.code }}</title>
|
||||
<style>
|
||||
:root {
|
||||
--bg-color: #1a1a1a;
|
||||
--card-bg: #2d2d2d;
|
||||
--text-color: #e0e0e0;
|
||||
--accent: #007bff;
|
||||
--border-color: #404040;
|
||||
--error-color: #ff4444;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Segoe UI', system-ui, sans-serif;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
margin: 0;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.error-container {
|
||||
max-width: 600px;
|
||||
padding: 40px;
|
||||
background: var(--card-bg);
|
||||
border-radius: 12px;
|
||||
border: 1px solid var(--border-color);
|
||||
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
h1 {
|
||||
color: var(--error-color);
|
||||
font-size: 3.5em;
|
||||
margin: 0 0 20px 0;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 1.2em;
|
||||
margin: 10px 0;
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--accent);
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.error-container {
|
||||
padding: 25px;
|
||||
margin: 15px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2.5em;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 1em;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="error-container">
|
||||
<h1>Error {{ error.code }}</h1>
|
||||
<p>{{ error.description }}</p>
|
||||
<a href="/">← Return to Home Page</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
292
templates/form.html
Normal file
292
templates/form.html
Normal file
@@ -0,0 +1,292 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Hosts Converter</title>
|
||||
<style>
|
||||
:root {
|
||||
--bg-color: #1a1a1a;
|
||||
--card-bg: #2d2d2d;
|
||||
--text-color: #e0e0e0;
|
||||
--accent: #007bff;
|
||||
--accent-light: #4da6ff;
|
||||
--border-color: #404040;
|
||||
--link-color: #4da6ff;
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
--bg-color: #f5f5f5;
|
||||
--card-bg: #ffffff;
|
||||
--text-color: #333333;
|
||||
--border-color: #dddddd;
|
||||
--link-color: #0066cc;
|
||||
--accent: #0066cc;
|
||||
--accent-light: #007bff;
|
||||
}
|
||||
|
||||
* {
|
||||
transition: background-color 0.3s, color 0.3s;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Segoe UI', system-ui, sans-serif;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
max-width: 800px;
|
||||
margin: 20px auto;
|
||||
padding: 20px;
|
||||
line-height: 1.6;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.theme-toggle {
|
||||
position: fixed;
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
background: var(--card-bg);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 20px;
|
||||
padding: 8px 15px;
|
||||
cursor: pointer;
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
h1 {
|
||||
color: var(--accent);
|
||||
margin-bottom: 30px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
form {
|
||||
background: var(--card-bg);
|
||||
padding: 25px;
|
||||
border-radius: 12px;
|
||||
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin: 15px 0;
|
||||
padding: 0 15px;
|
||||
}
|
||||
|
||||
input[type="text"] {
|
||||
width: calc(100% - 30px);
|
||||
padding: 10px 15px;
|
||||
margin: 8px 0;
|
||||
background: var(--bg-color);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
button {
|
||||
background: linear-gradient(135deg, var(--accent), var(--accent-light));
|
||||
color: white;
|
||||
padding: 12px 25px;
|
||||
border: none;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
margin: 0 15px;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.result-box {
|
||||
background: var(--card-bg);
|
||||
padding: 20px;
|
||||
border-radius: 12px;
|
||||
margin: 25px 15px;
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.recent-links {
|
||||
margin: 35px 15px 0;
|
||||
padding: 25px 15px 0;
|
||||
border-top: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.link-item {
|
||||
background: var(--card-bg);
|
||||
padding: 15px;
|
||||
margin: 12px 0;
|
||||
border-radius: 8px;
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.link-item:hover {
|
||||
transform: translateX(5px);
|
||||
transition: transform 0.2s;
|
||||
}
|
||||
|
||||
.timestamp {
|
||||
color: #888;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--link-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
footer {
|
||||
text-align: center;
|
||||
margin-top: 40px;
|
||||
padding: 20px;
|
||||
border-top: 1px solid var(--border-color);
|
||||
color: #888;
|
||||
}
|
||||
|
||||
footer a {
|
||||
color: var(--link-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
footer a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
body {
|
||||
padding: 15px;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
form {
|
||||
padding: 15px 0;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
input[type="text"] {
|
||||
width: calc(100% - 20px);
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
button {
|
||||
width: calc(100% - 20px);
|
||||
padding: 15px;
|
||||
margin: 0 10px;
|
||||
}
|
||||
|
||||
.result-box {
|
||||
margin: 25px 10px;
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.recent-links {
|
||||
margin: 35px 10px 0;
|
||||
padding: 25px 10px 0;
|
||||
}
|
||||
}
|
||||
|
||||
.copy-btn {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.copy-btn::after {
|
||||
content: "Copied!";
|
||||
position: absolute;
|
||||
background: var(--card-bg);
|
||||
color: var(--text-color);
|
||||
padding: 5px 10px;
|
||||
border-radius: 4px;
|
||||
right: -80px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
opacity: 0;
|
||||
transition: opacity 0.3s;
|
||||
}
|
||||
|
||||
.copy-btn.copied::after {
|
||||
opacity: 1;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body data-theme="dark">
|
||||
<button class="theme-toggle" onclick="toggleTheme()">🌓 Toggle Theme</button>
|
||||
|
||||
<h1>Hosts File Converter</h1>
|
||||
|
||||
<form method="GET" action="/">
|
||||
<div class="form-group">
|
||||
<label>URL to hosts file:</label>
|
||||
<input type="text" name="url" required
|
||||
placeholder="ex. https://paulgb.github.io/BarbBlock/blacklists/hosts-file.txt">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>Target IP:</label>
|
||||
<input type="text" name="ip" pattern="\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
|
||||
value="195.187.6.34" required>
|
||||
</div>
|
||||
|
||||
<button type="submit">Generate convert link</button>
|
||||
</form>
|
||||
|
||||
{% if generated_link %}
|
||||
<div class="result-box">
|
||||
<h3>Link to MikroTik/Adguard:</h3>
|
||||
<input type="text" value="{{ generated_link }}" readonly>
|
||||
<button class="copy-btn" onclick="copyToClipboard(this)">Copy link</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="recent-links">
|
||||
<h3>Last converts:</h3>
|
||||
{% if recent_links %}
|
||||
{% for link_data in recent_links %}
|
||||
<div class="link-item">
|
||||
<div class="timestamp">{{ link_data[0]|datetimeformat }}</div>
|
||||
<a href="/convert?url={{ link_data[1]|urlencode }}&ip={{ link_data[2] }}" target="_blank">
|
||||
{{ link_data[1] }} → {{ link_data[2] }}
|
||||
</a>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p>Empty..</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<footer>
|
||||
© 2025 <a href="https://www.linuxiarz.pl" target="_blank">linuxiarz.pl</a> - All rights reserved <br>
|
||||
Your IP address: <strong>{{ client_ip }}</strong> | Your User Agent: <strong>{{ user_agent }}</strong>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
function toggleTheme() {
|
||||
const body = document.body;
|
||||
body.setAttribute('data-theme',
|
||||
body.getAttribute('data-theme') === 'dark' ? 'light' : 'dark');
|
||||
localStorage.setItem('theme', body.getAttribute('data-theme'));
|
||||
}
|
||||
|
||||
function copyToClipboard(btn) {
|
||||
const copyText = document.querySelector("input[readonly]");
|
||||
copyText.select();
|
||||
document.execCommand("copy");
|
||||
|
||||
btn.classList.add('copied');
|
||||
setTimeout(() => btn.classList.remove('copied'), 2000);
|
||||
}
|
||||
|
||||
// Load saved theme
|
||||
const savedTheme = localStorage.getItem('theme') || 'dark';
|
||||
document.body.setAttribute('data-theme', savedTheme);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
70
templates/form_light.html
Normal file
70
templates/form_light.html
Normal file
@@ -0,0 +1,70 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Hosts Converter</title>
|
||||
<style>
|
||||
body { font-family: Arial, sans-serif; max-width: 800px; margin: 20px auto; padding: 20px; }
|
||||
form { background: #f5f5f5; padding: 20px; border-radius: 5px; }
|
||||
input[type="text"] { width: 100%; padding: 8px; margin: 5px 0; }
|
||||
.result-box { margin: 20px 0; padding: 15px; border: 1px solid #ddd; background: #fff; }
|
||||
.recent-links { margin-top: 30px; border-top: 1px solid #eee; padding-top: 20px; }
|
||||
.link-item { margin: 10px 0; padding: 10px; background: #f8f9fa; border-radius: 3px; }
|
||||
.timestamp { color: #666; font-size: 0.9em; }
|
||||
button { padding: 8px 15px; background: #007bff; color: white; border: none; border-radius: 3px; cursor: pointer; }
|
||||
button:hover { background: #0056b3; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Hosts File Converter</h1>
|
||||
|
||||
<form method="GET" action="/">
|
||||
<p>
|
||||
<label>URL to hosts file:<br>
|
||||
<input type="text" name="url" required
|
||||
placeholder="np. paulgb.github.io/BarbBlock/blacklists/hosts-file.txt">
|
||||
</label>
|
||||
</p>
|
||||
<p>
|
||||
<label>Target IP:
|
||||
<input type="text" name="ip" pattern="\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
|
||||
value="195.187.6.34" required>
|
||||
</label>
|
||||
</p>
|
||||
<button type="submit">Generate convert link</button>
|
||||
</form>
|
||||
|
||||
{% if generated_link %}
|
||||
<div class="result-box">
|
||||
<h3>Link to MikroTik/Adguard:</h3>
|
||||
<input type="text" value="{{ generated_link }}" readonly
|
||||
style="width: 100%; padding: 8px; margin: 5px 0;">
|
||||
<button onclick="copyToClipboard()">Copy link</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="recent-links">
|
||||
<h3>Last converts:</h3>
|
||||
{% if recent_links %}
|
||||
{% for link_data in recent_links %}
|
||||
<div class="link-item">
|
||||
<div class="timestamp">{{ link_data[0]|datetimeformat }}</div>
|
||||
<a href="/convert?url={{ link_data[1]|urlencode }}&ip={{ link_data[2] }}" target="_blank">
|
||||
{{ link_data[1] }} → {{ link_data[2] }}
|
||||
</a>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<p>Empty..</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function copyToClipboard() {
|
||||
const copyText = document.querySelector("input[readonly]");
|
||||
copyText.select();
|
||||
document.execCommand("copy");
|
||||
alert("OK!");
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
16
templates/stats.html
Normal file
16
templates/stats.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<!-- templates/stats.html -->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Statistics</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Download Statistics</h1>
|
||||
<table>
|
||||
<tr><th>URL</th><th>Hits</th></tr>
|
||||
{% for url, count in stats.items() %}
|
||||
<tr><td>{{ url }}</td><td>{{ count }}</td></tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
Reference in New Issue
Block a user