260 lines
8.4 KiB
Python
260 lines
8.4 KiB
Python
import re
|
|
import redis
|
|
import requests
|
|
from datetime import datetime
|
|
from flask import Flask, request, render_template, abort, jsonify
|
|
from urllib.parse import urlparse, quote, unquote, urljoin
|
|
from functools import wraps
|
|
|
|
app = Flask(__name__)
|
|
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024 * 1024 # limit
|
|
redis_client = redis.Redis(host='localhost', port=6379, db=7)
|
|
|
|
ALLOWED_IPS = {'127.0.0.1', '109.173.163.86'}
|
|
ALLOWED_DOMAIN = ''
|
|
|
|
@app.before_request
|
|
def track_request_data():
|
|
"""Track client IP and User-Agent for all requests"""
|
|
client_ip = get_client_ip()
|
|
user_agent = request.headers.get('User-Agent', 'Unknown')
|
|
|
|
# Track User-Agents
|
|
redis_client.incr(f'stats:user_agents:{quote(user_agent, safe="")}')
|
|
|
|
# Track client IPs
|
|
redis_client.incr(f'stats:client_ips:{client_ip}')
|
|
|
|
def get_client_ip():
|
|
"""Get real client IP considering proxies"""
|
|
x_forwarded_for = request.headers.get('X-Forwarded-For', '').split(',')
|
|
if x_forwarded_for and x_forwarded_for[0].strip():
|
|
return x_forwarded_for[0].strip()
|
|
return request.remote_addr
|
|
|
|
@app.template_filter('datetimeformat')
|
|
def datetimeformat_filter(value, format='%Y-%m-%d %H:%M'):
|
|
try:
|
|
dt = datetime.fromisoformat(value)
|
|
return dt.strftime(format)
|
|
except (ValueError, AttributeError):
|
|
return value
|
|
|
|
def ip_restriction(f):
|
|
@wraps(f)
|
|
def decorated(*args, **kwargs):
|
|
client_ip = get_client_ip()
|
|
host = request.host.split(':')[0]
|
|
|
|
allowed_conditions = [
|
|
client_ip in ALLOWED_IPS,
|
|
host == ALLOWED_DOMAIN,
|
|
request.headers.get('X-Forwarded-For', '').split(',')[0].strip() in ALLOWED_IPS
|
|
]
|
|
|
|
if any(allowed_conditions):
|
|
return f(*args, **kwargs)
|
|
redis_client.incr('stats:errors_403')
|
|
abort(403)
|
|
return decorated
|
|
|
|
def cache_key(source_url, ip):
|
|
return f"cache:{source_url}:{ip}"
|
|
|
|
#def convert_hosts(content, target_ip):
|
|
# """Convert IPs in hosts file content"""
|
|
# pattern = r'^\s*?(?P<ip>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+(?P<host>\S+).*$'
|
|
# return re.sub(pattern, f"{target_ip} \\g<host>", content, flags=re.MULTILINE)
|
|
|
|
def convert_hosts(content, target_ip):
|
|
"""Convert with enhanced validation"""
|
|
converted = []
|
|
|
|
for line in content.splitlines():
|
|
line = line.strip()
|
|
|
|
# Skip empty/comments
|
|
if not line or line[0] in ('!', '#', '/') or '$' in line:
|
|
continue
|
|
|
|
# AdGuard domains
|
|
if line.startswith(('||', '|')):
|
|
domain = line.split('^')[0].lstrip('|')
|
|
if 1 < len(domain) <= 253 and '.' in domain[1:-1]:
|
|
converted.append(f"{target_ip} {domain}")
|
|
continue
|
|
|
|
# Classic hosts format
|
|
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\s+', line):
|
|
converted.append(re.sub(r'^\S+', target_ip, line, count=1))
|
|
|
|
return '\n'.join(converted)
|
|
|
|
def validate_and_normalize_url(url):
|
|
"""Validate and normalize input URL"""
|
|
parsed = urlparse(url)
|
|
if not parsed.scheme:
|
|
url = f'https://{url}'
|
|
parsed = urlparse(url)
|
|
if not parsed.netloc:
|
|
raise ValueError("Missing host in URL")
|
|
return parsed.geturl()
|
|
|
|
def track_url_request(url):
|
|
"""Track requests for specific URLs"""
|
|
redis_key = f"stats:url_requests:{quote(url, safe='')}"
|
|
redis_client.incr(redis_key)
|
|
|
|
def add_recent_link(url, target_ip):
|
|
"""Add to recent links history"""
|
|
timestamp = datetime.now().isoformat()
|
|
link_data = f"{timestamp}|{url}|{target_ip}"
|
|
|
|
with redis_client.pipeline() as pipe:
|
|
pipe.lpush("recent_links", link_data)
|
|
pipe.ltrim("recent_links", 0, 9)
|
|
pipe.execute()
|
|
redis_client.incr('stats:recent_links_added')
|
|
|
|
def get_recent_links():
|
|
"""Get last 10 recent links"""
|
|
links = redis_client.lrange("recent_links", 0, 9)
|
|
parsed_links = []
|
|
for link in links:
|
|
parts = link.decode().split("|")
|
|
if len(parts) >= 3:
|
|
parsed_links.append((parts[0], parts[1], parts[2]))
|
|
elif len(parts) == 2:
|
|
parsed_links.append((parts[0], parts[1], "127.0.0.1"))
|
|
return parsed_links
|
|
|
|
@app.route('/', methods=['GET'])
|
|
def index():
|
|
"""Main form page"""
|
|
generated_link = None
|
|
recent_links = get_recent_links()
|
|
url_param = request.args.get('url')
|
|
target_ip = request.args.get('ip', '127.0.0.1')
|
|
|
|
if url_param:
|
|
try:
|
|
normalized_url = validate_and_normalize_url(unquote(url_param))
|
|
encoded_url = quote(normalized_url, safe='')
|
|
generated_link = urljoin(
|
|
request.host_url,
|
|
f"convert?url={encoded_url}&ip={target_ip}"
|
|
)
|
|
add_recent_link(normalized_url, target_ip)
|
|
recent_links = get_recent_links()
|
|
except Exception as e:
|
|
app.logger.error(f"Error processing URL: {str(e)}")
|
|
|
|
return render_template('form.html',
|
|
generated_link=generated_link,
|
|
recent_links=recent_links)
|
|
|
|
@app.route('/convert')
|
|
def convert():
|
|
"""Conversion endpoint"""
|
|
try:
|
|
redis_client.incr('stats:convert_requests')
|
|
encoded_url = request.args.get('url')
|
|
|
|
if not encoded_url:
|
|
redis_client.incr('stats:errors_400')
|
|
abort(400, description="Missing URL parameter")
|
|
|
|
decoded_url = unquote(encoded_url)
|
|
normalized_url = validate_and_normalize_url(decoded_url)
|
|
target_ip = request.args.get('ip', '127.0.0.1')
|
|
|
|
# Track statistics
|
|
track_url_request(normalized_url)
|
|
redis_client.incr(f'stats:target_ips:{target_ip}')
|
|
|
|
# Check cache
|
|
cached = redis_client.get(cache_key(normalized_url, target_ip))
|
|
if cached:
|
|
redis_client.incr('stats:cache_hits')
|
|
return cached.decode('utf-8'), 200, {'Content-Type': 'text/plain'}
|
|
|
|
redis_client.incr('stats:cache_misses')
|
|
|
|
# Fetch and process
|
|
response = requests.get(normalized_url, stream=True, timeout=15)
|
|
response.raise_for_status()
|
|
|
|
content = b''
|
|
for chunk in response.iter_content(2048):
|
|
content += chunk
|
|
if len(content) > app.config['MAX_CONTENT_LENGTH']:
|
|
redis_client.incr('stats:errors_413')
|
|
abort(413)
|
|
|
|
converted = convert_hosts(content.decode('utf-8'), target_ip)
|
|
redis_client.setex(cache_key(normalized_url, target_ip), 43200, converted) # 12h cache
|
|
redis_client.incr('stats:conversions_success')
|
|
return converted, 200, {'Content-Type': 'text/plain'}
|
|
|
|
except requests.RequestException as e:
|
|
app.logger.error(f"Request error: {str(e)}")
|
|
redis_client.incr('stats:errors_500')
|
|
abort(500)
|
|
except ValueError as e:
|
|
app.logger.error(f"URL validation error: {str(e)}")
|
|
redis_client.incr('stats:errors_400')
|
|
abort(400)
|
|
|
|
@app.route('/stats')
|
|
@ip_restriction
|
|
def stats():
|
|
"""Statistics endpoint"""
|
|
stats_data = {}
|
|
target_ips = {}
|
|
url_requests = {}
|
|
user_agents = {}
|
|
client_ips = {}
|
|
|
|
# Aggregate stats from Redis
|
|
for key in redis_client.scan_iter("stats:*"):
|
|
key_str = key.decode()
|
|
value = redis_client.get(key).decode()
|
|
|
|
if key_str.startswith('stats:target_ips:'):
|
|
ip = key_str.split(':', 2)[2]
|
|
target_ips[ip] = value
|
|
elif key_str.startswith('stats:url_requests:'):
|
|
url = unquote(key_str.split(':', 2)[2])
|
|
url_requests[url] = value
|
|
elif key_str.startswith('stats:user_agents:'):
|
|
ua = unquote(key_str.split(':', 2)[2])
|
|
user_agents[ua] = value
|
|
elif key_str.startswith('stats:client_ips:'):
|
|
ip = key_str.split(':', 2)[2]
|
|
client_ips[ip] = value
|
|
else:
|
|
stats_data[key_str] = value
|
|
|
|
# Structure response
|
|
response_data = {
|
|
**stats_data,
|
|
'target_ips': target_ips,
|
|
'url_requests': url_requests,
|
|
'user_agents': user_agents,
|
|
'client_ips': client_ips
|
|
}
|
|
|
|
return jsonify(response_data)
|
|
|
|
@app.errorhandler(400)
|
|
@app.errorhandler(403)
|
|
@app.errorhandler(404)
|
|
@app.errorhandler(413)
|
|
@app.errorhandler(500)
|
|
def handle_errors(e):
|
|
"""Error handling"""
|
|
return render_template('error.html', error=e), e.code
|
|
|
|
if __name__ == '__main__':
|
|
app.run(host='0.0.0.0', port=8283)
|