4113 lines
141 KiB
Python
4113 lines
141 KiB
Python
#!/usr/bin/env python3
|
|
from __future__ import annotations
|
|
|
|
import argparse, os, sys, json, shutil, subprocess, tarfile, tempfile, urllib.request, re, time, threading, signal, shutil, filecmp
|
|
from pathlib import Path
|
|
from glob import glob
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from contextlib import contextmanager
|
|
|
|
DEBUG = False
|
|
|
|
# ========== Configuration ==========
|
|
# Minimum required Node.js version for NPM 2.12.6+
|
|
MIN_NODEJS_VERSION = 20
|
|
# Maximum supported Node.js version
|
|
MAX_NODEJS_VERSION = 21
|
|
|
|
# Theme.Park settings (for --dark-mode or --tp-theme)
|
|
# Popular themes: organizr, dark, plex, nord, dracula, space-gray, hotline, aquamarine
|
|
TP_DOMAIN = "theme-park.dev"
|
|
TP_SCHEME = "https"
|
|
TP_COMMUNITY_THEME = "false"
|
|
TP_DEFAULT_THEME = "organizr"
|
|
|
|
# NPM Admin Interface Configuration
|
|
NPM_ADMIN_ENABLE_SSL = True
|
|
NPM_ADMIN_HTTP_PORT = 81
|
|
NPM_ADMIN_HTTPS_PORT = 8181
|
|
NPM_ADMIN_ROOT_PATH = "/opt/npm/frontend"
|
|
NPM_ADMIN_CERT_PATH = "/etc/nginx/ssl/npm-admin.crt"
|
|
NPM_ADMIN_KEY_PATH = "/etc/nginx/ssl/npm-admin.key"
|
|
NPM_ADMIN_CERT_DAYS = 3650
|
|
|
|
# min. RAM settings
|
|
MIN_MEMORY_GB = 3.5
|
|
SWAP_SIZE_GB = 2.0
|
|
|
|
# ========== UI / Spinner ==========
|
|
|
|
class Spinner:
|
|
|
|
FRAMES = {
|
|
'dots': ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"],
|
|
'line': ['|', '/', '-', '\\'],
|
|
'arrow': ['←', '↖', '↑', '↗', '→', '↘', '↓', '↙'],
|
|
'braille': ['⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷'],
|
|
'circle': ['◐', '◓', '◑', '◒'],
|
|
'bounce': ['⠁', '⠂', '⠄', '⡀', '⢀', '⠠', '⠐', '⠈'],
|
|
}
|
|
|
|
def __init__(self, text, style='dots'):
|
|
self.text = text
|
|
self.style = style
|
|
self.frames = self.FRAMES.get(style, self.FRAMES['dots'])
|
|
self._stop_event = threading.Event()
|
|
self._lock = threading.Lock()
|
|
self._thread = None
|
|
self._frame_index = 0
|
|
self._is_running = False
|
|
|
|
def _spin(self):
|
|
try:
|
|
while not self._stop_event.is_set():
|
|
with self._lock:
|
|
frame = self.frames[self._frame_index % len(self.frames)]
|
|
sys.stdout.write(f"\r\033[K{frame} {self.text}")
|
|
sys.stdout.flush()
|
|
self._frame_index += 1
|
|
time.sleep(0.08)
|
|
except Exception:
|
|
pass
|
|
|
|
def start(self):
|
|
if DEBUG:
|
|
print(f"• {self.text} ...")
|
|
return self
|
|
|
|
if not sys.stdout.isatty():
|
|
print(f"• {self.text} ...")
|
|
return self
|
|
|
|
with self._lock:
|
|
if not self._is_running:
|
|
self._stop_event.clear()
|
|
self._frame_index = 0
|
|
self._thread = threading.Thread(target=self._spin, daemon=True)
|
|
self._thread.start()
|
|
self._is_running = True
|
|
return self
|
|
|
|
def stop_ok(self, final_text=None):
|
|
text = final_text or self.text
|
|
self._stop(f"✔ {text}", " " * 20)
|
|
|
|
def stop_fail(self, final_text=None):
|
|
text = final_text or self.text
|
|
self._stop(f"✖ {text}", " " * 20)
|
|
|
|
def stop_warning(self, final_text=None):
|
|
text = final_text or self.text
|
|
self._stop(f"⚠ {text}", " " * 20)
|
|
|
|
def _stop(self, message, padding=""):
|
|
if DEBUG or not sys.stdout.isatty():
|
|
print(message)
|
|
self._is_running = False
|
|
return
|
|
|
|
with self._lock:
|
|
self._stop_event.set()
|
|
self._is_running = False
|
|
|
|
if self._thread and self._thread.is_alive():
|
|
self._thread.join(timeout=0.5)
|
|
|
|
sys.stdout.write(f"\r\033[K{message}{padding}\n")
|
|
sys.stdout.flush()
|
|
|
|
def update_text(self, new_text):
|
|
with self._lock:
|
|
self.text = new_text
|
|
|
|
def __enter__(self):
|
|
self.start()
|
|
return self
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
if exc_type is not None:
|
|
self.stop_fail()
|
|
else:
|
|
self.stop_ok()
|
|
return False
|
|
|
|
@contextmanager
|
|
def step(text, style='dots'):
|
|
spinner = Spinner(text, style=style)
|
|
spinner.start()
|
|
try:
|
|
yield spinner
|
|
spinner.stop_ok()
|
|
except Exception as e:
|
|
spinner.stop_fail()
|
|
raise
|
|
|
|
def signal_handler(signum, frame):
|
|
sys.stdout.write("\r\033[K")
|
|
sys.stdout.flush()
|
|
print("\nAborted by user")
|
|
sys.exit(130)
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
|
|
|
|
def _devnull():
|
|
return subprocess.DEVNULL if not DEBUG else None
|
|
|
|
|
|
def run(cmd, check=True, env=None):
|
|
if DEBUG:
|
|
print("+", " ".join(cmd))
|
|
return subprocess.run(
|
|
cmd,
|
|
check=check,
|
|
env=env,
|
|
stdout=None if DEBUG else subprocess.DEVNULL,
|
|
stderr=None if DEBUG else subprocess.DEVNULL,
|
|
)
|
|
|
|
|
|
def run_out(cmd, check=True):
|
|
if DEBUG:
|
|
print("+", " ".join(cmd))
|
|
result = subprocess.run(cmd, check=check, capture_output=True, text=True)
|
|
return result.stdout
|
|
|
|
|
|
# ========== Utils ==========
|
|
|
|
|
|
def ensure_root():
|
|
if os.geteuid() != 0:
|
|
print("Run as root.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
|
|
def os_release():
|
|
data = {}
|
|
try:
|
|
for line in Path("/etc/os-release").read_text().splitlines():
|
|
if "=" in line:
|
|
k, v = line.split("=", 1)
|
|
data[k] = v.strip().strip('"')
|
|
except Exception:
|
|
pass
|
|
pretty = (
|
|
data.get("PRETTY_NAME")
|
|
or f"{data.get('ID','linux')} {data.get('VERSION_ID','')}".strip()
|
|
)
|
|
return {
|
|
"ID": data.get("ID", ""),
|
|
"VERSION_ID": data.get("VERSION_ID", ""),
|
|
"CODENAME": data.get("VERSION_CODENAME", ""),
|
|
"PRETTY": pretty,
|
|
}
|
|
|
|
|
|
def apt_update_upgrade():
|
|
with step("Updating package lists and system"):
|
|
run(["apt-get", "update", "-y"])
|
|
run(["apt-get", "-y", "upgrade"])
|
|
|
|
|
|
def apt_install(pkgs):
|
|
if not pkgs:
|
|
return
|
|
with step(f"Installing packages: {', '.join(pkgs)}"):
|
|
run(["apt-get", "install", "-y"] + pkgs)
|
|
|
|
|
|
def apt_try_install(pkgs):
|
|
if not pkgs:
|
|
return
|
|
avail = []
|
|
for p in pkgs:
|
|
ok = subprocess.run(
|
|
["apt-cache", "show", p], stdout=_devnull(), stderr=_devnull()
|
|
)
|
|
if ok.returncode == 0:
|
|
avail.append(p)
|
|
elif DEBUG:
|
|
print(f"skip missing pkg: {p}")
|
|
if avail:
|
|
apt_install(avail)
|
|
|
|
|
|
def apt_purge(pkgs):
|
|
if not pkgs:
|
|
return
|
|
with step(f"Removing conflicting packages: {', '.join(pkgs)}"):
|
|
run(["apt-get", "purge", "-y"] + pkgs, check=False)
|
|
run(["apt-get", "autoremove", "-y"], check=False)
|
|
|
|
|
|
def write_file(path: Path, content: str, mode=0o644):
|
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
path.write_text(content, encoding="utf-8")
|
|
os.chmod(path, mode)
|
|
|
|
|
|
def append_unique(path: Path, lines: str):
|
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
existing = path.read_text(encoding="utf-8") if path.exists() else ""
|
|
out = existing
|
|
for line in lines.splitlines():
|
|
if line.strip() and line not in existing:
|
|
out += ("" if out.endswith("\n") else "\n") + line + "\n"
|
|
path.write_text(out, encoding="utf-8")
|
|
|
|
|
|
def parse_version(version_str: str) -> tuple:
|
|
try:
|
|
parts = re.match(r'(\d+)\.(\d+)\.(\d+)', version_str.strip())
|
|
if parts:
|
|
return (int(parts.group(1)), int(parts.group(2)), int(parts.group(3)))
|
|
return (0, 0, 0)
|
|
except:
|
|
return (0, 0, 0)
|
|
|
|
def interactive_install_mode():
|
|
"""
|
|
Interactive mode - asks user for installation preferences when no args provided.
|
|
Returns dict with user choices.
|
|
DEFAULT: Tagged release (stable) instead of branch
|
|
"""
|
|
|
|
print("\n" + "="*70)
|
|
print("NGINX PROXY MANAGER - INTERACTIVE INSTALLATION")
|
|
print("="*70 + "\n")
|
|
|
|
print("1. Select mode:")
|
|
print(" [1] Fresh Install (default)")
|
|
print(" [2] Update existing installation")
|
|
|
|
mode_choice = input("\nYour choice [1]: ").strip() or "1"
|
|
is_update = (mode_choice == "2")
|
|
|
|
if is_update:
|
|
print("\n✓ Update mode selected")
|
|
return {"update": True}
|
|
|
|
print("\n2. Installation source:")
|
|
print(" [1] Tagged release (stable version) - recommended")
|
|
print(" [2] Branch (master - latest development)")
|
|
|
|
source_choice = input("\nYour choice [1]: ").strip() or "1"
|
|
|
|
if source_choice == "2":
|
|
# ========== INSTALL BRANCH ==========
|
|
print("\n3. Select branch:")
|
|
print(" [1] master (default)")
|
|
print(" [2] dev")
|
|
print(" [3] custom branch name")
|
|
|
|
branch_choice = input("\nYour choice [1]: ").strip() or "1"
|
|
|
|
if branch_choice == "1":
|
|
branch_name = "master"
|
|
elif branch_choice == "2":
|
|
branch_name = "dev"
|
|
else:
|
|
branch_name = input("Enter custom branch name: ").strip() or "master"
|
|
|
|
print(f"\n✓ Installing from branch: {branch_name}")
|
|
|
|
return {
|
|
"update": False,
|
|
"branch": branch_name,
|
|
"npm_version": None,
|
|
"dark_mode": False,
|
|
"tp_theme": None,
|
|
}
|
|
|
|
else:
|
|
# ========== INSTALL TAG ==========
|
|
print("\n3. Select NPM version:")
|
|
print(" [1] Latest stable release (auto-detect)")
|
|
print(" [2] Specific version (e.g., 2.12.6)")
|
|
|
|
version_choice = input("\nYour choice [1]: ").strip() or "1"
|
|
|
|
if version_choice == "1":
|
|
npm_version = None
|
|
print("\n✓ Will install latest stable release")
|
|
else:
|
|
npm_version = input("Enter version (e.g., 2.12.6): ").strip()
|
|
if npm_version:
|
|
print(f"\n✓ Will install NPM v{npm_version}")
|
|
else:
|
|
npm_version = None
|
|
print("\n✓ Will install latest stable release")
|
|
|
|
# ========== DARK MODE / THEME ==========
|
|
ask_about_theme = True
|
|
if npm_version:
|
|
try:
|
|
version_parts = [int(x) for x in npm_version.split(".")[:3]]
|
|
if len(version_parts) < 3:
|
|
version_parts.extend([0] * (3 - len(version_parts)))
|
|
|
|
if tuple(version_parts) >= (2, 13, 0):
|
|
ask_about_theme = False
|
|
print("\n✓ Dark mode not available for NPM >= 2.13.0")
|
|
except:
|
|
pass
|
|
|
|
if ask_about_theme:
|
|
print("\n4. Dark mode / Theme:")
|
|
print(" [1] No theme (default)")
|
|
print(" [2] Dark mode with default theme (organizr)")
|
|
print(" [3] Custom theme (nord, dracula, plex, etc.)")
|
|
print(" Note: Dark mode only works with NPM < 2.13.0")
|
|
|
|
theme_choice = input("\nYour choice [1]: ").strip() or "1"
|
|
|
|
if theme_choice == "1":
|
|
dark_mode = False
|
|
tp_theme = None
|
|
elif theme_choice == "2":
|
|
dark_mode = True
|
|
tp_theme = None
|
|
else:
|
|
tp_theme = input("Enter theme name (e.g., nord, dracula): ").strip()
|
|
dark_mode = bool(tp_theme)
|
|
|
|
if dark_mode:
|
|
if tp_theme:
|
|
print(f"\n✓ Dark mode enabled with theme: {tp_theme}")
|
|
else:
|
|
print("\n✓ Dark mode enabled with default theme (organizr)")
|
|
else:
|
|
dark_mode = False
|
|
tp_theme = None
|
|
|
|
return {
|
|
"update": False,
|
|
"branch": None,
|
|
"npm_version": npm_version,
|
|
"dark_mode": dark_mode,
|
|
"tp_theme": tp_theme,
|
|
}
|
|
|
|
|
|
def apply_interactive_choices(args, choices):
|
|
"""
|
|
Apply user choices from interactive mode to args object.
|
|
|
|
WAŻNE:
|
|
- Ustawia WSZYSTKIE wartości, nawet jeśli są None
|
|
- Jeśli zarówno branch=None jak i npm_version=None, oznacza to
|
|
że użytkownik wybrał "latest release" (tag)
|
|
"""
|
|
|
|
if "update" in choices:
|
|
args.update = choices["update"]
|
|
|
|
if "branch" in choices:
|
|
args.branch = choices.get("branch")
|
|
|
|
if "npm_version" in choices:
|
|
args.npm_version = choices.get("npm_version")
|
|
|
|
if "dark_mode" in choices:
|
|
args.dark_mode = choices["dark_mode"]
|
|
|
|
if "tp_theme" in choices:
|
|
args.tp_theme = choices.get("tp_theme")
|
|
|
|
# ========== DEBUG LOGGING ==========
|
|
if DEBUG:
|
|
print(f"\n[DEBUG] Interactive choices applied:")
|
|
print(f" - update: {args.update}")
|
|
print(f" - branch: {args.branch}")
|
|
print(f" - npm_version: {args.npm_version}")
|
|
print(f" - dark_mode: {args.dark_mode}")
|
|
print(f" - tp_theme: {args.tp_theme}")
|
|
print(f" - Logic: branch={args.branch is not None}, npm_version={args.npm_version is not None}")
|
|
|
|
if args.branch is None and args.npm_version is None:
|
|
print(f" → Installing from LATEST RELEASE (tag) - auto-detect latest")
|
|
elif args.branch is not None and args.npm_version is None:
|
|
print(f" → Installing from BRANCH: {args.branch}")
|
|
elif args.npm_version is not None:
|
|
print(f" → Installing from TAG/VERSION: {args.npm_version}")
|
|
|
|
return args
|
|
|
|
|
|
def check_memory_and_create_swap():
|
|
"""Check available memory and create swap if needed - portable version."""
|
|
try:
|
|
try:
|
|
import psutil
|
|
total_memory_gb = psutil.virtual_memory().total / (1024 ** 3)
|
|
available_memory_gb = psutil.virtual_memory().available / (1024 ** 3)
|
|
except ImportError:
|
|
try:
|
|
with open('/proc/meminfo', 'r') as f:
|
|
meminfo = {}
|
|
for line in f:
|
|
key, val = line.split(':')
|
|
meminfo[key.strip()] = int(val.split()[0])
|
|
|
|
total_memory_gb = meminfo.get('MemTotal', 0) / (1024 ** 2)
|
|
available_memory_gb = meminfo.get('MemAvailable', meminfo.get('MemFree', 0)) / (1024 ** 2)
|
|
except:
|
|
try:
|
|
total_memory = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
|
|
available_memory = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PAGESIZE")
|
|
total_memory_gb = total_memory / (1024 ** 3)
|
|
available_memory_gb = available_memory / (1024 ** 3)
|
|
except:
|
|
if DEBUG:
|
|
print("⚠ Could not detect system memory, assuming 2 GB available")
|
|
return {"total_gb": 2.0, "available_gb": 2.0, "needs_swap": False}
|
|
|
|
print(f"\n{'='*70}")
|
|
print("MEMORY CHECK")
|
|
print(f"{'='*70}")
|
|
print(f"Total RAM: {total_memory_gb:.1f} GB")
|
|
print(f"Available: {available_memory_gb:.1f} GB")
|
|
print(f"Threshold: {MIN_MEMORY_GB} GB")
|
|
|
|
memory_info = {
|
|
"total_gb": total_memory_gb,
|
|
"available_gb": available_memory_gb,
|
|
"needs_swap": available_memory_gb < MIN_MEMORY_GB,
|
|
}
|
|
|
|
if memory_info["needs_swap"]:
|
|
print(f"⚠ Low memory detected! ({available_memory_gb:.1f} GB < {MIN_MEMORY_GB} GB)")
|
|
|
|
swap_file = Path("/swapfile")
|
|
|
|
try:
|
|
swapon_output = run_out(["swapon", "--show"], check=False)
|
|
if swapon_output and "/swapfile" in swapon_output:
|
|
print(f"✓ Swap file (/swapfile) already active")
|
|
print(f"{'='*70}\n")
|
|
return memory_info
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" Debug: swapon check failed: {e}")
|
|
|
|
if swap_file.exists():
|
|
print(f"✓ Swap file already exists at /swapfile")
|
|
file_size_bytes = swap_file.stat().st_size
|
|
file_size_gb = file_size_bytes / (1024 ** 3)
|
|
print(f" File size: {file_size_gb:.1f} GB")
|
|
|
|
try:
|
|
run(["swapon", str(swap_file)], check=False)
|
|
except:
|
|
pass
|
|
|
|
print(f"{'='*70}\n")
|
|
return memory_info
|
|
|
|
print(f"Creating {SWAP_SIZE_GB} GB swap file at /swapfile...")
|
|
|
|
try:
|
|
with step("Creating swap file"):
|
|
run(["dd", "if=/dev/zero", f"of={swap_file}", f"bs=1G", f"count={int(SWAP_SIZE_GB)}"])
|
|
run(["chmod", "600", str(swap_file)])
|
|
run(["mkswap", str(swap_file)])
|
|
run(["swapon", str(swap_file)])
|
|
print(f"✓ Swap ({SWAP_SIZE_GB} GB) created and activated")
|
|
except Exception as e:
|
|
print(f"⚠ Could not create swap: {e}")
|
|
print(f" Continuing anyway, installation may be slower...")
|
|
else:
|
|
print(f"✓ Memory sufficient ({available_memory_gb:.1f} GB >= {MIN_MEMORY_GB} GB)")
|
|
|
|
print(f"{'='*70}\n")
|
|
return memory_info
|
|
|
|
except Exception as e:
|
|
print(f"⚠ Error checking memory: {e}")
|
|
print(f" Assuming sufficient memory and continuing...")
|
|
return {"total_gb": 2.0, "available_gb": 2.0, "needs_swap": False}
|
|
|
|
|
|
def cleanup_swap():
|
|
"""
|
|
Removes temporary swap if it was created.
|
|
"""
|
|
try:
|
|
swap_file = Path("/swapfile")
|
|
if swap_file.exists():
|
|
with step("Cleaning up swap"):
|
|
run(["swapoff", str(swap_file)], check=False)
|
|
swap_file.unlink()
|
|
print("✓ Temporary swap removed")
|
|
except Exception as e:
|
|
print(f"⚠ Could not remove swap: {e}")
|
|
|
|
|
|
def github_latest_release_tag(repo: str, override: str | None) -> str:
|
|
if override:
|
|
return override.lstrip("v")
|
|
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
|
with step(f"Downloading from GitGub: {repo}"):
|
|
with urllib.request.urlopen(url) as r:
|
|
data = json.load(r)
|
|
tag = data["tag_name"]
|
|
return tag.lstrip("v")
|
|
|
|
|
|
def write_resolvers_conf(ipv6_enabled: bool):
|
|
ns_v4, ns_v6 = [], []
|
|
try:
|
|
for line in Path("/etc/resolv.conf").read_text().splitlines():
|
|
line = line.strip()
|
|
if not line.startswith("nameserver"):
|
|
continue
|
|
ip = line.split()[1].split("%")[0]
|
|
(ns_v6 if ":" in ip else ns_v4).append(ip)
|
|
except Exception:
|
|
pass
|
|
|
|
ips = ns_v4 + (ns_v6 if ipv6_enabled else [])
|
|
cloudflare_ips = ["1.1.1.1"] + (["2606:4700:4700::1111"] if ipv6_enabled else [])
|
|
google_ips = ["8.8.8.8"] + (["2001:4860:4860::8888"] if ipv6_enabled else [])
|
|
|
|
if not ips:
|
|
ips = cloudflare_ips + google_ips
|
|
|
|
ipv6_flag = " ipv6=on" if ipv6_enabled and any(":" in x for x in ips) else ""
|
|
|
|
if ns_v4 or ns_v6:
|
|
status_zone = "status_zone=default_resolver"
|
|
elif all(ip in cloudflare_ips for ip in ips):
|
|
status_zone = "status_zone=cloudflare_resolver"
|
|
elif all(ip in google_ips for ip in ips):
|
|
status_zone = "status_zone=google_resolver"
|
|
else:
|
|
status_zone = "status_zone=mixed_resolver"
|
|
|
|
content = f"resolver {' '.join(ips)} valid=10s {status_zone}{ipv6_flag};\n"
|
|
write_file(Path("/etc/angie/conf.d/include/resolvers.conf"), content, 0o644)
|
|
|
|
|
|
def validate_nodejs_version(version: str) -> tuple[bool, str, str | None]:
|
|
version_map = {"latest": "21", "lts": "18", "current": "21"}
|
|
|
|
resolved = version_map.get(version.lower(), version)
|
|
|
|
match = re.match(r"(\d+)", resolved)
|
|
if not match:
|
|
return False, resolved, f"Invalid version format: {version}"
|
|
|
|
major_version = int(match.group(1))
|
|
|
|
if major_version > MAX_NODEJS_VERSION:
|
|
warning = (
|
|
f"⚠ WARNING: Requested Node.js v{major_version} exceeds maximum tested version (v{MAX_NODEJS_VERSION}).\n"
|
|
f" NPM may not be compatible with Node.js v{major_version}.\n"
|
|
f" Falling back to Node.js v{MAX_NODEJS_VERSION}."
|
|
)
|
|
return False, str(MAX_NODEJS_VERSION), warning
|
|
|
|
return True, resolved, None
|
|
|
|
|
|
def validate_supported_os():
|
|
distro_id = OSREL.get("ID", "").lower()
|
|
version_id = OSREL.get("VERSION_ID", "").strip()
|
|
|
|
SUPPORTED = {"debian": ["11", "12", "13"], "ubuntu": ["20.04", "22.04", "24.04"]}
|
|
|
|
if distro_id not in SUPPORTED:
|
|
print(f"\n ⚠ ERROR: Unsupported distribution: {distro_id}")
|
|
print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}")
|
|
print(f"\n Supported distributions:")
|
|
print(f" • Debian 11 (Bullseye), 12 (Bookworm), 13 (Trixie)")
|
|
print(f" • Ubuntu 20.04 LTS, 22.04 LTS, 24.04 LTS")
|
|
print(f" • Debian derivatives: Proxmox, armbian")
|
|
print(f"\n Your distribution may work but is not tested.")
|
|
print(f" Continue at your own risk or install on a supported system.\n")
|
|
sys.exit(1)
|
|
|
|
supported_versions = SUPPORTED[distro_id]
|
|
version_match = False
|
|
|
|
for supported_ver in supported_versions:
|
|
if version_id.startswith(supported_ver):
|
|
version_match = True
|
|
break
|
|
|
|
if not version_match:
|
|
print(f"\n ⚠ WARNING: Unsupported version of {distro_id}: {version_id}")
|
|
print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}")
|
|
print(f" Supported versions: {', '.join(supported_versions)}")
|
|
print(f"\n This version is not officially tested.")
|
|
print(f" Prerequisites:")
|
|
print(f" • Angie packages must be available for your distribution")
|
|
print(
|
|
f" • Check: https://en.angie.software/angie/docs/installation/oss_packages/"
|
|
)
|
|
print(f" • Your system should be Debian/Ubuntu compatible (apt-based)")
|
|
|
|
response = input("\n Continue anyway? [y/N]: ").strip().lower()
|
|
if response not in ["y", "yes"]:
|
|
print("\n Installation cancelled.\n")
|
|
sys.exit(1)
|
|
print()
|
|
else:
|
|
print(f"✓ Supported OS detected: {OSREL.get('PRETTY', 'Unknown')}\n")
|
|
|
|
|
|
def save_installer_config(config: dict):
|
|
config_path = Path("/data/installer.json")
|
|
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
config["last_modified"] = time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
try:
|
|
config_path.write_text(json.dumps(config, indent=2), encoding="utf-8")
|
|
if DEBUG:
|
|
print(f"✓ Saved installer config to {config_path}")
|
|
except Exception as e:
|
|
print(f"⚠ Warning: Could not save installer config: {e}")
|
|
|
|
|
|
def load_installer_config() -> dict:
|
|
config_path = Path("/data/installer.json")
|
|
|
|
if not config_path.exists():
|
|
if DEBUG:
|
|
print(f"No installer config found at {config_path}")
|
|
return {}
|
|
|
|
try:
|
|
content = config_path.read_text(encoding="utf-8")
|
|
config = json.loads(content)
|
|
if DEBUG:
|
|
print(f"✓ Loaded installer config from {config_path}")
|
|
return config
|
|
except Exception as e:
|
|
print(f"⚠ Warning: Could not load installer config: {e}")
|
|
return {}
|
|
|
|
|
|
def comment_x_served_by_step(path="/etc/angie/conf.d/include/proxy.conf"):
|
|
p = Path(path)
|
|
if not p.exists():
|
|
raise FileNotFoundError(path)
|
|
src = p.read_text()
|
|
pattern = re.compile(
|
|
r"^(?P<ws>\s*)(?!#)\s*add_header\s+X-Served-By\s+\$host\s*;\s*$", re.MULTILINE
|
|
)
|
|
count = len(pattern.findall(src))
|
|
if count == 0:
|
|
return 0
|
|
backup = p.with_suffix(p.suffix + ".bak")
|
|
shutil.copy2(p, backup)
|
|
out = pattern.sub(
|
|
lambda m: f"{m.group('ws')}# add_header X-Served-By $host;", src
|
|
)
|
|
fd, tmp = tempfile.mkstemp(dir=str(p.parent))
|
|
os.close(fd)
|
|
Path(tmp).write_text(out)
|
|
shutil.copymode(p, tmp)
|
|
os.replace(tmp, p)
|
|
print(f"✔ Hide X-Served-by header | backup: {backup}")
|
|
return count
|
|
|
|
|
|
def set_file_ownership(files: list[str | Path], owner: str, mode: int | None = None):
|
|
success = []
|
|
failed = []
|
|
|
|
for file_path in files:
|
|
path = Path(file_path)
|
|
|
|
if not path.exists():
|
|
failed.append((str(path), "File not found"))
|
|
continue
|
|
|
|
try:
|
|
run(["chown", owner, str(path)])
|
|
|
|
if mode is not None:
|
|
os.chmod(path, mode)
|
|
|
|
success.append(str(path))
|
|
|
|
except Exception as e:
|
|
failed.append((str(path), str(e)))
|
|
|
|
if success:
|
|
print(f"✔ Set ownership '{owner}' for {len(success)} file(s)")
|
|
if DEBUG:
|
|
for f in success:
|
|
print(f" - {f}")
|
|
|
|
if failed:
|
|
print(f"⚠ Failed to set ownership for {len(failed)} file(s):")
|
|
for f, err in failed:
|
|
print(f" - {f}: {err}")
|
|
|
|
return len(failed) == 0
|
|
|
|
|
|
def check_distro_nodejs_available():
|
|
try:
|
|
result = subprocess.run(
|
|
["apt-cache", "show", "nodejs"],
|
|
stdout=subprocess.PIPE,
|
|
stderr=subprocess.PIPE,
|
|
text=True,
|
|
)
|
|
if result.returncode == 0:
|
|
for line in result.stdout.splitlines():
|
|
if line.startswith("Version:"):
|
|
version_str = line.split(":", 1)[1].strip()
|
|
match = re.match(r"(\d+)", version_str)
|
|
if match:
|
|
major = int(match.group(1))
|
|
if DEBUG:
|
|
print(
|
|
f"✓ Distro has nodejs v{version_str} (major: {major})"
|
|
)
|
|
return True, major, version_str
|
|
return False, None, None
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f"Failed to check distro nodejs: {e}")
|
|
return False, None, None
|
|
|
|
|
|
def install_nodejs_from_distro():
|
|
with step("Installing Node.js from distribution repositories"):
|
|
apt_install(["nodejs"])
|
|
|
|
if not shutil.which("npm"):
|
|
apt_try_install(["npm"])
|
|
|
|
if shutil.which("node"):
|
|
node_ver = run_out(["node", "--version"], check=False).strip()
|
|
print(f" Node.js: {node_ver}")
|
|
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
print(f" npm: {npm_ver}")
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
def ensure_minimum_nodejs(min_version=MIN_NODEJS_VERSION, user_requested_version=None):
|
|
with step("Checking Node.js version requirements\n"):
|
|
try:
|
|
node_ver = run_out(["node", "--version"], check=False).strip()
|
|
match = re.match(r"v?(\d+)", node_ver)
|
|
if match:
|
|
current_major = int(match.group(1))
|
|
|
|
if user_requested_version:
|
|
requested_match = re.match(r"(\d+)", str(user_requested_version))
|
|
if requested_match:
|
|
requested_major = int(requested_match.group(1))
|
|
if requested_major < MIN_NODEJS_VERSION:
|
|
requested_major = MIN_NODEJS_VERSION
|
|
elif requested_major > MAX_NODEJS_VERSION:
|
|
requested_major = MAX_NODEJS_VERSION
|
|
|
|
if current_major == requested_major:
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(
|
|
["npm", "--version"], check=False
|
|
).strip()
|
|
print(f" Node.js: {node_ver}")
|
|
print(f" npm: {npm_ver}")
|
|
else:
|
|
print(f" Node.js: {node_ver}")
|
|
return True
|
|
else:
|
|
if current_major >= min_version:
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
print(f" Node.js: {node_ver}")
|
|
print(f" npm: {npm_ver}")
|
|
else:
|
|
print(f" Node.js: {node_ver}")
|
|
return True
|
|
except FileNotFoundError:
|
|
pass
|
|
except Exception:
|
|
pass
|
|
|
|
if user_requested_version:
|
|
requested_match = re.match(r"(\d+)", str(user_requested_version))
|
|
if requested_match:
|
|
requested_major = int(requested_match.group(1))
|
|
|
|
if requested_major < MIN_NODEJS_VERSION:
|
|
print(
|
|
f"⚠ Requested version {requested_major} < minimum {MIN_NODEJS_VERSION}"
|
|
)
|
|
print(f" Installing minimum version: v{MIN_NODEJS_VERSION}")
|
|
install_node_from_nodesource(str(MIN_NODEJS_VERSION))
|
|
elif requested_major > MAX_NODEJS_VERSION:
|
|
print(
|
|
f"⚠ Requested version {requested_major} > maximum {MAX_NODEJS_VERSION}"
|
|
)
|
|
print(f" Installing maximum version: v{MAX_NODEJS_VERSION}")
|
|
install_node_from_nodesource(str(MAX_NODEJS_VERSION))
|
|
else:
|
|
install_node_from_nodesource(str(requested_major))
|
|
else:
|
|
install_node_from_nodesource(str(MIN_NODEJS_VERSION))
|
|
else:
|
|
has_nodejs, major, version_str = check_distro_nodejs_available()
|
|
|
|
if has_nodejs and major and major >= min_version:
|
|
print(f"✓ Distribution provides Node.js v{version_str} (>= v{min_version})")
|
|
if install_nodejs_from_distro():
|
|
return True
|
|
else:
|
|
print(f"⚠ Failed to install from distro, falling back to NodeSource")
|
|
install_node_from_nodesource(str(min_version))
|
|
else:
|
|
if has_nodejs:
|
|
print(f"⚠ Distribution Node.js v{version_str} < minimum v{min_version}")
|
|
else:
|
|
print(f"✓ Distribution doesn't provide Node.js package")
|
|
print(f" Installing from NodeSource: v{min_version}")
|
|
install_node_from_nodesource(str(min_version))
|
|
|
|
if shutil.which("node"):
|
|
node_ver = run_out(["node", "--version"], check=False).strip()
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
return True
|
|
|
|
return False
|
|
|
|
def cleanup_dev_config() -> None:
|
|
"""
|
|
Remove development config file if it exists.
|
|
Ensures production-ready configuration.
|
|
For versions 2.12.X
|
|
"""
|
|
dev_conf = Path("/etc/nginx/conf.d/dev.conf")
|
|
|
|
if dev_conf.exists():
|
|
try:
|
|
dev_conf.unlink()
|
|
print(f"✓ Removed development config: {dev_conf}")
|
|
except Exception as e:
|
|
print(f" Warning: Could not remove {dev_conf}: {e}")
|
|
else:
|
|
if DEBUG:
|
|
print(f"✓ Development config not found: {dev_conf}")
|
|
|
|
|
|
def download_extract_tar_gz(url: str, dest_dir: Path) -> Path:
|
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
with step("Downloading and untaring"):
|
|
with urllib.request.urlopen(url) as r, tempfile.NamedTemporaryFile(
|
|
delete=False
|
|
) as tf:
|
|
shutil.copyfileobj(r, tf)
|
|
tf.flush()
|
|
tf_path = Path(tf.name)
|
|
with tarfile.open(tf_path, "r:gz") as t:
|
|
try:
|
|
t.extractall(dest_dir, filter="data")
|
|
except TypeError:
|
|
t.extractall(dest_dir)
|
|
except Exception as e:
|
|
if "LinkOutsideDestinationError" in str(type(e).__name__):
|
|
t.extractall(dest_dir)
|
|
else:
|
|
raise
|
|
top = t.getmembers()[0].name.split("/")[0]
|
|
os.unlink(tf_path)
|
|
return dest_dir / top
|
|
|
|
|
|
# Distro info (used in banners & repo setup)
|
|
OSREL = os_release()
|
|
|
|
|
|
# === extra sync ===
|
|
def sync_backup_nginx_conf():
|
|
|
|
src = Path("/etc/nginx.bak/conf.d")
|
|
dst = Path("/etc/angie/conf.d")
|
|
if not src.exists():
|
|
return
|
|
with step("Sync /etc/nginx.bak/conf.d -> /etc/angie/conf.d"):
|
|
for p in src.rglob("*"):
|
|
if p.is_dir():
|
|
continue
|
|
rel = p.relative_to(src)
|
|
target = dst / rel
|
|
target.parent.mkdir(parents=True, exist_ok=True)
|
|
try:
|
|
if not target.exists() or not filecmp.cmp(p, target, shallow=False):
|
|
shutil.copy2(p, target)
|
|
except Exception as e:
|
|
print(f"Warning: sync failed for {p} -> {target}: {e}")
|
|
|
|
|
|
def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")):
|
|
info = os_release()
|
|
distro_id = (info.get("ID") or "").lower()
|
|
|
|
# ============================================================
|
|
# STEP 1: Check if Python 3.11 is already available
|
|
# ============================================================
|
|
python311_available = False
|
|
if shutil.which("python3.11"):
|
|
try:
|
|
ver_output = run_out(["python3.11", "--version"], check=False).strip()
|
|
match = re.search(r"Python (\d+)\.(\d+)", ver_output)
|
|
if match:
|
|
major, minor = int(match.group(1)), int(match.group(2))
|
|
if major == 3 and minor == 11:
|
|
python311_available = True
|
|
if DEBUG:
|
|
print(f"✔ Found system Python 3.11: {ver_output}")
|
|
except Exception:
|
|
pass
|
|
|
|
# ============================================================
|
|
# STEP 2: Use system Python 3.11 if available
|
|
# ============================================================
|
|
if python311_available:
|
|
with step(f"Using system Python 3.11 for certbot venv"):
|
|
# Ensure python3.11-venv is installed
|
|
apt_try_install(["python3.11-venv", "python3-pip"])
|
|
|
|
venv_dir.mkdir(parents=True, exist_ok=True)
|
|
run(["python3.11", "-m", "venv", str(venv_dir)])
|
|
|
|
venv_bin = venv_dir / "bin"
|
|
pip_path = venv_bin / "pip"
|
|
certbot_path = venv_bin / "certbot"
|
|
env_build = os.environ.copy()
|
|
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
|
|
|
|
run(
|
|
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
|
|
env=env_build,
|
|
)
|
|
run(
|
|
[
|
|
str(pip_path),
|
|
"install",
|
|
"-U",
|
|
"cryptography",
|
|
"cffi",
|
|
"certbot",
|
|
"tldextract",
|
|
],
|
|
env=env_build,
|
|
)
|
|
|
|
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
|
|
target = Path("/usr/local/bin/certbot")
|
|
if target.exists() or target.is_symlink():
|
|
try:
|
|
target.unlink()
|
|
except Exception:
|
|
pass
|
|
target.symlink_to(certbot_path)
|
|
|
|
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
|
|
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
|
|
print(f" Python: {ver_output}")
|
|
print(f" Certbot: {cb_ver.strip()}")
|
|
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
|
|
return
|
|
|
|
# ============================================================
|
|
# STEP 3: Ubuntu - install Python 3.11 from deadsnakes PPA
|
|
# ============================================================
|
|
if distro_id == "ubuntu":
|
|
with step(
|
|
f"Ubuntu detected: {info.get('PRETTY','Ubuntu')}. Install Python 3.11 via deadsnakes"
|
|
):
|
|
try:
|
|
run(["apt-get", "update", "-y"], check=False)
|
|
apt_try_install(["software-properties-common"])
|
|
except Exception:
|
|
run(
|
|
["apt-get", "install", "-y", "software-properties-common"],
|
|
check=False,
|
|
)
|
|
|
|
run(["add-apt-repository", "-y", "ppa:deadsnakes/ppa"])
|
|
run(["apt-get", "update", "-y"], check=False)
|
|
run(["apt-get", "install", "-y", "python3.11", "python3.11-venv"])
|
|
|
|
with step(f"Create venv at {venv_dir} using python3.11"):
|
|
venv_dir.mkdir(parents=True, exist_ok=True)
|
|
run(["python3.11", "-m", "venv", str(venv_dir)])
|
|
|
|
venv_bin = venv_dir / "bin"
|
|
pip_path = venv_bin / "pip"
|
|
certbot_path = venv_bin / "certbot"
|
|
env_build = os.environ.copy()
|
|
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
|
|
|
|
run(
|
|
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
|
|
env=env_build,
|
|
)
|
|
run(
|
|
[
|
|
str(pip_path),
|
|
"install",
|
|
"-U",
|
|
"cryptography",
|
|
"cffi",
|
|
"certbot",
|
|
"tldextract",
|
|
],
|
|
env=env_build,
|
|
)
|
|
|
|
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
|
|
target = Path("/usr/local/bin/certbot")
|
|
if target.exists() or target.is_symlink():
|
|
try:
|
|
target.unlink()
|
|
except Exception:
|
|
pass
|
|
target.symlink_to(certbot_path)
|
|
|
|
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
|
|
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
|
|
print(f" Python: Python 3.11 (deadsnakes)")
|
|
print(f" Certbot: {cb_ver.strip()}")
|
|
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
|
|
return
|
|
|
|
# ============================================================
|
|
# STEP 4: Debian - install Python 3.11 via pyenv
|
|
# ============================================================
|
|
PYENV_ROOT = Path("/opt/npm/.pyenv")
|
|
PYENV_OWNER = "npm"
|
|
PYTHON_VERSION = "3.11.14"
|
|
|
|
# Build dependencies dla pyenv
|
|
with step("Installing pyenv build dependencies"):
|
|
apt_install([
|
|
"build-essential",
|
|
"gcc",
|
|
"make",
|
|
"pkg-config",
|
|
"libssl-dev",
|
|
"zlib1g-dev",
|
|
"libbz2-dev",
|
|
"libreadline-dev",
|
|
"libsqlite3-dev",
|
|
"tk-dev",
|
|
"libncursesw5-dev",
|
|
"libgdbm-dev",
|
|
"libffi-dev",
|
|
"uuid-dev",
|
|
"liblzma-dev",
|
|
#"ca-certificates",
|
|
#"curl",
|
|
#"git",
|
|
])
|
|
|
|
Path("/opt/npm").mkdir(parents=True, exist_ok=True)
|
|
PYENV_ROOT.mkdir(parents=True, exist_ok=True)
|
|
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
|
|
|
|
with step(f"Ensuring pyenv is available at {PYENV_ROOT}"):
|
|
pyenv_bin_path = PYENV_ROOT / "bin" / "pyenv"
|
|
|
|
if not pyenv_bin_path.exists():
|
|
run(
|
|
[
|
|
"sudo",
|
|
"-u",
|
|
PYENV_OWNER,
|
|
"bash",
|
|
"-lc",
|
|
'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then '
|
|
" command -v git >/dev/null 2>&1 || sudo apt-get install -y git; "
|
|
" git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; "
|
|
"fi",
|
|
]
|
|
)
|
|
|
|
PYENV_BIN_CANDIDATES = [
|
|
str(PYENV_ROOT / "bin" / "pyenv"),
|
|
"pyenv",
|
|
"/usr/bin/pyenv",
|
|
"/usr/lib/pyenv/bin/pyenv",
|
|
]
|
|
|
|
pyenv_bin = next(
|
|
(c for c in PYENV_BIN_CANDIDATES if shutil.which(c) or Path(c).exists()), None
|
|
)
|
|
if not pyenv_bin:
|
|
raise RuntimeError("No 'pyenv' found even after git clone attempt.")
|
|
|
|
with step(f"Installing Python {PYTHON_VERSION} via pyenv into {PYENV_ROOT}"):
|
|
run(["mkdir", "-p", str(PYENV_ROOT)])
|
|
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
|
|
run(
|
|
[
|
|
"sudo",
|
|
"-u",
|
|
PYENV_OWNER,
|
|
"bash",
|
|
"-lc",
|
|
'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then '
|
|
" command -v git >/dev/null 2>&1 || sudo apt-get install -y git; "
|
|
" git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; "
|
|
"fi",
|
|
]
|
|
)
|
|
install_cmd = (
|
|
"export HOME=/opt/npm; "
|
|
"export PYENV_ROOT=/opt/npm/.pyenv; "
|
|
'export PATH="$PYENV_ROOT/bin:/usr/bin:/bin"; '
|
|
'mkdir -p "$PYENV_ROOT"; cd "$HOME"; '
|
|
f"pyenv install -s {PYTHON_VERSION}"
|
|
)
|
|
run(
|
|
[
|
|
"sudo",
|
|
"-u",
|
|
PYENV_OWNER,
|
|
"env",
|
|
"-i",
|
|
"HOME=/opt/npm",
|
|
f"PYENV_ROOT={PYENV_ROOT}",
|
|
f"PATH={PYENV_ROOT}/bin:/usr/bin:/bin",
|
|
"bash",
|
|
"-lc",
|
|
install_cmd,
|
|
]
|
|
)
|
|
|
|
profile_snippet = f"""# Auto-generated by npm-angie-auto-install
|
|
# pyenv for '{PYENV_OWNER}'
|
|
if [ -d "{PYENV_ROOT}" ]; then
|
|
export PYENV_ROOT="{PYENV_ROOT}"
|
|
case ":$PATH:" in *":{PYENV_ROOT}/bin:"*) ;; *) PATH="{PYENV_ROOT}/bin:$PATH";; esac
|
|
case ":$PATH:" in *":/usr/lib/pyenv/bin:"*) ;; *) PATH="/usr/lib/pyenv/bin:$PATH";; esac
|
|
export PATH
|
|
case "$-" in *i*) _interactive=1 ;; *) _interactive=0 ;; esac
|
|
if [ "$_interactive" = 1 ] && {{ [ "${{USER:-}}" = "{PYENV_OWNER}" ] || [ "${{SUDO_USER:-}}" = "{PYENV_OWNER}" ]; }}; then
|
|
if command -v pyenv >/dev/null 2>&1; then
|
|
eval "$(pyenv init -)"
|
|
elif [ -x "{PYENV_ROOT}/bin/pyenv" ]; then
|
|
eval "$("{PYENV_ROOT}/bin/pyenv" init -)"
|
|
fi
|
|
fi
|
|
fi
|
|
"""
|
|
write_file(Path("/etc/profile.d/npm-pyenv.sh"), profile_snippet, 0o644)
|
|
|
|
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3.11"
|
|
if not python311.exists():
|
|
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3"
|
|
if not python311.exists():
|
|
raise RuntimeError(f"No python {PYTHON_VERSION} in {PYENV_ROOT}/versions/.")
|
|
|
|
venv_bin = venv_dir / "bin"
|
|
pip_path = venv_bin / "pip"
|
|
certbot_path = venv_bin / "certbot"
|
|
|
|
with step(f"Preparing Certbot venv at {venv_dir} (Python {PYTHON_VERSION})"):
|
|
venv_dir.mkdir(parents=True, exist_ok=True)
|
|
if not venv_dir.exists() or not pip_path.exists():
|
|
run([str(python311), "-m", "venv", str(venv_dir)])
|
|
|
|
env_build = os.environ.copy()
|
|
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
|
|
|
|
run(
|
|
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
|
|
env=env_build,
|
|
)
|
|
run(
|
|
[
|
|
str(pip_path),
|
|
"install",
|
|
"-U",
|
|
"cryptography",
|
|
"cffi",
|
|
"certbot",
|
|
"tldextract",
|
|
],
|
|
env=env_build,
|
|
)
|
|
|
|
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
|
|
target = Path("/usr/local/bin/certbot")
|
|
if target.exists() or target.is_symlink():
|
|
try:
|
|
target.unlink()
|
|
except Exception:
|
|
pass
|
|
target.symlink_to(certbot_path)
|
|
|
|
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
|
|
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
|
|
print(f" Python: {PYTHON_VERSION} (pyenv)")
|
|
print(f" Certbot: {cb_ver.strip()}")
|
|
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
|
|
|
|
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", str(PYENV_ROOT)], check=False)
|
|
|
|
|
|
def configure_letsencrypt():
|
|
with step("configure letsencrypt"):
|
|
run(["chown", "-R", "npm:npm", "/opt/certbot"], check=False)
|
|
Path("/etc/letsencrypt").mkdir(parents=True, exist_ok=True)
|
|
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
|
|
run(
|
|
["apt-get", "install", "-y", "--no-install-recommends", "certbot"],
|
|
check=False,
|
|
)
|
|
ini = """text = True
|
|
non-interactive = True
|
|
webroot-path = /data/letsencrypt-acme-challenge
|
|
key-type = ecdsa
|
|
elliptic-curve = secp384r1
|
|
preferred-chain = ISRG Root X1
|
|
"""
|
|
write_file(Path("/etc/letsencrypt.ini"), ini, 0o644)
|
|
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
|
|
|
|
|
|
def ensure_nginx_symlink():
|
|
|
|
target = Path("/etc/angie")
|
|
link = Path("/etc/nginx")
|
|
try:
|
|
if link.is_symlink() and link.resolve() == target:
|
|
print("✔ Created symlink /etc/nginx -> /etc/angie")
|
|
return
|
|
|
|
if link.exists() and not link.is_symlink():
|
|
backup = Path("/etc/nginx.bak")
|
|
try:
|
|
if backup.exists():
|
|
if backup.is_symlink() or backup.is_file():
|
|
backup.unlink()
|
|
link.rename(backup)
|
|
print("✔ Backed up /etc/nginx to /etc/nginx.bak")
|
|
except Exception as e:
|
|
print(f"Warning: could not backup /etc/nginx: {e}")
|
|
|
|
try:
|
|
if link.exists() or link.is_symlink():
|
|
link.unlink()
|
|
except Exception:
|
|
pass
|
|
|
|
try:
|
|
link.symlink_to(target)
|
|
print("✔ Created symlink /etc/nginx -> /etc/angie")
|
|
except Exception as e:
|
|
print(f"Warning: could not create /etc/nginx symlink: {e}")
|
|
except Exception as e:
|
|
print(f"Warning: symlink check failed: {e}")
|
|
|
|
|
|
# ========== Angie / NPM template ==========
|
|
|
|
ANGIE_CONF_TEMPLATE = """# run nginx in foreground
|
|
#daemon off;
|
|
|
|
load_module /etc/angie/modules/ngx_http_headers_more_filter_module.so;
|
|
load_module /etc/angie/modules/ngx_http_brotli_filter_module.so;
|
|
load_module /etc/angie/modules/ngx_http_brotli_static_module.so;
|
|
load_module /etc/angie/modules/ngx_http_zstd_filter_module.so;
|
|
load_module /etc/angie/modules/ngx_http_zstd_static_module.so;
|
|
|
|
# other modules
|
|
include /data/nginx/custom/modules[.]conf;
|
|
|
|
pid /run/angie/angie.pid;
|
|
user root;
|
|
|
|
worker_processes auto;
|
|
pcre_jit on;
|
|
|
|
error_log /data/logs/fallback_error.log warn;
|
|
|
|
# Custom
|
|
include /data/nginx/custom/root_top[.]conf;
|
|
|
|
events {
|
|
include /data/nginx/custom/events[.]conf;
|
|
}
|
|
|
|
http {
|
|
include /etc/angie/mime.types;
|
|
default_type application/octet-stream;
|
|
sendfile on;
|
|
server_tokens off;
|
|
tcp_nopush on;
|
|
tcp_nodelay on;
|
|
client_body_temp_path /tmp/angie/body 1 2;
|
|
keepalive_timeout 90s;
|
|
proxy_connect_timeout 90s;
|
|
proxy_send_timeout 90s;
|
|
proxy_read_timeout 90s;
|
|
ssl_prefer_server_ciphers on;
|
|
gzip on;
|
|
proxy_ignore_client_abort off;
|
|
client_max_body_size 2000m;
|
|
server_names_hash_bucket_size 1024;
|
|
proxy_http_version 1.1;
|
|
proxy_set_header X-Forwarded-Scheme $scheme;
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
proxy_set_header Accept-Encoding "";
|
|
proxy_cache off;
|
|
|
|
#proxy_cache_path /var/lib/angie/cache/public levels=1:2 keys_zone=public-cache:30m max_size=1024m;
|
|
#proxy_cache_path /var/lib/angie/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
|
|
proxy_cache_path /var/lib/angie/cache/public levels=1:2 keys_zone=public-cache:50m max_size=2g inactive=7d use_temp_path=off;
|
|
proxy_cache_path /var/lib/angie/cache/private levels=1:2 keys_zone=private-cache:10m max_size=256m inactive=1h use_temp_path=off;
|
|
|
|
# HTTP/3 global settings
|
|
http3_max_concurrent_streams 128;
|
|
http3_stream_buffer_size 64k;
|
|
|
|
# QUIC settings
|
|
quic_retry on;
|
|
quic_gso on; # Performance boost dla Linux z UDP_SEGMENT
|
|
quic_active_connection_id_limit 2;
|
|
|
|
# Enable BPF for connection migration (Linux 5.7+)
|
|
# quic_bpf on; # Uncomment if your kernel supports it
|
|
|
|
include /etc/angie/conf.d/include/log.conf;
|
|
include /etc/angie/conf.d/include/resolvers.conf;
|
|
|
|
map $host $forward_scheme { default http; }
|
|
|
|
# Real IP Determination (IPv4 only by default)
|
|
set_real_ip_from 10.0.0.0/8;
|
|
set_real_ip_from 172.16.0.0/12;
|
|
set_real_ip_from 192.168.0.0/16;
|
|
include /etc/angie/conf.d/include/ip_ranges.conf;
|
|
real_ip_header X-Real-IP;
|
|
real_ip_recursive on;
|
|
|
|
# custom
|
|
brotli off;
|
|
brotli_comp_level 6;
|
|
brotli_static on;
|
|
brotli_types *;
|
|
zstd on;
|
|
zstd_min_length 256;
|
|
zstd_comp_level 3;
|
|
more_clear_headers "Server";
|
|
more_set_headers 'X-by: linuxiarz.pl';
|
|
|
|
# npm
|
|
include /data/nginx/custom/http_top[.]conf;
|
|
include /etc/nginx/conf.d/*.conf;
|
|
include /data/nginx/default_host/*.conf;
|
|
include /data/nginx/proxy_host/*.conf;
|
|
include /data/nginx/redirection_host/*.conf;
|
|
include /data/nginx/dead_host/*.conf;
|
|
include /data/nginx/temp/*.conf;
|
|
include /data/nginx/custom/http[.]conf;
|
|
|
|
# metrics & console
|
|
include /etc/angie/metrics.conf;
|
|
}
|
|
|
|
stream {
|
|
# npm
|
|
include /data/nginx/stream/*.conf;
|
|
include /data/nginx/custom/stream[.]conf;
|
|
}
|
|
# npm
|
|
include /data/nginx/custom/root[.]conf;
|
|
"""
|
|
|
|
ANGIE_UNIT = """[Unit]
|
|
Description=Angie - high performance web server
|
|
Documentation=https://en.angie.software/angie/docs/
|
|
After=network-online.target remote-fs.target nss-lookup.target
|
|
Wants=network-online.target
|
|
|
|
[Service]
|
|
Type=forking
|
|
PIDFile=/run/angie/angie.pid
|
|
ExecStartPre=/bin/mkdir -p /run/angie
|
|
ExecStartPre=/bin/mkdir -p /tmp/angie/body
|
|
ExecStart=/usr/sbin/angie -c /etc/angie/angie.conf
|
|
ExecReload=/bin/sh -c "/bin/kill -s HUP $(/bin/cat /run/angie/angie.pid)"
|
|
ExecStop=/bin/sh -c "/bin/kill -s TERM $(/bin/cat /run/angie/angie.pid)"
|
|
Restart=on-failure
|
|
RestartSec=3s
|
|
|
|
[Install]
|
|
WantedBy=multi-user.target
|
|
"""
|
|
|
|
|
|
def lsb_info():
|
|
try:
|
|
apt_try_install(["lsb-release"])
|
|
dist = (
|
|
run_out(["bash", "-lc", "lsb_release -si"]).strip().lower().replace(" ", "")
|
|
)
|
|
rel = run_out(["bash", "-lc", "lsb_release -sr"]).strip()
|
|
code = run_out(["bash", "-lc", "lsb_release -sc"]).strip()
|
|
return {
|
|
"ID": dist,
|
|
"VERSION_ID": rel,
|
|
"CODENAME": code,
|
|
"PRETTY": f"{dist} {rel} ({code})",
|
|
}
|
|
except Exception:
|
|
return os_release()
|
|
|
|
|
|
# ========== Angie ==========
|
|
def setup_angie(ipv6_enabled: bool):
|
|
def _norm(s: str, allow_dot: bool = False) -> str:
|
|
pat = r"[^a-z0-9+\-\.]" if allow_dot else r"[^a-z0-9+\-]"
|
|
return re.sub(pat, "", s.strip().lower())
|
|
|
|
with step("Adding Angie repo and installing Angie packages"):
|
|
run(
|
|
[
|
|
"curl",
|
|
"-fsSL",
|
|
"-o",
|
|
"/etc/apt/trusted.gpg.d/angie-signing.gpg",
|
|
"https://angie.software/keys/angie-signing.gpg",
|
|
]
|
|
)
|
|
|
|
try:
|
|
dist = run_out(["lsb_release", "-si"])
|
|
rel = run_out(["lsb_release", "-sr"])
|
|
code = run_out(["lsb_release", "-sc"])
|
|
except Exception:
|
|
dist = run_out(["bash", "-c", '. /etc/os-release && printf %s "$ID"'])
|
|
rel = run_out(
|
|
["bash", "-c", '. /etc/os-release && printf %s "$VERSION_ID"']
|
|
)
|
|
code = run_out(
|
|
["bash", "-c", '. /etc/os-release && printf %s "$VERSION_CODENAME"']
|
|
)
|
|
|
|
dist = _norm(dist)
|
|
rel = _norm(rel, allow_dot=True)
|
|
code = _norm(code)
|
|
|
|
os_id = f"{dist}/{rel}" if rel else dist
|
|
if code:
|
|
line = f"deb https://download.angie.software/angie/{os_id} {code} main\n"
|
|
else:
|
|
line = f"deb https://download.angie.software/angie/{os_id} main\n"
|
|
|
|
write_file(Path("/etc/apt/sources.list.d/angie.list"), line)
|
|
run(["apt-get", "update"])
|
|
|
|
base = [
|
|
"angie",
|
|
"angie-module-headers-more",
|
|
"angie-module-brotli",
|
|
"angie-module-zstd",
|
|
]
|
|
optional = ["angie-module-prometheus", "angie-console-light"]
|
|
apt_install(base)
|
|
apt_try_install(optional)
|
|
|
|
with step("Configuring modules and main Angie config"):
|
|
modules_dir = Path("/etc/nginx/modules")
|
|
modules_dir.mkdir(parents=True, exist_ok=True)
|
|
write_file(Path("/etc/angie/angie.conf"), ANGIE_CONF_TEMPLATE, 0o644)
|
|
|
|
WRAP = """#!/bin/sh
|
|
exec sudo -n /usr/sbin/angie "$@"
|
|
"""
|
|
write_file(Path("/usr/sbin/nginx"), WRAP, 0o755)
|
|
Path("/etc/nginx/conf.d/include").mkdir(parents=True, exist_ok=True)
|
|
|
|
with step("Setting resolver(s) and cache directories"):
|
|
write_resolvers_conf(ipv6_enabled)
|
|
for p in ["/var/lib/angie/cache/public", "/var/lib/angie/cache/private"]:
|
|
Path(p).mkdir(parents=True, exist_ok=True)
|
|
os.chmod(p, 0o755)
|
|
|
|
with step("Installing corrected systemd unit for Angie"):
|
|
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
|
|
|
|
|
|
def write_metrics_files():
|
|
"""Create /etc/angie/metrics.conf (port 82 with console & status)."""
|
|
with step("Adding Angie metrics & console on :82"):
|
|
metrics = """include /etc/angie/prometheus_all.conf;
|
|
server {
|
|
listen 82;
|
|
|
|
location /nginx_status {
|
|
stub_status on;
|
|
access_log off;
|
|
allow all;
|
|
}
|
|
|
|
auto_redirect on;
|
|
|
|
location /status/ {
|
|
api /status/;
|
|
api_config_files on;
|
|
}
|
|
|
|
location /console/ {
|
|
alias /usr/share/angie-console-light/html/;
|
|
index index.html;
|
|
}
|
|
|
|
location /console/api/ {
|
|
api /status/;
|
|
}
|
|
|
|
location =/p8s {
|
|
prometheus all;
|
|
}
|
|
}
|
|
"""
|
|
write_file(Path("/etc/angie/metrics.conf"), metrics, 0o644)
|
|
|
|
|
|
def ensure_angie_runtime_perms():
|
|
run_path = Path("/run/angie")
|
|
pid_file = run_path / "angie.pid"
|
|
run_path.mkdir(parents=True, exist_ok=True)
|
|
os.chmod(run_path, 0o2775)
|
|
try:
|
|
import grp
|
|
|
|
gid = grp.getgrnam("angie").gr_gid
|
|
os.chown(run_path, -1, gid)
|
|
except Exception:
|
|
pass
|
|
if not pid_file.exists():
|
|
pid_file.touch()
|
|
os.chmod(pid_file, 0o664)
|
|
try:
|
|
import grp, pwd
|
|
|
|
gid = grp.getgrnam("angie").gr_gid
|
|
uid = pwd.getpwnam("root").pw_uid
|
|
os.chown(pid_file, uid, gid)
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
def ensure_user_and_dirs():
|
|
with step("Creating npm user and app/log directories"):
|
|
try:
|
|
run(["id", "-u", "npm"])
|
|
except subprocess.CalledProcessError:
|
|
run(
|
|
[
|
|
"useradd",
|
|
"--system",
|
|
"--home",
|
|
"/opt/npm",
|
|
"--create-home",
|
|
"--shell",
|
|
"/usr/sbin/nologin",
|
|
"npm",
|
|
]
|
|
)
|
|
rc = subprocess.run(
|
|
["getent", "group", "angie"], stdout=_devnull(), stderr=_devnull()
|
|
).returncode
|
|
if rc != 0:
|
|
run(["groupadd", "angie"])
|
|
run(["usermod", "-aG", "angie", "npm"], check=False)
|
|
|
|
dirs = [
|
|
"/data",
|
|
"/data/nginx",
|
|
"/data/custom_ssl",
|
|
"/data/logs",
|
|
"/data/access",
|
|
"/data/nginx/default_host",
|
|
"/data/nginx/default_www",
|
|
"/data/nginx/proxy_host",
|
|
"/data/nginx/redirection_host",
|
|
"/data/nginx/stream",
|
|
"/data/nginx/dead_host",
|
|
"/data/nginx/temp",
|
|
"/data/letsencrypt-acme-challenge",
|
|
"/opt/npm",
|
|
"/opt/npm/frontend",
|
|
"/opt/npm/global",
|
|
"/run/nginx",
|
|
"/run/angie",
|
|
"/tmp/angie/body",
|
|
]
|
|
for d in dirs:
|
|
Path(d).mkdir(parents=True, exist_ok=True)
|
|
run(["chgrp", "-h", "angie", "/run/angie"], check=False)
|
|
os.chmod("/run/angie", 0o2775)
|
|
Path("/var/log/angie").mkdir(parents=True, exist_ok=True)
|
|
for f in ["access.log", "error.log"]:
|
|
(Path("/var/log/angie") / f).touch(exist_ok=True)
|
|
paths = ["/var/log/angie"] + glob("/var/log/angie/*.log")
|
|
for pth in paths:
|
|
run(["chgrp", "-h", "angie", pth], check=False)
|
|
run(["chmod", "775", "/var/log/angie"], check=False)
|
|
for pth in glob("/var/log/angie/*.log"):
|
|
run(["chmod", "664", pth], check=False)
|
|
Path("/var/log/nginx").mkdir(parents=True, exist_ok=True)
|
|
Path("/var/log/nginx/error.log").touch(exist_ok=True)
|
|
os.chmod("/var/log/nginx/error.log", 0o666)
|
|
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
|
|
ensure_angie_runtime_perms()
|
|
|
|
|
|
def create_sudoers_for_npm():
|
|
with step("Configuring sudoers for npm -> angie"):
|
|
content = """User_Alias NPMUSERS = npm
|
|
NPMUSERS ALL=(root) NOPASSWD: /usr/sbin/angie
|
|
"""
|
|
path = Path("/etc/sudoers.d/npm")
|
|
write_file(path, content, 0o440)
|
|
if shutil.which("visudo"):
|
|
run(["visudo", "-cf", str(path)], check=False)
|
|
|
|
|
|
def adjust_nginx_like_paths_in_tree(root: Path):
|
|
for p in root.rglob("*.conf"):
|
|
try:
|
|
txt = p.read_text(encoding="utf-8")
|
|
except Exception:
|
|
continue
|
|
txt2 = txt.replace("include conf.d", "include /etc/nginx/conf.d").replace(
|
|
"include /etc/angie/conf.d", "include /etc/nginx/conf.d"
|
|
)
|
|
if txt2 != txt:
|
|
p.write_text(txt2, encoding="utf-8")
|
|
for cand in root.rglob("nginx.conf"):
|
|
try:
|
|
txt = cand.read_text(encoding="utf-8")
|
|
except Exception:
|
|
continue
|
|
txt = re.sub(r"^user\s+\S+.*", "user root;", txt, flags=re.M)
|
|
txt = re.sub(r"^pid\s+.*", "pid /run/angie/angie.pid;", txt, flags=re.M)
|
|
txt = txt.replace("daemon on;", "#daemon on;")
|
|
cand.write_text(txt, encoding="utf-8")
|
|
|
|
|
|
def install_node_from_nodesource(version: str):
|
|
is_valid, resolved_version, warning = validate_nodejs_version(version)
|
|
|
|
if warning:
|
|
print(warning)
|
|
|
|
match = re.match(r"(\d+)", resolved_version)
|
|
if not match:
|
|
raise ValueError(f"Invalid Node.js version: {version}")
|
|
|
|
major_version = match.group(1)
|
|
|
|
with step("Removing old Node.js installations"):
|
|
run(
|
|
["apt-get", "remove", "-y", "nodejs", "npm", "libnode-dev", "libnode72"],
|
|
check=False,
|
|
)
|
|
run(
|
|
["apt-get", "purge", "-y", "nodejs", "npm", "libnode-dev", "libnode72"],
|
|
check=False,
|
|
)
|
|
run(["apt-get", "autoremove", "-y"], check=False)
|
|
|
|
for f in [
|
|
"/etc/apt/sources.list.d/nodesource.list",
|
|
"/etc/apt/keyrings/nodesource.gpg",
|
|
"/usr/share/keyrings/nodesource.gpg",
|
|
"/etc/apt/trusted.gpg.d/nodesource.gpg",
|
|
]:
|
|
if Path(f).exists():
|
|
Path(f).unlink()
|
|
|
|
with step(f"Installing Node.js v{major_version}.x from NodeSource repository"):
|
|
setup_url = f"https://deb.nodesource.com/setup_{major_version}.x"
|
|
|
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".sh", delete=False) as tf:
|
|
script_path = tf.name
|
|
|
|
try:
|
|
run(["curl", "-fsSL", setup_url, "-o", script_path])
|
|
|
|
os.chmod(script_path, 0o755)
|
|
|
|
if DEBUG:
|
|
subprocess.run(["bash", script_path], check=True)
|
|
else:
|
|
run(["bash", script_path])
|
|
|
|
run(["apt-get", "update", "-y"])
|
|
run(["apt-get", "install", "-y", "nodejs"])
|
|
|
|
finally:
|
|
if Path(script_path).exists():
|
|
os.unlink(script_path)
|
|
|
|
if shutil.which("node"):
|
|
node_ver = run_out(["node", "--version"], check=False).strip()
|
|
|
|
installed_major = re.match(r"v?(\d+)", node_ver)
|
|
if installed_major and installed_major.group(1) != major_version:
|
|
print(f"⚠ WARNING: Requested Node.js v{major_version}.x but got {node_ver}")
|
|
print(
|
|
f" This likely means NodeSource doesn't support your distribution yet."
|
|
)
|
|
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
print(f" Node.js: {node_ver}")
|
|
print(f" npm: {npm_ver}")
|
|
else:
|
|
print(f" Node.js: {node_ver}")
|
|
apt_try_install(["npm"])
|
|
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
print(f" npm: {npm_ver}")
|
|
|
|
if not shutil.which("npm"):
|
|
run(["corepack", "enable"], check=False)
|
|
|
|
if shutil.which("npm"):
|
|
npm_ver = run_out(["npm", "--version"], check=False).strip()
|
|
print(f"\n✔ npm {npm_ver} installed successfully")
|
|
else:
|
|
print(f"✖ npm could not be installed - manual intervention required")
|
|
else:
|
|
print("✖ Node.js installation failed")
|
|
raise RuntimeError("Node.js installation failed")
|
|
|
|
|
|
def install_yarn():
|
|
"""
|
|
Install yarn package manager with cross-distro conflict handling.
|
|
Handles Ubuntu 24.04 where old yarnpkg conflicts with npm global yarn.
|
|
Safe for Debian (no-op if yarn files don't exist).
|
|
"""
|
|
|
|
with step("Preparing yarn installation"):
|
|
old_yarn_paths = [
|
|
"/usr/bin/yarn",
|
|
"/usr/bin/yarnpkg",
|
|
"/usr/local/bin/yarn",
|
|
"/usr/local/bin/yarnpkg",
|
|
]
|
|
|
|
for path in old_yarn_paths:
|
|
try:
|
|
if os.path.lexists(path):
|
|
os.remove(path)
|
|
if DEBUG:
|
|
print(f" Removed: {path}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" Could not remove {path}: {e}")
|
|
|
|
for pattern in ["/usr/bin/*yarn*", "/usr/local/bin/*yarn*"]:
|
|
for path in glob(pattern):
|
|
try:
|
|
if os.path.islink(path):
|
|
os.remove(path)
|
|
if DEBUG:
|
|
print(f" Removed symlink: {path}")
|
|
except Exception:
|
|
pass
|
|
|
|
with step("Installing yarn via npm"):
|
|
result = subprocess.run(
|
|
["npm", "install", "-g", "yarn@latest"],
|
|
stdout=subprocess.DEVNULL if not DEBUG else None,
|
|
stderr=subprocess.DEVNULL if not DEBUG else None,
|
|
)
|
|
|
|
if result.returncode != 0:
|
|
print(" ⚠ Standard install failed, trying with --force flag")
|
|
run(["npm", "install", "-g", "--force", "yarn@latest"], check=False)
|
|
|
|
with step("Enabling corepack"):
|
|
try:
|
|
subprocess.run(
|
|
["corepack", "enable"],
|
|
input="",
|
|
stdin=subprocess.PIPE,
|
|
stdout=subprocess.DEVNULL if not DEBUG else None,
|
|
stderr=subprocess.DEVNULL if not DEBUG else None,
|
|
timeout=10,
|
|
text=True,
|
|
check=False
|
|
)
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ corepack enable: {e}")
|
|
|
|
if shutil.which("yarn"):
|
|
try:
|
|
yarn_ver = run_out(["yarn", "--version"], check=False).strip()
|
|
print(f" yarn installed: {yarn_ver}")
|
|
return True
|
|
except Exception:
|
|
print(" yarn installed (version check failed, but executable found)")
|
|
return True
|
|
else:
|
|
print(" ⚠ yarn installation may have failed - executable not found in PATH")
|
|
return False
|
|
|
|
|
|
def _is_ubuntu_wo_distutils() -> bool:
|
|
try:
|
|
dist = (OSREL.get("ID", "") or "").lower()
|
|
ver = (OSREL.get("VERSION_ID", "") or "").strip()
|
|
|
|
def _vers(t):
|
|
parts = (ver.split(".") + ["0", "0"])[:2]
|
|
return (int(parts[0]), int(parts[1]))
|
|
|
|
return dist == "ubuntu" and _vers(ver) >= (24, 4)
|
|
except Exception:
|
|
return False
|
|
|
|
|
|
def _prepare_sass(frontend_dir: Path):
|
|
pj = frontend_dir / "package.json"
|
|
if not pj.exists():
|
|
return
|
|
|
|
import json, re, os
|
|
|
|
try:
|
|
data = json.loads(pj.read_text(encoding="utf-8"))
|
|
except Exception:
|
|
return
|
|
|
|
deps = data.get("dependencies", {}) or {}
|
|
dev = data.get("devDependencies", {}) or {}
|
|
has_node_sass = ("node-sass" in deps) or ("node-sass" in dev)
|
|
if not has_node_sass:
|
|
return
|
|
|
|
env_flag = os.environ.get("USE_DART_SASS", "").strip()
|
|
use_dart = (env_flag == "1") or (env_flag == "" and _is_ubuntu_wo_distutils())
|
|
|
|
data.setdefault("dependencies", {})
|
|
data.setdefault("devDependencies", {})
|
|
|
|
if use_dart:
|
|
data["dependencies"].pop("node-sass", None)
|
|
data["devDependencies"].pop("node-sass", None)
|
|
if "sass" not in data["dependencies"] and "sass" not in data["devDependencies"]:
|
|
data["devDependencies"]["sass"] = "^1.77.0"
|
|
|
|
scripts = data.get("scripts") or {}
|
|
data["scripts"] = {
|
|
k: re.sub(r"\bnode-sass\b", "sass", v or "") for k, v in scripts.items()
|
|
}
|
|
|
|
if env_flag == "":
|
|
os.environ["USE_DART_SASS"] = "1"
|
|
else:
|
|
target = "^9.0.0"
|
|
if "node-sass" in data["dependencies"]:
|
|
data["dependencies"]["node-sass"] = target
|
|
else:
|
|
data["devDependencies"]["node-sass"] = target
|
|
|
|
res = data.get("resolutions") or {}
|
|
res["node-gyp"] = "^10.0.0"
|
|
res["node-sass"] = "^9.0.0"
|
|
data["resolutions"] = res
|
|
|
|
os.environ["npm_config_node_sass_binary_site"] = (
|
|
"https://github.com/sass/node-sass/releases/download"
|
|
)
|
|
|
|
pj.write_text(
|
|
json.dumps(data, indent=2, ensure_ascii=False) + "\n", encoding="utf-8"
|
|
)
|
|
|
|
|
|
def _build_frontend(src_frontend: Path, dest_frontend: Path):
|
|
|
|
def _semver(s: str) -> bool:
|
|
return bool(re.match(r"^\d+(?:\.\d+){1,3}$", (s or "").strip()))
|
|
|
|
def _good_yarn(argv: list[str]) -> bool:
|
|
try:
|
|
v = (run_out(argv + ["--version"], check=False) or "").strip()
|
|
return _semver(v)
|
|
except Exception:
|
|
return False
|
|
|
|
def _pick_yarn_cmd() -> list[str] | None:
|
|
# Try direct yarn/yarnpkg first
|
|
for c in (["yarn"], ["yarnpkg"]):
|
|
if shutil.which(c[0]) and _good_yarn(c):
|
|
return c
|
|
|
|
# If npm exists, try to use it to run yarn
|
|
if shutil.which("npm"):
|
|
npm_ver = (run_out(["npm", "--version"], check=False) or "").strip()
|
|
if npm_ver:
|
|
# Try npm exec yarn@stable
|
|
if _good_yarn(["npm", "exec", "--yes", "yarn@stable", "--"]):
|
|
return ["npm", "exec", "--yes", "yarn@stable", "--"]
|
|
|
|
# Try npx as fallback
|
|
if shutil.which("npx"):
|
|
npx_ver = (run_out(["npx", "--version"], check=False) or "").strip()
|
|
if npx_ver:
|
|
if _good_yarn(["npx", "-y", "yarn@stable"]):
|
|
return ["npx", "-y", "yarn@stable"]
|
|
|
|
return None
|
|
|
|
def _ensure_yarn_installed():
|
|
"""Install yarn globally using npm or corepack."""
|
|
with step("Installing yarn globally"):
|
|
if not shutil.which("npm"):
|
|
try:
|
|
apt_try_install(["npm"])
|
|
except Exception:
|
|
run(["apt-get", "update"], check=False)
|
|
run(["apt-get", "install", "-y", "npm"])
|
|
|
|
# Try corepack first (modern way)
|
|
if shutil.which("corepack"):
|
|
try:
|
|
run(["corepack", "enable"])
|
|
run(["corepack", "prepare", "yarn@stable", "--activate"])
|
|
if shutil.which("yarn"):
|
|
return
|
|
except Exception:
|
|
pass
|
|
|
|
# Fallback to npm install
|
|
try:
|
|
run(["npm", "install", "-g", "yarn@latest"])
|
|
except Exception:
|
|
# Last resort - try with --force
|
|
run(["npm", "install", "-g", "--force", "yarn@latest"], check=False)
|
|
|
|
yarn_cmd = _pick_yarn_cmd()
|
|
if not yarn_cmd:
|
|
_ensure_yarn_installed()
|
|
yarn_cmd = _pick_yarn_cmd()
|
|
|
|
if not yarn_cmd:
|
|
raise RuntimeError(
|
|
"Unable to detect or install a valid Yarn.\n"
|
|
"Try manually: npm install -g yarn@latest"
|
|
)
|
|
|
|
with step("Installing frontend dependencies (yarn)"):
|
|
os.environ["NODE_ENV"] = "development"
|
|
os.chdir(src_frontend)
|
|
_prepare_sass(src_frontend)
|
|
|
|
# Get and create cache directory
|
|
try:
|
|
cache_dir = (
|
|
run_out(yarn_cmd + ["cache", "dir"], check=False) or ""
|
|
).strip()
|
|
if cache_dir and not Path(cache_dir).exists():
|
|
Path(cache_dir).mkdir(parents=True, exist_ok=True)
|
|
except Exception:
|
|
pass
|
|
|
|
# Clean cache
|
|
try:
|
|
run(yarn_cmd + ["cache", "clean"], check=False)
|
|
except Exception:
|
|
pass
|
|
|
|
install_cmd = yarn_cmd + ["install"]
|
|
|
|
if install_cmd[-1] == "--":
|
|
install_cmd = install_cmd[:-1]
|
|
|
|
if DEBUG:
|
|
print(f"Running: {' '.join(install_cmd)}")
|
|
|
|
try:
|
|
run(install_cmd)
|
|
except subprocess.CalledProcessError as e:
|
|
print(
|
|
f"\n Retrying yarn install with compatibility flags..."
|
|
)
|
|
retry_cmd = install_cmd + [
|
|
"--network-timeout",
|
|
"100000",
|
|
"--ignore-engines",
|
|
]
|
|
run(retry_cmd)
|
|
|
|
with step("Building frontend (yarn build)"):
|
|
env = os.environ.copy()
|
|
env["NODE_OPTIONS"] = "--openssl-legacy-provider"
|
|
|
|
build_cmd = yarn_cmd + ["build"]
|
|
if build_cmd[-1] == "--":
|
|
build_cmd = build_cmd[:-1]
|
|
|
|
try:
|
|
run(build_cmd, env=env)
|
|
except subprocess.CalledProcessError:
|
|
print("\n⚠ Build failed with legacy provider, retrying without...")
|
|
env.pop("NODE_OPTIONS", None)
|
|
run(build_cmd, env=env)
|
|
|
|
with step("Copying frontend artifacts"):
|
|
shutil.copytree(src_frontend / "dist", dest_frontend, dirs_exist_ok=True)
|
|
if (src_frontend / "app-images").exists():
|
|
shutil.copytree(
|
|
src_frontend / "app-images",
|
|
dest_frontend / "images",
|
|
dirs_exist_ok=True,
|
|
)
|
|
|
|
|
|
def patch_npm_backend_commands():
|
|
candidates = [
|
|
Path("/opt/npm/lib/utils.js"),
|
|
Path("/opt/npm/utils.js"),
|
|
Path("/opt/npm/lib/commands.js"),
|
|
]
|
|
for p in candidates:
|
|
if not p.exists():
|
|
continue
|
|
try:
|
|
txt = p.read_text(encoding="utf-8")
|
|
except Exception:
|
|
continue
|
|
new = re.sub(r"\blogrotate\b", "/usr/local/bin/logrotate-npm", txt)
|
|
new = re.sub(r"(?<!/usr/sbin/)\bnginx\b", "/usr/sbin/nginx", new)
|
|
if new != txt:
|
|
p.write_text(new, encoding="utf-8")
|
|
|
|
# ========== DEPLOY FUNCTIONS ==========
|
|
|
|
def deploy_npm_app(npm_version_override: str | None):
|
|
version = github_latest_release_tag(
|
|
"NginxProxyManager/nginx-proxy-manager", npm_version_override
|
|
)
|
|
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
|
|
tmp = Path(tempfile.mkdtemp(prefix="npm-angie-"))
|
|
src = download_extract_tar_gz(url, tmp)
|
|
|
|
with step("Setting version numbers in package.json"):
|
|
for pkg in ["backend/package.json", "frontend/package.json"]:
|
|
pj = src / pkg
|
|
txt = pj.read_text(encoding="utf-8")
|
|
txt = re.sub(r'"version":\s*"0\.0\.0"', f'"version": "{version}"', txt)
|
|
pj.write_text(txt, encoding="utf-8")
|
|
|
|
with step("Fixing include paths / nginx.conf"):
|
|
adjust_nginx_like_paths_in_tree(src)
|
|
|
|
with step("Copying web root and configs to /etc/nginx"):
|
|
Path("/var/www/html").mkdir(parents=True, exist_ok=True)
|
|
shutil.copytree(
|
|
src / "docker" / "rootfs" / "var" / "www" / "html",
|
|
"/var/www/html",
|
|
dirs_exist_ok=True,
|
|
)
|
|
shutil.copytree(
|
|
src / "docker" / "rootfs" / "etc" / "nginx",
|
|
"/etc/nginx",
|
|
dirs_exist_ok=True,
|
|
)
|
|
devconf = Path("/etc/nginx/conf.d/dev.conf")
|
|
if devconf.exists():
|
|
devconf.unlink()
|
|
shutil.copy2(
|
|
src / "docker" / "rootfs" / "etc" / "logrotate.d" / "nginx-proxy-manager",
|
|
"/etc/logrotate.d/nginx-proxy-manager",
|
|
)
|
|
Path("/etc/nginx/conf").mkdir(parents=True, exist_ok=True)
|
|
if not Path("/etc/nginx/conf/nginx.conf").exists():
|
|
os.symlink("/etc/angie/angie.conf", "/etc/nginx/conf/nginx.conf")
|
|
|
|
with step("Copying backend/global to /opt/npm"):
|
|
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
|
|
Path("/opt/npm/frontend/images").mkdir(parents=True, exist_ok=True)
|
|
shutil.copytree(src / "global", "/opt/npm/global", dirs_exist_ok=True)
|
|
|
|
with step("Creating SQLite config if missing"):
|
|
cfg = Path("/opt/npm/config/production.json")
|
|
if not cfg.exists():
|
|
write_file(
|
|
cfg,
|
|
json.dumps(
|
|
{
|
|
"database": {
|
|
"engine": "knex-native",
|
|
"knex": {
|
|
"client": "sqlite3",
|
|
"connection": {"filename": "/data/database.sqlite"},
|
|
},
|
|
}
|
|
},
|
|
indent=2,
|
|
),
|
|
)
|
|
|
|
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
|
|
|
|
with step("Installing backend dependencies (yarn)"):
|
|
os.chdir("/opt/npm")
|
|
run(["yarn", "install"])
|
|
|
|
with step("Normalizing directories ownership"):
|
|
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
|
|
|
|
with step("Preparing include/ip_ranges.conf (owned by npm)"):
|
|
include_dir = Path("/etc/nginx/conf.d/include")
|
|
include_dir.mkdir(parents=True, exist_ok=True)
|
|
ipranges = include_dir / "ip_ranges.conf"
|
|
if not ipranges.exists():
|
|
write_file(ipranges, "# populated by NPM (IPv4 only)\n")
|
|
try:
|
|
run(["chown", "npm:npm", str(include_dir), str(ipranges)])
|
|
except Exception:
|
|
pass
|
|
os.chmod(ipranges, 0o664)
|
|
|
|
patch_npm_backend_commands()
|
|
|
|
return version
|
|
|
|
|
|
def inject_footer_link(src: Path) -> None:
|
|
"""
|
|
Inject custom footer link to the auto-installer in SiteFooter.tsx.
|
|
Adds it to the end of the first <ul> list (GitHub fork section).
|
|
"""
|
|
footer_path = src / "frontend" / "src" / "components" / "SiteFooter.tsx"
|
|
|
|
if not footer_path.exists():
|
|
return
|
|
|
|
try:
|
|
content = footer_path.read_text(encoding="utf-8")
|
|
|
|
if '<T id="footer.github-fork" />' not in content:
|
|
return
|
|
|
|
installer_item = '''<li className="list-inline-item">
|
|
<a href="https://gitea.linuxiarz.pl/gru/npm-angie-auto-install"
|
|
target="_blank" className="link-secondary" rel="noopener"> Deployed by Auto Installer | linuxiarz.pl</a>
|
|
</li>'''
|
|
|
|
pattern = r'(<li className="list-inline-item">\s*<a[^>]*>\s*<T id="footer\.github-fork"[^<]*</a>\s*</li>)(\s*</ul>)'
|
|
|
|
if re.search(pattern, content, re.DOTALL):
|
|
new_content = re.sub(
|
|
pattern,
|
|
f'\\1\n{installer_item}\\2',
|
|
content,
|
|
flags=re.DOTALL
|
|
)
|
|
|
|
footer_path.write_text(new_content, encoding="utf-8")
|
|
print(f"\n✓ Injected installer link to SiteFooter.tsx")
|
|
return
|
|
|
|
search_str = '<T id="footer.github-fork" />'
|
|
if search_str in content:
|
|
idx = content.find(search_str)
|
|
close_li_idx = content.find('</li>', idx)
|
|
|
|
if close_li_idx > 0:
|
|
insert_pos = close_li_idx + 5
|
|
new_content = (
|
|
content[:insert_pos] +
|
|
'\n' + installer_item +
|
|
content[insert_pos:]
|
|
)
|
|
|
|
footer_path.write_text(new_content, encoding="utf-8")
|
|
print(f"\n✓ Injected installer link to SiteFooter.tsx")
|
|
|
|
except Exception as e:
|
|
print(f" ⚠ Warning: Failed to inject footer link: {e}")
|
|
|
|
|
|
def deploy_npm_app_from_git(ref: str) -> str:
|
|
if ref.startswith("refs/heads/"):
|
|
ref_type = "branch"
|
|
branch_name = ref.replace("refs/heads/", "")
|
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
|
|
version = f"{branch_name}-dev-{timestamp}"
|
|
git_ref = branch_name
|
|
elif ref.startswith("refs/tags/"):
|
|
ref_type = "tag"
|
|
version = ref.replace("refs/tags/v", "").replace("refs/tags/", "")
|
|
tag_name = ref.replace("refs/tags/", "")
|
|
git_ref = tag_name
|
|
else:
|
|
ref_type = "branch"
|
|
branch_name = ref
|
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M")
|
|
version = f"{branch_name}-dev-{timestamp}"
|
|
git_ref = branch_name
|
|
|
|
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/{git_ref}"
|
|
|
|
tmp = Path(tempfile.mkdtemp(prefix="npm-angie-"))
|
|
src = download_extract_tar_gz(url, tmp)
|
|
|
|
# Set version numbers in package.json files
|
|
with step("Setting version numbers in package.json"):
|
|
for pkg in ["backend/package.json", "frontend/package.json"]:
|
|
pj = src / pkg
|
|
if not pj.exists():
|
|
continue
|
|
|
|
try:
|
|
data = json.loads(pj.read_text(encoding="utf-8"))
|
|
data["version"] = version
|
|
pj.write_text(
|
|
json.dumps(data, indent=2, ensure_ascii=False) + "\n",
|
|
encoding="utf-8"
|
|
)
|
|
if DEBUG:
|
|
print(f" ✓ Updated {pkg} -> version {version}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning: Could not update {pkg}: {e}")
|
|
|
|
# Fix nginx-like include paths in configuration files
|
|
with step("Fixing include paths / nginx.conf"):
|
|
adjust_nginx_like_paths_in_tree(src)
|
|
|
|
with step("Customizing frontend components"):
|
|
inject_footer_link(src)
|
|
|
|
# Copy web root and configuration to /etc/angie
|
|
with step("Copying web root and configs to /etc/angie"):
|
|
Path("/var/www/html").mkdir(parents=True, exist_ok=True)
|
|
shutil.copytree(
|
|
src / "docker" / "rootfs" / "var" / "www" / "html",
|
|
"/var/www/html",
|
|
dirs_exist_ok=True,
|
|
)
|
|
shutil.copytree(
|
|
src / "docker" / "rootfs" / "etc" / "nginx",
|
|
"/etc/angie",
|
|
dirs_exist_ok=True,
|
|
)
|
|
# Remove development config file if present
|
|
devconf = Path("/etc/angie/conf.d/dev.conf")
|
|
if devconf.exists():
|
|
devconf.unlink()
|
|
# Copy logrotate configuration
|
|
shutil.copy2(
|
|
src / "docker" / "rootfs" / "etc" / "logrotate.d" / "nginx-proxy-manager",
|
|
"/etc/logrotate.d/nginx-proxy-manager",
|
|
)
|
|
# Create symlink to /etc/nginx if it doesn't exist
|
|
if not Path("/etc/nginx").exists():
|
|
os.symlink("/etc/angie", "/etc/nginx")
|
|
|
|
# Copy backend and global directories to /opt/npm
|
|
with step("Copying backend to /opt/npm"):
|
|
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
|
|
Path("/opt/npm/frontend/images").mkdir(parents=True, exist_ok=True)
|
|
|
|
# Copy /global if it exists (git always has it)
|
|
global_src = src / "global"
|
|
if global_src.exists():
|
|
shutil.copytree(global_src, "/opt/npm/global", dirs_exist_ok=True)
|
|
print(f" ✓ Directory 'global' copied")
|
|
|
|
# Create SQLite database configuration if missing
|
|
with step("Creating SQLite config if missing"):
|
|
cfg = Path("/opt/npm/config/production.json")
|
|
if not cfg.exists():
|
|
write_file(
|
|
cfg,
|
|
json.dumps(
|
|
{
|
|
"database": {
|
|
"engine": "knex-native",
|
|
"knex": {
|
|
"client": "sqlite3",
|
|
"connection": {"filename": "/data/database.sqlite"},
|
|
},
|
|
}
|
|
},
|
|
indent=2,
|
|
),
|
|
)
|
|
|
|
# Build frontend application
|
|
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
|
|
|
|
# Install backend Node.js dependencies via yarn
|
|
with step("Installing backend dependencies (yarn)"):
|
|
os.chdir("/opt/npm")
|
|
run(["yarn", "install"])
|
|
|
|
# Fix ownership of NPM directories
|
|
with step("Normalizing directories ownership"):
|
|
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
|
|
|
|
# Prepare and set permissions for IP ranges configuration
|
|
with step("Preparing include/ip_ranges.conf (owned by npm)"):
|
|
include_dir = Path("/etc/nginx/conf.d/include")
|
|
include_dir.mkdir(parents=True, exist_ok=True)
|
|
ipranges = include_dir / "ip_ranges.conf"
|
|
if not ipranges.exists():
|
|
write_file(ipranges, "# populated by NPM (IPv4 only)\n")
|
|
try:
|
|
run(["chown", "npm:npm", str(include_dir), str(ipranges)])
|
|
except Exception:
|
|
pass
|
|
os.chmod(ipranges, 0o664)
|
|
|
|
# Apply patches to NPM backend
|
|
patch_npm_backend_commands()
|
|
|
|
return version
|
|
|
|
|
|
def copy_tree_safe(src: Path, dst: Path) -> None:
|
|
|
|
dst.mkdir(parents=True, exist_ok=True)
|
|
|
|
for item in src.iterdir():
|
|
src_item = src / item.name
|
|
dst_item = dst / item.name
|
|
|
|
try:
|
|
if src_item.is_dir():
|
|
if dst_item.exists():
|
|
shutil.rmtree(dst_item)
|
|
shutil.copytree(src_item, dst_item)
|
|
else:
|
|
shutil.copy2(src_item, dst_item)
|
|
except FileNotFoundError:
|
|
# Pomiń brakujące pliki/katalogi
|
|
if DEBUG:
|
|
print(f" ⊘ Skipped missing: {src_item.name}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Error copying {src_item.name}: {e}")
|
|
|
|
|
|
def deploy_npm_app_from_release(version: str | None) -> str:
|
|
"""
|
|
Deploy NPM from GitHub release tag.
|
|
For versions >= 2.13.0, automatically falls back to git source (missing /global in releases).
|
|
|
|
Args:
|
|
version (str | None): Release tag version (e.g., "2.13.1"). If None, fetches latest.
|
|
|
|
Returns:
|
|
str: Installed version string
|
|
"""
|
|
# Get latest version if not specified
|
|
if not version:
|
|
version = github_latest_release_tag(
|
|
"NginxProxyManager/nginx-proxy-manager",
|
|
override=None
|
|
)
|
|
print(f"✓ Latest stable version: {version}")
|
|
|
|
# Check if version >= 2.13.0 - if so, use git instead (releases missing /global)
|
|
version_parsed = parse_version(version)
|
|
if version_parsed >= (2, 13, 0):
|
|
print(f" Version {version} >= 2.13.0: using git source (release archive incomplete)")
|
|
return deploy_npm_app_from_git(f"refs/tags/v{version}")
|
|
|
|
# For versions < 2.13.0, download from release archive
|
|
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
|
|
tmp = Path(tempfile.mkdtemp(prefix="npm-angie-"))
|
|
src = download_extract_tar_gz(url, tmp)
|
|
|
|
|
|
with step(f"Preparing NPM app from release v{version}"):
|
|
Path("/opt/npm").mkdir(parents=True, exist_ok=True)
|
|
|
|
backend_src = src / "backend"
|
|
if backend_src.exists():
|
|
if DEBUG:
|
|
print(f" Unpacking backend contents to /opt/npm/")
|
|
|
|
try:
|
|
for item in backend_src.iterdir():
|
|
src_item = backend_src / item.name
|
|
dst_item = Path(f"/opt/npm/{item.name}")
|
|
|
|
if src_item.is_dir():
|
|
if dst_item.exists():
|
|
shutil.rmtree(dst_item)
|
|
copy_tree_safe(src_item, dst_item)
|
|
else:
|
|
shutil.copy2(src_item, dst_item)
|
|
|
|
if DEBUG:
|
|
print(f" ✓ Backend contents unpacked")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning unpacking backend: {e}")
|
|
|
|
# 2. Kopiuj frontend/
|
|
frontend_src = src / "frontend"
|
|
frontend_dst = Path("/opt/npm/frontend")
|
|
if frontend_src.exists():
|
|
if frontend_dst.exists():
|
|
shutil.rmtree(frontend_dst)
|
|
try:
|
|
copy_tree_safe(frontend_src, frontend_dst)
|
|
if DEBUG:
|
|
print(f" ✓ Copied frontend")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning copying frontend: {e}")
|
|
|
|
# 3. Kopiuj global/
|
|
global_src = src / "global"
|
|
global_dst = Path("/opt/npm/global")
|
|
if global_src.exists():
|
|
if global_dst.exists():
|
|
shutil.rmtree(global_dst)
|
|
try:
|
|
copy_tree_safe(global_src, global_dst)
|
|
if DEBUG:
|
|
print(f" ✓ Copied global")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning copying global: {e}")
|
|
else:
|
|
# Create empty /global if missing
|
|
global_dst.mkdir(parents=True, exist_ok=True)
|
|
if DEBUG:
|
|
print(f" ⊘ Directory 'global' not in archive (created empty)")
|
|
|
|
# Set version numbers in package.json files
|
|
with step("Setting version numbers in package.json"):
|
|
for pkg_path in ["/opt/npm/package.json", "/opt/npm/frontend/package.json"]:
|
|
pj = Path(pkg_path)
|
|
if not pj.exists():
|
|
if DEBUG:
|
|
print(f" ⚠ {pkg_path} not found, skipping")
|
|
continue
|
|
|
|
try:
|
|
data = json.loads(pj.read_text(encoding="utf-8"))
|
|
data["version"] = version
|
|
pj.write_text(
|
|
json.dumps(data, indent=2, ensure_ascii=False) + "\n",
|
|
encoding="utf-8"
|
|
)
|
|
if DEBUG:
|
|
print(f" ✓ Updated {pkg_path} -> version {version}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning: Could not update {pkg_path}: {e}")
|
|
|
|
with step("Fixing include paths / nginx.conf"):
|
|
adjust_nginx_like_paths_in_tree(src)
|
|
|
|
with step("Customizing frontend components"):
|
|
inject_footer_link(src)
|
|
|
|
with step("Copying web root and configs to /etc/angie"):
|
|
Path("/var/www/html").mkdir(parents=True, exist_ok=True)
|
|
|
|
docker_rootfs = src / "docker" / "rootfs"
|
|
|
|
if (docker_rootfs / "var" / "www" / "html").exists():
|
|
try:
|
|
shutil.copytree(
|
|
docker_rootfs / "var" / "www" / "html",
|
|
"/var/www/html",
|
|
dirs_exist_ok=True,
|
|
)
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning copying web root: {e}")
|
|
|
|
if (docker_rootfs / "etc" / "nginx").exists():
|
|
try:
|
|
shutil.copytree(
|
|
docker_rootfs / "etc" / "nginx",
|
|
"/etc/angie",
|
|
dirs_exist_ok=True,
|
|
)
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning copying nginx config: {e}")
|
|
|
|
# Build frontend application
|
|
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
|
|
|
|
# Install backend dependencies
|
|
with step("Installing backend dependencies (yarn)"):
|
|
os.chdir("/opt/npm")
|
|
run(["yarn", "install"])
|
|
|
|
# Fix ownership
|
|
with step("Normalizing directories ownership"):
|
|
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
|
|
|
|
# Prepare IP ranges configuration
|
|
with step("Preparing include/ip_ranges.conf"):
|
|
include_dir = Path("/etc/nginx/conf.d/include")
|
|
include_dir.mkdir(parents=True, exist_ok=True)
|
|
ipranges = include_dir / "ip_ranges.conf"
|
|
if not ipranges.exists():
|
|
write_file(ipranges, "# populated by NPM (IPv4 only)\n")
|
|
|
|
# Apply patches
|
|
patch_npm_backend_commands()
|
|
|
|
return version
|
|
|
|
|
|
def strip_ipv6_listens(paths):
|
|
with step("Removing IPv6 listen entries from configs (--enable-ipv6 not set)"):
|
|
confs = []
|
|
for p in paths:
|
|
confs.extend(Path(p).rglob("*.conf"))
|
|
for f in confs:
|
|
try:
|
|
txt = f.read_text(encoding="utf-8")
|
|
except Exception:
|
|
continue
|
|
new = re.sub(r"(?m)^\s*listen\s+\[::\]:\d+[^;]*;\s*$", "", txt)
|
|
new = re.sub(r"\n{3,}", "\n\n", new)
|
|
if new != txt:
|
|
f.write_text(new, encoding="utf-8")
|
|
|
|
|
|
def install_logrotate_for_data_logs():
|
|
with step("Installing logrotate policy for /var/log/angie (*.log)"):
|
|
conf_path = Path("/etc/logrotate.d/angie")
|
|
content = """/var/log/angie/*.log {
|
|
daily
|
|
rotate 1
|
|
compress
|
|
missingok
|
|
notifempty
|
|
copytruncate
|
|
create 0640 root root
|
|
su root root
|
|
postrotate
|
|
if [ -f /run/angie/angie.pid ]; then
|
|
kill -USR1 $(cat /run/angie/angie.pid)
|
|
fi
|
|
endscript
|
|
}
|
|
"""
|
|
write_file(conf_path, content, 0o644)
|
|
try:
|
|
run(["/usr/sbin/logrotate", "-d", str(conf_path)], check=False)
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
def fix_logrotate_permissions_and_wrapper():
|
|
with step("Fixing logrotate state-file permissions and helper"):
|
|
system_status = Path("/var/lib/logrotate/status")
|
|
if system_status.exists():
|
|
try:
|
|
run(["setfacl", "-m", "u:npm:rw", str(system_status)], check=False)
|
|
except FileNotFoundError:
|
|
try:
|
|
run(["chgrp", "npm", str(system_status)], check=False)
|
|
os.chmod(system_status, 0o664)
|
|
except Exception:
|
|
pass
|
|
|
|
state_dir = Path("/opt/npm/var")
|
|
state_dir.mkdir(parents=True, exist_ok=True)
|
|
state_file = state_dir / "logrotate.state"
|
|
if not state_file.exists():
|
|
state_file.touch()
|
|
os.chmod(state_file, 0o664)
|
|
|
|
try:
|
|
import pwd, grp
|
|
|
|
uid = pwd.getpwnam("npm").pw_uid
|
|
gid = grp.getgrnam("npm").gr_gid
|
|
os.chown(state_dir, uid, gid)
|
|
os.chown(state_file, uid, gid)
|
|
except Exception:
|
|
pass
|
|
|
|
helper = Path("/usr/local/bin/logrotate-npm")
|
|
helper_content = f"""#!/bin/sh
|
|
# Logrotate wrapper for npm user
|
|
exec /usr/sbin/logrotate -s {state_file} "$@"
|
|
"""
|
|
write_file(helper, helper_content, 0o755)
|
|
|
|
logrotate_dir = Path("/var/lib/logrotate")
|
|
if logrotate_dir.exists():
|
|
try:
|
|
run(["usermod", "-aG", "adm", "npm"], check=False)
|
|
|
|
run(["chgrp", "adm", str(logrotate_dir)], check=False)
|
|
os.chmod(logrotate_dir, 0o775)
|
|
except Exception as e:
|
|
print(f"⚠ Warning: could not fix {logrotate_dir} permissions: {e}")
|
|
|
|
|
|
def create_systemd_units(ipv6_enabled: bool):
|
|
with step("Creating and starting systemd services (angie, npm)"):
|
|
unit_lines = [
|
|
"[Unit]",
|
|
"Description=Nginx Proxy Manager (backend)",
|
|
"After=network.target angie.service",
|
|
"Wants=angie.service",
|
|
"",
|
|
"[Service]",
|
|
"User=npm",
|
|
"Group=npm",
|
|
"WorkingDirectory=/opt/npm",
|
|
"Environment=NODE_ENV=production",
|
|
]
|
|
if not ipv6_enabled:
|
|
unit_lines.append("Environment=DISABLE_IPV6=true")
|
|
unit_lines += [
|
|
"ExecStart=/usr/bin/node /opt/npm/index.js",
|
|
"Restart=on-failure",
|
|
"RestartSec=5",
|
|
"",
|
|
"[Install]",
|
|
"WantedBy=multi-user.target",
|
|
"",
|
|
]
|
|
write_file(
|
|
Path("/etc/systemd/system/npm.service"), "\n".join(unit_lines), 0o644
|
|
)
|
|
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
|
|
|
|
run(["systemctl", "daemon-reload"])
|
|
run(["systemctl", "enable", "--now", "angie.service"])
|
|
run(["/usr/sbin/nginx", "-t"], check=False)
|
|
|
|
run(["systemctl", "enable", "--now", "npm.service"])
|
|
run(["angie", "-s", "reload"], check=False)
|
|
|
|
########### REPLACE CONFIGS ############
|
|
|
|
def update_config_file(filepath, newcontent, owner="npm:npm", mode=0o644, description=None):
|
|
filepath = Path(filepath)
|
|
backuppath = None
|
|
|
|
if filepath.exists():
|
|
timestamp = time.strftime("%Y%m%d-%H%M%S")
|
|
backuppath = filepath.parent / f"{filepath.name}.backup-{timestamp}"
|
|
if DEBUG:
|
|
print(f" Creating backup: {backuppath}")
|
|
shutil.copy2(filepath, backuppath)
|
|
|
|
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
write_file(filepath, newcontent, mode)
|
|
|
|
if DEBUG:
|
|
print(f" Written to: {filepath}")
|
|
|
|
if owner:
|
|
try:
|
|
run("chown", owner, str(filepath), check=False)
|
|
if DEBUG:
|
|
print(f" Owner set to: {owner}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" Warning: Could not set owner: {e}")
|
|
|
|
return backuppath
|
|
|
|
|
|
def update_npn_assets_config():
|
|
"""
|
|
Update /etc/nginx/conf.d/include/assets.conf with optimized cache settings.
|
|
"""
|
|
content = """location ~* \\.(css|js|mjs|json|xml|txt|md|html|htm|pdf|doc|docx|xls|xlsx|ppt|pptx|jpg|jpeg|jpe|jfif|pjpeg|pjp|png|gif|webp|avif|apng|svg|svgz|ico|bmp|tif|tiff|jxl|heic|heif|woff|woff2|ttf|otf|eot|mp3|mp4|m4a|m4v|ogg|ogv|oga|opus|wav|webm|flac|aac|mov|avi|wmv|zip|gz|bz2|tar|rar|7z|css\\.map|js\\.map)$ {
|
|
proxy_cache public-cache;
|
|
proxy_cache_valid 200 30m;
|
|
proxy_cache_revalidate on;
|
|
proxy_cache_lock on;
|
|
proxy_cache_lock_timeout 5s;
|
|
proxy_cache_background_update on;
|
|
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
|
|
proxy_connect_timeout 5s;
|
|
proxy_read_timeout 15s;
|
|
add_header X-Cache-Status $upstream_cache_status always;
|
|
proxy_hide_header Age;
|
|
proxy_hide_header X-Cache-Hits;
|
|
proxy_hide_header X-Cache;
|
|
access_log off;
|
|
include /etc/nginx/conf.d/include/proxy.conf;
|
|
status_zone cache_assets;
|
|
}
|
|
"""
|
|
|
|
with step("Updating NPM assets cache configuration"):
|
|
return update_config_file(
|
|
filepath="/etc/nginx/conf.d/include/assets.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
def update_ssl_ciphers_config():
|
|
|
|
content = """# Modern SSL/TLS Configuration
|
|
ssl_protocols TLSv1.2 TLSv1.3;
|
|
ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
|
|
ssl_prefer_server_ciphers on;
|
|
ssl_conf_command Ciphersuites TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384;
|
|
|
|
"""
|
|
|
|
with step("Updating NPM SSL/TLS cipher configuration"):
|
|
return update_config_file(
|
|
filepath="/etc/nginx/conf.d/include/ssl-ciphers.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
def update_npm_listen_template():
|
|
"""
|
|
Update NPM listen template with HTTP/3 (QUIC) support for Angie.
|
|
"""
|
|
content = """# HTTP listening
|
|
# HTTP listening
|
|
listen 80;
|
|
{% if ipv6 -%}
|
|
listen [::]:80;
|
|
{% else -%}
|
|
#listen [::]:80;
|
|
{% endif %}
|
|
|
|
{% if certificate -%}
|
|
# HTTPS/TLS listening
|
|
# HTTP/3 (QUIC)
|
|
listen 443 quic;
|
|
{% if ipv6 -%}
|
|
listen [::]:443 quic;
|
|
{% endif %}
|
|
|
|
# HTTP/2 and HTTP/1.1 fallback - TCP port
|
|
listen 443 ssl;
|
|
{% if ipv6 -%}
|
|
listen [::]:443 ssl;
|
|
{% else -%}
|
|
#listen [::]:443 ssl;
|
|
{% endif %}
|
|
{% endif %}
|
|
|
|
server_name {{ domain_names | join: " " }};
|
|
|
|
{% if certificate -%}
|
|
# Enable HTTP/2 and HTTP/3 together
|
|
{% if http2_support == 1 or http2_support == true %}
|
|
http2 on;
|
|
http3 on;
|
|
http3_hq on;
|
|
{% else -%}
|
|
http2 off;
|
|
http3 off;
|
|
{% endif %}
|
|
|
|
# Advertise HTTP/3 availability to clients
|
|
add_header Alt-Svc 'h3=":443"; ma=86400' always;
|
|
{% endif %}
|
|
|
|
# Angie status for stats
|
|
status_zone {{ domain_names[0] | replace: "*.", "" | replace: ".", "_" }};
|
|
"""
|
|
|
|
with step("Updating NPM listen template with HTTP/3 support"):
|
|
return update_config_file(
|
|
filepath="/opt/npm/templates/_listen.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
def update_npm_proxy_host_template():
|
|
"""
|
|
Update /opt/npm/templates/proxy_host.conf with upstream keepalive configuration.
|
|
"""
|
|
content = """{% include "_header_comment.conf" %}
|
|
|
|
|
|
{% if enabled %}
|
|
|
|
#### BCKEND UPSTREAM ####
|
|
{% assign bname = domain_names[0] | replace: "*.", "" | replace: ".", "_" %}
|
|
upstream backend_{{ bname }} {
|
|
zone {{ bname }} 1m;
|
|
server {{ forward_host }}:{{ forward_port }};
|
|
keepalive 16;
|
|
}
|
|
|
|
{% include "_hsts_map.conf" %}
|
|
|
|
server {
|
|
set $forward_scheme {{ forward_scheme }};
|
|
set $server "{{ forward_host }}";
|
|
set $port {{ forward_port }};
|
|
|
|
{% include "_listen.conf" %}
|
|
{% include "_certificates.conf" %}
|
|
{% include "_assets.conf" %}
|
|
{% include "_exploits.conf" %}
|
|
{% include "_hsts.conf" %}
|
|
{% include "_forced_ssl.conf" %}
|
|
|
|
{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Connection $http_connection;
|
|
proxy_http_version 1.1;
|
|
{% endif %}
|
|
|
|
access_log /data/logs/proxy-host-{{ id }}_access.log proxy;
|
|
error_log /data/logs/proxy-host-{{ id }}_error.log warn;
|
|
|
|
{{ advanced_config }}
|
|
|
|
{{ locations }}
|
|
|
|
{% if use_default_location %}
|
|
|
|
location / {
|
|
{% include "_access.conf" %}
|
|
{% include "_hsts.conf" %}
|
|
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Forwarded-Scheme $scheme;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_http_version 1.1;
|
|
proxy_set_header Connection "";
|
|
proxy_pass {{ forward_scheme }}://backend_{{ bname }}$request_uri;
|
|
{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Connection $http_connection;
|
|
{% endif %}
|
|
}
|
|
{% endif %}
|
|
|
|
# Custom
|
|
include /data/nginx/custom/server_proxy[.]conf;
|
|
}
|
|
{% endif %}
|
|
"""
|
|
|
|
with step("Updating NPM proxy host template"):
|
|
return update_config_file(
|
|
filepath="/opt/npm/templates/proxy_host.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
def update_npm_location_template():
|
|
"""
|
|
Update /opt/npm/templates/_location.conf with status_zone monitoring.
|
|
"""
|
|
content = """ location {{ path }} {
|
|
{{ advanced_config }}
|
|
|
|
status_zone location_{{ forward_host }}_{{ forward_port }}_{{ path }};
|
|
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Forwarded-Scheme $scheme;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
proxy_set_header X-Forwarded-For $remote_addr;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
|
|
proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
|
|
|
|
{% include "_access.conf" %}
|
|
{% include "_assets.conf" %}
|
|
{% include "_exploits.conf" %}
|
|
{% include "_forced_ssl.conf" %}
|
|
{% include "_hsts.conf" %}
|
|
|
|
{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Connection $http_connection;
|
|
proxy_http_version 1.1;
|
|
{% endif %}
|
|
}
|
|
"""
|
|
|
|
with step("Updating NPM custom location template"):
|
|
return update_config_file(
|
|
filepath="/opt/npm/templates/_location.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
def generate_selfsigned_cert(cert_path=None, key_path=None, days=None):
|
|
cert_path = Path(cert_path or NPM_ADMIN_CERT_PATH)
|
|
key_path = Path(key_path or NPM_ADMIN_KEY_PATH)
|
|
days = days or NPM_ADMIN_CERT_DAYS
|
|
|
|
cert_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
if cert_path.exists() and key_path.exists():
|
|
if DEBUG:
|
|
print(f" Certificate already exists: {cert_path}")
|
|
return (str(cert_path), str(key_path))
|
|
|
|
if DEBUG:
|
|
print(f" Generating self-signed certificate...")
|
|
|
|
run([
|
|
"openssl", "req", "-x509", "-nodes",
|
|
"-days", str(days),
|
|
"-newkey", "rsa:4096",
|
|
"-keyout", str(key_path),
|
|
"-out", str(cert_path),
|
|
"-subj", "/C=US/ST=State/L=City/O=Organization/CN=nginxproxymanager"
|
|
], check=True)
|
|
|
|
run(["chmod", "644", str(cert_path)], check=False)
|
|
run(["chmod", "600", str(key_path)], check=False)
|
|
run(["chown", "npm:npm", str(cert_path)], check=False)
|
|
run(["chown", "npm:npm", str(key_path)], check=False)
|
|
|
|
if DEBUG:
|
|
print(f" Certificate created: {cert_path}")
|
|
print(f" Private key created: {key_path}")
|
|
|
|
return (str(cert_path), str(key_path))
|
|
|
|
|
|
def update_npm_admin_interface(enable_ssl=None, http_port=None, https_port=None, root_path=None):
|
|
"""
|
|
Update NPM admin interface configuration with SSL support and redirect.
|
|
Uses global configuration if parameters not provided.
|
|
"""
|
|
enable_ssl = NPM_ADMIN_ENABLE_SSL if enable_ssl is None else enable_ssl
|
|
http_port = http_port or NPM_ADMIN_HTTP_PORT
|
|
https_port = https_port or NPM_ADMIN_HTTPS_PORT
|
|
root_path = root_path or NPM_ADMIN_ROOT_PATH
|
|
cert_path = NPM_ADMIN_CERT_PATH
|
|
key_path = NPM_ADMIN_KEY_PATH
|
|
|
|
if enable_ssl:
|
|
with step("Generating self-signed certificate for admin interface"):
|
|
generate_selfsigned_cert()
|
|
content = f"""# Admin Interface - HTTP (redirect to HTTPS)
|
|
server {{
|
|
listen {http_port} default_server;
|
|
server_name nginxproxymanager;
|
|
|
|
add_header Alt-Svc 'h3=":{https_port}"; ma=60' always;
|
|
|
|
# Redirect all HTTP traffic to HTTPS
|
|
return 301 https://$host:{https_port}$request_uri;
|
|
}}
|
|
|
|
# Admin Interface - HTTPS
|
|
server {{
|
|
listen {https_port} ssl;
|
|
listen {https_port} quic reuseport;
|
|
|
|
listen 443 ssl;
|
|
listen 443 quic reuseport;
|
|
|
|
add_header Alt-Svc 'h3=":{https_port}"; ma=60' always;
|
|
http3 on;
|
|
http2 on;
|
|
|
|
server_name nginxproxymanager npm-admin;
|
|
|
|
# SSL Configuration
|
|
ssl_certificate {cert_path};
|
|
ssl_certificate_key {key_path};
|
|
include /etc/nginx/conf.d/include/ssl-ciphers.conf;
|
|
status_zone npm_admin;
|
|
|
|
root {root_path};
|
|
access_log /dev/null;
|
|
|
|
location /api {{
|
|
return 302 /api/;
|
|
}}
|
|
|
|
location /api/ {{
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Forwarded-Scheme $scheme;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
proxy_set_header X-Forwarded-For $remote_addr;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_pass http://127.0.0.1:3000/;
|
|
|
|
proxy_read_timeout 15m;
|
|
proxy_send_timeout 15m;
|
|
}}
|
|
|
|
location / {{
|
|
etag off;
|
|
index index.html;
|
|
if ($request_uri ~ ^/(.*)\\.html$) {{
|
|
return 302 /$1;
|
|
}}
|
|
try_files $uri $uri.html $uri/ /index.html;
|
|
}}
|
|
}}
|
|
"""
|
|
else:
|
|
# Configuration without SSL (original)
|
|
content = f"""# Admin Interface
|
|
server {{
|
|
listen {http_port} default_server;
|
|
server_name nginxproxymanager npm-admin;
|
|
root {root_path};
|
|
access_log /dev/null;
|
|
status_zone npm_admin;
|
|
|
|
location /api {{
|
|
return 302 /api/;
|
|
}}
|
|
|
|
location /api/ {{
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Forwarded-Scheme $scheme;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
proxy_set_header X-Forwarded-For $remote_addr;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_pass http://127.0.0.1:3000/;
|
|
|
|
proxy_read_timeout 15m;
|
|
proxy_send_timeout 15m;
|
|
}}
|
|
|
|
location / {{
|
|
etag off;
|
|
index index.html;
|
|
iif ($request_uri ~ ^/(.*)\\.html$) {{
|
|
return 302 /$1;
|
|
}}
|
|
try_files $uri $uri.html $uri/ /index.html;
|
|
}}
|
|
}}
|
|
"""
|
|
|
|
with step("Updating NPM admin interface configuration"):
|
|
return update_config_file(
|
|
filepath="/etc/nginx/conf.d/production.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
|
|
def update_npm_stream_template():
|
|
"""
|
|
Update /opt/npm/templates/stream.conf with status_zone monitoring.
|
|
"""
|
|
content = """# ------------------------------------------------------------
|
|
# {{ incoming_port }} TCP: {{ tcp_forwarding }} UDP: {{ udp_forwarding }}
|
|
# ------------------------------------------------------------
|
|
|
|
{% if enabled %}
|
|
{% if tcp_forwarding == 1 or tcp_forwarding == true -%}
|
|
server {
|
|
listen {{ incoming_port }} {%- if certificate %} ssl {%- endif %};
|
|
{% unless ipv6 -%} # {%- endunless -%} listen [::]:{{ incoming_port }} {%- if certificate %} ssl {%- endif %};
|
|
|
|
{%- include "_certificates_stream.conf" %}
|
|
|
|
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
|
|
|
|
status_zone stream_tcp_{{ incoming_port }}_{{ forwarding_port }};
|
|
|
|
# Custom
|
|
include /data/nginx/custom/server_stream[.]conf;
|
|
include /data/nginx/custom/server_stream_tcp[.]conf;
|
|
}
|
|
{% endif %}
|
|
|
|
{% if udp_forwarding == 1 or udp_forwarding == true -%}
|
|
server {
|
|
listen {{ incoming_port }} udp;
|
|
{% unless ipv6 -%} # {%- endunless -%} listen [::]:{{ incoming_port }} udp;
|
|
|
|
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
|
|
|
|
status_zone stream_udp_{{ incoming_port }}_{{ forwarding_port }};
|
|
|
|
# Custom
|
|
include /data/nginx/custom/server_stream[.]conf;
|
|
include /data/nginx/custom/server_stream_udp[.]conf;
|
|
}
|
|
{% endif %}
|
|
{% endif %}
|
|
"""
|
|
|
|
with step("Updating NPM stream template"):
|
|
return update_config_file(
|
|
filepath="/opt/npm/templates/stream.conf",
|
|
newcontent=content,
|
|
owner="npm:npm",
|
|
mode=0o644
|
|
)
|
|
|
|
|
|
def gather_versions(npm_app_version: str):
|
|
_ips = run_out(["hostname", "-I"], check=False) or ""
|
|
ip = (_ips.split() or [""])[0]
|
|
|
|
angie_out = (
|
|
(run_out(["angie", "-v"], check=False) or "")
|
|
+ "\n"
|
|
+ (run_out(["angie", "-V"], check=False) or "")
|
|
)
|
|
m = re.search(r"(?i)\bangie\s*/\s*([0-9]+(?:\.[0-9]+)+)\b", angie_out)
|
|
if not m:
|
|
dp = (
|
|
run_out(["dpkg-query", "-W", "-f=${Version}", "angie"], check=False) or ""
|
|
).strip()
|
|
m = re.search(r"([0-9]+(?:\.[0-9]+)+)", dp)
|
|
angie_v = m.group(1) if m else (angie_out.strip() or "")
|
|
node_v = (run_out(["node", "-v"], check=False) or "").strip().lstrip("v")
|
|
|
|
yarn_v = (run_out(["yarn", "-v"], check=False) or "").strip()
|
|
if not yarn_v:
|
|
yarn_v = (run_out(["yarnpkg", "-v"], check=False) or "").strip()
|
|
|
|
return ip, angie_v, node_v, yarn_v, npm_app_version
|
|
|
|
|
|
def update_motd(enabled: bool, info, ipv6_enabled: bool, npm_version: str = None, installed_from_branch: bool = False, tp_theme: str = None):
|
|
"""
|
|
Updates MOTD with installation/configuration details.
|
|
Conditionally shows:
|
|
- Default credentials only for NPM < 2.13.0
|
|
- First login message for NPM >= 2.13.0
|
|
- Theme info only if not from branch
|
|
- Branch detection
|
|
"""
|
|
if not enabled:
|
|
return
|
|
|
|
ip, angie_v, node_v, yarn_v, npm_v = info
|
|
|
|
ipv6_line = (
|
|
"IPv6: enabled (configs untouched)."
|
|
if ipv6_enabled
|
|
else "IPv6: disabled in resolvers and conf."
|
|
)
|
|
|
|
is_branch_version = "-dev-" in npm_version if npm_version else False
|
|
|
|
npm_version_parsed = (0, 0, 0)
|
|
if npm_version and not is_branch_version:
|
|
clean_version = npm_version[1:] if npm_version.startswith("v") else npm_version
|
|
npm_version_parsed = parse_version(clean_version)
|
|
|
|
if is_branch_version or installed_from_branch:
|
|
creds = "First login: Visit the panel above to set admin user and password"
|
|
elif npm_version_parsed >= (2, 13, 0):
|
|
creds = "First login: Visit the panel above to set admin user and password"
|
|
else:
|
|
creds = "Default login: admin@example.com / changeme (change immediately!)"
|
|
|
|
# ========== THEME INFO ==========
|
|
if not installed_from_branch:
|
|
if tp_theme:
|
|
theme_line = f"Theme: {tp_theme}"
|
|
else:
|
|
theme_line = "Theme: DISABLED"
|
|
else:
|
|
theme_line = "Theme: Default"
|
|
|
|
protocol = "https" if NPM_ADMIN_ENABLE_SSL else "http"
|
|
port = NPM_ADMIN_HTTPS_PORT if NPM_ADMIN_ENABLE_SSL else NPM_ADMIN_HTTP_PORT
|
|
npm_line = f"Nginx Proxy Manager: {protocol}://{ip}:{port}"
|
|
|
|
if is_branch_version:
|
|
npm_source = f"Source: branch ({npm_version})"
|
|
elif installed_from_branch:
|
|
npm_source = "Source: master branch (development)"
|
|
else:
|
|
npm_source = f"Source: release {npm_version}"
|
|
|
|
text = f"""
|
|
################################ NPM / ANGIE ################################
|
|
OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})
|
|
{npm_line}
|
|
Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s
|
|
Angie: {angie_v} (conf: /etc/angie -> /etc/nginx, reload: angie -s reload)
|
|
Node.js: v{node_v} Yarn: v{yarn_v}
|
|
NPM: {npm_v}
|
|
{npm_source}
|
|
{theme_line}
|
|
Paths: app=/opt/npm data=/data cache=/var/lib/angie/cache
|
|
{ipv6_line}
|
|
{creds}
|
|
###########################################################################
|
|
"""
|
|
motd_d = Path("/etc/motd.d")
|
|
if motd_d.exists():
|
|
write_file(motd_d / "10-npm-angie", text.strip() + "\n", 0o644)
|
|
else:
|
|
motd = Path("/etc/motd")
|
|
existing = motd.read_text(encoding="utf-8") if motd.exists() else ""
|
|
pattern = re.compile(
|
|
r"################################ NPM / ANGIE ################################.*?###########################################################################\n",
|
|
re.S,
|
|
)
|
|
if pattern.search(existing):
|
|
content = pattern.sub(text.strip() + "\n", existing)
|
|
else:
|
|
content = (
|
|
(existing.rstrip() + "\n\n" + text.strip() + "\n")
|
|
if existing
|
|
else (text.strip() + "\n")
|
|
)
|
|
write_file(motd, content, 0o644)
|
|
|
|
|
|
def print_summary(info, ipv6_enabled, dark_enabled, tp_theme, update_mode, npm_version=None, installed_from_branch=False):
|
|
"""
|
|
Displays installation/update summary with conditionally shown credentials and theme.
|
|
- For branch installations: First login setup (no presets)
|
|
- For NPM >= 2.13.0: First login setup (credentials set by user)
|
|
- For NPM < 2.13.0: Default credentials shown with warning
|
|
- Theme is hidden if installed from branch (no theme support in dev)
|
|
"""
|
|
ip, angie_v, node_v, yarn_v, npm_v = info
|
|
|
|
print("\n====================== SUMMARY ======================")
|
|
print(f"OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})")
|
|
print(f"Mode: {'UPDATE' if update_mode else 'INSTALL'}")
|
|
|
|
if NPM_ADMIN_ENABLE_SSL:
|
|
print(f"NPM panel address: https://{ip}:{NPM_ADMIN_HTTPS_PORT}")
|
|
print(f" (HTTP→HTTPS: http://{ip}:{NPM_ADMIN_HTTP_PORT})")
|
|
else:
|
|
print(f"NPM panel address: http://{ip}:{NPM_ADMIN_HTTP_PORT}")
|
|
|
|
print(f"Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s")
|
|
print(f"Angie: v{angie_v}")
|
|
print(f"Node.js: v{node_v}")
|
|
print(f"Yarn: v{yarn_v}")
|
|
print(f"NPM: {npm_v}")
|
|
print(
|
|
f"IPv6: {'ENABLED' if ipv6_enabled else 'DISABLED'}"
|
|
)
|
|
|
|
if not installed_from_branch:
|
|
if tp_theme:
|
|
print(f"Custom theme: {tp_theme}")
|
|
|
|
print(
|
|
"Paths: /opt/npm (app), /data (data), /etc/angie (conf), /var/log/angie (logs)"
|
|
)
|
|
print("Services: systemctl status angie.service / npm.service")
|
|
|
|
if not update_mode:
|
|
npm_version_parsed = parse_version(npm_version) if npm_version else (0, 0, 0)
|
|
is_branch_version = "-dev-" in npm_version if npm_version else False
|
|
|
|
if is_branch_version or installed_from_branch:
|
|
# Branch installation
|
|
print(f"\n FIRST LOGIN (branch: {npm_version}):")
|
|
print(f" URL: https://{ip}:{NPM_ADMIN_HTTPS_PORT}")
|
|
print(f" Set admin user and password during first login")
|
|
elif npm_version_parsed >= (2, 13, 0):
|
|
# NPM >= 2.13.0
|
|
print(f"\n FIRST LOGIN (NPM v{npm_version}):")
|
|
print(f" URL: https://{ip}:{NPM_ADMIN_HTTPS_PORT}")
|
|
print(f" Set admin user and password during first login")
|
|
else:
|
|
# NPM < 2.13.0
|
|
print(f"\n DEFAULT LOGIN (NPM v{npm_version}):")
|
|
print(f" Email: admin@example.com")
|
|
print(f" Password: changeme")
|
|
print(f" (Change immediately after first login)")
|
|
|
|
print("Test config: /usr/sbin/angie -t")
|
|
print("==========================================================\n")
|
|
|
|
|
|
# ========== UPDATE-ONLY ==========
|
|
|
|
def update_only(
|
|
node_pkg: str,
|
|
node_version: str | None,
|
|
npm_version_override: str | None,
|
|
apply_dark: bool,
|
|
dark_env: dict,
|
|
ipv6_enabled: bool,
|
|
):
|
|
|
|
apt_update_upgrade()
|
|
|
|
# Ensure npm exists before trying to install yarn
|
|
if not shutil.which("npm"):
|
|
ensure_minimum_nodejs(user_requested_version=node_pkg)
|
|
install_yarn()
|
|
|
|
version = github_latest_release_tag(
|
|
"NginxProxyManager/nginx-proxy-manager", npm_version_override
|
|
)
|
|
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
|
|
tmp = Path(tempfile.mkdtemp(prefix="npm-update-"))
|
|
src = download_extract_tar_gz(url, tmp)
|
|
|
|
with step("Setting version in package.json (update)"):
|
|
|
|
for pkg_path in ["package.json", "backend/package.json", "frontend/package.json"]:
|
|
pj = src / pkg_path
|
|
if not pj.exists():
|
|
continue
|
|
|
|
try:
|
|
data = json.loads(pj.read_text(encoding="utf-8"))
|
|
data["version"] = version
|
|
pj.write_text(
|
|
json.dumps(data, indent=2, ensure_ascii=False) + "\n",
|
|
encoding="utf-8"
|
|
)
|
|
if DEBUG:
|
|
print(f" ✓ Updated {pkg_path} -> version {version}")
|
|
except Exception as e:
|
|
if DEBUG:
|
|
print(f" ⚠ Warning: Could not update {pkg_path}: {e}")
|
|
|
|
# ========== BACKUP BEFORE UPDATE ==========
|
|
timestamp = time.strftime("%Y%m%d-%H%M%S")
|
|
backup_dir = Path(f"/data/backups/npm-backup-{timestamp}")
|
|
|
|
with step("Creating full backup before update"):
|
|
backup_dir.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
try:
|
|
if Path("/opt/npm").exists():
|
|
shutil.copytree("/opt/npm", backup_dir / "opt_npm", dirs_exist_ok=True)
|
|
|
|
if Path("/data/database.sqlite").exists():
|
|
shutil.copy2("/data/database.sqlite", backup_dir / "database.sqlite")
|
|
if Path("/data/letsencrypt").exists():
|
|
shutil.copytree("/data/letsencrypt", backup_dir / "letsencrypt", dirs_exist_ok=True)
|
|
if Path("/data/nginx").exists():
|
|
shutil.copytree("/data/nginx", backup_dir / "nginx", dirs_exist_ok=True)
|
|
|
|
backup_info = {
|
|
"backup_date": timestamp,
|
|
"npm_version": "current",
|
|
"update_to_version": version,
|
|
"backup_path": str(backup_dir)
|
|
}
|
|
(backup_dir / "backup_info.json").write_text(json.dumps(backup_info, indent=2))
|
|
|
|
backups = sorted(backup_dir.parent.glob("npm-backup-*"))
|
|
if len(backups) > 3:
|
|
for old_backup in backups[:-3]:
|
|
shutil.rmtree(old_backup, ignore_errors=True)
|
|
|
|
except Exception as e:
|
|
print(f"⚠ Warning: Backup failed: {e}")
|
|
print(" Continue update anyway? [y/N]: ", end="", flush=True)
|
|
response = input().strip().lower()
|
|
if response not in ["y", "yes"]:
|
|
print("Update cancelled.")
|
|
sys.exit(1)
|
|
|
|
print(f" Backup location: {backup_dir}")
|
|
backups = sorted(backup_dir.parent.glob("npm-backup-*"))
|
|
if len(backups) > 3:
|
|
print(f" Removed {len(backups) - 3} old backup(s)")
|
|
# ========== END BACKUP ==========
|
|
|
|
# Customize frontend components (inject installer link)
|
|
with step("Customizing frontend components"):
|
|
inject_footer_link(src)
|
|
|
|
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
|
|
|
|
with step("Updating backend without overwriting config/"):
|
|
backup_cfg = Path("/tmp/npm-config-backup")
|
|
if backup_cfg.exists():
|
|
shutil.rmtree(backup_cfg)
|
|
if Path("/opt/npm/config").exists():
|
|
shutil.copytree("/opt/npm/config", backup_cfg, dirs_exist_ok=True)
|
|
|
|
backend_src = src / "backend"
|
|
|
|
if backend_src.exists():
|
|
if DEBUG:
|
|
print(f" Unpacking backend contents (version < 2.13.0)")
|
|
|
|
for item in Path("/opt/npm").glob("*"):
|
|
if item.name in ("frontend", "config"):
|
|
continue
|
|
if item.is_dir():
|
|
shutil.rmtree(item)
|
|
else:
|
|
item.unlink()
|
|
|
|
for item in backend_src.iterdir():
|
|
src_item = backend_src / item.name
|
|
dst_item = Path(f"/opt/npm/{item.name}")
|
|
|
|
if src_item.is_dir():
|
|
if dst_item.exists():
|
|
shutil.rmtree(dst_item)
|
|
copy_tree_safe(src_item, dst_item)
|
|
else:
|
|
shutil.copy2(src_item, dst_item)
|
|
else:
|
|
if DEBUG:
|
|
print(f" Copying root contents (version >= 2.13.0)")
|
|
|
|
for item in Path("/opt/npm").glob("*"):
|
|
if item.name in ("frontend", "config"):
|
|
continue
|
|
if item.is_dir():
|
|
shutil.rmtree(item)
|
|
else:
|
|
item.unlink()
|
|
|
|
for item in src.iterdir():
|
|
src_item = src / item.name
|
|
dst_item = Path(f"/opt/npm/{item.name}")
|
|
|
|
if item.name in ("frontend", "config", "docker"):
|
|
continue
|
|
|
|
if src_item.is_dir():
|
|
if dst_item.exists():
|
|
shutil.rmtree(dst_item)
|
|
copy_tree_safe(src_item, dst_item)
|
|
else:
|
|
shutil.copy2(src_item, dst_item)
|
|
|
|
global_src = src / "global"
|
|
if global_src.exists():
|
|
global_dst = Path("/opt/npm/global")
|
|
if global_dst.exists():
|
|
shutil.rmtree(global_dst)
|
|
shutil.copytree(global_src, global_dst, dirs_exist_ok=True)
|
|
if DEBUG:
|
|
print(f" ✓ Directory 'global' copied")
|
|
else:
|
|
Path("/opt/npm/global").mkdir(parents=True, exist_ok=True)
|
|
if DEBUG:
|
|
print(f" ⊘ Directory 'global' not in archive (created empty)")
|
|
|
|
Path("/opt/npm/config").mkdir(parents=True, exist_ok=True)
|
|
if backup_cfg.exists():
|
|
# Przywróć wszystko z backup_cfg
|
|
for item in backup_cfg.iterdir():
|
|
src_cfg = backup_cfg / item.name
|
|
dst_cfg = Path(f"/opt/npm/config/{item.name}")
|
|
|
|
if src_cfg.is_dir():
|
|
if dst_cfg.exists():
|
|
shutil.rmtree(dst_cfg)
|
|
shutil.copytree(src_cfg, dst_cfg)
|
|
else:
|
|
shutil.copy2(src_cfg, dst_cfg)
|
|
|
|
shutil.rmtree(backup_cfg, ignore_errors=True)
|
|
|
|
with step("Installing backend dependencies after update"):
|
|
os.chdir("/opt/npm")
|
|
run(["yarn", "install"])
|
|
|
|
patch_npm_backend_commands()
|
|
create_systemd_units(ipv6_enabled=ipv6_enabled)
|
|
|
|
with step("Setting owners"):
|
|
run(["chown", "-R", "npm:npm", "/opt/npm"])
|
|
|
|
# Cleanup development configuration
|
|
with step("Cleaning up development configuration"):
|
|
dev_conf = Path("/etc/nginx/conf.d/dev.conf")
|
|
if dev_conf.exists():
|
|
try:
|
|
dev_conf.unlink()
|
|
print(f" ✓ Removed development config")
|
|
except Exception as e:
|
|
print(f" ⚠ Warning: Could not remove dev.conf: {e}")
|
|
|
|
if apply_dark:
|
|
apply_dark_mode(**dark_env)
|
|
|
|
save_installer_config({
|
|
"ipv6_enabled": ipv6_enabled,
|
|
"tp_theme": dark_env.get("TP_THEME") if apply_dark else None,
|
|
"tp_domain": dark_env.get("TP_DOMAIN", TP_DOMAIN),
|
|
"tp_scheme": dark_env.get("TP_SCHEME", TP_SCHEME),
|
|
"tp_community_theme": dark_env.get("TP_COMMUNITY_THEME", TP_COMMUNITY_THEME),
|
|
"node_version": node_version,
|
|
"npm_version": version,
|
|
})
|
|
|
|
certbot_venv = Path('/opt/certbot')
|
|
if certbot_venv.exists:
|
|
print(f"♻ Removing stale certbot venv for rebuild...")
|
|
shutil.rmtree(certbot_venv, ignore_errors=True)
|
|
|
|
setup_certbot_venv()
|
|
configure_letsencrypt()
|
|
|
|
with step("Restarting services after update"):
|
|
run(["systemctl", "restart", "angie.service"], check=False)
|
|
run(["systemctl", "restart", "npm.service"], check=False)
|
|
|
|
return version
|
|
|
|
|
|
# ========== CUSTOM THEME ==========
|
|
def apply_dark_mode(
|
|
APP_FILEPATH="/opt/npm/frontend",
|
|
TP_DOMAIN=None,
|
|
TP_COMMUNITY_THEME=None,
|
|
TP_SCHEME=None,
|
|
TP_THEME=None,
|
|
):
|
|
if DEBUG:
|
|
print("--------------------------------------")
|
|
print("| Nginx Proxy Manager theme.park Mod |")
|
|
print("--------------------------------------")
|
|
|
|
# locate frontend
|
|
if not Path(APP_FILEPATH).exists():
|
|
if Path("/app/frontend").exists():
|
|
APP_FILEPATH = "/app/frontend"
|
|
elif Path("/opt/nginx-proxy-manager/frontend").exists():
|
|
APP_FILEPATH = "/opt/nginx-proxy-manager/frontend"
|
|
|
|
if not TP_DOMAIN or TP_DOMAIN.strip() == "":
|
|
if DEBUG:
|
|
print("No domain set, defaulting to theme-park.dev")
|
|
TP_DOMAIN = "theme-park.dev"
|
|
if not TP_SCHEME or TP_SCHEME.strip() == "":
|
|
TP_SCHEME = "https"
|
|
THEME_TYPE = (
|
|
"community-theme-options"
|
|
if (str(TP_COMMUNITY_THEME).lower() == "true")
|
|
else "theme-options"
|
|
)
|
|
if not TP_THEME or TP_THEME.strip() == "":
|
|
TP_THEME = "organizr"
|
|
|
|
if "github.io" in TP_DOMAIN:
|
|
TP_DOMAIN = f"{TP_DOMAIN}/theme.park"
|
|
|
|
if DEBUG:
|
|
print(
|
|
"Variables set:\n"
|
|
f"'APP_FILEPATH'={APP_FILEPATH}\n"
|
|
f"'TP_DOMAIN'={TP_DOMAIN}\n"
|
|
f"'TP_COMMUNITY_THEME'={TP_COMMUNITY_THEME}\n"
|
|
f"'TP_SCHEME'={TP_SCHEME}\n"
|
|
f"'TP_THEME'={TP_THEME}\n"
|
|
)
|
|
|
|
base_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/base/nginx-proxy-manager/nginx-proxy-manager-base.css"
|
|
theme_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/{THEME_TYPE}/{TP_THEME}.css"
|
|
|
|
with step("Injecting Theme.Park CSS into HTML"):
|
|
htmls = list(Path(APP_FILEPATH).rglob("*.html"))
|
|
for path in htmls:
|
|
html = path.read_text(encoding="utf-8")
|
|
if base_href not in html:
|
|
html = re.sub(
|
|
r"</head>",
|
|
f"<link rel='stylesheet' href='{base_href}'></head> ",
|
|
html,
|
|
flags=re.I,
|
|
)
|
|
html = re.sub(
|
|
r"</head>",
|
|
f"<link rel='stylesheet' href='{theme_href}'></head> ",
|
|
html,
|
|
flags=re.I,
|
|
)
|
|
path.write_text(html, encoding="utf-8")
|
|
if DEBUG:
|
|
print(f"Patched: {path}")
|
|
|
|
|
|
# ========== MAIN ==========
|
|
def main():
|
|
global DEBUG
|
|
ensure_root()
|
|
parser = argparse.ArgumentParser(
|
|
description="Install/upgrade NPM on Angie (Debian 11 + / Ubuntu 20.04 +).",
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
)
|
|
parser.add_argument(
|
|
"--nodejs-pkg",
|
|
default="nodejs",
|
|
help="APT Node.js package name (e.g. nodejs, nodejs-18).",
|
|
)
|
|
parser.add_argument(
|
|
"--node-version",
|
|
default=None,
|
|
help=f"Install Node.js from NodeSource repo (e.g. 'latest', '21', '20', '18'). "
|
|
f"Maximum supported: v{MAX_NODEJS_VERSION}. Overrides --nodejs-pkg.",
|
|
)
|
|
parser.add_argument(
|
|
"--npm-version",
|
|
default=None,
|
|
help="Force NPM app version from release tag (e.g. 2.12.6). Default: master branch.",
|
|
)
|
|
parser.add_argument(
|
|
"--motd",
|
|
choices=["yes", "no"],
|
|
default="yes",
|
|
help="Update MOTD after completion.",
|
|
)
|
|
parser.add_argument(
|
|
"--enable-ipv6",
|
|
action="store_true",
|
|
help="Do not strip IPv6 from configs/resolvers (keep IPv6).",
|
|
)
|
|
parser.add_argument(
|
|
"--update",
|
|
action="store_true",
|
|
help="Update mode: upgrade packages + rebuild frontend/backend without reconfiguring Angie.",
|
|
)
|
|
parser.add_argument(
|
|
"--dark-mode",
|
|
action="store_true",
|
|
help=f"Enable dark theme (default: {TP_DEFAULT_THEME} from theme-park.dev). "
|
|
f"Only available for NPM version < 2.13.0",
|
|
)
|
|
parser.add_argument(
|
|
"--tp-theme",
|
|
default=None,
|
|
help="Enable dark theme with specific theme name (e.g. nord, dracula, plex). Implies --dark-mode. "
|
|
"Only available for NPM version < 2.13.0",
|
|
)
|
|
parser.add_argument(
|
|
"--branch",
|
|
type=str,
|
|
default=None,
|
|
metavar="BRANCH",
|
|
help="Install from specific git branch (e.g., master, dev). "
|
|
"Default: master branch (latest development). Cannot be used with --dark-mode or --tp-theme.",
|
|
)
|
|
parser.add_argument(
|
|
"--debug", action="store_true", help="Show detailed logs and progress."
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
DEBUG = args.debug
|
|
|
|
# Check memory and create swap if needed
|
|
memory_info = check_memory_and_create_swap()
|
|
|
|
# Determine if any main parameters were provided
|
|
main_params_provided = any([
|
|
args.npm_version,
|
|
args.branch,
|
|
args.update,
|
|
args.dark_mode,
|
|
args.tp_theme,
|
|
args.node_version,
|
|
])
|
|
|
|
# ========== INTERACTIVE MODE ==========
|
|
if not main_params_provided:
|
|
print("\nNo installation parameters provided. Starting interactive mode...")
|
|
choices = interactive_install_mode()
|
|
args = apply_interactive_choices(args, choices)
|
|
|
|
print("\n" + "="*70)
|
|
print("INSTALLATION SUMMARY")
|
|
print("="*70)
|
|
if args.update:
|
|
print("Mode: UPDATE")
|
|
elif args.branch:
|
|
print(f"Mode: INSTALL from branch '{args.branch}'")
|
|
else:
|
|
print(f"Mode: INSTALL from release tag")
|
|
if args.npm_version:
|
|
print(f"Version: {args.npm_version}")
|
|
else:
|
|
print("Version: Latest stable")
|
|
if args.dark_mode:
|
|
print(f"Theme: {args.tp_theme or 'default (organizr)'}")
|
|
print("="*70 + "\n")
|
|
|
|
confirm = input("Proceed with installation? [Y/n]: ").strip().lower()
|
|
if confirm and confirm not in ['y', 'yes', '']:
|
|
cleanup_swap()
|
|
print("Installation cancelled.")
|
|
sys.exit(0)
|
|
|
|
# ========== WRAP INSTALLATION ==========
|
|
try:
|
|
# Initialize variables to prevent UnboundLocalError
|
|
npm_app_version = None
|
|
dark_mode_enabled = False
|
|
installed_from_branch = False
|
|
selected_theme = None
|
|
dark_mode_requested = False
|
|
|
|
# Display installation banner
|
|
print("\n================== NPM + ANGIE installer ==================")
|
|
print(f"Repository: https://gitea.linuxiarz.pl/gru/npm-angie-auto-install")
|
|
print(f"Script description: Auto-installer with Angie + Node.js auto-setup")
|
|
print(f"")
|
|
print(f"System Information:")
|
|
print(f" OS: {OSREL['PRETTY']}")
|
|
print(f" Distribution: {OSREL['ID']} {OSREL['VERSION_ID']}")
|
|
print(f" Codename: {OSREL.get('CODENAME', 'N/A')}")
|
|
print(f" Python: {sys.version.split()[0]}")
|
|
print(f"")
|
|
print(f"Installation Mode:")
|
|
print(f" Log Level: {'DEBUG (verbose)' if DEBUG else 'SIMPLE (progress only)'}")
|
|
print(f" Min Node.js: v{MIN_NODEJS_VERSION}+")
|
|
print(f" Max Node.js: v{MAX_NODEJS_VERSION}")
|
|
print(f"")
|
|
print(f"Author: @linuxiarz.pl (Mateusz Gruszczyński)")
|
|
print("===========================================================\n")
|
|
|
|
# Validate dark mode / theme conflicts
|
|
if (args.dark_mode or args.tp_theme) and args.branch:
|
|
print("⚠ ERROR: --dark-mode and --tp-theme are only available for tagged releases (< 2.13.0)")
|
|
print(" Branch installations do not support themes.")
|
|
sys.exit(1)
|
|
|
|
if (args.dark_mode or args.tp_theme) and not args.npm_version:
|
|
print("⚠ ERROR: --dark-mode and --tp-theme require --npm-version to be specified")
|
|
print(" Themes only work with releases < 2.13.0, not with branch installations.")
|
|
sys.exit(1)
|
|
|
|
# Setup theme preferences
|
|
if args.tp_theme:
|
|
selected_theme = args.tp_theme
|
|
dark_mode_requested = True
|
|
elif args.dark_mode:
|
|
selected_theme = TP_DEFAULT_THEME
|
|
dark_mode_requested = True
|
|
else:
|
|
selected_theme = None
|
|
dark_mode_requested = False
|
|
|
|
# ========== UPDATE MODE ==========
|
|
if args.update:
|
|
installer_config = load_installer_config()
|
|
|
|
if not args.tp_theme and installer_config.get("tp_theme"):
|
|
selected_theme = installer_config["tp_theme"]
|
|
print(f"✓ Using stored theme: {selected_theme}")
|
|
|
|
if not args.dark_mode and not args.tp_theme and installer_config.get("tp_theme"):
|
|
dark_mode_requested = True
|
|
print(f"✓ Using stored Theme-Park setting: enabled")
|
|
|
|
stored_ipv6 = installer_config.get("ipv6_enabled", args.enable_ipv6)
|
|
installed_from_branch = installer_config.get("installed_from_branch", False)
|
|
previous_branch = installer_config.get("branch", "master")
|
|
|
|
install_logrotate_for_data_logs()
|
|
fix_logrotate_permissions_and_wrapper()
|
|
|
|
if installed_from_branch:
|
|
print(f"Old installation: branch '{previous_branch}'")
|
|
with step(f"Updating NPM from branch: {previous_branch}"):
|
|
npm_app_version = deploy_npm_app_from_git(f"refs/heads/{previous_branch}")
|
|
print(f"✓ NPM updated to {npm_app_version} from branch {previous_branch}")
|
|
dark_mode_enabled = False
|
|
npm_version_parsed = parse_version(npm_app_version)
|
|
else:
|
|
print(f"✓ Old installation: release tag")
|
|
version = update_only(
|
|
node_pkg=args.nodejs_pkg,
|
|
node_version=args.node_version,
|
|
npm_version_override=args.npm_version,
|
|
apply_dark=False,
|
|
dark_env=dict(
|
|
APP_FILEPATH="/opt/npm/frontend",
|
|
TP_DOMAIN=TP_DOMAIN,
|
|
TP_COMMUNITY_THEME=TP_COMMUNITY_THEME,
|
|
TP_SCHEME=TP_SCHEME,
|
|
TP_THEME=selected_theme,
|
|
),
|
|
ipv6_enabled=stored_ipv6 if 'stored_ipv6' in locals() else args.enable_ipv6,
|
|
)
|
|
npm_app_version = version
|
|
npm_version_parsed = parse_version(npm_app_version)
|
|
|
|
if dark_mode_requested and npm_version_parsed < (2, 13, 0):
|
|
dark_mode_enabled = True
|
|
print(f"✓ Dark mode enabled for NPM {npm_app_version}")
|
|
else:
|
|
dark_mode_enabled = False
|
|
if dark_mode_requested:
|
|
print(f"⊘ Dark unavailable for NPM {npm_app_version} (requires < 2.13.0)")
|
|
|
|
if dark_mode_enabled and npm_version_parsed < (2, 13, 0):
|
|
with step(f"Applying dark mode for NPM {npm_app_version}"):
|
|
apply_dark_mode(
|
|
APP_FILEPATH="/opt/npm/frontend",
|
|
TP_DOMAIN=TP_DOMAIN,
|
|
TP_COMMUNITY_THEME=TP_COMMUNITY_THEME,
|
|
TP_SCHEME=TP_SCHEME,
|
|
TP_THEME=selected_theme,
|
|
)
|
|
|
|
comment_x_served_by_step()
|
|
set_file_ownership(["/etc/nginx/conf.d/include/ip_ranges.conf"], "npm:npm", 0o664)
|
|
|
|
update_ssl_ciphers_config()
|
|
update_npn_assets_config()
|
|
update_npm_admin_interface()
|
|
update_npm_proxy_host_template()
|
|
update_npm_location_template()
|
|
update_npm_listen_template()
|
|
update_npm_stream_template()
|
|
cleanup_dev_config()
|
|
|
|
info = gather_versions(npm_app_version)
|
|
update_motd(
|
|
args.motd == "yes", info, ipv6_enabled=args.enable_ipv6,
|
|
npm_version=npm_app_version,
|
|
installed_from_branch=installed_from_branch,
|
|
tp_theme=selected_theme if dark_mode_enabled else None
|
|
)
|
|
print_summary(
|
|
info, args.enable_ipv6, dark_mode_enabled, selected_theme,
|
|
update_mode=True, npm_version=npm_app_version,
|
|
installed_from_branch=installed_from_branch
|
|
)
|
|
|
|
# ========== FRESH INSTALL ==========
|
|
else:
|
|
validate_supported_os()
|
|
apt_update_upgrade()
|
|
apt_purge(
|
|
["nginx", "openresty", "nodejs", "npm", "yarn", "certbot", "rustc", "cargo"]
|
|
)
|
|
apt_install(
|
|
[
|
|
"ca-certificates",
|
|
"curl",
|
|
"gnupg",
|
|
"apt-transport-https",
|
|
"openssl",
|
|
"apache2-utils",
|
|
"logrotate",
|
|
"sudo",
|
|
"acl",
|
|
"python3",
|
|
"sqlite3",
|
|
"git",
|
|
"lsb-release",
|
|
"build-essential",
|
|
]
|
|
)
|
|
|
|
setup_angie(ipv6_enabled=args.enable_ipv6)
|
|
write_metrics_files()
|
|
ensure_minimum_nodejs(user_requested_version=args.node_version)
|
|
install_yarn()
|
|
ensure_user_and_dirs()
|
|
create_sudoers_for_npm()
|
|
setup_certbot_venv()
|
|
configure_letsencrypt()
|
|
|
|
# ========== INSTALLATION ==========
|
|
if args.branch is not None:
|
|
# User explicitly provided --branch
|
|
branch_name = args.branch
|
|
with step(f"Installing NPM from branch: {branch_name}"):
|
|
npm_app_version = deploy_npm_app_from_git(f"refs/heads/{branch_name}")
|
|
|
|
print(f"\n{'='*70}")
|
|
print(f"✓ NPM Installation Complete")
|
|
print(f"{'='*70}")
|
|
print(f"Source: Branch (development)")
|
|
print(f"Branch: {branch_name}")
|
|
print(f"NPM Version: {npm_app_version}")
|
|
print(f"Dark Mode: Disabled")
|
|
print(f"{'='*70}\n")
|
|
|
|
dark_mode_enabled = False
|
|
installed_from_branch = True
|
|
|
|
if args.branch is not None:
|
|
# User explicitly provided --branch
|
|
branch_name = args.branch
|
|
with step(f"Installing NPM from branch: {branch_name}"):
|
|
npm_app_version = deploy_npm_app_from_git(f"refs/heads/{branch_name}")
|
|
|
|
print(f"\n{'='*70}")
|
|
print(f"✓ NPM Installation Complete (from Branch)")
|
|
print(f"{'='*70}")
|
|
print(f"Source: Branch (development)")
|
|
print(f"Branch: {branch_name}")
|
|
print(f"NPM Version: {npm_app_version}")
|
|
print(f"{'='*70}\n")
|
|
|
|
dark_mode_enabled = False
|
|
installed_from_branch = True
|
|
|
|
elif args.npm_version is not None:
|
|
# Version explicitly specified - use that tag
|
|
version_parsed = parse_version(args.npm_version)
|
|
|
|
if version_parsed >= (2, 13, 0):
|
|
# NPM >= 2.13.0: use git (has full structure with /global)
|
|
with step(f"Installing NPM v{args.npm_version} from git tag"):
|
|
npm_app_version = deploy_npm_app_from_git(f"refs/tags/v{args.npm_version}")
|
|
else:
|
|
# NPM < 2.13.0: use release archive
|
|
with step(f"Installing NPM v{args.npm_version} from release tag"):
|
|
npm_app_version = deploy_npm_app_from_release(args.npm_version)
|
|
|
|
print(f"\n{'='*70}")
|
|
print(f"✓ NPM Installation Complete (from Release Tag)")
|
|
print(f"{'='*70}")
|
|
print(f"Source: Release tag (stable)")
|
|
print(f"Requested: v{args.npm_version}")
|
|
print(f"Installed: {npm_app_version}")
|
|
print(f"{'='*70}\n")
|
|
|
|
installed_from_branch = False
|
|
|
|
else:
|
|
with step("Detecting latest stable release"):
|
|
latest_version = github_latest_release_tag(
|
|
"NginxProxyManager/nginx-proxy-manager",
|
|
override=None
|
|
)
|
|
print(f" Latest stable version: {latest_version}")
|
|
|
|
version_parsed = parse_version(latest_version)
|
|
|
|
if version_parsed >= (2, 13, 0):
|
|
# NPM >= 2.13.0: use git (has full structure with /global)
|
|
with step(f"Installing NPM v{latest_version} from git tag"):
|
|
npm_app_version = deploy_npm_app_from_git(f"refs/tags/v{latest_version}")
|
|
else:
|
|
# NPM < 2.13.0: use release archive
|
|
with step(f"Installing NPM v{latest_version} from release"):
|
|
npm_app_version = deploy_npm_app_from_release(latest_version)
|
|
|
|
print(f"\n{'='*70}")
|
|
print(f"✓ NPM Installation Complete (Latest Stable)")
|
|
print(f"{'='*70}")
|
|
print(f"Source: Latest stable release (auto-detected)")
|
|
print(f"Installed: {npm_app_version}")
|
|
print(f"{'='*70}\n")
|
|
|
|
installed_from_branch = False
|
|
|
|
# Handle IPv6 stripping
|
|
if not args.enable_ipv6:
|
|
strip_ipv6_listens([Path("/etc/angie"), Path("/etc/nginx")])
|
|
else:
|
|
print("IPv6: leaving entries (skipped IPv6 cleanup).")
|
|
|
|
npm_version_parsed = parse_version(npm_app_version)
|
|
|
|
# Apply dark mode if applicable
|
|
if dark_mode_enabled and npm_version_parsed < (2, 13, 0):
|
|
with step(f"Applying dark mode for NPM {npm_app_version}"):
|
|
apply_dark_mode(
|
|
APP_FILEPATH="/opt/npm/frontend",
|
|
TP_DOMAIN=TP_DOMAIN,
|
|
TP_COMMUNITY_THEME=TP_COMMUNITY_THEME,
|
|
TP_SCHEME=TP_SCHEME,
|
|
TP_THEME=selected_theme,
|
|
)
|
|
|
|
# Save installation configuration
|
|
save_installer_config({
|
|
"ipv6_enabled": args.enable_ipv6,
|
|
"tp_theme": selected_theme if dark_mode_enabled else None,
|
|
"tp_domain": TP_DOMAIN,
|
|
"tp_scheme": TP_SCHEME,
|
|
"tp_community_theme": TP_COMMUNITY_THEME,
|
|
"node_version": args.node_version,
|
|
"npm_version": npm_app_version,
|
|
"installed_from_branch": installed_from_branch,
|
|
"branch": args.branch if installed_from_branch else None,
|
|
})
|
|
|
|
create_systemd_units(ipv6_enabled=args.enable_ipv6)
|
|
|
|
ensure_nginx_symlink()
|
|
install_logrotate_for_data_logs()
|
|
fix_logrotate_permissions_and_wrapper()
|
|
sync_backup_nginx_conf()
|
|
comment_x_served_by_step()
|
|
set_file_ownership(["/etc/nginx/conf.d/include/ip_ranges.conf"], "npm:npm", 0o664)
|
|
|
|
update_ssl_ciphers_config()
|
|
update_npn_assets_config()
|
|
update_npm_admin_interface()
|
|
update_npm_proxy_host_template()
|
|
update_npm_location_template()
|
|
update_npm_listen_template()
|
|
update_npm_stream_template()
|
|
cleanup_dev_config()
|
|
|
|
# Restart services
|
|
with step("Restarting services after installation"):
|
|
run(["systemctl", "restart", "angie.service"], check=False)
|
|
run(["systemctl", "restart", "npm.service"], check=False)
|
|
|
|
info = gather_versions(npm_app_version)
|
|
update_motd(
|
|
args.motd == "yes", info, ipv6_enabled=args.enable_ipv6,
|
|
npm_version=npm_app_version,
|
|
installed_from_branch=installed_from_branch,
|
|
tp_theme=selected_theme if dark_mode_enabled else None
|
|
)
|
|
print_summary(
|
|
info, args.enable_ipv6, dark_mode_enabled, selected_theme,
|
|
update_mode=False, npm_version=npm_app_version,
|
|
installed_from_branch=installed_from_branch
|
|
)
|
|
|
|
finally:
|
|
# Always cleanup swap at the end
|
|
cleanup_swap()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
signal.signal(signal.SIGINT, lambda s, f: sys.exit(130))
|
|
main()
|