Files
npm-angie-auto-install/npm_install.py
Mateusz Gruszczyński 4bf2a058ea fix pyenv
2025-10-24 15:02:26 +02:00

1324 lines
50 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
import argparse, os, sys, json, shutil, subprocess, tarfile, tempfile, urllib.request, re, time, threading, signal
from pathlib import Path
from glob import glob
DEBUG = False
# ========== UI / Spinner ==========
class Spinner:
FRAMES = ["","","","","","","","","",""]
def __init__(self, text):
self.text = text
self._stop = threading.Event()
self._th = threading.Thread(target=self._spin, daemon=True)
def _spin(self):
i = 0
while not self._stop.is_set():
frame = self.FRAMES[i % len(self.FRAMES)]
print(f"\r{frame} {self.text} ", end="", flush=True)
time.sleep(0.12)
i += 1
def start(self):
if not DEBUG:
self._th.start()
else:
print(f"{self.text} ...")
def stop_ok(self):
if not DEBUG:
self._stop.set()
self._th.join(timeout=0.2)
print(f"\r{self.text}{' ' * 20}")
else:
print(f"{self.text}")
def stop_fail(self):
if not DEBUG:
self._stop.set()
self._th.join(timeout=0.2)
print(f"\r{self.text}{' ' * 20}")
else:
print(f"{self.text}")
def step(text):
class _Ctx:
def __enter__(self_inner):
self_inner.spinner = Spinner(text)
self_inner.spinner.start()
return self_inner
def __exit__(self_inner, exc_type, exc, tb):
if exc is None:
self_inner.spinner.stop_ok()
else:
self_inner.spinner.stop_fail()
return _Ctx()
def _devnull():
return subprocess.DEVNULL if not DEBUG else None
def run(cmd, check=True, env=None):
if DEBUG:
print("+", " ".join(cmd))
return subprocess.run(cmd, check=check, env=env,
stdout=None if DEBUG else subprocess.DEVNULL,
stderr=None if DEBUG else subprocess.DEVNULL)
def run_out(cmd, check=True):
if DEBUG:
print("+", " ".join(cmd))
result = subprocess.run(cmd, check=check, capture_output=True, text=True)
return result.stdout
# ========== Utils ==========
def ensure_root():
if os.geteuid() != 0:
print("Run as root.", file=sys.stderr)
sys.exit(1)
def os_release():
data = {}
try:
for line in Path("/etc/os-release").read_text().splitlines():
if "=" in line:
k,v = line.split("=",1)
data[k] = v.strip().strip('"')
except Exception:
pass
pretty = data.get("PRETTY_NAME") or f"{data.get('ID','linux')} {data.get('VERSION_ID','')}".strip()
return {
"ID": data.get("ID",""),
"VERSION_ID": data.get("VERSION_ID",""),
"CODENAME": data.get("VERSION_CODENAME",""),
"PRETTY": pretty
}
def apt_update_upgrade():
with step("Updating package lists and system"):
run(["apt-get", "update", "-y"])
run(["apt-get", "-y", "upgrade"])
def apt_install(pkgs):
if not pkgs: return
with step(f"Installing packages: {', '.join(pkgs)}"):
run(["apt-get", "install", "-y"] + pkgs)
def apt_try_install(pkgs):
if not pkgs: return
avail = []
for p in pkgs:
ok = subprocess.run(["apt-cache","show", p], stdout=_devnull(), stderr=_devnull())
if ok.returncode == 0:
avail.append(p)
elif DEBUG:
print(f"skip missing pkg: {p}")
if avail:
apt_install(avail)
def apt_purge(pkgs):
if not pkgs: return
with step(f"Removing conflicting packages: {', '.join(pkgs)}"):
run(["apt-get", "purge", "-y"] + pkgs, check=False)
run(["apt-get", "autoremove", "-y"], check=False)
def write_file(path: Path, content: str, mode=0o644):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content, encoding="utf-8")
os.chmod(path, mode)
def append_unique(path: Path, lines: str):
path.parent.mkdir(parents=True, exist_ok=True)
existing = path.read_text(encoding="utf-8") if path.exists() else ""
out = existing
for line in lines.splitlines():
if line.strip() and line not in existing:
out += ("" if out.endswith("\n") else "\n") + line + "\n"
path.write_text(out, encoding="utf-8")
def github_latest_release_tag(repo: str, override: str | None) -> str:
if override:
return override.lstrip("v")
url = f"https://api.github.com/repos/{repo}/releases/latest"
with step(f"Downloading from GitGub: {repo}"):
with urllib.request.urlopen(url) as r:
data = json.load(r)
tag = data["tag_name"]
return tag.lstrip("v")
def write_resolvers_conf(ipv6_enabled: bool):
ns_v4, ns_v6 = [], []
try:
for line in Path("/etc/resolv.conf").read_text().splitlines():
line = line.strip()
if not line.startswith("nameserver"):
continue
ip = line.split()[1].split("%")[0]
(ns_v6 if ":" in ip else ns_v4).append(ip)
except Exception:
pass
ips = ns_v4 + (ns_v6 if ipv6_enabled else [])
if not ips:
ips = ["1.1.1.1", "8.8.8.8"] + (["2606:4700:4700::1111", "2001:4860:4860::8888"] if ipv6_enabled else [])
ipv6_flag = " ipv6=on" if ipv6_enabled and any(":" in x for x in ips) else ""
content = f"resolver {' '.join(ips)} valid=10s{ipv6_flag};\n"
write_file(Path("/etc/angie/conf.d/include/resolvers.conf"), content, 0o644)
def download_extract_tar_gz(url: str, dest_dir: Path) -> Path:
dest_dir.mkdir(parents=True, exist_ok=True)
with step("Downloading and untaring"):
with urllib.request.urlopen(url) as r, tempfile.NamedTemporaryFile(delete=False) as tf:
shutil.copyfileobj(r, tf)
tf.flush()
tf_path = Path(tf.name)
with tarfile.open(tf_path, "r:gz") as t:
try:
t.extractall(dest_dir, filter="data")
except TypeError:
t.extractall(dest_dir)
top = t.getmembers()[0].name.split("/")[0]
os.unlink(tf_path)
return dest_dir / top
# Distro info (used in banners & repo setup)
OSREL = os_release()
# === extra sync ===
def sync_backup_nginx_conf():
from pathlib import Path
import shutil, filecmp
src = Path("/etc/nginx.bak/conf.d")
dst = Path("/etc/angie/conf.d")
if not src.exists():
return
with step("Sync /etc/nginx.bak/conf.d -> /etc/angie/conf.d"):
for p in src.rglob("*"):
if p.is_dir():
continue
rel = p.relative_to(src)
target = dst / rel
target.parent.mkdir(parents=True, exist_ok=True)
try:
if not target.exists() or not filecmp.cmp(p, target, shallow=False):
shutil.copy2(p, target)
except Exception as e:
print(f"Warning: sync failed for {p} -> {target}: {e}")
def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")):
PYENV_ROOT = Path("/opt/npm/.pyenv")
PYENV_OWNER = "npm"
PYTHON_VERSION = "3.11.11"
PYENV_BIN_CANDIDATES = ["pyenv", "/usr/bin/pyenv", "/usr/lib/pyenv/bin/pyenv"]
try:
apt_try_install([
"pyenv", "build-essential", "gcc", "make", "pkg-config",
"libssl-dev", "zlib1g-dev", "libbz2-dev", "libreadline-dev",
"libsqlite3-dev", "tk-dev", "libncursesw5-dev", "libgdbm-dev",
"libffi-dev", "uuid-dev", "liblzma-dev", "ca-certificates", "curl"
])
except Exception:
run(["apt-get", "update"], check=False)
run(["apt-get", "install", "-y",
"pyenv", "build-essential", "gcc", "make", "pkg-config",
"libssl-dev", "zlib1g-dev", "libbz2-dev", "libreadline-dev",
"libsqlite3-dev", "tk-dev", "libncursesw5-dev", "libgdbm-dev",
"libffi-dev", "uuid-dev", "liblzma-dev", "ca-certificates", "curl"
], check=False)
Path("/opt/npm").mkdir(parents=True, exist_ok=True)
PYENV_ROOT.mkdir(parents=True, exist_ok=True)
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
pyenv_bin = next((c for c in PYENV_BIN_CANDIDATES if shutil.which(c)), None)
if not pyenv_bin:
raise RuntimeError("Nie znaleziono 'pyenv' (spróbuj /usr/bin/pyenv lub /usr/lib/pyenv/bin/pyenv).")
env_pyenv = os.environ.copy()
env_pyenv.update({
"HOME": "/opt/npm",
"PYENV_ROOT": str(PYENV_ROOT),
"PATH": "/usr/lib/pyenv/bin:/usr/bin:/bin"
})
with step(f"Installing Python {PYTHON_VERSION} via pyenv into {PYENV_ROOT}"):
# 1) Upewnij się, że PYENV_ROOT istnieje i należy do 'npm'
run(["mkdir", "-p", str(PYENV_ROOT)])
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
# 2) Jeżeli lokalny pyenv nie istnieje sklonuj go (pomija wrappera Debiana)
run([
"sudo", "-u", PYENV_OWNER, "bash", "-lc",
'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then '
' command -v git >/dev/null 2>&1 || sudo apt-get install -y git; '
' git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; '
"fi"
])
# 3) Z bardzo czystym środowiskiem (env -i) instalujemy CPython
# żadnych /etc/profile, żadnych wrapperów.
install_cmd = (
'export HOME=/opt/npm; '
'export PYENV_ROOT=/opt/npm/.pyenv; '
'export PATH="$PYENV_ROOT/bin:/usr/bin:/bin"; '
'mkdir -p "$PYENV_ROOT"; cd "$HOME"; '
f'pyenv install -s {PYTHON_VERSION}'
)
run([
"sudo", "-u", PYENV_OWNER, "env", "-i",
"HOME=/opt/npm",
f"PYENV_ROOT={PYENV_ROOT}",
f"PATH={PYENV_ROOT}/bin:/usr/bin:/bin",
"bash", "-lc", install_cmd
])
profile_snippet = f"""# Auto-generated by setup_certbot_venv
# Ustawienia pyenv dla uzytkownika '{PYENV_OWNER}'
if [ -d "{PYENV_ROOT}" ]; then
export PYENV_ROOT="{PYENV_ROOT}"
# Dopnij lokalne binarki pyenv (git-install) idempotentnie
case ":$PATH:" in *":$PYENV_ROOT/bin:"*) ;; *) PATH="$PYENV_ROOT/bin:$PATH";; esac
# Dopnij systemowe binarki pyenv z pakietu Debiana idempotentnie
case ":$PATH:" in *":/usr/lib/pyenv/bin:"*) ;; *) PATH="/usr/lib/pyenv/bin:$PATH";; esac
export PATH
# Inicjalizacja tylko dla interaktywnych powlok uzytkownika '{PYENV_OWNER}'
case "$-" in *i*) _interactive=1 ;; *) _interactive=0 ;; esac
if [ "$_interactive" = 1 ] && {{ [ "${{USER:-}}" = "{PYENV_OWNER}" ] || [ "${{SUDO_USER:-}}" = "{PYENV_OWNER}" ]; }}; then
if command -v pyenv >/dev/null 2>&1; then
eval "$(pyenv init -)"
elif [ -x "{PYENV_ROOT}/bin/pyenv" ]; then
eval "$("{PYENV_ROOT}/bin/pyenv" init -)"
fi
fi
fi
"""
write_file(Path("/etc/profile.d/npm-pyenv.sh"), profile_snippet, 0o644)
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3.11"
if not python311.exists():
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3"
if not python311.exists():
raise RuntimeError(f"Nie znaleziono interpretera Pythona {PYTHON_VERSION} w {PYENV_ROOT}/versions/.")
venv_bin = venv_dir / "bin"
pip_path = venv_bin / "pip"
certbot_path = venv_bin / "certbot"
with step(f"Preparing Certbot venv at {venv_dir} (Python {PYTHON_VERSION})"):
venv_dir.mkdir(parents=True, exist_ok=True)
if not venv_dir.exists() or not pip_path.exists():
run([str(python311), "-m", "venv", str(venv_dir)])
env_build = os.environ.copy()
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
run([str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], env=env_build)
run([str(pip_path), "install", "-U",
"cryptography", "cffi", "certbot", "tldextract"], env=env_build)
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
target = Path("/usr/local/bin/certbot")
if target.exists() or target.is_symlink():
try:
target.unlink()
except Exception:
pass
target.symlink_to(certbot_path)
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
print(f"Certbot: {cb_ver.strip()} | Pip: {pip_ver.strip()}")
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", str(PYENV_ROOT)], check=False)
def configure_letsencrypt():
with step("configure letsencrypt"):
run(["chown", "-R", "npm:npm", "/opt/certbot"], check=False)
Path("/etc/letsencrypt").mkdir(parents=True, exist_ok=True)
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
run(["apt-get", "install", "-y", "--no-install-recommends", "certbot"], check=False)
ini = """text = True
non-interactive = True
webroot-path = /data/letsencrypt-acme-challenge
key-type = ecdsa
elliptic-curve = secp384r1
preferred-chain = ISRG Root X1
"""
write_file(Path("/etc/letsencrypt.ini"), ini, 0o644)
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
def ensure_nginx_symlink():
from pathlib import Path
target = Path("/etc/angie")
link = Path("/etc/nginx")
try:
if link.is_symlink() and link.resolve() == target:
print("✔ Created symlink /etc/nginx -> /etc/angie")
return
if link.exists() and not link.is_symlink():
backup = Path("/etc/nginx.bak")
try:
if backup.exists():
if backup.is_symlink() or backup.is_file():
backup.unlink()
link.rename(backup)
print("✔ Backed up /etc/nginx to /etc/nginx.bak")
except Exception as e:
print(f"Warning: could not backup /etc/nginx: {e}")
try:
if link.exists() or link.is_symlink():
link.unlink()
except Exception:
pass
try:
link.symlink_to(target)
print("✔ Created symlink /etc/nginx -> /etc/angie")
except Exception as e:
print(f"Warning: could not create /etc/nginx symlink: {e}")
except Exception as e:
print(f"Warning: symlink check failed: {e}")
# ========== Angie / NPM template ==========
ANGIE_CONF_TEMPLATE = """# run nginx in foreground
#daemon off;
load_module /etc/angie/modules/ngx_http_headers_more_filter_module.so;
load_module /etc/angie/modules/ngx_http_brotli_filter_module.so;
load_module /etc/angie/modules/ngx_http_brotli_static_module.so;
load_module /etc/angie/modules/ngx_http_zstd_filter_module.so;
load_module /etc/angie/modules/ngx_http_zstd_static_module.so;
# other modules
include /data/nginx/custom/modules[.]conf;
pid /run/angie/angie.pid;
user root;
worker_processes auto;
pcre_jit on;
error_log /data/logs/fallback_error.log warn;
# Custom
include /data/nginx/custom/root_top[.]conf;
events {
include /data/nginx/custom/events[.]conf;
}
http {
include /etc/angie/mime.types;
default_type application/octet-stream;
sendfile on;
server_tokens off;
tcp_nopush on;
tcp_nodelay on;
client_body_temp_path /tmp/angie/body 1 2;
keepalive_timeout 90s;
proxy_connect_timeout 90s;
proxy_send_timeout 90s;
proxy_read_timeout 90s;
ssl_prefer_server_ciphers on;
gzip on;
proxy_ignore_client_abort off;
client_max_body_size 2000m;
server_names_hash_bucket_size 1024;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Accept-Encoding "";
proxy_cache off;
proxy_cache_path /var/lib/angie/cache/public levels=1:2 keys_zone=public-cache:30m max_size=192m;
proxy_cache_path /var/lib/angie/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
include /etc/angie/conf.d/include/log.conf;
include /etc/angie/conf.d/include/resolvers.conf;
map $host $forward_scheme { default http; }
# Real IP Determination (IPv4 only by default)
set_real_ip_from 10.0.0.0/8;
set_real_ip_from 172.16.0.0/12;
set_real_ip_from 192.168.0.0/16;
include /etc/angie/conf.d/include/ip_ranges.conf;
real_ip_header X-Real-IP;
real_ip_recursive on;
# custom
brotli off;
brotli_comp_level 6;
brotli_static on;
brotli_types *;
zstd on;
zstd_min_length 256;
zstd_comp_level 3;
more_clear_headers "Server";
more_set_headers 'X-by: linuxiarz.pl';
# npm
include /data/nginx/custom/http_top[.]conf;
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
include /data/nginx/proxy_host/*.conf;
include /data/nginx/redirection_host/*.conf;
include /data/nginx/dead_host/*.conf;
include /data/nginx/temp/*.conf;
include /data/nginx/custom/http[.]conf;
# metrics & console
include /etc/angie/metrics.conf;
}
stream {
# npm
include /data/nginx/stream/*.conf;
include /data/nginx/custom/stream[.]conf;
}
# npm
include /data/nginx/custom/root[.]conf;
"""
ANGIE_UNIT = """[Unit]
Description=Angie - high performance web server
Documentation=https://en.angie.software/angie/docs/
After=network-online.target remote-fs.target nss-lookup.target
Wants=network-online.target
[Service]
Type=forking
PIDFile=/run/angie/angie.pid
ExecStartPre=/bin/mkdir -p /run/angie
ExecStartPre=/bin/mkdir -p /tmp/angie/body
ExecStart=/usr/sbin/angie -c /etc/angie/angie.conf
ExecReload=/bin/sh -c "/bin/kill -s HUP $(/bin/cat /run/angie/angie.pid)"
ExecStop=/bin/sh -c "/bin/kill -s TERM $(/bin/cat /run/angie/angie.pid)"
Restart=on-failure
RestartSec=3s
[Install]
WantedBy=multi-user.target
"""
def lsb_info():
try:
apt_try_install(["lsb-release"])
dist = run_out(["bash","-lc","lsb_release -si"]).strip().lower().replace(" ", "")
rel = run_out(["bash","-lc","lsb_release -sr"]).strip()
code = run_out(["bash","-lc","lsb_release -sc"]).strip()
return {"ID": dist, "VERSION_ID": rel, "CODENAME": code, "PRETTY": f"{dist} {rel} ({code})"}
except Exception:
return os_release()
# ========== Angie ==========
def setup_angie(ipv6_enabled: bool):
def _norm(s: str, allow_dot: bool = False) -> str:
pat = r"[^a-z0-9+\-\.]" if allow_dot else r"[^a-z0-9+\-]"
return re.sub(pat, "", s.strip().lower())
with step("Adding Angie repo and installing Angie packages"):
apt_try_install([
"ca-certificates", "curl", "gnupg", "apt-transport-https",
"software-properties-common", "lsb-release"
])
run([
"curl", "-fsSL", "-o", "/etc/apt/trusted.gpg.d/angie-signing.gpg",
"https://angie.software/keys/angie-signing.gpg"
])
try:
dist = run_out(["lsb_release", "-si"])
rel = run_out(["lsb_release", "-sr"])
code = run_out(["lsb_release", "-sc"])
except Exception:
dist = run_out(["bash","-c",". /etc/os-release && printf %s \"$ID\""])
rel = run_out(["bash","-c",". /etc/os-release && printf %s \"$VERSION_ID\""])
code = run_out(["bash","-c",". /etc/os-release && printf %s \"$VERSION_CODENAME\""])
dist = _norm(dist)
rel = _norm(rel, allow_dot=True)
code = _norm(code)
os_id = f"{dist}/{rel}" if rel else dist
if code:
line = f"deb https://download.angie.software/angie/{os_id} {code} main\n"
else:
line = f"deb https://download.angie.software/angie/{os_id} main\n"
write_file(Path("/etc/apt/sources.list.d/angie.list"), line)
run(["apt-get", "update"])
base = ["angie", "angie-module-headers-more", "angie-module-brotli", "angie-module-zstd"]
optional = ["angie-module-prometheus", "angie-console-light"]
apt_install(base)
apt_try_install(optional)
with step("Configuring modules and main Angie config"):
modules_dir = Path("/etc/nginx/modules")
modules_dir.mkdir(parents=True, exist_ok=True)
write_file(Path("/etc/angie/angie.conf"), ANGIE_CONF_TEMPLATE, 0o644)
WRAP = """#!/bin/sh
exec sudo -n /usr/sbin/angie "$@"
"""
write_file(Path("/usr/sbin/nginx"), WRAP, 0o755)
Path("/etc/nginx/conf.d/include").mkdir(parents=True, exist_ok=True)
with step("Setting resolver(s) and cache directories"):
write_resolvers_conf(ipv6_enabled)
for p in ["/var/lib/angie/cache/public", "/var/lib/angie/cache/private"]:
Path(p).mkdir(parents=True, exist_ok=True)
os.chmod(p, 0o755)
with step("Installing corrected systemd unit for Angie"):
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
def write_metrics_files():
"""Create /etc/angie/metrics.conf (port 82 with console & status)."""
with step("Adding Angie metrics & console on :82"):
metrics = """include /etc/angie/prometheus_all.conf;
server {
listen 82;
location /nginx_status {
stub_status on;
access_log off;
allow all;
}
auto_redirect on;
location /status/ {
api /status/;
api_config_files on;
}
location /console/ {
alias /usr/share/angie-console-light/html/;
index index.html;
}
location /console/api/ {
api /status/;
}
location =/p8s {
prometheus all;
}
}
"""
write_file(Path("/etc/angie/metrics.conf"), metrics, 0o644)
def ensure_angie_runtime_perms():
run_path = Path("/run/angie")
pid_file = run_path / "angie.pid"
run_path.mkdir(parents=True, exist_ok=True)
os.chmod(run_path, 0o2775)
try:
import grp
gid = grp.getgrnam("angie").gr_gid
os.chown(run_path, -1, gid)
except Exception:
pass
if not pid_file.exists():
pid_file.touch()
os.chmod(pid_file, 0o664)
try:
import grp, pwd
gid = grp.getgrnam("angie").gr_gid
uid = pwd.getpwnam("root").pw_uid
os.chown(pid_file, uid, gid)
except Exception:
pass
def ensure_user_and_dirs():
with step("Creating npm user and app/log directories"):
try:
run(["id", "-u", "npm"])
except subprocess.CalledProcessError:
run(["useradd", "--system", "--home", "/opt/npm", "--create-home", "--shell", "/usr/sbin/nologin", "npm"])
rc = subprocess.run(["getent","group","angie"], stdout=_devnull(), stderr=_devnull()).returncode
if rc != 0:
run(["groupadd","angie"])
run(["usermod","-aG","angie","npm"], check=False)
dirs = [
"/data","/data/nginx","/data/custom_ssl","/data/logs","/data/access",
"/data/nginx/default_host","/data/nginx/default_www","/data/nginx/proxy_host",
"/data/nginx/redirection_host","/data/nginx/stream","/data/nginx/dead_host","/data/nginx/temp",
"/data/letsencrypt-acme-challenge","/opt/npm","/opt/npm/frontend","/opt/npm/global",
"/run/nginx","/run/angie","/tmp/angie/body"
]
for d in dirs:
Path(d).mkdir(parents=True, exist_ok=True)
run(["chgrp","-h","angie","/run/angie"], check=False)
os.chmod("/run/angie", 0o2775)
Path("/var/log/angie").mkdir(parents=True, exist_ok=True)
for f in ["access.log","error.log"]:
(Path("/var/log/angie")/f).touch(exist_ok=True)
paths = ["/var/log/angie"] + glob("/var/log/angie/*.log")
for pth in paths:
run(["chgrp","-h","angie", pth], check=False)
run(["chmod","775","/var/log/angie"], check=False)
for pth in glob("/var/log/angie/*.log"):
run(["chmod","664", pth], check=False)
Path("/var/log/nginx").mkdir(parents=True, exist_ok=True)
Path("/var/log/nginx/error.log").touch(exist_ok=True)
os.chmod("/var/log/nginx/error.log", 0o666)
run(["chown","-R","npm:npm","/opt/npm","/data"])
ensure_angie_runtime_perms()
def create_sudoers_for_npm():
with step("Configuring sudoers for npm -> angie"):
content = """User_Alias NPMUSERS = npm
NPMUSERS ALL=(root) NOPASSWD: /usr/sbin/angie
"""
path = Path("/etc/sudoers.d/npm")
write_file(path, content, 0o440)
if shutil.which("visudo"):
run(["visudo","-cf", str(path)], check=False)
def adjust_nginx_like_paths_in_tree(root: Path):
for p in root.rglob("*.conf"):
try:
txt = p.read_text(encoding="utf-8")
except Exception:
continue
txt2 = txt.replace("include conf.d", "include /etc/nginx/conf.d") \
.replace("include /etc/angie/conf.d", "include /etc/nginx/conf.d")
if txt2 != txt:
p.write_text(txt2, encoding="utf-8")
for cand in root.rglob("nginx.conf"):
try:
txt = cand.read_text(encoding="utf-8")
except Exception:
continue
txt = re.sub(r"^user\s+\S+.*", "user root;", txt, flags=re.M)
txt = re.sub(r"^pid\s+.*", "pid /run/angie/angie.pid;", txt, flags=re.M)
txt = txt.replace("daemon on;", "#daemon on;")
cand.write_text(txt, encoding="utf-8")
def install_node_and_yarn(node_pkg: str):
apt_install([node_pkg])
if shutil.which("yarn") or shutil.which("yarnpkg"):
return
apt_try_install(["yarn"])
if not shutil.which("yarn") and not shutil.which("yarnpkg"):
apt_try_install(["yarnpkg"])
if not Path("/usr/bin/yarn").exists() and Path("/usr/bin/yarnpkg").exists():
os.symlink("/usr/bin/yarnpkg","/usr/bin/yarn")
def _is_ubuntu_wo_distutils() -> bool:
try:
dist = (OSREL.get("ID","") or "").lower()
ver = (OSREL.get("VERSION_ID","") or "").strip()
def _vers(t):
parts = (ver.split(".") + ["0","0"])[:2]
return (int(parts[0]), int(parts[1]))
return dist == "ubuntu" and _vers(ver) >= (24, 4)
except Exception:
return False
def _prepare_sass(frontend_dir: Path):
pj = frontend_dir / "package.json"
if not pj.exists():
return
import json, re, os
try:
data = json.loads(pj.read_text(encoding="utf-8"))
except Exception:
return
deps = data.get("dependencies", {}) or {}
dev = data.get("devDependencies", {}) or {}
has_node_sass = ("node-sass" in deps) or ("node-sass" in dev)
if not has_node_sass:
return
env_flag = (os.environ.get("USE_DART_SASS","").strip())
use_dart = (env_flag == "1") or (env_flag == "" and _is_ubuntu_wo_distutils())
data.setdefault("dependencies", {})
data.setdefault("devDependencies", {})
if use_dart:
data["dependencies"].pop("node-sass", None)
data["devDependencies"].pop("node-sass", None)
if "sass" not in data["dependencies"] and "sass" not in data["devDependencies"]:
data["devDependencies"]["sass"] = "^1.77.0"
scripts = (data.get("scripts") or {})
data["scripts"] = {k: re.sub(r"\bnode-sass\b", "sass", v or "") for k, v in scripts.items()}
if env_flag == "":
os.environ["USE_DART_SASS"] = "1"
else:
target = "^9.0.0"
if "node-sass" in data["dependencies"]:
data["dependencies"]["node-sass"] = target
else:
data["devDependencies"]["node-sass"] = target
res = (data.get("resolutions") or {})
res["node-gyp"] = "^10.0.0"
res["node-sass"] = "^9.0.0"
data["resolutions"] = res
os.environ["npm_config_node_sass_binary_site"] = "https://github.com/sass/node-sass/releases/download"
pj.write_text(json.dumps(data, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
def _build_frontend(src_frontend: Path, dest_frontend: Path):
def _semver(s: str) -> bool:
return bool(re.match(r"^\d+(?:\.\d+){1,3}$", (s or "").strip()))
def _good_yarn(argv: list[str]) -> bool:
v = (run_out(argv + ["--version"], check=False) or "").strip()
return _semver(v)
def _pick_yarn_cmd() -> list[str] | None:
for c in (["yarn"], ["yarnpkg"]):
if shutil.which(c[0]) and _good_yarn(c):
return c
if shutil.which("npm") and (run_out(["npm", "--version"], check=False) or "").strip():
if _good_yarn(["npm", "exec", "--yes", "yarn@stable"]):
return ["npm", "exec", "--yes", "yarn@stable"]
if shutil.which("npx") and (run_out(["npx", "--version"], check=False) or "").strip():
if _good_yarn(["npx", "-y", "yarn@stable"]):
return ["npx", "-y", "yarn@stable"]
return None
def _ensure_yarn_installed():
if not shutil.which("npm"):
try:
apt_try_install(["npm"])
except Exception:
run(["apt-get", "update"], check=False)
run(["apt-get", "install", "-y", "npm"])
run(["npm", "install", "-g", "yarn"], check=False)
yarn_cmd = _pick_yarn_cmd()
if not yarn_cmd:
_ensure_yarn_installed()
yarn_cmd = _pick_yarn_cmd()
if not yarn_cmd:
raise RuntimeError("Unable to detect or install a valid Yarn. Try: apt-get install -y npm && npm i -g yarn.")
with step("Installing frontend dependencies (yarn)"):
os.environ["NODE_ENV"] = "development"
os.chdir(src_frontend)
_prepare_sass(src_frontend)
cache_dir = (run_out(yarn_cmd + ["cache", "dir"], check=False) or "").strip()
if cache_dir and not Path(cache_dir).exists():
Path(cache_dir).mkdir(parents=True, exist_ok=True)
run(yarn_cmd + ["cache", "clean"], check=False)
run(yarn_cmd + ["install"])
with step("Building frontend (yarn build)"):
env = os.environ.copy()
env["NODE_OPTIONS"] = "--openssl-legacy-provider"
run(yarn_cmd + ["build"], env=env)
with step("Copying frontend artifacts"):
shutil.copytree(src_frontend / "dist", dest_frontend, dirs_exist_ok=True)
if (src_frontend / "app-images").exists():
shutil.copytree(src_frontend / "app-images", dest_frontend / "images", dirs_exist_ok=True)
def patch_npm_backend_commands():
candidates = [
Path("/opt/npm/lib/utils.js"),
Path("/opt/npm/utils.js"),
Path("/opt/npm/lib/commands.js"),
]
for p in candidates:
if not p.exists():
continue
try:
txt = p.read_text(encoding="utf-8")
except Exception:
continue
new = re.sub(r'\blogrotate\b', '/usr/local/bin/logrotate-npm', txt)
new = re.sub(r'(?<!/usr/sbin/)\bnginx\b', '/usr/sbin/nginx', new)
if new != txt:
p.write_text(new, encoding="utf-8")
def deploy_npm_app(npm_version_override: str | None):
version = github_latest_release_tag("NginxProxyManager/nginx-proxy-manager", npm_version_override)
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
tmp = Path(tempfile.mkdtemp(prefix="npm-angie-"))
src = download_extract_tar_gz(url, tmp)
with step("Setting version numbers in package.json"):
for pkg in ["backend/package.json", "frontend/package.json"]:
pj = src / pkg
txt = pj.read_text(encoding="utf-8")
txt = re.sub(r'"version":\s*"0\.0\.0"', f'"version": "{version}"', txt)
pj.write_text(txt, encoding="utf-8")
with step("Fixing include paths / nginx.conf"):
adjust_nginx_like_paths_in_tree(src)
with step("Copying web root and configs to /etc/nginx"):
Path("/var/www/html").mkdir(parents=True, exist_ok=True)
shutil.copytree(src / "docker" / "rootfs" / "var" / "www" / "html", "/var/www/html", dirs_exist_ok=True)
shutil.copytree(src / "docker" / "rootfs" / "etc" / "nginx", "/etc/nginx", dirs_exist_ok=True)
devconf = Path("/etc/nginx/conf.d/dev.conf")
if devconf.exists():
devconf.unlink()
shutil.copy2(src / "docker" / "rootfs" / "etc" / "logrotate.d" / "nginx-proxy-manager",
"/etc/logrotate.d/nginx-proxy-manager")
Path("/etc/nginx/conf").mkdir(parents=True, exist_ok=True)
if not Path("/etc/nginx/conf/nginx.conf").exists():
os.symlink("/etc/angie/angie.conf", "/etc/nginx/conf/nginx.conf")
with step("Copying backend/global to /opt/npm"):
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
Path("/opt/npm/frontend/images").mkdir(parents=True, exist_ok=True)
shutil.copytree(src / "global", "/opt/npm/global", dirs_exist_ok=True)
with step("Creating SQLite config if missing"):
cfg = Path("/opt/npm/config/production.json")
if not cfg.exists():
write_file(cfg, json.dumps({
"database": {
"engine": "knex-native",
"knex": {
"client": "sqlite3",
"connection": {"filename": "/data/database.sqlite"}
}
}
}, indent=2))
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
with step("Installing backend dependencies (yarn)"):
os.chdir("/opt/npm")
run(["yarn", "install"])
with step("Normalizing directories ownership"):
run(["chown","-R","npm:npm","/opt/npm","/data"])
with step("Fixing root in production.conf if needed"):
for p in [Path("/etc/angie/conf.d/production.conf"), Path("/etc/nginx/conf.d/production.conf")]:
if p.exists():
txt = p.read_text(encoding="utf-8")
txt2 = re.sub(r"^\s*root\s+.*?;\s*$", " root /opt/npm/frontend;", txt, flags=re.M)
if txt2 != txt:
p.write_text(txt2, encoding="utf-8")
with step("Preparing include/ip_ranges.conf (owned by npm)"):
include_dir = Path("/etc/nginx/conf.d/include")
include_dir.mkdir(parents=True, exist_ok=True)
ipranges = include_dir / "ip_ranges.conf"
if not ipranges.exists():
write_file(ipranges, "# populated by NPM (IPv4 only)\n")
try:
run(["chown", "npm:npm", str(include_dir), str(ipranges)])
except Exception:
pass
os.chmod(ipranges, 0o664)
patch_npm_backend_commands()
return version
def strip_ipv6_listens(paths):
with step("Removing IPv6 listen entries from configs (--enable-ipv6 not set)"):
confs = []
for p in paths:
confs.extend(Path(p).rglob("*.conf"))
for f in confs:
try:
txt = f.read_text(encoding="utf-8")
except Exception:
continue
new = re.sub(r'(?m)^\s*listen\s+\[::\]:\d+[^;]*;\s*$', '', txt)
new = re.sub(r'\n{3,}', '\n\n', new)
if new != txt:
f.write_text(new, encoding="utf-8")
def install_logrotate_for_data_logs():
with step("Installing logrotate policy for /data/logs (*.log), keep 7 rotations"):
conf_path = Path("/etc/logrotate.d/npm-data-logs")
content = """/data/logs/*.log {
daily
rotate 7
compress
delaycompress
missingok
notifempty
copytruncate
create 0640 root root
}
"""
write_file(conf_path, content, 0o644)
try:
run(["/usr/sbin/logrotate", "-d", str(conf_path)], check=False)
except Exception:
pass
def fix_logrotate_permissions_and_wrapper():
with step("Fixing logrotate state-file permissions and helper"):
status = Path("/var/lib/logrotate/status")
try:
run(["setfacl", "-m", "u:npm:rw", str(status)], check=False)
except FileNotFoundError:
pass
state_dir = Path("/opt/npm/var"); state_dir.mkdir(parents=True, exist_ok=True)
state_file = state_dir / "logrotate.state"
if not state_file.exists():
state_file.touch()
os.chmod(state_file, 0o664)
try:
import pwd, grp
uid = pwd.getpwnam("npm").pw_uid
gid = grp.getgrnam("npm").gr_gid
os.chown(state_dir, uid, gid)
os.chown(state_file, uid, gid)
except Exception:
pass
helper = Path("/usr/local/bin/logrotate-npm")
helper_content = f"""#!/bin/sh
exec /usr/sbin/logrotate -s {state_file} "$@"
"""
write_file(helper, helper_content, 0o755)
def create_systemd_units(ipv6_enabled: bool):
with step("Creating and starting systemd services (angie, npm)"):
unit_lines = [
"[Unit]",
"Description=Nginx Proxy Manager (backend)",
"After=network.target angie.service",
"Wants=angie.service",
"",
"[Service]",
"User=npm",
"Group=npm",
"WorkingDirectory=/opt/npm",
"Environment=NODE_ENV=production",
]
if not ipv6_enabled:
unit_lines.append("Environment=DISABLE_IPV6=true")
unit_lines += [
"ExecStart=/usr/bin/node /opt/npm/index.js",
"Restart=on-failure",
"RestartSec=5",
"",
"[Install]",
"WantedBy=multi-user.target",
""
]
write_file(Path("/etc/systemd/system/npm.service"), "\n".join(unit_lines), 0o644)
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
run(["systemctl","daemon-reload"])
run(["systemctl","enable","--now","angie.service"])
run(["/usr/sbin/nginx","-t"], check=False)
run(["systemctl","enable","--now","npm.service"])
run(["angie","-s","reload"], check=False)
def gather_versions(npm_app_version: str):
_ips = run_out(["hostname", "-I"], check=False) or ""
ip = (_ips.split() or [""])[0]
angie_out = (
(run_out(["angie", "-v"], check=False) or "") + "\n" +
(run_out(["angie", "-V"], check=False) or "")
)
m = re.search(r"(?i)\bangie\s*/\s*([0-9]+(?:\.[0-9]+)+)\b", angie_out)
if not m:
dp = (run_out(["dpkg-query", "-W", "-f=${Version}", "angie"], check=False) or "").strip()
m = re.search(r"([0-9]+(?:\.[0-9]+)+)", dp)
angie_v = m.group(1) if m else (angie_out.strip() or "")
node_v = (run_out(["node", "-v"], check=False) or "").strip().lstrip("v")
yarn_v = (run_out(["yarn", "-v"], check=False) or "").strip()
if not yarn_v:
yarn_v = (run_out(["yarnpkg", "-v"], check=False) or "").strip()
return ip, angie_v, node_v, yarn_v, npm_app_version
def update_motd(enabled: bool, info, ipv6_enabled: bool):
if not enabled:
return
ip, angie_v, node_v, yarn_v, npm_v = info
ipv6_line = "IPv6: enabled (configs untouched)." if ipv6_enabled else "IPv6: disabled in resolvers and conf."
creds = "Default login: admin@example.com / changeme"
text = f"""
################################ NPM / ANGIE ################################
OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})
Nginx Proxy Manager: http://{ip}:81
Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s
Angie: {angie_v} (conf: /etc/angie -> /etc/nginx, reload: angie -s reload)
Node.js: v{node_v} Yarn: v{yarn_v}
NPM app: v{npm_v}
Paths: app=/opt/npm data=/data cache=/var/lib/angie/cache
{ipv6_line}
{creds}
###########################################################################
"""
motd_d = Path("/etc/motd.d")
if motd_d.exists():
write_file(motd_d / "10-npm-angie", text.strip() + "\n", 0o644)
else:
motd = Path("/etc/motd")
existing = motd.read_text(encoding="utf-8") if motd.exists() else ""
pattern = re.compile(r"################################ NPM / ANGIE ################################.*?###########################################################################\n", re.S)
if pattern.search(existing):
content = pattern.sub(text.strip()+"\n", existing)
else:
content = (existing.rstrip()+"\n\n"+text.strip()+"\n") if existing else (text.strip()+"\n")
write_file(motd, content, 0o644)
def print_summary(info, ipv6_enabled, dark_enabled, update_mode):
ip, angie_v, node_v, yarn_v, npm_v = info
print("\n====================== SUMMARY ======================")
print(f"OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})")
print(f"Mode: {'UPDATE' if update_mode else 'INSTALL'}")
print(f"NPM panel address: http://{ip}:81")
print(f"Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s ")
print(f"Angie: v{angie_v} (unit: angie.service, PID: /run/angie/angie.pid)")
print(f"Node.js: v{node_v}")
print(f"Yarn: v{yarn_v}")
print(f"NPM (app): v{npm_v}")
print(f"IPv6: {'ENABLED' if ipv6_enabled else 'DISABLED (in configs too)'}")
print(f"Dark mode (TP): {'YES' if dark_enabled else 'NO'}")
print("Paths: /opt/npm (app), /data (data), /etc/angie (conf), /var/log/angie (logs)")
print("Services: systemctl status angie.service / npm.service")
print("Default login: Email: admin@example.com Password: changeme")
print("Test config: /usr/sbin/angie -t")
print("==========================================================\n")
# ========== UPDATE-ONLY ==========
def update_only(node_pkg: str, npm_version_override: str | None, apply_dark: bool, dark_env: dict, ipv6_enabled: bool):
apt_update_upgrade()
install_node_and_yarn(node_pkg)
version = github_latest_release_tag("NginxProxyManager/nginx-proxy-manager", npm_version_override)
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
tmp = Path(tempfile.mkdtemp(prefix="npm-update-"))
src = download_extract_tar_gz(url, tmp)
with step("Setting version in package.json (update)"):
for pkg in ["backend/package.json", "frontend/package.json"]:
pj = src / pkg
txt = pj.read_text(encoding="utf-8")
txt = re.sub(r'"version":\s*"0\.0\.0"', f'"version": "{version}"', txt)
pj.write_text(txt, encoding="utf-8")
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
with step("Updating backend without overwriting config/"):
backup_cfg = Path("/tmp/npm-config-backup")
if backup_cfg.exists():
shutil.rmtree(backup_cfg)
if Path("/opt/npm/config").exists():
shutil.copytree("/opt/npm/config", backup_cfg, dirs_exist_ok=True)
for item in Path("/opt/npm").glob("*"):
if item.name in ("frontend","config"):
continue
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
Path("/opt/npm/config").mkdir(parents=True, exist_ok=True)
if backup_cfg.exists():
shutil.copytree(backup_cfg, "/opt/npm/config", dirs_exist_ok=True)
shutil.rmtree(backup_cfg, ignore_errors=True)
with step("Installing backend dependencies after update"):
os.chdir("/opt/npm")
run(["yarn", "install"])
patch_npm_backend_commands()
create_systemd_units(ipv6_enabled=ipv6_enabled)
with step("Setting owners"):
run(["chown","-R","npm:npm","/opt/npm"])
if apply_dark:
apply_dark_mode(**dark_env)
with step("Restarting services after update"):
run(["systemctl","restart","angie.service"], check=False)
run(["systemctl","restart","npm.service"], check=False)
return version
# ========== DARK MODE ==========
def apply_dark_mode(APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=None, TP_COMMUNITY_THEME=None, TP_SCHEME=None, TP_THEME=None):
if DEBUG:
print('--------------------------------------')
print('| Nginx Proxy Manager theme.park Mod |')
print('--------------------------------------')
# locate frontend
if not Path(APP_FILEPATH).exists():
if Path("/app/frontend").exists():
APP_FILEPATH = "/app/frontend"
elif Path("/opt/nginx-proxy-manager/frontend").exists():
APP_FILEPATH = "/opt/nginx-proxy-manager/frontend"
if not TP_DOMAIN or TP_DOMAIN.strip() == "":
if DEBUG: print("No domain set, defaulting to theme-park.dev")
TP_DOMAIN = "theme-park.dev"
if not TP_SCHEME or TP_SCHEME.strip() == "":
TP_SCHEME = "https"
THEME_TYPE = "community-theme-options" if (str(TP_COMMUNITY_THEME).lower() == "true") else "theme-options"
if not TP_THEME or TP_THEME.strip() == "":
TP_THEME = "organizr"
if "github.io" in TP_DOMAIN:
TP_DOMAIN = f"{TP_DOMAIN}/theme.park"
if DEBUG:
print("Variables set:\n"
f"'APP_FILEPATH'={APP_FILEPATH}\n"
f"'TP_DOMAIN'={TP_DOMAIN}\n"
f"'TP_COMMUNITY_THEME'={TP_COMMUNITY_THEME}\n"
f"'TP_SCHEME'={TP_SCHEME}\n"
f"'TP_THEME'={TP_THEME}\n")
base_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/base/nginx-proxy-manager/nginx-proxy-manager-base.css"
theme_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/{THEME_TYPE}/{TP_THEME}.css"
with step("Injecting Theme.Park CSS into HTML"):
htmls = list(Path(APP_FILEPATH).rglob("*.html"))
for path in htmls:
html = path.read_text(encoding="utf-8")
if base_href not in html:
html = re.sub(r"</head>", f"<link rel='stylesheet' href='{base_href}'></head> ", html, flags=re.I)
html = re.sub(r"</head>", f"<link rel='stylesheet' href='{theme_href}'></head> ", html, flags=re.I)
path.write_text(html, encoding="utf-8")
if DEBUG:
print(f"Patched: {path}")
# ========== MAIN ==========
def main():
global DEBUG
ensure_root()
parser = argparse.ArgumentParser(
description="Install/upgrade NPM on Angie (Debian/Ubuntu) with step animation.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--nodejs-pkg", default="nodejs", help="APT Node.js package name (e.g. nodejs, nodejs-18).")
parser.add_argument("--npm-version", default=None, help="Force NPM app version (e.g. 2.12.6). Default: latest release.")
parser.add_argument("--motd", choices=["yes","no"], default="yes", help="Update MOTD after completion.")
parser.add_argument("--enable-ipv6", action="store_true",
help="Do not strip IPv6 from configs/resolvers (keep IPv6).")
parser.add_argument("--update", action="store_true",
help="Update mode: upgrade packages + rebuild frontend/backend without reconfiguring Angie.")
parser.add_argument("--dark-mode", action="store_true",
help="Inject Theme.Park CSS into frontend (see TP_* vars).")
parser.add_argument("--tp-domain", default=os.environ.get("TP_DOMAIN", ""),
help="Theme.Park domain (e.g. theme-park.dev or *.github.io).")
parser.add_argument("--tp-community-theme", default=os.environ.get("TP_COMMUNITY_THEME", "false"),
help="true = community-theme-options; false = theme-options.")
parser.add_argument("--tp-scheme", default=os.environ.get("TP_SCHEME", "https"),
help="URL scheme (http/https).")
parser.add_argument("--tp-theme", default=os.environ.get("TP_THEME", "organizr"),
help="Theme.Park theme name (e.g. organizr, catppuccin).")
parser.add_argument("--debug", action="store_true",
help="Show detailed logs and progress.")
args = parser.parse_args()
DEBUG = args.debug
print("\n================== NPM + ANGIE installer ( https://gitea.linuxiarz.pl/gru/npm-angie-auto-install ) ==================")
print("Log mode:", "DEBUG" if DEBUG else "SIMPLE")
print(f"Detected OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})")
print("\n@linuxiarz.pl\n")
if args.update:
install_logrotate_for_data_logs()
fix_logrotate_permissions_and_wrapper()
version = update_only(
node_pkg=args.nodejs_pkg,
npm_version_override=args.npm_version,
apply_dark=args.dark_mode,
dark_env=dict(
APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=args.tp_domain,
TP_COMMUNITY_THEME=args.tp_community_theme,
TP_SCHEME=args.tp_scheme,
TP_THEME=args.tp_theme,
),
ipv6_enabled=args.enable_ipv6,
)
info = gather_versions(version)
update_motd(args.motd == "yes", info, ipv6_enabled=args.enable_ipv6)
print_summary(info, args.enable_ipv6, args.dark_mode, update_mode=True)
return
apt_update_upgrade()
apt_purge(["nginx","openresty","nodejs","npm","yarn","certbot","rustc","cargo"])
apt_install(["ca-certificates","curl","gnupg","openssl","apache2-utils","logrotate","sudo","acl",
"python3","sqlite3", "git", "lsb-release", "build-essential"])
setup_angie(ipv6_enabled=args.enable_ipv6)
write_metrics_files()
install_node_and_yarn(args.nodejs_pkg)
ensure_user_and_dirs()
create_sudoers_for_npm()
setup_certbot_venv()
configure_letsencrypt()
npm_app_version = deploy_npm_app(args.npm_version)
if not args.enable_ipv6:
strip_ipv6_listens([Path("/etc/angie"), Path("/etc/nginx")])
else:
print("IPv6: leaving entries (skipped IPv6 cleanup).")
if args.dark_mode:
apply_dark_mode(APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=args.tp_domain,
TP_COMMUNITY_THEME=args.tp_community_theme,
TP_SCHEME=args.tp_scheme,
TP_THEME=args.tp_theme)
create_systemd_units(ipv6_enabled=args.enable_ipv6)
ensure_nginx_symlink()
install_logrotate_for_data_logs()
fix_logrotate_permissions_and_wrapper()
sync_backup_nginx_conf()
with step("Restarting services after installation"):
run(["systemctl","restart","angie.service"], check=False)
run(["systemctl","restart","npm.service"], check=False)
info = gather_versions(npm_app_version)
update_motd(args.motd == "yes", info, ipv6_enabled=args.enable_ipv6)
print_summary(info, args.enable_ipv6, args.dark_mode, update_mode=False)
if __name__ == "__main__":
signal.signal(signal.SIGINT, lambda s, f: sys.exit(130))
main()