Files
npm-angie-auto-install/npm_install.py
Mateusz Gruszczyński 940d3ffac6 backup before update
2025-10-26 21:58:58 +01:00

2419 lines
81 KiB
Python

#!/usr/bin/env python3
from __future__ import annotations
import argparse, os, sys, json, shutil, subprocess, tarfile, tempfile, urllib.request, re, time, threading, signal
from pathlib import Path
from glob import glob
DEBUG = False
# ========== Configuration ==========
# Minimum required Node.js version for NPM 2.12.6+
MIN_NODEJS_VERSION = 18
# Maximum supported Node.js version
MAX_NODEJS_VERSION = 21
# Theme.Park settings (for --dark-mode or --tp-theme)
# Popular themes: organizr, dark, plex, nord, dracula, space-gray, hotline, aquamarine
TP_DOMAIN = "theme-park.dev"
TP_SCHEME = "https"
TP_COMMUNITY_THEME = "false"
TP_DEFAULT_THEME = "organizr"
# ========== UI / Spinner ==========
class Spinner:
FRAMES = ["", "", "", "", "", "", "", "", "", ""]
def __init__(self, text):
self.text = text
self._stop = threading.Event()
self._th = threading.Thread(target=self._spin, daemon=True)
def _spin(self):
i = 0
while not self._stop.is_set():
frame = self.FRAMES[i % len(self.FRAMES)]
print(f"\r{frame} {self.text} ", end="", flush=True)
time.sleep(0.12)
i += 1
def start(self):
if not DEBUG:
self._th.start()
else:
print(f"{self.text} ...")
def stop_ok(self):
if not DEBUG:
self._stop.set()
self._th.join(timeout=0.2)
print(f"\r{self.text}{' ' * 20}")
else:
print(f"{self.text}")
def stop_fail(self):
if not DEBUG:
self._stop.set()
self._th.join(timeout=0.2)
print(f"\r{self.text}{' ' * 20}")
else:
print(f"{self.text}")
def step(text):
class _Ctx:
def __enter__(self_inner):
self_inner.spinner = Spinner(text)
self_inner.spinner.start()
return self_inner
def __exit__(self_inner, exc_type, exc, tb):
if exc is None:
self_inner.spinner.stop_ok()
else:
self_inner.spinner.stop_fail()
return _Ctx()
def _devnull():
return subprocess.DEVNULL if not DEBUG else None
def run(cmd, check=True, env=None):
if DEBUG:
print("+", " ".join(cmd))
return subprocess.run(
cmd,
check=check,
env=env,
stdout=None if DEBUG else subprocess.DEVNULL,
stderr=None if DEBUG else subprocess.DEVNULL,
)
def run_out(cmd, check=True):
if DEBUG:
print("+", " ".join(cmd))
result = subprocess.run(cmd, check=check, capture_output=True, text=True)
return result.stdout
# ========== Utils ==========
def ensure_root():
if os.geteuid() != 0:
print("Run as root.", file=sys.stderr)
sys.exit(1)
def os_release():
data = {}
try:
for line in Path("/etc/os-release").read_text().splitlines():
if "=" in line:
k, v = line.split("=", 1)
data[k] = v.strip().strip('"')
except Exception:
pass
pretty = (
data.get("PRETTY_NAME")
or f"{data.get('ID','linux')} {data.get('VERSION_ID','')}".strip()
)
return {
"ID": data.get("ID", ""),
"VERSION_ID": data.get("VERSION_ID", ""),
"CODENAME": data.get("VERSION_CODENAME", ""),
"PRETTY": pretty,
}
def apt_update_upgrade():
with step("Updating package lists and system"):
run(["apt-get", "update", "-y"])
run(["apt-get", "-y", "upgrade"])
def apt_install(pkgs):
if not pkgs:
return
with step(f"Installing packages: {', '.join(pkgs)}"):
run(["apt-get", "install", "-y"] + pkgs)
def apt_try_install(pkgs):
if not pkgs:
return
avail = []
for p in pkgs:
ok = subprocess.run(
["apt-cache", "show", p], stdout=_devnull(), stderr=_devnull()
)
if ok.returncode == 0:
avail.append(p)
elif DEBUG:
print(f"skip missing pkg: {p}")
if avail:
apt_install(avail)
def apt_purge(pkgs):
if not pkgs:
return
with step(f"Removing conflicting packages: {', '.join(pkgs)}"):
run(["apt-get", "purge", "-y"] + pkgs, check=False)
run(["apt-get", "autoremove", "-y"], check=False)
def write_file(path: Path, content: str, mode=0o644):
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content, encoding="utf-8")
os.chmod(path, mode)
def append_unique(path: Path, lines: str):
path.parent.mkdir(parents=True, exist_ok=True)
existing = path.read_text(encoding="utf-8") if path.exists() else ""
out = existing
for line in lines.splitlines():
if line.strip() and line not in existing:
out += ("" if out.endswith("\n") else "\n") + line + "\n"
path.write_text(out, encoding="utf-8")
def github_latest_release_tag(repo: str, override: str | None) -> str:
if override:
return override.lstrip("v")
url = f"https://api.github.com/repos/{repo}/releases/latest"
with step(f"Downloading from GitGub: {repo}"):
with urllib.request.urlopen(url) as r:
data = json.load(r)
tag = data["tag_name"]
return tag.lstrip("v")
def write_resolvers_conf(ipv6_enabled: bool):
ns_v4, ns_v6 = [], []
try:
for line in Path("/etc/resolv.conf").read_text().splitlines():
line = line.strip()
if not line.startswith("nameserver"):
continue
ip = line.split()[1].split("%")[0]
(ns_v6 if ":" in ip else ns_v4).append(ip)
except Exception:
pass
ips = ns_v4 + (ns_v6 if ipv6_enabled else [])
if not ips:
ips = ["1.1.1.1", "8.8.8.8"] + (
["2606:4700:4700::1111", "2001:4860:4860::8888"] if ipv6_enabled else []
)
ipv6_flag = " ipv6=on" if ipv6_enabled and any(":" in x for x in ips) else ""
content = f"resolver {' '.join(ips)} valid=10s{ipv6_flag};\n"
write_file(Path("/etc/angie/conf.d/include/resolvers.conf"), content, 0o644)
def validate_nodejs_version(version: str) -> tuple[bool, str, str | None]:
version_map = {"latest": "21", "lts": "18", "current": "21"}
resolved = version_map.get(version.lower(), version)
match = re.match(r"(\d+)", resolved)
if not match:
return False, resolved, f"Invalid version format: {version}"
major_version = int(match.group(1))
if major_version > MAX_NODEJS_VERSION:
warning = (
f"⚠ WARNING: Requested Node.js v{major_version} exceeds maximum tested version (v{MAX_NODEJS_VERSION}).\n"
f" NPM may not be compatible with Node.js v{major_version}.\n"
f" Falling back to Node.js v{MAX_NODEJS_VERSION}."
)
return False, str(MAX_NODEJS_VERSION), warning
return True, resolved, None
def validate_supported_os():
distro_id = OSREL.get("ID", "").lower()
version_id = OSREL.get("VERSION_ID", "").strip()
SUPPORTED = {"debian": ["11", "12", "13"], "ubuntu": ["20.04", "22.04", "24.04"]}
if distro_id not in SUPPORTED:
print(f"\n ⚠ ERROR: Unsupported distribution: {distro_id}")
print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}")
print(f"\n Supported distributions:")
print(f" • Debian 11 (Bullseye), 12 (Bookworm), 13 (Trixie)")
print(f" • Ubuntu 20.04 LTS, 22.04 LTS, 24.04 LTS")
print(f" • Debian derivatives: Proxmox, armbian")
print(f"\n Your distribution may work but is not tested.")
print(f" Continue at your own risk or install on a supported system.\n")
sys.exit(1)
supported_versions = SUPPORTED[distro_id]
version_match = False
for supported_ver in supported_versions:
if version_id.startswith(supported_ver):
version_match = True
break
if not version_match:
print(f"\n ⚠ WARNING: Unsupported version of {distro_id}: {version_id}")
print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}")
print(f" Supported versions: {', '.join(supported_versions)}")
print(f"\n This version is not officially tested.")
print(f" Prerequisites:")
print(f" • Angie packages must be available for your distribution")
print(
f" • Check: https://en.angie.software/angie/docs/installation/oss_packages/"
)
print(f" • Your system should be Debian/Ubuntu compatible (apt-based)")
response = input("\n Continue anyway? [y/N]: ").strip().lower()
if response not in ["y", "yes"]:
print("\n Installation cancelled.\n")
sys.exit(1)
print()
else:
print(f"✓ Supported OS detected: {OSREL.get('PRETTY', 'Unknown')}\n")
def save_installer_config(config: dict):
config_path = Path("/data/installer.json")
config_path.parent.mkdir(parents=True, exist_ok=True)
config["last_modified"] = time.strftime("%Y-%m-%d %H:%M:%S")
try:
config_path.write_text(json.dumps(config, indent=2), encoding="utf-8")
if DEBUG:
print(f"✓ Saved installer config to {config_path}")
except Exception as e:
print(f"⚠ Warning: Could not save installer config: {e}")
def load_installer_config() -> dict:
config_path = Path("/data/installer.json")
if not config_path.exists():
if DEBUG:
print(f"No installer config found at {config_path}")
return {}
try:
content = config_path.read_text(encoding="utf-8")
config = json.loads(content)
if DEBUG:
print(f"✓ Loaded installer config from {config_path}")
return config
except Exception as e:
print(f"⚠ Warning: Could not load installer config: {e}")
return {}
def comment_x_served_by_step(path="/etc/angie/conf.d/include/proxy.conf"):
p = Path(path)
if not p.exists():
raise FileNotFoundError(path)
src = p.read_text()
pattern = re.compile(
r"^(?P<ws>\s*)(?!#)\s*add_header\s+X-Served-By\s+\$host\s*;\s*$", re.MULTILINE
)
count = len(pattern.findall(src))
if count == 0:
return 0
backup = p.with_suffix(p.suffix + ".bak")
shutil.copy2(p, backup)
out = pattern.sub(
lambda m: f"{m.group('ws')}# add_header X-Served-By $host;", src
)
fd, tmp = tempfile.mkstemp(dir=str(p.parent))
os.close(fd)
Path(tmp).write_text(out)
shutil.copymode(p, tmp)
os.replace(tmp, p)
print(f"✔ Hide X-Served-by header | backup: {backup}")
return count
def set_file_ownership(files: list[str | Path], owner: str, mode: int | None = None):
success = []
failed = []
for file_path in files:
path = Path(file_path)
if not path.exists():
failed.append((str(path), "File not found"))
continue
try:
run(["chown", owner, str(path)])
if mode is not None:
os.chmod(path, mode)
success.append(str(path))
except Exception as e:
failed.append((str(path), str(e)))
if success:
print(f"✔ Set ownership '{owner}' for {len(success)} file(s)")
if DEBUG:
for f in success:
print(f" - {f}")
if failed:
print(f"⚠ Failed to set ownership for {len(failed)} file(s):")
for f, err in failed:
print(f" - {f}: {err}")
return len(failed) == 0
def check_distro_nodejs_available():
try:
result = subprocess.run(
["apt-cache", "show", "nodejs"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
if result.returncode == 0:
for line in result.stdout.splitlines():
if line.startswith("Version:"):
version_str = line.split(":", 1)[1].strip()
match = re.match(r"(\d+)", version_str)
if match:
major = int(match.group(1))
if DEBUG:
print(
f"✓ Distro has nodejs v{version_str} (major: {major})"
)
return True, major, version_str
return False, None, None
except Exception as e:
if DEBUG:
print(f"Failed to check distro nodejs: {e}")
return False, None, None
def install_nodejs_from_distro():
with step("Installing Node.js from distribution repositories"):
apt_install(["nodejs"])
if not shutil.which("npm"):
apt_try_install(["npm"])
if shutil.which("node"):
node_ver = run_out(["node", "--version"], check=False).strip()
print(f" Node.js: {node_ver}")
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
print(f" npm: {npm_ver}")
return True
return False
def ensure_minimum_nodejs(min_version=MIN_NODEJS_VERSION, user_requested_version=None):
with step("Checking Node.js version requirements"):
try:
node_ver = run_out(["node", "--version"], check=False).strip()
match = re.match(r"v?(\d+)", node_ver)
if match:
current_major = int(match.group(1))
if user_requested_version:
requested_match = re.match(r"(\d+)", str(user_requested_version))
if requested_match:
requested_major = int(requested_match.group(1))
if requested_major < MIN_NODEJS_VERSION:
requested_major = MIN_NODEJS_VERSION
elif requested_major > MAX_NODEJS_VERSION:
requested_major = MAX_NODEJS_VERSION
if current_major == requested_major:
if shutil.which("npm"):
npm_ver = run_out(
["npm", "--version"], check=False
).strip()
print(f" Node.js: {node_ver}")
print(f" npm: {npm_ver}")
else:
print(f" Node.js: {node_ver}")
return True
else:
if current_major >= min_version:
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
print(f" Node.js: {node_ver}")
print(f" npm: {npm_ver}")
else:
print(f" Node.js: {node_ver}")
return True
except FileNotFoundError:
pass
except Exception:
pass
if user_requested_version:
requested_match = re.match(r"(\d+)", str(user_requested_version))
if requested_match:
requested_major = int(requested_match.group(1))
if requested_major < MIN_NODEJS_VERSION:
print(
f"⚠ Requested version {requested_major} < minimum {MIN_NODEJS_VERSION}"
)
print(f" Installing minimum version: v{MIN_NODEJS_VERSION}")
install_node_from_nodesource(str(MIN_NODEJS_VERSION))
elif requested_major > MAX_NODEJS_VERSION:
print(
f"⚠ Requested version {requested_major} > maximum {MAX_NODEJS_VERSION}"
)
print(f" Installing maximum version: v{MAX_NODEJS_VERSION}")
install_node_from_nodesource(str(MAX_NODEJS_VERSION))
else:
install_node_from_nodesource(str(requested_major))
else:
install_node_from_nodesource(str(MIN_NODEJS_VERSION))
else:
has_nodejs, major, version_str = check_distro_nodejs_available()
if has_nodejs and major and major >= min_version:
print(f"✓ Distribution provides Node.js v{version_str} (>= v{min_version})")
if install_nodejs_from_distro():
return True
else:
print(f"⚠ Failed to install from distro, falling back to NodeSource")
install_node_from_nodesource(str(min_version))
else:
if has_nodejs:
print(f"⚠ Distribution Node.js v{version_str} < minimum v{min_version}")
else:
print(f"✓ Distribution doesn't provide Node.js package")
print(f" Installing from NodeSource: v{min_version}")
install_node_from_nodesource(str(min_version))
if shutil.which("node"):
node_ver = run_out(["node", "--version"], check=False).strip()
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
return True
return False
def download_extract_tar_gz(url: str, dest_dir: Path) -> Path:
dest_dir.mkdir(parents=True, exist_ok=True)
with step("Downloading and untaring"):
with urllib.request.urlopen(url) as r, tempfile.NamedTemporaryFile(
delete=False
) as tf:
shutil.copyfileobj(r, tf)
tf.flush()
tf_path = Path(tf.name)
with tarfile.open(tf_path, "r:gz") as t:
try:
t.extractall(dest_dir, filter="data")
except TypeError:
t.extractall(dest_dir)
except Exception as e:
if "LinkOutsideDestinationError" in str(type(e).__name__):
t.extractall(dest_dir)
else:
raise
top = t.getmembers()[0].name.split("/")[0]
os.unlink(tf_path)
return dest_dir / top
# Distro info (used in banners & repo setup)
OSREL = os_release()
# === extra sync ===
def sync_backup_nginx_conf():
from pathlib import Path
import shutil, filecmp
src = Path("/etc/nginx.bak/conf.d")
dst = Path("/etc/angie/conf.d")
if not src.exists():
return
with step("Sync /etc/nginx.bak/conf.d -> /etc/angie/conf.d"):
for p in src.rglob("*"):
if p.is_dir():
continue
rel = p.relative_to(src)
target = dst / rel
target.parent.mkdir(parents=True, exist_ok=True)
try:
if not target.exists() or not filecmp.cmp(p, target, shallow=False):
shutil.copy2(p, target)
except Exception as e:
print(f"Warning: sync failed for {p} -> {target}: {e}")
def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")):
info = os_release()
distro_id = (info.get("ID") or "").lower()
# ============================================================
# STEP 1: Check if Python 3.11 is already available
# ============================================================
python311_available = False
if shutil.which("python3.11"):
try:
ver_output = run_out(["python3.11", "--version"], check=False).strip()
match = re.search(r"Python (\d+)\.(\d+)", ver_output)
if match:
major, minor = int(match.group(1)), int(match.group(2))
if major == 3 and minor == 11:
python311_available = True
if DEBUG:
print(f"✔ Found system Python 3.11: {ver_output}")
except Exception:
pass
# ============================================================
# STEP 2: Use system Python 3.11 if available
# ============================================================
if python311_available:
with step(f"Using system Python 3.11 for certbot venv"):
# Ensure python3.11-venv is installed
apt_try_install(["python3.11-venv", "python3-pip"])
venv_dir.mkdir(parents=True, exist_ok=True)
run(["python3.11", "-m", "venv", str(venv_dir)])
venv_bin = venv_dir / "bin"
pip_path = venv_bin / "pip"
certbot_path = venv_bin / "certbot"
env_build = os.environ.copy()
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
run(
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
env=env_build,
)
run(
[
str(pip_path),
"install",
"-U",
"cryptography",
"cffi",
"certbot",
"tldextract",
],
env=env_build,
)
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
target = Path("/usr/local/bin/certbot")
if target.exists() or target.is_symlink():
try:
target.unlink()
except Exception:
pass
target.symlink_to(certbot_path)
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
print(f" Python: {ver_output}")
print(f" Certbot: {cb_ver.strip()}")
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
return
# ============================================================
# STEP 3: Ubuntu - install Python 3.11 from deadsnakes PPA
# ============================================================
if distro_id == "ubuntu":
with step(
f"Ubuntu detected: {info.get('PRETTY','Ubuntu')}. Install Python 3.11 via deadsnakes"
):
try:
run(["apt-get", "update", "-y"], check=False)
apt_try_install(["software-properties-common"])
except Exception:
run(
["apt-get", "install", "-y", "software-properties-common"],
check=False,
)
run(["add-apt-repository", "-y", "ppa:deadsnakes/ppa"])
run(["apt-get", "update", "-y"], check=False)
run(["apt-get", "install", "-y", "python3.11", "python3.11-venv"])
with step(f"Create venv at {venv_dir} using python3.11"):
venv_dir.mkdir(parents=True, exist_ok=True)
run(["python3.11", "-m", "venv", str(venv_dir)])
venv_bin = venv_dir / "bin"
pip_path = venv_bin / "pip"
certbot_path = venv_bin / "certbot"
env_build = os.environ.copy()
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
run(
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
env=env_build,
)
run(
[
str(pip_path),
"install",
"-U",
"cryptography",
"cffi",
"certbot",
"tldextract",
],
env=env_build,
)
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
target = Path("/usr/local/bin/certbot")
if target.exists() or target.is_symlink():
try:
target.unlink()
except Exception:
pass
target.symlink_to(certbot_path)
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
print(f" Python: Python 3.11 (deadsnakes)")
print(f" Certbot: {cb_ver.strip()}")
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
return
# ============================================================
# STEP 4: Debian - install Python 3.11 via pyenv
# ============================================================
PYENV_ROOT = Path("/opt/npm/.pyenv")
PYENV_OWNER = "npm"
PYTHON_VERSION = "3.11.14"
try:
apt_try_install(
[
"pyenv",
"build-essential",
"gcc",
"make",
"pkg-config",
"libssl-dev",
"zlib1g-dev",
"libbz2-dev",
"libreadline-dev",
"libsqlite3-dev",
"tk-dev",
"libncursesw5-dev",
"libgdbm-dev",
"libffi-dev",
"uuid-dev",
"liblzma-dev",
"ca-certificates",
"curl",
]
)
except Exception:
run(["apt-get", "update"], check=False)
run(
[
"apt-get",
"install",
"-y",
"build-essential",
"gcc",
"make",
"pkg-config",
"libssl-dev",
"zlib1g-dev",
"libbz2-dev",
"libreadline-dev",
"libsqlite3-dev",
"tk-dev",
"libncursesw5-dev",
"libgdbm-dev",
"libffi-dev",
"uuid-dev",
"liblzma-dev",
"ca-certificates",
"curl",
"git",
],
check=False,
)
Path("/opt/npm").mkdir(parents=True, exist_ok=True)
PYENV_ROOT.mkdir(parents=True, exist_ok=True)
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
with step(f"Ensuring pyenv is available at {PYENV_ROOT}"):
pyenv_bin_path = PYENV_ROOT / "bin" / "pyenv"
if not pyenv_bin_path.exists():
run(
[
"sudo",
"-u",
PYENV_OWNER,
"bash",
"-lc",
'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then '
" command -v git >/dev/null 2>&1 || sudo apt-get install -y git; "
" git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; "
"fi",
]
)
PYENV_BIN_CANDIDATES = [
str(PYENV_ROOT / "bin" / "pyenv"),
"pyenv",
"/usr/bin/pyenv",
"/usr/lib/pyenv/bin/pyenv",
]
pyenv_bin = next(
(c for c in PYENV_BIN_CANDIDATES if shutil.which(c) or Path(c).exists()), None
)
if not pyenv_bin:
raise RuntimeError("No 'pyenv' found even after git clone attempt.")
with step(f"Installing Python {PYTHON_VERSION} via pyenv into {PYENV_ROOT}"):
run(["mkdir", "-p", str(PYENV_ROOT)])
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False)
run(
[
"sudo",
"-u",
PYENV_OWNER,
"bash",
"-lc",
'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then '
" command -v git >/dev/null 2>&1 || sudo apt-get install -y git; "
" git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; "
"fi",
]
)
install_cmd = (
"export HOME=/opt/npm; "
"export PYENV_ROOT=/opt/npm/.pyenv; "
'export PATH="$PYENV_ROOT/bin:/usr/bin:/bin"; '
'mkdir -p "$PYENV_ROOT"; cd "$HOME"; '
f"pyenv install -s {PYTHON_VERSION}"
)
run(
[
"sudo",
"-u",
PYENV_OWNER,
"env",
"-i",
"HOME=/opt/npm",
f"PYENV_ROOT={PYENV_ROOT}",
f"PATH={PYENV_ROOT}/bin:/usr/bin:/bin",
"bash",
"-lc",
install_cmd,
]
)
profile_snippet = f"""# Auto-generated by npm-angie-auto-install
# pyenv for '{PYENV_OWNER}'
if [ -d "{PYENV_ROOT}" ]; then
export PYENV_ROOT="{PYENV_ROOT}"
case ":$PATH:" in *":{PYENV_ROOT}/bin:"*) ;; *) PATH="{PYENV_ROOT}/bin:$PATH";; esac
case ":$PATH:" in *":/usr/lib/pyenv/bin:"*) ;; *) PATH="/usr/lib/pyenv/bin:$PATH";; esac
export PATH
case "$-" in *i*) _interactive=1 ;; *) _interactive=0 ;; esac
if [ "$_interactive" = 1 ] && {{ [ "${{USER:-}}" = "{PYENV_OWNER}" ] || [ "${{SUDO_USER:-}}" = "{PYENV_OWNER}" ]; }}; then
if command -v pyenv >/dev/null 2>&1; then
eval "$(pyenv init -)"
elif [ -x "{PYENV_ROOT}/bin/pyenv" ]; then
eval "$("{PYENV_ROOT}/bin/pyenv" init -)"
fi
fi
fi
"""
write_file(Path("/etc/profile.d/npm-pyenv.sh"), profile_snippet, 0o644)
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3.11"
if not python311.exists():
python311 = PYENV_ROOT / "versions" / PYTHON_VERSION / "bin" / "python3"
if not python311.exists():
raise RuntimeError(f"No python {PYTHON_VERSION} in {PYENV_ROOT}/versions/.")
venv_bin = venv_dir / "bin"
pip_path = venv_bin / "pip"
certbot_path = venv_bin / "certbot"
with step(f"Preparing Certbot venv at {venv_dir} (Python {PYTHON_VERSION})"):
venv_dir.mkdir(parents=True, exist_ok=True)
if not venv_dir.exists() or not pip_path.exists():
run([str(python311), "-m", "venv", str(venv_dir)])
env_build = os.environ.copy()
env_build["SETUPTOOLS_USE_DISTUTILS"] = "local"
run(
[str(pip_path), "install", "-U", "pip", "setuptools", "wheel"],
env=env_build,
)
run(
[
str(pip_path),
"install",
"-U",
"cryptography",
"cffi",
"certbot",
"tldextract",
],
env=env_build,
)
Path("/usr/local/bin").mkdir(parents=True, exist_ok=True)
target = Path("/usr/local/bin/certbot")
if target.exists() or target.is_symlink():
try:
target.unlink()
except Exception:
pass
target.symlink_to(certbot_path)
cb_ver = run_out([str(certbot_path), "--version"], check=False) or ""
pip_ver = run_out([str(pip_path), "--version"], check=False) or ""
print(f" Python: {PYTHON_VERSION} (pyenv)")
print(f" Certbot: {cb_ver.strip()}")
print(f" Pip: {pip_ver.strip().split(' from ')[0]}")
run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", str(PYENV_ROOT)], check=False)
def configure_letsencrypt():
with step("configure letsencrypt"):
run(["chown", "-R", "npm:npm", "/opt/certbot"], check=False)
Path("/etc/letsencrypt").mkdir(parents=True, exist_ok=True)
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
run(
["apt-get", "install", "-y", "--no-install-recommends", "certbot"],
check=False,
)
ini = """text = True
non-interactive = True
webroot-path = /data/letsencrypt-acme-challenge
key-type = ecdsa
elliptic-curve = secp384r1
preferred-chain = ISRG Root X1
"""
write_file(Path("/etc/letsencrypt.ini"), ini, 0o644)
run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False)
def ensure_nginx_symlink():
from pathlib import Path
target = Path("/etc/angie")
link = Path("/etc/nginx")
try:
if link.is_symlink() and link.resolve() == target:
print("✔ Created symlink /etc/nginx -> /etc/angie")
return
if link.exists() and not link.is_symlink():
backup = Path("/etc/nginx.bak")
try:
if backup.exists():
if backup.is_symlink() or backup.is_file():
backup.unlink()
link.rename(backup)
print("✔ Backed up /etc/nginx to /etc/nginx.bak")
except Exception as e:
print(f"Warning: could not backup /etc/nginx: {e}")
try:
if link.exists() or link.is_symlink():
link.unlink()
except Exception:
pass
try:
link.symlink_to(target)
print("✔ Created symlink /etc/nginx -> /etc/angie")
except Exception as e:
print(f"Warning: could not create /etc/nginx symlink: {e}")
except Exception as e:
print(f"Warning: symlink check failed: {e}")
# ========== Angie / NPM template ==========
ANGIE_CONF_TEMPLATE = """# run nginx in foreground
#daemon off;
load_module /etc/angie/modules/ngx_http_headers_more_filter_module.so;
load_module /etc/angie/modules/ngx_http_brotli_filter_module.so;
load_module /etc/angie/modules/ngx_http_brotli_static_module.so;
load_module /etc/angie/modules/ngx_http_zstd_filter_module.so;
load_module /etc/angie/modules/ngx_http_zstd_static_module.so;
# other modules
include /data/nginx/custom/modules[.]conf;
pid /run/angie/angie.pid;
user root;
worker_processes auto;
pcre_jit on;
error_log /data/logs/fallback_error.log warn;
# Custom
include /data/nginx/custom/root_top[.]conf;
events {
include /data/nginx/custom/events[.]conf;
}
http {
include /etc/angie/mime.types;
default_type application/octet-stream;
sendfile on;
server_tokens off;
tcp_nopush on;
tcp_nodelay on;
client_body_temp_path /tmp/angie/body 1 2;
keepalive_timeout 90s;
proxy_connect_timeout 90s;
proxy_send_timeout 90s;
proxy_read_timeout 90s;
ssl_prefer_server_ciphers on;
gzip on;
proxy_ignore_client_abort off;
client_max_body_size 2000m;
server_names_hash_bucket_size 1024;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Accept-Encoding "";
proxy_cache off;
proxy_cache_path /var/lib/angie/cache/public levels=1:2 keys_zone=public-cache:30m max_size=192m;
proxy_cache_path /var/lib/angie/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
include /etc/angie/conf.d/include/log.conf;
include /etc/angie/conf.d/include/resolvers.conf;
map $host $forward_scheme { default http; }
# Real IP Determination (IPv4 only by default)
set_real_ip_from 10.0.0.0/8;
set_real_ip_from 172.16.0.0/12;
set_real_ip_from 192.168.0.0/16;
include /etc/angie/conf.d/include/ip_ranges.conf;
real_ip_header X-Real-IP;
real_ip_recursive on;
# custom
brotli off;
brotli_comp_level 6;
brotli_static on;
brotli_types *;
zstd on;
zstd_min_length 256;
zstd_comp_level 3;
more_clear_headers "Server";
more_set_headers 'X-by: linuxiarz.pl';
# npm
include /data/nginx/custom/http_top[.]conf;
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
include /data/nginx/proxy_host/*.conf;
include /data/nginx/redirection_host/*.conf;
include /data/nginx/dead_host/*.conf;
include /data/nginx/temp/*.conf;
include /data/nginx/custom/http[.]conf;
# metrics & console
include /etc/angie/metrics.conf;
}
stream {
# npm
include /data/nginx/stream/*.conf;
include /data/nginx/custom/stream[.]conf;
}
# npm
include /data/nginx/custom/root[.]conf;
"""
ANGIE_UNIT = """[Unit]
Description=Angie - high performance web server
Documentation=https://en.angie.software/angie/docs/
After=network-online.target remote-fs.target nss-lookup.target
Wants=network-online.target
[Service]
Type=forking
PIDFile=/run/angie/angie.pid
ExecStartPre=/bin/mkdir -p /run/angie
ExecStartPre=/bin/mkdir -p /tmp/angie/body
ExecStart=/usr/sbin/angie -c /etc/angie/angie.conf
ExecReload=/bin/sh -c "/bin/kill -s HUP $(/bin/cat /run/angie/angie.pid)"
ExecStop=/bin/sh -c "/bin/kill -s TERM $(/bin/cat /run/angie/angie.pid)"
Restart=on-failure
RestartSec=3s
[Install]
WantedBy=multi-user.target
"""
def lsb_info():
try:
apt_try_install(["lsb-release"])
dist = (
run_out(["bash", "-lc", "lsb_release -si"]).strip().lower().replace(" ", "")
)
rel = run_out(["bash", "-lc", "lsb_release -sr"]).strip()
code = run_out(["bash", "-lc", "lsb_release -sc"]).strip()
return {
"ID": dist,
"VERSION_ID": rel,
"CODENAME": code,
"PRETTY": f"{dist} {rel} ({code})",
}
except Exception:
return os_release()
# ========== Angie ==========
def setup_angie(ipv6_enabled: bool):
def _norm(s: str, allow_dot: bool = False) -> str:
pat = r"[^a-z0-9+\-\.]" if allow_dot else r"[^a-z0-9+\-]"
return re.sub(pat, "", s.strip().lower())
with step("Adding Angie repo and installing Angie packages"):
apt_try_install(
[
"ca-certificates",
"curl",
"gnupg",
"apt-transport-https",
"software-properties-common",
"lsb-release",
]
)
run(
[
"curl",
"-fsSL",
"-o",
"/etc/apt/trusted.gpg.d/angie-signing.gpg",
"https://angie.software/keys/angie-signing.gpg",
]
)
try:
dist = run_out(["lsb_release", "-si"])
rel = run_out(["lsb_release", "-sr"])
code = run_out(["lsb_release", "-sc"])
except Exception:
dist = run_out(["bash", "-c", '. /etc/os-release && printf %s "$ID"'])
rel = run_out(
["bash", "-c", '. /etc/os-release && printf %s "$VERSION_ID"']
)
code = run_out(
["bash", "-c", '. /etc/os-release && printf %s "$VERSION_CODENAME"']
)
dist = _norm(dist)
rel = _norm(rel, allow_dot=True)
code = _norm(code)
os_id = f"{dist}/{rel}" if rel else dist
if code:
line = f"deb https://download.angie.software/angie/{os_id} {code} main\n"
else:
line = f"deb https://download.angie.software/angie/{os_id} main\n"
write_file(Path("/etc/apt/sources.list.d/angie.list"), line)
run(["apt-get", "update"])
base = [
"angie",
"angie-module-headers-more",
"angie-module-brotli",
"angie-module-zstd",
]
optional = ["angie-module-prometheus", "angie-console-light"]
apt_install(base)
apt_try_install(optional)
with step("Configuring modules and main Angie config"):
modules_dir = Path("/etc/nginx/modules")
modules_dir.mkdir(parents=True, exist_ok=True)
write_file(Path("/etc/angie/angie.conf"), ANGIE_CONF_TEMPLATE, 0o644)
WRAP = """#!/bin/sh
exec sudo -n /usr/sbin/angie "$@"
"""
write_file(Path("/usr/sbin/nginx"), WRAP, 0o755)
Path("/etc/nginx/conf.d/include").mkdir(parents=True, exist_ok=True)
with step("Setting resolver(s) and cache directories"):
write_resolvers_conf(ipv6_enabled)
for p in ["/var/lib/angie/cache/public", "/var/lib/angie/cache/private"]:
Path(p).mkdir(parents=True, exist_ok=True)
os.chmod(p, 0o755)
with step("Installing corrected systemd unit for Angie"):
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
def write_metrics_files():
"""Create /etc/angie/metrics.conf (port 82 with console & status)."""
with step("Adding Angie metrics & console on :82"):
metrics = """include /etc/angie/prometheus_all.conf;
server {
listen 82;
location /nginx_status {
stub_status on;
access_log off;
allow all;
}
auto_redirect on;
location /status/ {
api /status/;
api_config_files on;
}
location /console/ {
alias /usr/share/angie-console-light/html/;
index index.html;
}
location /console/api/ {
api /status/;
}
location =/p8s {
prometheus all;
}
}
"""
write_file(Path("/etc/angie/metrics.conf"), metrics, 0o644)
def ensure_angie_runtime_perms():
run_path = Path("/run/angie")
pid_file = run_path / "angie.pid"
run_path.mkdir(parents=True, exist_ok=True)
os.chmod(run_path, 0o2775)
try:
import grp
gid = grp.getgrnam("angie").gr_gid
os.chown(run_path, -1, gid)
except Exception:
pass
if not pid_file.exists():
pid_file.touch()
os.chmod(pid_file, 0o664)
try:
import grp, pwd
gid = grp.getgrnam("angie").gr_gid
uid = pwd.getpwnam("root").pw_uid
os.chown(pid_file, uid, gid)
except Exception:
pass
def ensure_user_and_dirs():
with step("Creating npm user and app/log directories"):
try:
run(["id", "-u", "npm"])
except subprocess.CalledProcessError:
run(
[
"useradd",
"--system",
"--home",
"/opt/npm",
"--create-home",
"--shell",
"/usr/sbin/nologin",
"npm",
]
)
rc = subprocess.run(
["getent", "group", "angie"], stdout=_devnull(), stderr=_devnull()
).returncode
if rc != 0:
run(["groupadd", "angie"])
run(["usermod", "-aG", "angie", "npm"], check=False)
dirs = [
"/data",
"/data/nginx",
"/data/custom_ssl",
"/data/logs",
"/data/access",
"/data/nginx/default_host",
"/data/nginx/default_www",
"/data/nginx/proxy_host",
"/data/nginx/redirection_host",
"/data/nginx/stream",
"/data/nginx/dead_host",
"/data/nginx/temp",
"/data/letsencrypt-acme-challenge",
"/opt/npm",
"/opt/npm/frontend",
"/opt/npm/global",
"/run/nginx",
"/run/angie",
"/tmp/angie/body",
]
for d in dirs:
Path(d).mkdir(parents=True, exist_ok=True)
run(["chgrp", "-h", "angie", "/run/angie"], check=False)
os.chmod("/run/angie", 0o2775)
Path("/var/log/angie").mkdir(parents=True, exist_ok=True)
for f in ["access.log", "error.log"]:
(Path("/var/log/angie") / f).touch(exist_ok=True)
paths = ["/var/log/angie"] + glob("/var/log/angie/*.log")
for pth in paths:
run(["chgrp", "-h", "angie", pth], check=False)
run(["chmod", "775", "/var/log/angie"], check=False)
for pth in glob("/var/log/angie/*.log"):
run(["chmod", "664", pth], check=False)
Path("/var/log/nginx").mkdir(parents=True, exist_ok=True)
Path("/var/log/nginx/error.log").touch(exist_ok=True)
os.chmod("/var/log/nginx/error.log", 0o666)
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
ensure_angie_runtime_perms()
def create_sudoers_for_npm():
with step("Configuring sudoers for npm -> angie"):
content = """User_Alias NPMUSERS = npm
NPMUSERS ALL=(root) NOPASSWD: /usr/sbin/angie
"""
path = Path("/etc/sudoers.d/npm")
write_file(path, content, 0o440)
if shutil.which("visudo"):
run(["visudo", "-cf", str(path)], check=False)
def adjust_nginx_like_paths_in_tree(root: Path):
for p in root.rglob("*.conf"):
try:
txt = p.read_text(encoding="utf-8")
except Exception:
continue
txt2 = txt.replace("include conf.d", "include /etc/nginx/conf.d").replace(
"include /etc/angie/conf.d", "include /etc/nginx/conf.d"
)
if txt2 != txt:
p.write_text(txt2, encoding="utf-8")
for cand in root.rglob("nginx.conf"):
try:
txt = cand.read_text(encoding="utf-8")
except Exception:
continue
txt = re.sub(r"^user\s+\S+.*", "user root;", txt, flags=re.M)
txt = re.sub(r"^pid\s+.*", "pid /run/angie/angie.pid;", txt, flags=re.M)
txt = txt.replace("daemon on;", "#daemon on;")
cand.write_text(txt, encoding="utf-8")
def install_node_from_nodesource(version: str):
is_valid, resolved_version, warning = validate_nodejs_version(version)
if warning:
print(warning)
match = re.match(r"(\d+)", resolved_version)
if not match:
raise ValueError(f"Invalid Node.js version: {version}")
major_version = match.group(1)
with step("Removing old Node.js installations"):
run(
["apt-get", "remove", "-y", "nodejs", "npm", "libnode-dev", "libnode72"],
check=False,
)
run(
["apt-get", "purge", "-y", "nodejs", "npm", "libnode-dev", "libnode72"],
check=False,
)
run(["apt-get", "autoremove", "-y"], check=False)
for f in [
"/etc/apt/sources.list.d/nodesource.list",
"/etc/apt/keyrings/nodesource.gpg",
"/usr/share/keyrings/nodesource.gpg",
"/etc/apt/trusted.gpg.d/nodesource.gpg",
]:
if Path(f).exists():
Path(f).unlink()
with step(f"Installing Node.js v{major_version}.x from NodeSource repository"):
apt_try_install(["ca-certificates", "curl", "gnupg", "apt-transport-https"])
setup_url = f"https://deb.nodesource.com/setup_{major_version}.x"
with tempfile.NamedTemporaryFile(mode="w", suffix=".sh", delete=False) as tf:
script_path = tf.name
try:
run(["curl", "-fsSL", setup_url, "-o", script_path])
os.chmod(script_path, 0o755)
if DEBUG:
subprocess.run(["bash", script_path], check=True)
else:
run(["bash", script_path])
run(["apt-get", "update", "-y"])
run(["apt-get", "install", "-y", "nodejs"])
finally:
if Path(script_path).exists():
os.unlink(script_path)
if shutil.which("node"):
node_ver = run_out(["node", "--version"], check=False).strip()
installed_major = re.match(r"v?(\d+)", node_ver)
if installed_major and installed_major.group(1) != major_version:
print(f"⚠ WARNING: Requested Node.js v{major_version}.x but got {node_ver}")
print(
f" This likely means NodeSource doesn't support your distribution yet."
)
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
print(f" Node.js: {node_ver}")
print(f" npm: {npm_ver}")
else:
print(f"Node.js: {node_ver}")
apt_try_install(["npm"])
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
print(f"npm: {npm_ver}")
if not shutil.which("npm"):
run(["corepack", "enable"], check=False)
if shutil.which("npm"):
npm_ver = run_out(["npm", "--version"], check=False).strip()
print(f"\n✔ npm {npm_ver} installed successfully")
else:
print(f"✖ npm could not be installed - manual intervention required")
else:
print("✖ Node.js installation failed")
raise RuntimeError("Node.js installation failed")
def install_yarn():
if shutil.which("yarn") or shutil.which("yarnpkg"):
return
apt_try_install(["yarn"])
if not shutil.which("yarn") and not shutil.which("yarnpkg"):
apt_try_install(["yarnpkg"])
if not Path("/usr/bin/yarn").exists() and Path("/usr/bin/yarnpkg").exists():
os.symlink("/usr/bin/yarnpkg", "/usr/bin/yarn")
def _is_ubuntu_wo_distutils() -> bool:
try:
dist = (OSREL.get("ID", "") or "").lower()
ver = (OSREL.get("VERSION_ID", "") or "").strip()
def _vers(t):
parts = (ver.split(".") + ["0", "0"])[:2]
return (int(parts[0]), int(parts[1]))
return dist == "ubuntu" and _vers(ver) >= (24, 4)
except Exception:
return False
def _prepare_sass(frontend_dir: Path):
pj = frontend_dir / "package.json"
if not pj.exists():
return
import json, re, os
try:
data = json.loads(pj.read_text(encoding="utf-8"))
except Exception:
return
deps = data.get("dependencies", {}) or {}
dev = data.get("devDependencies", {}) or {}
has_node_sass = ("node-sass" in deps) or ("node-sass" in dev)
if not has_node_sass:
return
env_flag = os.environ.get("USE_DART_SASS", "").strip()
use_dart = (env_flag == "1") or (env_flag == "" and _is_ubuntu_wo_distutils())
data.setdefault("dependencies", {})
data.setdefault("devDependencies", {})
if use_dart:
data["dependencies"].pop("node-sass", None)
data["devDependencies"].pop("node-sass", None)
if "sass" not in data["dependencies"] and "sass" not in data["devDependencies"]:
data["devDependencies"]["sass"] = "^1.77.0"
scripts = data.get("scripts") or {}
data["scripts"] = {
k: re.sub(r"\bnode-sass\b", "sass", v or "") for k, v in scripts.items()
}
if env_flag == "":
os.environ["USE_DART_SASS"] = "1"
else:
target = "^9.0.0"
if "node-sass" in data["dependencies"]:
data["dependencies"]["node-sass"] = target
else:
data["devDependencies"]["node-sass"] = target
res = data.get("resolutions") or {}
res["node-gyp"] = "^10.0.0"
res["node-sass"] = "^9.0.0"
data["resolutions"] = res
os.environ["npm_config_node_sass_binary_site"] = (
"https://github.com/sass/node-sass/releases/download"
)
pj.write_text(
json.dumps(data, indent=2, ensure_ascii=False) + "\n", encoding="utf-8"
)
def _build_frontend(src_frontend: Path, dest_frontend: Path):
def _semver(s: str) -> bool:
return bool(re.match(r"^\d+(?:\.\d+){1,3}$", (s or "").strip()))
def _good_yarn(argv: list[str]) -> bool:
try:
v = (run_out(argv + ["--version"], check=False) or "").strip()
return _semver(v)
except Exception:
return False
def _pick_yarn_cmd() -> list[str] | None:
# Try direct yarn/yarnpkg first
for c in (["yarn"], ["yarnpkg"]):
if shutil.which(c[0]) and _good_yarn(c):
return c
# If npm exists, try to use it to run yarn
if shutil.which("npm"):
npm_ver = (run_out(["npm", "--version"], check=False) or "").strip()
if npm_ver:
# Try npm exec yarn@stable
if _good_yarn(["npm", "exec", "--yes", "yarn@stable", "--"]):
return ["npm", "exec", "--yes", "yarn@stable", "--"]
# Try npx as fallback
if shutil.which("npx"):
npx_ver = (run_out(["npx", "--version"], check=False) or "").strip()
if npx_ver:
if _good_yarn(["npx", "-y", "yarn@stable"]):
return ["npx", "-y", "yarn@stable"]
return None
def _ensure_yarn_installed():
"""Install yarn globally using npm or corepack."""
with step("Installing yarn globally"):
if not shutil.which("npm"):
try:
apt_try_install(["npm"])
except Exception:
run(["apt-get", "update"], check=False)
run(["apt-get", "install", "-y", "npm"])
# Try corepack first (modern way)
if shutil.which("corepack"):
try:
run(["corepack", "enable"])
run(["corepack", "prepare", "yarn@stable", "--activate"])
if shutil.which("yarn"):
return
except Exception:
pass
# Fallback to npm install
try:
run(["npm", "install", "-g", "yarn@latest"])
except Exception:
# Last resort - try with --force
run(["npm", "install", "-g", "--force", "yarn@latest"], check=False)
yarn_cmd = _pick_yarn_cmd()
if not yarn_cmd:
_ensure_yarn_installed()
yarn_cmd = _pick_yarn_cmd()
if not yarn_cmd:
raise RuntimeError(
"Unable to detect or install a valid Yarn.\n"
"Try manually: npm install -g yarn@latest"
)
with step("Installing frontend dependencies (yarn)"):
os.environ["NODE_ENV"] = "development"
os.chdir(src_frontend)
_prepare_sass(src_frontend)
# Get and create cache directory
try:
cache_dir = (
run_out(yarn_cmd + ["cache", "dir"], check=False) or ""
).strip()
if cache_dir and not Path(cache_dir).exists():
Path(cache_dir).mkdir(parents=True, exist_ok=True)
except Exception:
pass
# Clean cache
try:
run(yarn_cmd + ["cache", "clean"], check=False)
except Exception:
pass
install_cmd = yarn_cmd + ["install"]
if install_cmd[-1] == "--":
install_cmd = install_cmd[:-1]
if DEBUG:
print(f"Running: {' '.join(install_cmd)}")
try:
run(install_cmd)
except subprocess.CalledProcessError as e:
print(
f"\n✖ Yarn install failed. Trying with --network-timeout and --ignore-engines..."
)
retry_cmd = install_cmd + [
"--network-timeout",
"100000",
"--ignore-engines",
]
run(retry_cmd)
with step("Building frontend (yarn build)"):
env = os.environ.copy()
env["NODE_OPTIONS"] = "--openssl-legacy-provider"
build_cmd = yarn_cmd + ["build"]
if build_cmd[-1] == "--":
build_cmd = build_cmd[:-1]
try:
run(build_cmd, env=env)
except subprocess.CalledProcessError:
print("\n⚠ Build failed with legacy provider, retrying without...")
env.pop("NODE_OPTIONS", None)
run(build_cmd, env=env)
with step("Copying frontend artifacts"):
shutil.copytree(src_frontend / "dist", dest_frontend, dirs_exist_ok=True)
if (src_frontend / "app-images").exists():
shutil.copytree(
src_frontend / "app-images",
dest_frontend / "images",
dirs_exist_ok=True,
)
def patch_npm_backend_commands():
candidates = [
Path("/opt/npm/lib/utils.js"),
Path("/opt/npm/utils.js"),
Path("/opt/npm/lib/commands.js"),
]
for p in candidates:
if not p.exists():
continue
try:
txt = p.read_text(encoding="utf-8")
except Exception:
continue
new = re.sub(r"\blogrotate\b", "/usr/local/bin/logrotate-npm", txt)
new = re.sub(r"(?<!/usr/sbin/)\bnginx\b", "/usr/sbin/nginx", new)
if new != txt:
p.write_text(new, encoding="utf-8")
def deploy_npm_app(npm_version_override: str | None):
version = github_latest_release_tag(
"NginxProxyManager/nginx-proxy-manager", npm_version_override
)
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
tmp = Path(tempfile.mkdtemp(prefix="npm-angie-"))
src = download_extract_tar_gz(url, tmp)
with step("Setting version numbers in package.json"):
for pkg in ["backend/package.json", "frontend/package.json"]:
pj = src / pkg
txt = pj.read_text(encoding="utf-8")
txt = re.sub(r'"version":\s*"0\.0\.0"', f'"version": "{version}"', txt)
pj.write_text(txt, encoding="utf-8")
with step("Fixing include paths / nginx.conf"):
adjust_nginx_like_paths_in_tree(src)
with step("Copying web root and configs to /etc/nginx"):
Path("/var/www/html").mkdir(parents=True, exist_ok=True)
shutil.copytree(
src / "docker" / "rootfs" / "var" / "www" / "html",
"/var/www/html",
dirs_exist_ok=True,
)
shutil.copytree(
src / "docker" / "rootfs" / "etc" / "nginx",
"/etc/nginx",
dirs_exist_ok=True,
)
devconf = Path("/etc/nginx/conf.d/dev.conf")
if devconf.exists():
devconf.unlink()
shutil.copy2(
src / "docker" / "rootfs" / "etc" / "logrotate.d" / "nginx-proxy-manager",
"/etc/logrotate.d/nginx-proxy-manager",
)
Path("/etc/nginx/conf").mkdir(parents=True, exist_ok=True)
if not Path("/etc/nginx/conf/nginx.conf").exists():
os.symlink("/etc/angie/angie.conf", "/etc/nginx/conf/nginx.conf")
with step("Copying backend/global to /opt/npm"):
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
Path("/opt/npm/frontend/images").mkdir(parents=True, exist_ok=True)
shutil.copytree(src / "global", "/opt/npm/global", dirs_exist_ok=True)
with step("Creating SQLite config if missing"):
cfg = Path("/opt/npm/config/production.json")
if not cfg.exists():
write_file(
cfg,
json.dumps(
{
"database": {
"engine": "knex-native",
"knex": {
"client": "sqlite3",
"connection": {"filename": "/data/database.sqlite"},
},
}
},
indent=2,
),
)
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
with step("Installing backend dependencies (yarn)"):
os.chdir("/opt/npm")
run(["yarn", "install"])
with step("Normalizing directories ownership"):
run(["chown", "-R", "npm:npm", "/opt/npm", "/data"])
with step("Fixing root in production.conf if needed"):
for p in [
Path("/etc/angie/conf.d/production.conf"),
Path("/etc/nginx/conf.d/production.conf"),
]:
if p.exists():
txt = p.read_text(encoding="utf-8")
txt2 = re.sub(
r"^\s*root\s+.*?;\s*$",
" root /opt/npm/frontend;",
txt,
flags=re.M,
)
if txt2 != txt:
p.write_text(txt2, encoding="utf-8")
with step("Preparing include/ip_ranges.conf (owned by npm)"):
include_dir = Path("/etc/nginx/conf.d/include")
include_dir.mkdir(parents=True, exist_ok=True)
ipranges = include_dir / "ip_ranges.conf"
if not ipranges.exists():
write_file(ipranges, "# populated by NPM (IPv4 only)\n")
try:
run(["chown", "npm:npm", str(include_dir), str(ipranges)])
except Exception:
pass
os.chmod(ipranges, 0o664)
patch_npm_backend_commands()
return version
def strip_ipv6_listens(paths):
with step("Removing IPv6 listen entries from configs (--enable-ipv6 not set)"):
confs = []
for p in paths:
confs.extend(Path(p).rglob("*.conf"))
for f in confs:
try:
txt = f.read_text(encoding="utf-8")
except Exception:
continue
new = re.sub(r"(?m)^\s*listen\s+\[::\]:\d+[^;]*;\s*$", "", txt)
new = re.sub(r"\n{3,}", "\n\n", new)
if new != txt:
f.write_text(new, encoding="utf-8")
def install_logrotate_for_data_logs():
with step("Installing logrotate policy for /var/log/angie (*.log)"):
conf_path = Path("/etc/logrotate.d/angie")
content = """/var/log/angie/*.log {
daily
rotate 1
compress
missingok
notifempty
copytruncate
create 0640 root root
su root root
postrotate
if [ -f /run/angie/angie.pid ]; then
kill -USR1 $(cat /run/angie/angie.pid)
fi
endscript
}
"""
write_file(conf_path, content, 0o644)
try:
run(["/usr/sbin/logrotate", "-d", str(conf_path)], check=False)
except Exception:
pass
def fix_logrotate_permissions_and_wrapper():
with step("Fixing logrotate state-file permissions and helper"):
system_status = Path("/var/lib/logrotate/status")
if system_status.exists():
try:
run(["setfacl", "-m", "u:npm:rw", str(system_status)], check=False)
except FileNotFoundError:
try:
run(["chgrp", "npm", str(system_status)], check=False)
os.chmod(system_status, 0o664)
except Exception:
pass
state_dir = Path("/opt/npm/var")
state_dir.mkdir(parents=True, exist_ok=True)
state_file = state_dir / "logrotate.state"
if not state_file.exists():
state_file.touch()
os.chmod(state_file, 0o664)
try:
import pwd, grp
uid = pwd.getpwnam("npm").pw_uid
gid = grp.getgrnam("npm").gr_gid
os.chown(state_dir, uid, gid)
os.chown(state_file, uid, gid)
except Exception:
pass
helper = Path("/usr/local/bin/logrotate-npm")
helper_content = f"""#!/bin/sh
# Logrotate wrapper for npm user
exec /usr/sbin/logrotate -s {state_file} "$@"
"""
write_file(helper, helper_content, 0o755)
logrotate_dir = Path("/var/lib/logrotate")
if logrotate_dir.exists():
try:
run(["usermod", "-aG", "adm", "npm"], check=False)
run(["chgrp", "adm", str(logrotate_dir)], check=False)
os.chmod(logrotate_dir, 0o775)
except Exception as e:
print(f"⚠ Warning: could not fix {logrotate_dir} permissions: {e}")
def create_systemd_units(ipv6_enabled: bool):
with step("Creating and starting systemd services (angie, npm)"):
unit_lines = [
"[Unit]",
"Description=Nginx Proxy Manager (backend)",
"After=network.target angie.service",
"Wants=angie.service",
"",
"[Service]",
"User=npm",
"Group=npm",
"WorkingDirectory=/opt/npm",
"Environment=NODE_ENV=production",
]
if not ipv6_enabled:
unit_lines.append("Environment=DISABLE_IPV6=true")
unit_lines += [
"ExecStart=/usr/bin/node /opt/npm/index.js",
"Restart=on-failure",
"RestartSec=5",
"",
"[Install]",
"WantedBy=multi-user.target",
"",
]
write_file(
Path("/etc/systemd/system/npm.service"), "\n".join(unit_lines), 0o644
)
write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644)
run(["systemctl", "daemon-reload"])
run(["systemctl", "enable", "--now", "angie.service"])
run(["/usr/sbin/nginx", "-t"], check=False)
run(["systemctl", "enable", "--now", "npm.service"])
run(["angie", "-s", "reload"], check=False)
def gather_versions(npm_app_version: str):
_ips = run_out(["hostname", "-I"], check=False) or ""
ip = (_ips.split() or [""])[0]
angie_out = (
(run_out(["angie", "-v"], check=False) or "")
+ "\n"
+ (run_out(["angie", "-V"], check=False) or "")
)
m = re.search(r"(?i)\bangie\s*/\s*([0-9]+(?:\.[0-9]+)+)\b", angie_out)
if not m:
dp = (
run_out(["dpkg-query", "-W", "-f=${Version}", "angie"], check=False) or ""
).strip()
m = re.search(r"([0-9]+(?:\.[0-9]+)+)", dp)
angie_v = m.group(1) if m else (angie_out.strip() or "")
node_v = (run_out(["node", "-v"], check=False) or "").strip().lstrip("v")
yarn_v = (run_out(["yarn", "-v"], check=False) or "").strip()
if not yarn_v:
yarn_v = (run_out(["yarnpkg", "-v"], check=False) or "").strip()
return ip, angie_v, node_v, yarn_v, npm_app_version
def update_motd(enabled: bool, info, ipv6_enabled: bool):
if not enabled:
return
ip, angie_v, node_v, yarn_v, npm_v = info
ipv6_line = (
"IPv6: enabled (configs untouched)."
if ipv6_enabled
else "IPv6: disabled in resolvers and conf."
)
creds = "Default login: admin@example.com / changeme"
text = f"""
################################ NPM / ANGIE ################################
OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})
Nginx Proxy Manager: http://{ip}:81
Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s
Angie: {angie_v} (conf: /etc/angie -> /etc/nginx, reload: angie -s reload)
Node.js: v{node_v} Yarn: v{yarn_v}
NPM app: v{npm_v}
Paths: app=/opt/npm data=/data cache=/var/lib/angie/cache
{ipv6_line}
{creds}
###########################################################################
"""
motd_d = Path("/etc/motd.d")
if motd_d.exists():
write_file(motd_d / "10-npm-angie", text.strip() + "\n", 0o644)
else:
motd = Path("/etc/motd")
existing = motd.read_text(encoding="utf-8") if motd.exists() else ""
pattern = re.compile(
r"################################ NPM / ANGIE ################################.*?###########################################################################\n",
re.S,
)
if pattern.search(existing):
content = pattern.sub(text.strip() + "\n", existing)
else:
content = (
(existing.rstrip() + "\n\n" + text.strip() + "\n")
if existing
else (text.strip() + "\n")
)
write_file(motd, content, 0o644)
def print_summary(info, ipv6_enabled, dark_enabled, tp_theme, update_mode):
ip, angie_v, node_v, yarn_v, npm_v = info
print("\n====================== SUMMARY ======================")
print(f"OS: {OSREL['PRETTY']} ({OSREL['ID']} {OSREL['VERSION_ID']})")
print(f"Mode: {'UPDATE' if update_mode else 'INSTALL'}")
print(f"NPM panel address: http://{ip}:81")
print(f"Angie & Prometheus stats: http://{ip}:82/console | http://{ip}:82/p8s ")
print(f"Angie: v{angie_v}")
print(f"Node.js: v{node_v}")
print(f"Yarn: v{yarn_v}")
print(f"NPM (app): v{npm_v}")
print(
f"IPv6: {'ENABLED' if ipv6_enabled else 'DISABLED (in configs too)'}"
)
if tp_theme:
print(f"Custom theme: {tp_theme}")
else:
print(f"Custom theme: DISABLED")
print(
"Paths: /opt/npm (app), /data (data), /etc/angie (conf), /var/log/angie (logs)"
)
print("Services: systemctl status angie.service / npm.service")
if not update_mode:
print("Default login: Email: admin@example.com Password: changeme")
print("Test config: /usr/sbin/angie -t")
print("==========================================================\n")
# ========== UPDATE-ONLY ==========
def update_only(
node_pkg: str,
node_version: str | None,
npm_version_override: str | None,
apply_dark: bool,
dark_env: dict,
ipv6_enabled: bool,
):
apt_update_upgrade()
install_yarn()
version = github_latest_release_tag(
"NginxProxyManager/nginx-proxy-manager", npm_version_override
)
url = f"https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/refs/tags/v{version}"
tmp = Path(tempfile.mkdtemp(prefix="npm-update-"))
src = download_extract_tar_gz(url, tmp)
with step("Setting version in package.json (update)"):
for pkg in ["backend/package.json", "frontend/package.json"]:
pj = src / pkg
txt = pj.read_text(encoding="utf-8")
txt = re.sub(r'"version":\s*"0\.0\.0"', f'"version": "{version}"', txt)
pj.write_text(txt, encoding="utf-8")
# ========== BACKUP BEFORE UPDATE ==========
timestamp = time.strftime("%Y%m%d-%H%M%S")
backup_dir = Path(f"/data/backups/npm-backup-{timestamp}")
with step("Creating full backup before update"):
backup_dir.parent.mkdir(parents=True, exist_ok=True)
try:
if Path("/opt/npm").exists():
shutil.copytree("/opt/npm", backup_dir / "opt_npm", dirs_exist_ok=True)
if Path("/data/database.sqlite").exists():
shutil.copy2("/data/database.sqlite", backup_dir / "database.sqlite")
if Path("/data/letsencrypt").exists():
shutil.copytree("/data/letsencrypt", backup_dir / "letsencrypt", dirs_exist_ok=True)
if Path("/data/nginx").exists():
shutil.copytree("/data/nginx", backup_dir / "nginx", dirs_exist_ok=True)
backup_info = {
"backup_date": timestamp,
"npm_version": "current",
"update_to_version": version,
"backup_path": str(backup_dir)
}
(backup_dir / "backup_info.json").write_text(json.dumps(backup_info, indent=2))
backups = sorted(backup_dir.parent.glob("npm-backup-*"))
if len(backups) > 3:
for old_backup in backups[:-3]:
shutil.rmtree(old_backup, ignore_errors=True)
except Exception as e:
print(f"⚠ Warning: Backup failed: {e}")
print(" Continue update anyway? [y/N]: ", end="", flush=True)
response = input().strip().lower()
if response not in ["y", "yes"]:
print("Update cancelled.")
sys.exit(1)
print(f" Backup location: {backup_dir}")
if len(backups) > 3:
print(f" Removed {len(backups) - 3} old backup(s)")
# ========== END BACKUP ==========
_build_frontend(src / "frontend", Path("/opt/npm/frontend"))
with step("Updating backend without overwriting config/"):
backup_cfg = Path("/tmp/npm-config-backup")
if backup_cfg.exists():
shutil.rmtree(backup_cfg)
if Path("/opt/npm/config").exists():
shutil.copytree("/opt/npm/config", backup_cfg, dirs_exist_ok=True)
for item in Path("/opt/npm").glob("*"):
if item.name in ("frontend", "config"):
continue
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
shutil.copytree(src / "backend", "/opt/npm", dirs_exist_ok=True)
shutil.copytree(src / "global", "/opt/npm/global", dirs_exist_ok=True)
Path("/opt/npm/config").mkdir(parents=True, exist_ok=True)
if backup_cfg.exists():
shutil.copytree(backup_cfg, "/opt/npm/config", dirs_exist_ok=True)
shutil.rmtree(backup_cfg, ignore_errors=True)
with step("Installing backend dependencies after update"):
os.chdir("/opt/npm")
run(["yarn", "install"])
patch_npm_backend_commands()
create_systemd_units(ipv6_enabled=ipv6_enabled)
with step("Setting owners"):
run(["chown", "-R", "npm:npm", "/opt/npm"])
if apply_dark:
apply_dark_mode(**dark_env)
save_installer_config({
"ipv6_enabled": ipv6_enabled,
"tp_theme": dark_env.get("TP_THEME") if apply_dark else None,
"tp_domain": dark_env.get("TP_DOMAIN", TP_DOMAIN),
"tp_scheme": dark_env.get("TP_SCHEME", TP_SCHEME),
"tp_community_theme": dark_env.get("TP_COMMUNITY_THEME", TP_COMMUNITY_THEME),
"node_version": node_version,
"npm_version": version,
})
with step("Restarting services after update"):
run(["systemctl", "restart", "angie.service"], check=False)
run(["systemctl", "restart", "npm.service"], check=False)
return version
# ========== CUSTOM THEME ==========
def apply_dark_mode(
APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=None,
TP_COMMUNITY_THEME=None,
TP_SCHEME=None,
TP_THEME=None,
):
if DEBUG:
print("--------------------------------------")
print("| Nginx Proxy Manager theme.park Mod |")
print("--------------------------------------")
# locate frontend
if not Path(APP_FILEPATH).exists():
if Path("/app/frontend").exists():
APP_FILEPATH = "/app/frontend"
elif Path("/opt/nginx-proxy-manager/frontend").exists():
APP_FILEPATH = "/opt/nginx-proxy-manager/frontend"
if not TP_DOMAIN or TP_DOMAIN.strip() == "":
if DEBUG:
print("No domain set, defaulting to theme-park.dev")
TP_DOMAIN = "theme-park.dev"
if not TP_SCHEME or TP_SCHEME.strip() == "":
TP_SCHEME = "https"
THEME_TYPE = (
"community-theme-options"
if (str(TP_COMMUNITY_THEME).lower() == "true")
else "theme-options"
)
if not TP_THEME or TP_THEME.strip() == "":
TP_THEME = "organizr"
if "github.io" in TP_DOMAIN:
TP_DOMAIN = f"{TP_DOMAIN}/theme.park"
if DEBUG:
print(
"Variables set:\n"
f"'APP_FILEPATH'={APP_FILEPATH}\n"
f"'TP_DOMAIN'={TP_DOMAIN}\n"
f"'TP_COMMUNITY_THEME'={TP_COMMUNITY_THEME}\n"
f"'TP_SCHEME'={TP_SCHEME}\n"
f"'TP_THEME'={TP_THEME}\n"
)
base_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/base/nginx-proxy-manager/nginx-proxy-manager-base.css"
theme_href = f"{TP_SCHEME}://{TP_DOMAIN}/css/{THEME_TYPE}/{TP_THEME}.css"
with step("Injecting Theme.Park CSS into HTML"):
htmls = list(Path(APP_FILEPATH).rglob("*.html"))
for path in htmls:
html = path.read_text(encoding="utf-8")
if base_href not in html:
html = re.sub(
r"</head>",
f"<link rel='stylesheet' href='{base_href}'></head> ",
html,
flags=re.I,
)
html = re.sub(
r"</head>",
f"<link rel='stylesheet' href='{theme_href}'></head> ",
html,
flags=re.I,
)
path.write_text(html, encoding="utf-8")
if DEBUG:
print(f"Patched: {path}")
# ========== MAIN ==========
def main():
global DEBUG
ensure_root()
parser = argparse.ArgumentParser(
description="Install/upgrade NPM on Angie (Debian 11 + / Ubuntu 20.04 +).",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--nodejs-pkg",
default="nodejs",
help="APT Node.js package name (e.g. nodejs, nodejs-18).",
)
parser.add_argument(
"--node-version",
default=None,
help=f"Install Node.js from NodeSource repo (e.g. 'latest', '21', '20', '18'). "
f"Maximum supported: v{MAX_NODEJS_VERSION}. Overrides --nodejs-pkg.",
)
parser.add_argument(
"--npm-version",
default=None,
help="Force NPM app version (e.g. 2.12.6). Default: latest release.",
)
parser.add_argument(
"--motd",
choices=["yes", "no"],
default="yes",
help="Update MOTD after completion.",
)
parser.add_argument(
"--enable-ipv6",
action="store_true",
help="Do not strip IPv6 from configs/resolvers (keep IPv6).",
)
parser.add_argument(
"--update",
action="store_true",
help="Update mode: upgrade packages + rebuild frontend/backend without reconfiguring Angie.",
)
parser.add_argument(
"--dark-mode",
action="store_true",
help=f"Enable dark theme (default: {TP_DEFAULT_THEME} from theme-park.dev)",
)
parser.add_argument(
"--tp-theme",
default=None,
help="Enable dark theme with specific theme name (e.g. nord, dracula, plex). Implies --dark-mode.",
)
parser.add_argument(
"--debug", action="store_true", help="Show detailed logs and progress."
)
args = parser.parse_args()
DEBUG = args.debug
print("\n================== NPM + ANGIE installer ==================")
print(f"Repository: https://gitea.linuxiarz.pl/gru/npm-angie-auto-install")
print(f"Script description: Auto-installer with Angie + Node.js auto-setup")
print(f"")
print(f"System Information:")
print(f" OS: {OSREL['PRETTY']}")
print(f" Distribution: {OSREL['ID']} {OSREL['VERSION_ID']}")
print(f" Codename: {OSREL.get('CODENAME', 'N/A')}")
print(f" Python: {sys.version.split()[0]}")
print(f"")
print(f"Installation Mode:")
print(f" Log Level: {'DEBUG (verbose)' if DEBUG else 'SIMPLE (progress only)'}")
print(f" Min Node.js: v{MIN_NODEJS_VERSION}+ (auto-installed if needed)")
print(f" Max Node.js: v{MAX_NODEJS_VERSION} (tested)")
print(f"")
print(f"Author: @linuxiarz.pl (Mateusz Gruszczyński)")
print("===========================================================\n")
if args.tp_theme:
dark_mode_enabled = True
selected_theme = args.tp_theme
elif args.dark_mode:
dark_mode_enabled = True
selected_theme = TP_DEFAULT_THEME
else:
dark_mode_enabled = False
selected_theme = None
if args.update:
installer_config = load_installer_config()
if not args.tp_theme and installer_config.get("tp_theme"):
selected_theme = installer_config["tp_theme"]
print(f"✓ Using stored theme: {selected_theme}")
if not args.dark_mode and not args.tp_theme and installer_config.get("tp_theme"):
args.dark_mode = True
print(f"✓ Using stored Theme-Park setting: enabled")
stored_ipv6 = installer_config.get("ipv6_enabled", args.enable_ipv6)
install_logrotate_for_data_logs()
fix_logrotate_permissions_and_wrapper()
version = update_only(
node_pkg=args.nodejs_pkg,
node_version=args.node_version,
npm_version_override=args.npm_version,
apply_dark=args.dark_mode or bool(selected_theme),
dark_env=dict(
APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=TP_DOMAIN,
TP_COMMUNITY_THEME=TP_COMMUNITY_THEME,
TP_SCHEME=TP_SCHEME,
TP_THEME=selected_theme,
),
ipv6_enabled=stored_ipv6 if 'stored_ipv6' in locals() else args.enable_ipv6,
)
info = gather_versions(version)
update_motd(args.motd == "yes", info, ipv6_enabled=args.enable_ipv6)
print_summary(
info, args.enable_ipv6, args.dark_mode, selected_theme, update_mode=True
)
return
validate_supported_os()
apt_update_upgrade()
apt_purge(
["nginx", "openresty", "nodejs", "npm", "yarn", "certbot", "rustc", "cargo"]
)
apt_install(
[
"ca-certificates",
"curl",
"gnupg",
"openssl",
"apache2-utils",
"logrotate",
"sudo",
"acl",
"python3",
"sqlite3",
"git",
"lsb-release",
"build-essential",
]
)
setup_angie(ipv6_enabled=args.enable_ipv6)
write_metrics_files()
ensure_minimum_nodejs(user_requested_version=args.node_version)
install_yarn()
ensure_user_and_dirs()
create_sudoers_for_npm()
setup_certbot_venv()
configure_letsencrypt()
npm_app_version = deploy_npm_app(args.npm_version)
if not args.enable_ipv6:
strip_ipv6_listens([Path("/etc/angie"), Path("/etc/nginx")])
else:
print("IPv6: leaving entries (skipped IPv6 cleanup).")
if dark_mode_enabled:
apply_dark_mode(
APP_FILEPATH="/opt/npm/frontend",
TP_DOMAIN=TP_DOMAIN,
TP_COMMUNITY_THEME=TP_COMMUNITY_THEME,
TP_SCHEME=TP_SCHEME,
TP_THEME=selected_theme,
)
# Save installation configuration for future updates
save_installer_config({
"ipv6_enabled": args.enable_ipv6,
"tp_theme": selected_theme,
"tp_domain": TP_DOMAIN,
"tp_scheme": TP_SCHEME,
"tp_community_theme": TP_COMMUNITY_THEME,
"node_version": args.node_version,
"npm_version": npm_app_version,
})
create_systemd_units(ipv6_enabled=args.enable_ipv6)
ensure_nginx_symlink()
install_logrotate_for_data_logs()
fix_logrotate_permissions_and_wrapper()
sync_backup_nginx_conf()
comment_x_served_by_step()
set_file_ownership(["/etc/nginx/conf.d/include/ip_ranges.conf"], "npm:npm", 0o664)
with step("Restarting services after installation"):
run(["systemctl", "restart", "angie.service"], check=False)
run(["systemctl", "restart", "npm.service"], check=False)
info = gather_versions(npm_app_version)
update_motd(args.motd == "yes", info, ipv6_enabled=args.enable_ipv6)
print_summary(
info, args.enable_ipv6, args.dark_mode, selected_theme, update_mode=False
)
if __name__ == "__main__":
signal.signal(signal.SIGINT, lambda s, f: sys.exit(130))
main()