From e30d54dd92dcf8b7d27e6f7127b8cc34e00ac268 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20Gruszczy=C5=84ski?= Date: Sun, 26 Oct 2025 18:45:14 +0100 Subject: [PATCH] node version logic --- npm_install.py | 1148 ++++++++++++++++++++++++++++++++++-------------- 1 file changed, 823 insertions(+), 325 deletions(-) diff --git a/npm_install.py b/npm_install.py index 86e0c3c..7baf2ba 100644 --- a/npm_install.py +++ b/npm_install.py @@ -11,7 +11,7 @@ DEBUG = False # ========== Configuration ========== # Minimum required Node.js version for NPM 2.12.6+ MIN_NODEJS_VERSION = 18 -# Maximum supported Node.js version +# Maximum supported Node.js version MAX_NODEJS_VERSION = 21 # Theme.Park settings (for --dark-mode or --tp-theme) @@ -23,8 +23,10 @@ TP_DEFAULT_THEME = "organizr" # ========== UI / Spinner ========== + class Spinner: - FRAMES = ["⠋","⠙","⠹","⠸","⠼","⠴","⠦","⠧","⠇","⠏"] + FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"] + def __init__(self, text): self.text = text self._stop = threading.Event() @@ -60,28 +62,38 @@ class Spinner: else: print(f"✖ {self.text}") + def step(text): class _Ctx: def __enter__(self_inner): self_inner.spinner = Spinner(text) self_inner.spinner.start() return self_inner + def __exit__(self_inner, exc_type, exc, tb): if exc is None: self_inner.spinner.stop_ok() else: self_inner.spinner.stop_fail() + return _Ctx() + def _devnull(): return subprocess.DEVNULL if not DEBUG else None + def run(cmd, check=True, env=None): if DEBUG: print("+", " ".join(cmd)) - return subprocess.run(cmd, check=check, env=env, - stdout=None if DEBUG else subprocess.DEVNULL, - stderr=None if DEBUG else subprocess.DEVNULL) + return subprocess.run( + cmd, + check=check, + env=env, + stdout=None if DEBUG else subprocess.DEVNULL, + stderr=None if DEBUG else subprocess.DEVNULL, + ) + def run_out(cmd, check=True): if DEBUG: @@ -89,45 +101,58 @@ def run_out(cmd, check=True): result = subprocess.run(cmd, check=check, capture_output=True, text=True) return result.stdout + # ========== Utils ========== + def ensure_root(): if os.geteuid() != 0: print("Run as root.", file=sys.stderr) sys.exit(1) + def os_release(): data = {} try: for line in Path("/etc/os-release").read_text().splitlines(): if "=" in line: - k,v = line.split("=",1) + k, v = line.split("=", 1) data[k] = v.strip().strip('"') except Exception: pass - pretty = data.get("PRETTY_NAME") or f"{data.get('ID','linux')} {data.get('VERSION_ID','')}".strip() + pretty = ( + data.get("PRETTY_NAME") + or f"{data.get('ID','linux')} {data.get('VERSION_ID','')}".strip() + ) return { - "ID": data.get("ID",""), - "VERSION_ID": data.get("VERSION_ID",""), - "CODENAME": data.get("VERSION_CODENAME",""), - "PRETTY": pretty + "ID": data.get("ID", ""), + "VERSION_ID": data.get("VERSION_ID", ""), + "CODENAME": data.get("VERSION_CODENAME", ""), + "PRETTY": pretty, } + def apt_update_upgrade(): with step("Updating package lists and system"): run(["apt-get", "update", "-y"]) run(["apt-get", "-y", "upgrade"]) + def apt_install(pkgs): - if not pkgs: return + if not pkgs: + return with step(f"Installing packages: {', '.join(pkgs)}"): run(["apt-get", "install", "-y"] + pkgs) + def apt_try_install(pkgs): - if not pkgs: return + if not pkgs: + return avail = [] for p in pkgs: - ok = subprocess.run(["apt-cache","show", p], stdout=_devnull(), stderr=_devnull()) + ok = subprocess.run( + ["apt-cache", "show", p], stdout=_devnull(), stderr=_devnull() + ) if ok.returncode == 0: avail.append(p) elif DEBUG: @@ -135,17 +160,21 @@ def apt_try_install(pkgs): if avail: apt_install(avail) + def apt_purge(pkgs): - if not pkgs: return + if not pkgs: + return with step(f"Removing conflicting packages: {', '.join(pkgs)}"): run(["apt-get", "purge", "-y"] + pkgs, check=False) run(["apt-get", "autoremove", "-y"], check=False) + def write_file(path: Path, content: str, mode=0o644): path.parent.mkdir(parents=True, exist_ok=True) path.write_text(content, encoding="utf-8") os.chmod(path, mode) + def append_unique(path: Path, lines: str): path.parent.mkdir(parents=True, exist_ok=True) existing = path.read_text(encoding="utf-8") if path.exists() else "" @@ -155,6 +184,7 @@ def append_unique(path: Path, lines: str): out += ("" if out.endswith("\n") else "\n") + line + "\n" path.write_text(out, encoding="utf-8") + def github_latest_release_tag(repo: str, override: str | None) -> str: if override: return override.lstrip("v") @@ -165,6 +195,7 @@ def github_latest_release_tag(repo: str, override: str | None) -> str: tag = data["tag_name"] return tag.lstrip("v") + def write_resolvers_conf(ipv6_enabled: bool): ns_v4, ns_v6 = [], [] try: @@ -179,27 +210,26 @@ def write_resolvers_conf(ipv6_enabled: bool): ips = ns_v4 + (ns_v6 if ipv6_enabled else []) if not ips: - ips = ["1.1.1.1", "8.8.8.8"] + (["2606:4700:4700::1111", "2001:4860:4860::8888"] if ipv6_enabled else []) + ips = ["1.1.1.1", "8.8.8.8"] + ( + ["2606:4700:4700::1111", "2001:4860:4860::8888"] if ipv6_enabled else [] + ) ipv6_flag = " ipv6=on" if ipv6_enabled and any(":" in x for x in ips) else "" content = f"resolver {' '.join(ips)} valid=10s{ipv6_flag};\n" write_file(Path("/etc/angie/conf.d/include/resolvers.conf"), content, 0o644) + def validate_nodejs_version(version: str) -> tuple[bool, str, str | None]: - version_map = { - 'latest': '21', - 'lts': '18', - 'current': '21' - } - + version_map = {"latest": "21", "lts": "18", "current": "21"} + resolved = version_map.get(version.lower(), version) - - match = re.match(r'(\d+)', resolved) + + match = re.match(r"(\d+)", resolved) if not match: return False, resolved, f"Invalid version format: {version}" - + major_version = int(match.group(1)) - + if major_version > MAX_NODEJS_VERSION: warning = ( f"⚠ WARNING: Requested Node.js v{major_version} exceeds maximum tested version (v{MAX_NODEJS_VERSION}).\n" @@ -207,18 +237,16 @@ def validate_nodejs_version(version: str) -> tuple[bool, str, str | None]: f" Falling back to Node.js v{MAX_NODEJS_VERSION}." ) return False, str(MAX_NODEJS_VERSION), warning - + return True, resolved, None + def validate_supported_os(): - distro_id = OSREL.get('ID', '').lower() - version_id = OSREL.get('VERSION_ID', '').strip() - - SUPPORTED = { - 'debian': ['11', '12', '13'], - 'ubuntu': ['20.04', '22.04', '24.04'] - } - + distro_id = OSREL.get("ID", "").lower() + version_id = OSREL.get("VERSION_ID", "").strip() + + SUPPORTED = {"debian": ["11", "12", "13"], "ubuntu": ["20.04", "22.04", "24.04"]} + if distro_id not in SUPPORTED: print(f"\n ⚠ ERROR: Unsupported distribution: {distro_id}") print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}") @@ -229,15 +257,15 @@ def validate_supported_os(): print(f"\n Your distribution may work but is not tested.") print(f" Continue at your own risk or install on a supported system.\n") sys.exit(1) - + supported_versions = SUPPORTED[distro_id] version_match = False - + for supported_ver in supported_versions: if version_id.startswith(supported_ver): version_match = True break - + if not version_match: print(f"\n ⚠ WARNING: Unsupported version of {distro_id}: {version_id}") print(f" Detected: {OSREL.get('PRETTY', 'Unknown')}") @@ -245,29 +273,36 @@ def validate_supported_os(): print(f"\n This version is not officially tested.") print(f" Prerequisites:") print(f" • Angie packages must be available for your distribution") - print(f" • Check: https://en.angie.software/angie/docs/installation/oss_packages/") + print( + f" • Check: https://en.angie.software/angie/docs/installation/oss_packages/" + ) print(f" • Your system should be Debian/Ubuntu compatible (apt-based)") - + response = input("\n Continue anyway? [y/N]: ").strip().lower() - if response not in ['y', 'yes']: + if response not in ["y", "yes"]: print("\n Installation cancelled.\n") sys.exit(1) print() else: print(f"✓ Supported OS detected: {OSREL.get('PRETTY', 'Unknown')}\n") + def comment_x_served_by_step(path="/etc/angie/conf.d/include/proxy.conf"): p = Path(path) if not p.exists(): raise FileNotFoundError(path) src = p.read_text() - pattern = re.compile(r'^(?P\s*)(?!#)\s*add_header\s+X-Served-By\s+\$host\s*;\s*$', re.MULTILINE) + pattern = re.compile( + r"^(?P\s*)(?!#)\s*add_header\s+X-Served-By\s+\$host\s*;\s*$", re.MULTILINE + ) count = len(pattern.findall(src)) if count == 0: return 0 backup = p.with_suffix(p.suffix + ".bak") shutil.copy2(p, backup) - out = pattern.sub(lambda m: f"{m.group('ws')}# add_header X-Served-By $host;", src) + out = pattern.sub( + lambda m: f"{m.group('ws')}# add_header X-Served-By $host;", src + ) fd, tmp = tempfile.mkstemp(dir=str(p.parent)) os.close(fd) Path(tmp).write_text(out) @@ -276,65 +311,171 @@ def comment_x_served_by_step(path="/etc/angie/conf.d/include/proxy.conf"): print(f"✔ Hide X-Served-by header | backup: {backup}") return count + def set_file_ownership(files: list[str | Path], owner: str, mode: int | None = None): success = [] failed = [] - + for file_path in files: path = Path(file_path) - + if not path.exists(): failed.append((str(path), "File not found")) continue - + try: run(["chown", owner, str(path)]) - + if mode is not None: os.chmod(path, mode) - + success.append(str(path)) - + except Exception as e: failed.append((str(path), str(e))) - + if success: print(f"✔ Set ownership '{owner}' for {len(success)} file(s)") if DEBUG: for f in success: print(f" - {f}") - + if failed: print(f"⚠ Failed to set ownership for {len(failed)} file(s):") for f, err in failed: print(f" - {f}: {err}") - + return len(failed) == 0 -def ensure_minimum_nodejs(min_version=MIN_NODEJS_VERSION): + +def check_distro_nodejs_available(): + try: + result = subprocess.run( + ["apt-cache", "show", "nodejs"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) + if result.returncode == 0: + for line in result.stdout.splitlines(): + if line.startswith("Version:"): + version_str = line.split(":", 1)[1].strip() + match = re.match(r"(\d+)", version_str) + if match: + major = int(match.group(1)) + if DEBUG: + print( + f"✓ Distro has nodejs v{version_str} (major: {major})" + ) + return True, major, version_str + return False, None, None + except Exception as e: + if DEBUG: + print(f"Failed to check distro nodejs: {e}") + return False, None, None + + +def install_nodejs_from_distro(): + with step("Installing Node.js from distribution repositories"): + apt_install(["nodejs"]) + + if shutil.which("node"): + node_ver = run_out(["node", "--version"], check=False).strip() + if shutil.which("npm"): + npm_ver = run_out(["npm", "--version"], check=False).strip() + print(f" Node.js: {node_ver} (from distro)") + print(f" npm: {npm_ver}") + else: + print(f" Node.js: {node_ver} (from distro)") + print(f"⚠ npm not found, installing...") + apt_try_install(["npm"]) + if shutil.which("npm"): + npm_ver = run_out(["npm", "--version"], check=False).strip() + print(f"✔ npm {npm_ver} installed successfully") + return True + return False + + +def ensure_minimum_nodejs(min_version=MIN_NODEJS_VERSION, user_requested_version=None): with step("Checking Node.js version requirements"): try: node_ver = run_out(["node", "--version"], check=False).strip() - - match = re.match(r'v?(\d+)', node_ver) + match = re.match(r"v?(\d+)", node_ver) if match: current_major = int(match.group(1)) - if current_major >= min_version: - if shutil.which("npm"): - npm_ver = run_out(["npm", "--version"], check=False).strip() - print(f"Node.js: {node_ver}") - print(f" npm: {npm_ver}") - else: - print(f"Node.js: {node_ver}") - return True + if user_requested_version: + requested_match = re.match(r"(\d+)", str(user_requested_version)) + if requested_match: + requested_major = int(requested_match.group(1)) + if requested_major < MIN_NODEJS_VERSION: + requested_major = MIN_NODEJS_VERSION + elif requested_major > MAX_NODEJS_VERSION: + requested_major = MAX_NODEJS_VERSION + + if current_major == requested_major: + if shutil.which("npm"): + npm_ver = run_out( + ["npm", "--version"], check=False + ).strip() + print(f"Node.js: {node_ver}") + print(f" npm: {npm_ver}") + else: + print(f"Node.js: {node_ver}") + return True + else: + if current_major >= min_version: + if shutil.which("npm"): + npm_ver = run_out(["npm", "--version"], check=False).strip() + print(f"Node.js: {node_ver}") + print(f" npm: {npm_ver}") + else: + print(f"Node.js: {node_ver}") + return True except FileNotFoundError: pass except Exception: pass - - install_node_from_nodesource(str(min_version)) - + + if user_requested_version: + requested_match = re.match(r"(\d+)", str(user_requested_version)) + if requested_match: + requested_major = int(requested_match.group(1)) + + if requested_major < MIN_NODEJS_VERSION: + print( + f"⚠ Requested version {requested_major} < minimum {MIN_NODEJS_VERSION}" + ) + print(f" Installing minimum version: v{MIN_NODEJS_VERSION}") + install_node_from_nodesource(str(MIN_NODEJS_VERSION)) + elif requested_major > MAX_NODEJS_VERSION: + print( + f"⚠ Requested version {requested_major} > maximum {MAX_NODEJS_VERSION}" + ) + print(f" Installing maximum version: v{MAX_NODEJS_VERSION}") + install_node_from_nodesource(str(MAX_NODEJS_VERSION)) + else: + install_node_from_nodesource(str(requested_major)) + else: + install_node_from_nodesource(str(MIN_NODEJS_VERSION)) + else: + has_nodejs, major, version_str = check_distro_nodejs_available() + + if has_nodejs and major and major >= min_version: + print(f"✓ Distribution provides Node.js v{version_str} (>= v{min_version})") + if install_nodejs_from_distro(): + return True + else: + print(f"⚠ Failed to install from distro, falling back to NodeSource") + install_node_from_nodesource(str(min_version)) + else: + if has_nodejs: + print(f"⚠ Distribution Node.js v{version_str} < minimum v{min_version}") + else: + print(f"✓ Distribution doesn't provide Node.js package") + print(f" Installing from NodeSource: v{min_version}") + install_node_from_nodesource(str(min_version)) + if shutil.which("node"): node_ver = run_out(["node", "--version"], check=False).strip() if shutil.which("npm"): @@ -343,13 +484,17 @@ def ensure_minimum_nodejs(min_version=MIN_NODEJS_VERSION): print(f" npm: {npm_ver}") else: print(f"Node.js: {node_ver}") - - return True + return True + + return False + def download_extract_tar_gz(url: str, dest_dir: Path) -> Path: dest_dir.mkdir(parents=True, exist_ok=True) with step("Downloading and untaring"): - with urllib.request.urlopen(url) as r, tempfile.NamedTemporaryFile(delete=False) as tf: + with urllib.request.urlopen(url) as r, tempfile.NamedTemporaryFile( + delete=False + ) as tf: shutil.copyfileobj(r, tf) tf.flush() tf_path = Path(tf.name) @@ -359,7 +504,7 @@ def download_extract_tar_gz(url: str, dest_dir: Path) -> Path: except TypeError: t.extractall(dest_dir) except Exception as e: - if 'LinkOutsideDestinationError' in str(type(e).__name__): + if "LinkOutsideDestinationError" in str(type(e).__name__): t.extractall(dest_dir) else: raise @@ -367,13 +512,16 @@ def download_extract_tar_gz(url: str, dest_dir: Path) -> Path: os.unlink(tf_path) return dest_dir / top + # Distro info (used in banners & repo setup) OSREL = os_release() + # === extra sync === def sync_backup_nginx_conf(): from pathlib import Path import shutil, filecmp + src = Path("/etc/nginx.bak/conf.d") dst = Path("/etc/angie/conf.d") if not src.exists(): @@ -395,7 +543,7 @@ def sync_backup_nginx_conf(): def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): info = os_release() distro_id = (info.get("ID") or "").lower() - + # ============================================================ # STEP 1: Check if Python 3.11 is already available # ============================================================ @@ -403,7 +551,7 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): if shutil.which("python3.11"): try: ver_output = run_out(["python3.11", "--version"], check=False).strip() - match = re.search(r'Python (\d+)\.(\d+)', ver_output) + match = re.search(r"Python (\d+)\.(\d+)", ver_output) if match: major, minor = int(match.group(1)), int(match.group(2)) if major == 3 and minor == 11: @@ -412,7 +560,7 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): print(f"✔ Found system Python 3.11: {ver_output}") except Exception: pass - + # ============================================================ # STEP 2: Use system Python 3.11 if available # ============================================================ @@ -420,43 +568,64 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): with step(f"Using system Python 3.11 for certbot venv"): # Ensure python3.11-venv is installed apt_try_install(["python3.11-venv", "python3-pip"]) - + venv_dir.mkdir(parents=True, exist_ok=True) run(["python3.11", "-m", "venv", str(venv_dir)]) - + venv_bin = venv_dir / "bin" pip_path = venv_bin / "pip" certbot_path = venv_bin / "certbot" env_build = os.environ.copy() env_build["SETUPTOOLS_USE_DISTUTILS"] = "local" - - run([str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], env=env_build) - run([str(pip_path), "install", "-U", "cryptography", "cffi", "certbot", "tldextract"], env=env_build) - + + run( + [str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], + env=env_build, + ) + run( + [ + str(pip_path), + "install", + "-U", + "cryptography", + "cffi", + "certbot", + "tldextract", + ], + env=env_build, + ) + Path("/usr/local/bin").mkdir(parents=True, exist_ok=True) target = Path("/usr/local/bin/certbot") if target.exists() or target.is_symlink(): - try: target.unlink() - except Exception: pass + try: + target.unlink() + except Exception: + pass target.symlink_to(certbot_path) - + cb_ver = run_out([str(certbot_path), "--version"], check=False) or "" pip_ver = run_out([str(pip_path), "--version"], check=False) or "" print(f" Python: {ver_output}") print(f" Certbot: {cb_ver.strip()}") print(f" Pip: {pip_ver.strip().split(' from ')[0]}") return - + # ============================================================ # STEP 3: Ubuntu - install Python 3.11 from deadsnakes PPA # ============================================================ if distro_id == "ubuntu": - with step(f"Ubuntu detected: {info.get('PRETTY','Ubuntu')}. Install Python 3.11 via deadsnakes"): + with step( + f"Ubuntu detected: {info.get('PRETTY','Ubuntu')}. Install Python 3.11 via deadsnakes" + ): try: run(["apt-get", "update", "-y"], check=False) apt_try_install(["software-properties-common"]) except Exception: - run(["apt-get", "install", "-y", "software-properties-common"], check=False) + run( + ["apt-get", "install", "-y", "software-properties-common"], + check=False, + ) run(["add-apt-repository", "-y", "ppa:deadsnakes/ppa"]) run(["apt-get", "update", "-y"], check=False) @@ -472,14 +641,30 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): env_build = os.environ.copy() env_build["SETUPTOOLS_USE_DISTUTILS"] = "local" - run([str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], env=env_build) - run([str(pip_path), "install", "-U", "cryptography", "cffi", "certbot", "tldextract"], env=env_build) + run( + [str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], + env=env_build, + ) + run( + [ + str(pip_path), + "install", + "-U", + "cryptography", + "cffi", + "certbot", + "tldextract", + ], + env=env_build, + ) Path("/usr/local/bin").mkdir(parents=True, exist_ok=True) target = Path("/usr/local/bin/certbot") if target.exists() or target.is_symlink(): - try: target.unlink() - except Exception: pass + try: + target.unlink() + except Exception: + pass target.symlink_to(certbot_path) cb_ver = run_out([str(certbot_path), "--version"], check=False) or "" @@ -497,20 +682,56 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): PYTHON_VERSION = "3.11.14" try: - apt_try_install([ - "pyenv", "build-essential", "gcc", "make", "pkg-config", - "libssl-dev", "zlib1g-dev", "libbz2-dev", "libreadline-dev", - "libsqlite3-dev", "tk-dev", "libncursesw5-dev", "libgdbm-dev", - "libffi-dev", "uuid-dev", "liblzma-dev", "ca-certificates", "curl" - ]) + apt_try_install( + [ + "pyenv", + "build-essential", + "gcc", + "make", + "pkg-config", + "libssl-dev", + "zlib1g-dev", + "libbz2-dev", + "libreadline-dev", + "libsqlite3-dev", + "tk-dev", + "libncursesw5-dev", + "libgdbm-dev", + "libffi-dev", + "uuid-dev", + "liblzma-dev", + "ca-certificates", + "curl", + ] + ) except Exception: run(["apt-get", "update"], check=False) - run(["apt-get", "install", "-y", - "build-essential", "gcc", "make", "pkg-config", - "libssl-dev", "zlib1g-dev", "libbz2-dev", "libreadline-dev", - "libsqlite3-dev", "tk-dev", "libncursesw5-dev", "libgdbm-dev", - "libffi-dev", "uuid-dev", "liblzma-dev", "ca-certificates", "curl", "git" - ], check=False) + run( + [ + "apt-get", + "install", + "-y", + "build-essential", + "gcc", + "make", + "pkg-config", + "libssl-dev", + "zlib1g-dev", + "libbz2-dev", + "libreadline-dev", + "libsqlite3-dev", + "tk-dev", + "libncursesw5-dev", + "libgdbm-dev", + "libffi-dev", + "uuid-dev", + "liblzma-dev", + "ca-certificates", + "curl", + "git", + ], + check=False, + ) Path("/opt/npm").mkdir(parents=True, exist_ok=True) PYENV_ROOT.mkdir(parents=True, exist_ok=True) @@ -518,51 +739,73 @@ def setup_certbot_venv(venv_dir: Path = Path("/opt/certbot")): with step(f"Ensuring pyenv is available at {PYENV_ROOT}"): pyenv_bin_path = PYENV_ROOT / "bin" / "pyenv" - + if not pyenv_bin_path.exists(): - run([ - "sudo", "-u", PYENV_OWNER, "bash", "-lc", - 'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then ' - ' command -v git >/dev/null 2>&1 || sudo apt-get install -y git; ' - ' git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; ' - "fi" - ]) + run( + [ + "sudo", + "-u", + PYENV_OWNER, + "bash", + "-lc", + 'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then ' + " command -v git >/dev/null 2>&1 || sudo apt-get install -y git; " + " git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; " + "fi", + ] + ) PYENV_BIN_CANDIDATES = [ str(PYENV_ROOT / "bin" / "pyenv"), - "pyenv", - "/usr/bin/pyenv", - "/usr/lib/pyenv/bin/pyenv" + "pyenv", + "/usr/bin/pyenv", + "/usr/lib/pyenv/bin/pyenv", ] - pyenv_bin = next((c for c in PYENV_BIN_CANDIDATES if shutil.which(c) or Path(c).exists()), None) + pyenv_bin = next( + (c for c in PYENV_BIN_CANDIDATES if shutil.which(c) or Path(c).exists()), None + ) if not pyenv_bin: raise RuntimeError("No 'pyenv' found even after git clone attempt.") with step(f"Installing Python {PYTHON_VERSION} via pyenv into {PYENV_ROOT}"): run(["mkdir", "-p", str(PYENV_ROOT)]) run(["chown", "-R", f"{PYENV_OWNER}:{PYENV_OWNER}", "/opt/npm"], check=False) - run([ - "sudo", "-u", PYENV_OWNER, "bash", "-lc", - 'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then ' - ' command -v git >/dev/null 2>&1 || sudo apt-get install -y git; ' - ' git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; ' - "fi" - ]) + run( + [ + "sudo", + "-u", + PYENV_OWNER, + "bash", + "-lc", + 'if [ ! -x "/opt/npm/.pyenv/bin/pyenv" ]; then ' + " command -v git >/dev/null 2>&1 || sudo apt-get install -y git; " + " git clone --depth=1 https://github.com/pyenv/pyenv.git /opt/npm/.pyenv; " + "fi", + ] + ) install_cmd = ( - 'export HOME=/opt/npm; ' - 'export PYENV_ROOT=/opt/npm/.pyenv; ' + "export HOME=/opt/npm; " + "export PYENV_ROOT=/opt/npm/.pyenv; " 'export PATH="$PYENV_ROOT/bin:/usr/bin:/bin"; ' 'mkdir -p "$PYENV_ROOT"; cd "$HOME"; ' - f'pyenv install -s {PYTHON_VERSION}' + f"pyenv install -s {PYTHON_VERSION}" + ) + run( + [ + "sudo", + "-u", + PYENV_OWNER, + "env", + "-i", + "HOME=/opt/npm", + f"PYENV_ROOT={PYENV_ROOT}", + f"PATH={PYENV_ROOT}/bin:/usr/bin:/bin", + "bash", + "-lc", + install_cmd, + ] ) - run([ - "sudo", "-u", PYENV_OWNER, "env", "-i", - "HOME=/opt/npm", - f"PYENV_ROOT={PYENV_ROOT}", - f"PATH={PYENV_ROOT}/bin:/usr/bin:/bin", - "bash", "-lc", install_cmd - ]) profile_snippet = f"""# Auto-generated by npm-angie-auto-install # pyenv for '{PYENV_OWNER}' @@ -601,14 +844,30 @@ fi env_build = os.environ.copy() env_build["SETUPTOOLS_USE_DISTUTILS"] = "local" - run([str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], env=env_build) - run([str(pip_path), "install", "-U", "cryptography", "cffi", "certbot", "tldextract"], env=env_build) + run( + [str(pip_path), "install", "-U", "pip", "setuptools", "wheel"], + env=env_build, + ) + run( + [ + str(pip_path), + "install", + "-U", + "cryptography", + "cffi", + "certbot", + "tldextract", + ], + env=env_build, + ) Path("/usr/local/bin").mkdir(parents=True, exist_ok=True) target = Path("/usr/local/bin/certbot") if target.exists() or target.is_symlink(): - try: target.unlink() - except Exception: pass + try: + target.unlink() + except Exception: + pass target.symlink_to(certbot_path) cb_ver = run_out([str(certbot_path), "--version"], check=False) or "" @@ -625,7 +884,10 @@ def configure_letsencrypt(): run(["chown", "-R", "npm:npm", "/opt/certbot"], check=False) Path("/etc/letsencrypt").mkdir(parents=True, exist_ok=True) run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False) - run(["apt-get", "install", "-y", "--no-install-recommends", "certbot"], check=False) + run( + ["apt-get", "install", "-y", "--no-install-recommends", "certbot"], + check=False, + ) ini = """text = True non-interactive = True webroot-path = /data/letsencrypt-acme-challenge @@ -636,8 +898,10 @@ preferred-chain = ISRG Root X1 write_file(Path("/etc/letsencrypt.ini"), ini, 0o644) run(["chown", "-R", "npm:npm", "/etc/letsencrypt"], check=False) + def ensure_nginx_symlink(): from pathlib import Path + target = Path("/etc/angie") link = Path("/etc/nginx") try: @@ -670,6 +934,7 @@ def ensure_nginx_symlink(): except Exception as e: print(f"Warning: symlink check failed: {e}") + # ========== Angie / NPM template ========== ANGIE_CONF_TEMPLATE = """# run nginx in foreground @@ -792,13 +1057,21 @@ RestartSec=3s WantedBy=multi-user.target """ + def lsb_info(): try: apt_try_install(["lsb-release"]) - dist = run_out(["bash","-lc","lsb_release -si"]).strip().lower().replace(" ", "") - rel = run_out(["bash","-lc","lsb_release -sr"]).strip() - code = run_out(["bash","-lc","lsb_release -sc"]).strip() - return {"ID": dist, "VERSION_ID": rel, "CODENAME": code, "PRETTY": f"{dist} {rel} ({code})"} + dist = ( + run_out(["bash", "-lc", "lsb_release -si"]).strip().lower().replace(" ", "") + ) + rel = run_out(["bash", "-lc", "lsb_release -sr"]).strip() + code = run_out(["bash", "-lc", "lsb_release -sc"]).strip() + return { + "ID": dist, + "VERSION_ID": rel, + "CODENAME": code, + "PRETTY": f"{dist} {rel} ({code})", + } except Exception: return os_release() @@ -810,27 +1083,42 @@ def setup_angie(ipv6_enabled: bool): return re.sub(pat, "", s.strip().lower()) with step("Adding Angie repo and installing Angie packages"): - apt_try_install([ - "ca-certificates", "curl", "gnupg", "apt-transport-https", - "software-properties-common", "lsb-release" - ]) + apt_try_install( + [ + "ca-certificates", + "curl", + "gnupg", + "apt-transport-https", + "software-properties-common", + "lsb-release", + ] + ) - run([ - "curl", "-fsSL", "-o", "/etc/apt/trusted.gpg.d/angie-signing.gpg", - "https://angie.software/keys/angie-signing.gpg" - ]) + run( + [ + "curl", + "-fsSL", + "-o", + "/etc/apt/trusted.gpg.d/angie-signing.gpg", + "https://angie.software/keys/angie-signing.gpg", + ] + ) try: dist = run_out(["lsb_release", "-si"]) - rel = run_out(["lsb_release", "-sr"]) + rel = run_out(["lsb_release", "-sr"]) code = run_out(["lsb_release", "-sc"]) except Exception: - dist = run_out(["bash","-c",". /etc/os-release && printf %s \"$ID\""]) - rel = run_out(["bash","-c",". /etc/os-release && printf %s \"$VERSION_ID\""]) - code = run_out(["bash","-c",". /etc/os-release && printf %s \"$VERSION_CODENAME\""]) + dist = run_out(["bash", "-c", '. /etc/os-release && printf %s "$ID"']) + rel = run_out( + ["bash", "-c", '. /etc/os-release && printf %s "$VERSION_ID"'] + ) + code = run_out( + ["bash", "-c", '. /etc/os-release && printf %s "$VERSION_CODENAME"'] + ) dist = _norm(dist) - rel = _norm(rel, allow_dot=True) + rel = _norm(rel, allow_dot=True) code = _norm(code) os_id = f"{dist}/{rel}" if rel else dist @@ -842,7 +1130,12 @@ def setup_angie(ipv6_enabled: bool): write_file(Path("/etc/apt/sources.list.d/angie.list"), line) run(["apt-get", "update"]) - base = ["angie", "angie-module-headers-more", "angie-module-brotli", "angie-module-zstd"] + base = [ + "angie", + "angie-module-headers-more", + "angie-module-brotli", + "angie-module-zstd", + ] optional = ["angie-module-prometheus", "angie-console-light"] apt_install(base) apt_try_install(optional) @@ -867,6 +1160,7 @@ exec sudo -n /usr/sbin/angie "$@" with step("Installing corrected systemd unit for Angie"): write_file(Path("/etc/systemd/system/angie.service"), ANGIE_UNIT, 0o644) + def write_metrics_files(): """Create /etc/angie/metrics.conf (port 82 with console & status).""" with step("Adding Angie metrics & console on :82"): @@ -903,6 +1197,7 @@ server { """ write_file(Path("/etc/angie/metrics.conf"), metrics, 0o644) + def ensure_angie_runtime_perms(): run_path = Path("/run/angie") pid_file = run_path / "angie.pid" @@ -910,6 +1205,7 @@ def ensure_angie_runtime_perms(): os.chmod(run_path, 0o2775) try: import grp + gid = grp.getgrnam("angie").gr_gid os.chown(run_path, -1, gid) except Exception: @@ -919,49 +1215,79 @@ def ensure_angie_runtime_perms(): os.chmod(pid_file, 0o664) try: import grp, pwd + gid = grp.getgrnam("angie").gr_gid uid = pwd.getpwnam("root").pw_uid os.chown(pid_file, uid, gid) except Exception: pass + def ensure_user_and_dirs(): with step("Creating npm user and app/log directories"): try: run(["id", "-u", "npm"]) except subprocess.CalledProcessError: - run(["useradd", "--system", "--home", "/opt/npm", "--create-home", "--shell", "/usr/sbin/nologin", "npm"]) - rc = subprocess.run(["getent","group","angie"], stdout=_devnull(), stderr=_devnull()).returncode + run( + [ + "useradd", + "--system", + "--home", + "/opt/npm", + "--create-home", + "--shell", + "/usr/sbin/nologin", + "npm", + ] + ) + rc = subprocess.run( + ["getent", "group", "angie"], stdout=_devnull(), stderr=_devnull() + ).returncode if rc != 0: - run(["groupadd","angie"]) - run(["usermod","-aG","angie","npm"], check=False) + run(["groupadd", "angie"]) + run(["usermod", "-aG", "angie", "npm"], check=False) dirs = [ - "/data","/data/nginx","/data/custom_ssl","/data/logs","/data/access", - "/data/nginx/default_host","/data/nginx/default_www","/data/nginx/proxy_host", - "/data/nginx/redirection_host","/data/nginx/stream","/data/nginx/dead_host","/data/nginx/temp", - "/data/letsencrypt-acme-challenge","/opt/npm","/opt/npm/frontend","/opt/npm/global", - "/run/nginx","/run/angie","/tmp/angie/body" + "/data", + "/data/nginx", + "/data/custom_ssl", + "/data/logs", + "/data/access", + "/data/nginx/default_host", + "/data/nginx/default_www", + "/data/nginx/proxy_host", + "/data/nginx/redirection_host", + "/data/nginx/stream", + "/data/nginx/dead_host", + "/data/nginx/temp", + "/data/letsencrypt-acme-challenge", + "/opt/npm", + "/opt/npm/frontend", + "/opt/npm/global", + "/run/nginx", + "/run/angie", + "/tmp/angie/body", ] for d in dirs: Path(d).mkdir(parents=True, exist_ok=True) - run(["chgrp","-h","angie","/run/angie"], check=False) + run(["chgrp", "-h", "angie", "/run/angie"], check=False) os.chmod("/run/angie", 0o2775) Path("/var/log/angie").mkdir(parents=True, exist_ok=True) - for f in ["access.log","error.log"]: - (Path("/var/log/angie")/f).touch(exist_ok=True) + for f in ["access.log", "error.log"]: + (Path("/var/log/angie") / f).touch(exist_ok=True) paths = ["/var/log/angie"] + glob("/var/log/angie/*.log") for pth in paths: - run(["chgrp","-h","angie", pth], check=False) - run(["chmod","775","/var/log/angie"], check=False) + run(["chgrp", "-h", "angie", pth], check=False) + run(["chmod", "775", "/var/log/angie"], check=False) for pth in glob("/var/log/angie/*.log"): - run(["chmod","664", pth], check=False) + run(["chmod", "664", pth], check=False) Path("/var/log/nginx").mkdir(parents=True, exist_ok=True) Path("/var/log/nginx/error.log").touch(exist_ok=True) os.chmod("/var/log/nginx/error.log", 0o666) - run(["chown","-R","npm:npm","/opt/npm","/data"]) + run(["chown", "-R", "npm:npm", "/opt/npm", "/data"]) ensure_angie_runtime_perms() + def create_sudoers_for_npm(): with step("Configuring sudoers for npm -> angie"): content = """User_Alias NPMUSERS = npm @@ -970,7 +1296,8 @@ NPMUSERS ALL=(root) NOPASSWD: /usr/sbin/angie path = Path("/etc/sudoers.d/npm") write_file(path, content, 0o440) if shutil.which("visudo"): - run(["visudo","-cf", str(path)], check=False) + run(["visudo", "-cf", str(path)], check=False) + def adjust_nginx_like_paths_in_tree(root: Path): for p in root.rglob("*.conf"): @@ -978,8 +1305,9 @@ def adjust_nginx_like_paths_in_tree(root: Path): txt = p.read_text(encoding="utf-8") except Exception: continue - txt2 = txt.replace("include conf.d", "include /etc/nginx/conf.d") \ - .replace("include /etc/angie/conf.d", "include /etc/nginx/conf.d") + txt2 = txt.replace("include conf.d", "include /etc/nginx/conf.d").replace( + "include /etc/angie/conf.d", "include /etc/nginx/conf.d" + ) if txt2 != txt: p.write_text(txt2, encoding="utf-8") for cand in root.rglob("nginx.conf"): @@ -992,63 +1320,74 @@ def adjust_nginx_like_paths_in_tree(root: Path): txt = txt.replace("daemon on;", "#daemon on;") cand.write_text(txt, encoding="utf-8") -def install_node_from_nodesource(version: str): + +def install_node_from_nodesource(version: str): is_valid, resolved_version, warning = validate_nodejs_version(version) - + if warning: print(warning) - - match = re.match(r'(\d+)', resolved_version) + + match = re.match(r"(\d+)", resolved_version) if not match: raise ValueError(f"Invalid Node.js version: {version}") - + major_version = match.group(1) - + with step("Removing old Node.js installations"): - run(["apt-get", "remove", "-y", "nodejs", "npm", "libnode-dev", "libnode72"], check=False) - run(["apt-get", "purge", "-y", "nodejs", "npm", "libnode-dev", "libnode72"], check=False) + run( + ["apt-get", "remove", "-y", "nodejs", "npm", "libnode-dev", "libnode72"], + check=False, + ) + run( + ["apt-get", "purge", "-y", "nodejs", "npm", "libnode-dev", "libnode72"], + check=False, + ) run(["apt-get", "autoremove", "-y"], check=False) - - for f in ["/etc/apt/sources.list.d/nodesource.list", - "/etc/apt/keyrings/nodesource.gpg", - "/usr/share/keyrings/nodesource.gpg", - "/etc/apt/trusted.gpg.d/nodesource.gpg"]: + + for f in [ + "/etc/apt/sources.list.d/nodesource.list", + "/etc/apt/keyrings/nodesource.gpg", + "/usr/share/keyrings/nodesource.gpg", + "/etc/apt/trusted.gpg.d/nodesource.gpg", + ]: if Path(f).exists(): Path(f).unlink() - + with step(f"Installing Node.js v{major_version}.x from NodeSource repository"): apt_try_install(["ca-certificates", "curl", "gnupg", "apt-transport-https"]) - + setup_url = f"https://deb.nodesource.com/setup_{major_version}.x" - - with tempfile.NamedTemporaryFile(mode='w', suffix='.sh', delete=False) as tf: + + with tempfile.NamedTemporaryFile(mode="w", suffix=".sh", delete=False) as tf: script_path = tf.name - + try: run(["curl", "-fsSL", setup_url, "-o", script_path]) - - os.chmod(script_path, 0o755) + + os.chmod(script_path, 0o755) if DEBUG: subprocess.run(["bash", script_path], check=True) else: run(["bash", script_path]) - - run(["apt-get", "update", "-y"]) + + run(["apt-get", "update", "-y"]) run(["apt-get", "install", "-y", "nodejs"]) - + finally: if Path(script_path).exists(): os.unlink(script_path) - + if shutil.which("node"): node_ver = run_out(["node", "--version"], check=False).strip() - - installed_major = re.match(r'v?(\d+)', node_ver) + + installed_major = re.match(r"v?(\d+)", node_ver) if installed_major and installed_major.group(1) != major_version: print(f"⚠ WARNING: Requested Node.js v{major_version}.x but got {node_ver}") - print(f" This likely means NodeSource doesn't support your distribution yet.") - + print( + f" This likely means NodeSource doesn't support your distribution yet." + ) + if shutil.which("npm"): npm_ver = run_out(["npm", "--version"], check=False).strip() print(f" Node.js: {node_ver}") @@ -1056,12 +1395,12 @@ def install_node_from_nodesource(version: str): else: print(f" Node.js: {node_ver}") print(f"⚠ npm not found, installing...") - + run(["apt-get", "install", "-y", "npm"], check=False) - + if not shutil.which("npm"): run(["corepack", "enable"], check=False) - + if shutil.which("npm"): npm_ver = run_out(["npm", "--version"], check=False).strip() print(f"\n✔ npm {npm_ver} installed successfully") @@ -1071,6 +1410,7 @@ def install_node_from_nodesource(version: str): print("✖ Node.js installation failed") raise RuntimeError("Node.js installation failed") + def install_node_and_yarn(node_pkg: str = None, node_version: str = None): if node_version: install_node_from_nodesource(node_version) @@ -1083,37 +1423,42 @@ def install_node_and_yarn(node_pkg: str = None, node_version: str = None): if not shutil.which("yarn") and not shutil.which("yarnpkg"): apt_try_install(["yarnpkg"]) if not Path("/usr/bin/yarn").exists() and Path("/usr/bin/yarnpkg").exists(): - os.symlink("/usr/bin/yarnpkg","/usr/bin/yarn") + os.symlink("/usr/bin/yarnpkg", "/usr/bin/yarn") + def _is_ubuntu_wo_distutils() -> bool: try: - dist = (OSREL.get("ID","") or "").lower() - ver = (OSREL.get("VERSION_ID","") or "").strip() - def _vers(t): - parts = (ver.split(".") + ["0","0"])[:2] + dist = (OSREL.get("ID", "") or "").lower() + ver = (OSREL.get("VERSION_ID", "") or "").strip() + + def _vers(t): + parts = (ver.split(".") + ["0", "0"])[:2] return (int(parts[0]), int(parts[1])) + return dist == "ubuntu" and _vers(ver) >= (24, 4) except Exception: return False + def _prepare_sass(frontend_dir: Path): pj = frontend_dir / "package.json" if not pj.exists(): return import json, re, os + try: data = json.loads(pj.read_text(encoding="utf-8")) except Exception: return deps = data.get("dependencies", {}) or {} - dev = data.get("devDependencies", {}) or {} + dev = data.get("devDependencies", {}) or {} has_node_sass = ("node-sass" in deps) or ("node-sass" in dev) if not has_node_sass: - return + return - env_flag = (os.environ.get("USE_DART_SASS","").strip()) + env_flag = os.environ.get("USE_DART_SASS", "").strip() use_dart = (env_flag == "1") or (env_flag == "" and _is_ubuntu_wo_distutils()) data.setdefault("dependencies", {}) @@ -1125,8 +1470,10 @@ def _prepare_sass(frontend_dir: Path): if "sass" not in data["dependencies"] and "sass" not in data["devDependencies"]: data["devDependencies"]["sass"] = "^1.77.0" - scripts = (data.get("scripts") or {}) - data["scripts"] = {k: re.sub(r"\bnode-sass\b", "sass", v or "") for k, v in scripts.items()} + scripts = data.get("scripts") or {} + data["scripts"] = { + k: re.sub(r"\bnode-sass\b", "sass", v or "") for k, v in scripts.items() + } if env_flag == "": os.environ["USE_DART_SASS"] = "1" @@ -1137,14 +1484,18 @@ def _prepare_sass(frontend_dir: Path): else: data["devDependencies"]["node-sass"] = target - res = (data.get("resolutions") or {}) - res["node-gyp"] = "^10.0.0" - res["node-sass"] = "^9.0.0" + res = data.get("resolutions") or {} + res["node-gyp"] = "^10.0.0" + res["node-sass"] = "^9.0.0" data["resolutions"] = res - os.environ["npm_config_node_sass_binary_site"] = "https://github.com/sass/node-sass/releases/download" + os.environ["npm_config_node_sass_binary_site"] = ( + "https://github.com/sass/node-sass/releases/download" + ) - pj.write_text(json.dumps(data, indent=2, ensure_ascii=False) + "\n", encoding="utf-8") + pj.write_text( + json.dumps(data, indent=2, ensure_ascii=False) + "\n", encoding="utf-8" + ) def _build_frontend(src_frontend: Path, dest_frontend: Path): @@ -1164,7 +1515,7 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): for c in (["yarn"], ["yarnpkg"]): if shutil.which(c[0]) and _good_yarn(c): return c - + # If npm exists, try to use it to run yarn if shutil.which("npm"): npm_ver = (run_out(["npm", "--version"], check=False) or "").strip() @@ -1172,14 +1523,14 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): # Try npm exec yarn@stable if _good_yarn(["npm", "exec", "--yes", "yarn@stable", "--"]): return ["npm", "exec", "--yes", "yarn@stable", "--"] - + # Try npx as fallback if shutil.which("npx"): npx_ver = (run_out(["npx", "--version"], check=False) or "").strip() if npx_ver: if _good_yarn(["npx", "-y", "yarn@stable"]): return ["npx", "-y", "yarn@stable"] - + return None def _ensure_yarn_installed(): @@ -1191,7 +1542,7 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): except Exception: run(["apt-get", "update"], check=False) run(["apt-get", "install", "-y", "npm"]) - + # Try corepack first (modern way) if shutil.which("corepack"): try: @@ -1201,7 +1552,7 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): return except Exception: pass - + # Fallback to npm install try: run(["npm", "install", "-g", "yarn@latest"]) @@ -1213,7 +1564,7 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): if not yarn_cmd: _ensure_yarn_installed() yarn_cmd = _pick_yarn_cmd() - + if not yarn_cmd: raise RuntimeError( "Unable to detect or install a valid Yarn.\n" @@ -1227,7 +1578,9 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): # Get and create cache directory try: - cache_dir = (run_out(yarn_cmd + ["cache", "dir"], check=False) or "").strip() + cache_dir = ( + run_out(yarn_cmd + ["cache", "dir"], check=False) or "" + ).strip() if cache_dir and not Path(cache_dir).exists(): Path(cache_dir).mkdir(parents=True, exist_ok=True) except Exception: @@ -1240,28 +1593,34 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): pass install_cmd = yarn_cmd + ["install"] - + if install_cmd[-1] == "--": install_cmd = install_cmd[:-1] - + if DEBUG: print(f"Running: {' '.join(install_cmd)}") - + try: run(install_cmd) except subprocess.CalledProcessError as e: - print(f"\n✖ Yarn install failed. Trying with --network-timeout and --ignore-engines...") - retry_cmd = install_cmd + ["--network-timeout", "100000", "--ignore-engines"] + print( + f"\n✖ Yarn install failed. Trying with --network-timeout and --ignore-engines..." + ) + retry_cmd = install_cmd + [ + "--network-timeout", + "100000", + "--ignore-engines", + ] run(retry_cmd) with step("Building frontend (yarn build)"): env = os.environ.copy() env["NODE_OPTIONS"] = "--openssl-legacy-provider" - + build_cmd = yarn_cmd + ["build"] if build_cmd[-1] == "--": build_cmd = build_cmd[:-1] - + try: run(build_cmd, env=env) except subprocess.CalledProcessError: @@ -1272,7 +1631,12 @@ def _build_frontend(src_frontend: Path, dest_frontend: Path): with step("Copying frontend artifacts"): shutil.copytree(src_frontend / "dist", dest_frontend, dirs_exist_ok=True) if (src_frontend / "app-images").exists(): - shutil.copytree(src_frontend / "app-images", dest_frontend / "images", dirs_exist_ok=True) + shutil.copytree( + src_frontend / "app-images", + dest_frontend / "images", + dirs_exist_ok=True, + ) + def patch_npm_backend_commands(): candidates = [ @@ -1287,13 +1651,16 @@ def patch_npm_backend_commands(): txt = p.read_text(encoding="utf-8") except Exception: continue - new = re.sub(r'\blogrotate\b', '/usr/local/bin/logrotate-npm', txt) - new = re.sub(r'(?", f" ", html, flags=re.I) - html = re.sub(r"", f" ", html, flags=re.I) + html = re.sub( + r"", + f" ", + html, + flags=re.I, + ) + html = re.sub( + r"", + f" ", + html, + flags=re.I, + ) path.write_text(html, encoding="utf-8") if DEBUG: print(f"Patched: {path}") + # ========== MAIN ========== def main(): global DEBUG ensure_root() parser = argparse.ArgumentParser( description="Install/upgrade NPM on Angie (Debian 11 + / Ubuntu 20.04 +).", - formatter_class=argparse.ArgumentDefaultsHelpFormatter + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--nodejs-pkg", + default="nodejs", + help="APT Node.js package name (e.g. nodejs, nodejs-18).", + ) + parser.add_argument( + "--node-version", + default=None, + help=f"Install Node.js from NodeSource repo (e.g. 'latest', '21', '20', '18'). " + f"Maximum supported: v{MAX_NODEJS_VERSION}. Overrides --nodejs-pkg.", + ) + parser.add_argument( + "--npm-version", + default=None, + help="Force NPM app version (e.g. 2.12.6). Default: latest release.", + ) + parser.add_argument( + "--motd", + choices=["yes", "no"], + default="yes", + help="Update MOTD after completion.", + ) + parser.add_argument( + "--enable-ipv6", + action="store_true", + help="Do not strip IPv6 from configs/resolvers (keep IPv6).", + ) + parser.add_argument( + "--update", + action="store_true", + help="Update mode: upgrade packages + rebuild frontend/backend without reconfiguring Angie.", + ) + parser.add_argument( + "--dark-mode", + action="store_true", + help=f"Enable dark theme (default: {TP_DEFAULT_THEME} from theme-park.dev)", + ) + parser.add_argument( + "--tp-theme", + default=None, + help="Enable dark theme with specific theme name (e.g. nord, dracula, plex). Implies --dark-mode.", + ) + parser.add_argument( + "--debug", action="store_true", help="Show detailed logs and progress." ) - parser.add_argument("--nodejs-pkg", default="nodejs", help="APT Node.js package name (e.g. nodejs, nodejs-18).") - parser.add_argument("--node-version", default=None, - help=f"Install Node.js from NodeSource repo (e.g. 'latest', '21', '20', '18'). " - f"Maximum supported: v{MAX_NODEJS_VERSION}. Overrides --nodejs-pkg.") - parser.add_argument("--npm-version", default=None, help="Force NPM app version (e.g. 2.12.6). Default: latest release.") - parser.add_argument("--motd", choices=["yes","no"], default="yes", help="Update MOTD after completion.") - parser.add_argument("--enable-ipv6", action="store_true", - help="Do not strip IPv6 from configs/resolvers (keep IPv6).") - parser.add_argument("--update", action="store_true", - help="Update mode: upgrade packages + rebuild frontend/backend without reconfiguring Angie.") - parser.add_argument("--dark-mode", action="store_true", - help=f"Enable dark theme (default: {TP_DEFAULT_THEME} from theme-park.dev)") - parser.add_argument("--tp-theme", default=None, - help="Enable dark theme with specific theme name (e.g. nord, dracula, plex). Implies --dark-mode.") - parser.add_argument("--debug", action="store_true", - help="Show detailed logs and progress.") args = parser.parse_args() DEBUG = args.debug @@ -1743,13 +2227,30 @@ def main(): validate_supported_os() apt_update_upgrade() - apt_purge(["nginx","openresty","nodejs","npm","yarn","certbot","rustc","cargo"]) - apt_install(["ca-certificates","curl","gnupg","openssl","apache2-utils","logrotate","sudo","acl", - "python3","sqlite3", "git", "lsb-release", "build-essential"]) + apt_purge( + ["nginx", "openresty", "nodejs", "npm", "yarn", "certbot", "rustc", "cargo"] + ) + apt_install( + [ + "ca-certificates", + "curl", + "gnupg", + "openssl", + "apache2-utils", + "logrotate", + "sudo", + "acl", + "python3", + "sqlite3", + "git", + "lsb-release", + "build-essential", + ] + ) setup_angie(ipv6_enabled=args.enable_ipv6) write_metrics_files() - ensure_minimum_nodejs() + ensure_minimum_nodejs(user_requested_version=args.node_version) install_node_and_yarn(node_pkg=args.nodejs_pkg, node_version=args.node_version) ensure_user_and_dirs() create_sudoers_for_npm() @@ -1779,7 +2280,7 @@ def main(): TP_DOMAIN=TP_DOMAIN, TP_COMMUNITY_THEME=TP_COMMUNITY_THEME, TP_SCHEME=TP_SCHEME, - TP_THEME=selected_theme + TP_THEME=selected_theme, ) create_systemd_units(ipv6_enabled=args.enable_ipv6) @@ -1789,20 +2290,17 @@ def main(): fix_logrotate_permissions_and_wrapper() sync_backup_nginx_conf() comment_x_served_by_step() - set_file_ownership( - ["/etc/nginx/conf.d/include/ip_ranges.conf"], - "npm:npm", - 0o664 - ) + set_file_ownership(["/etc/nginx/conf.d/include/ip_ranges.conf"], "npm:npm", 0o664) with step("Restarting services after installation"): - run(["systemctl","restart","angie.service"], check=False) - run(["systemctl","restart","npm.service"], check=False) + run(["systemctl", "restart", "angie.service"], check=False) + run(["systemctl", "restart", "npm.service"], check=False) info = gather_versions(npm_app_version) update_motd(args.motd == "yes", info, ipv6_enabled=args.enable_ipv6) print_summary(info, args.enable_ipv6, args.dark_mode, update_mode=False) + if __name__ == "__main__": signal.signal(signal.SIGINT, lambda s, f: sys.exit(130)) main()