diff --git a/.gitignore b/.gitignore index 56392ab..3267812 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ .DS_Store /target +result +nix/releases.json.bak diff --git a/Cargo.toml b/Cargo.toml index f51f1d0..5a38032 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,6 +30,9 @@ tokio = { version = "1.46.1", features = ["full"] } tracing = { version = "0.1.41", features = ["log"] } whoami = "1.6.0" +[features] +nix-patchelf = [] + [dev-dependencies] assert_cmd = "2.0" mockall = "0.13.1" diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..bd62e9d --- /dev/null +++ b/flake.lock @@ -0,0 +1,82 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1759733170, + "narHash": "sha256-TXnlsVb5Z8HXZ6mZoeOAIwxmvGHp1g4Dw89eLvIwKVI=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "8913c168d1c56dc49a7718685968f38752171c3b", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1759804383, + "narHash": "sha256-jPz0K8xsT2eNSratkw8bfPwSlTuOXGeUvz+bd9wq/vY=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "dec08d5dfeca099b0058f0cc61264b04f33db42c", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..d282f68 --- /dev/null +++ b/flake.nix @@ -0,0 +1,325 @@ +{ + description = "Sui Tooling Version Manager"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + }; + + outputs = + { + self, + nixpkgs, + flake-utils, + rust-overlay, + }: + flake-utils.lib.eachDefaultSystem ( + system: + let + overlays = [ (import rust-overlay) ]; + pkgs = import nixpkgs { + inherit system overlays; + }; + + # Standalone releases: version -> hash mapping + # These are pre-built binaries that will be patched with Nix dependencies + # Update with: nix run .#update-releases + standaloneReleases = builtins.fromJSON (builtins.readFile ./nix/releases.json); + + rustToolchain = pkgs.rust-bin.stable.latest.default.override { + extensions = [ "rust-src" ]; + }; + + buildInputs = + with pkgs; + [ + openssl + pkg-config + ] + ++ lib.optionals stdenv.isDarwin [ + darwin.apple_sdk.frameworks.Security + darwin.apple_sdk.frameworks.SystemConfiguration + ]; + + nativeBuildInputs = with pkgs; [ + rustToolchain + pkg-config + ]; + + # These libraries will be added to the RPATH of the patched binary + runtimeLibs = with pkgs; [ + stdenv.cc.cc.lib # libstdc++.so.6, libgcc_s.so.1 + glibc # libc.so.6, libm.so.6, libpthread.so.0, libdl.so.2 + openssl # libssl.so, libcrypto.so (for reqwest with rustls-tls) + zlib # libz.so.1 (for flate2) + ]; + in + let + + # Build the library path string + patchData = (builtins.toJSON { + lib_path = "${(pkgs.lib.makeLibraryPath runtimeLibs)}"; + interpreter = "${pkgs.glibc}/lib/ld-linux-x86-64.so.2"; + }); + + # Import runtime dependencies configuration + #runtimeDeps = import ./nix-runtime-deps.nix { inherit pkgs; }; + + # Function to build suiup with optional patchelf + mkSuiup = + { + enablePatchelf ? false, + }: + pkgs.rustPlatform.buildRustPackage { + pname = "suiup"; + version = "0.0.4"; + + inherit buildInputs patchData; + + src = ./.; + # passAsFile = [ "patchData"]; + cargoLock = { + lockFile = ./Cargo.lock; + }; + + nativeBuildInputs = + nativeBuildInputs + ++ pkgs.lib.optionals enablePatchelf [ + pkgs.patchelf + ]; + + doCheck = false; + + passAsFile = [ "patchData" ]; + + # Enable the nix-patchelf feature when building with patchelf + buildFeatures = pkgs.lib.optionals enablePatchelf [ "nix-patchelf" ]; + + postPatch = pkgs.lib.optionalString enablePatchelf '' + substituteInPlace src/patchelf.rs \ + --replace-fail '"patchelf"' '"${pkgs.patchelf}/bin/patchelf"' \ + --replace-fail '/usr/share/suiup/nix-runtime-deps.json' "$out/share/suiup/nix-runtime-deps.json" + ''; + + # Install the runtime dependencies JSON file and patch suiup binary + postInstall = pkgs.lib.optionalString enablePatchelf '' + echo "Setting up Nix patchelf support..." + + # Create the data directory for runtime deps config + mkdir -p $out/share/suiup + cp $patchDataPath $out/share/suiup/nix-runtime-deps.json; + ''; + + meta = with pkgs.lib; { + description = "Sui Tooling Version Manager"; + homepage = "https://github.com/Mystenlabs/suiup"; + license = licenses.asl20; + maintainers = [ ]; + mainProgram = "suiup"; + }; + }; + + # Function to create a patched standalone binary package + # This downloads a pre-built binary or .tgz and patches it using suiup's patchelf process + mkStandaloneBinary = + { binaryName + , version + , hash + , url + }: + let + # Determine if this is a .tgz archive + isTgz = pkgs.lib.hasSuffix ".tgz" url; + # Map package name to actual binary name in archive + # walrus-sites package contains site-builder binary + actualBinaryName = if binaryName == "walrus-sites" then "site-builder" else binaryName; + in + pkgs.stdenv.mkDerivation { + pname = binaryName; + inherit version; + + src = pkgs.fetchurl { + inherit url hash; + }; + + nativeBuildInputs = [ pkgs.patchelf ] + ++ pkgs.lib.optionals isTgz [ pkgs.gnutar pkgs.gzip ]; + + buildInputs = runtimeLibs; + + unpackPhase = if isTgz then '' + runHook preUnpack + tar -xzf $src + runHook postUnpack + '' else '' + runHook preUnpack + # For direct binaries, just copy the file + cp $src binary + runHook postUnpack + ''; + + dontBuild = true; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + + # Find the binary file + ${if isTgz then '' + # For .tgz archives, find and extract the binary + # The binary is typically at the root or in a bin directory + if [ -f ${actualBinaryName} ]; then + BINARY_PATH=${actualBinaryName} + elif [ -f bin/${actualBinaryName} ]; then + BINARY_PATH=bin/${actualBinaryName} + else + echo "Error: Could not find binary ${actualBinaryName} in archive" + find . -type f + exit 1 + fi + install -D -m755 "$BINARY_PATH" $out/bin/${binaryName} + '' else '' + # For direct binaries + install -D -m755 binary $out/bin/${binaryName} + ''} + + # Apply the same patching that suiup does + echo "Patching ${binaryName} binary..." + patchelf \ + --set-interpreter ${pkgs.glibc}/lib/ld-linux-x86-64.so.2 \ + --set-rpath ${pkgs.lib.makeLibraryPath runtimeLibs} \ + $out/bin/${binaryName} + + runHook postInstall + ''; + + meta = with pkgs.lib; { + description = "Patched ${binaryName} standalone binary"; + platforms = [ "x86_64-linux" ]; + mainProgram = binaryName; + }; + }; + + # Generate all standalone binary packages + standalonePackages = pkgs.lib.flatten ( + pkgs.lib.mapAttrsToList ( + binaryName: versions: + pkgs.lib.mapAttrsToList ( + version: releaseInfo: + let + # Handle both old format (string hash) and new format ({hash, url}) + hash = if builtins.isString releaseInfo then releaseInfo else releaseInfo.hash; + url = if builtins.isString releaseInfo + then "https://github.com/MystenLabs/${binaryName}/releases/download/${version}/${binaryName}-ubuntu-x86_64" + else releaseInfo.url; + in + pkgs.lib.nameValuePair "${binaryName}-${version}" ( + mkStandaloneBinary { + inherit binaryName version hash url; + } + ) + ) versions + ) standaloneReleases + ); + + # Helper function to get the latest mainnet release for a binary + # For tools with network prefixes (sui, walrus, walrus-sites), get mainnet version + # For tools without network prefixes (mvr), get the latest version + getLatestMainnet = binaryName: + let + versions = standaloneReleases.${binaryName} or {}; + # Try to get mainnet-prefixed versions first + mainnetVersions = pkgs.lib.filterAttrs (version: _: pkgs.lib.hasPrefix "mainnet-" version) versions; + # If no mainnet versions, use all versions (for tools like mvr) + candidateVersions = if mainnetVersions == {} then versions else mainnetVersions; + sortedVersions = builtins.sort (a: b: a > b) (builtins.attrNames candidateVersions); + in + if sortedVersions == [] then null else builtins.head sortedVersions; + + # Create standalone packages as an attrset first + standalonePackagesAttrs = builtins.listToAttrs standalonePackages; + + in + { + packages = + { + # Default build without patchelf + default = mkSuiup { enablePatchelf = true; }; + + # Aliases to latest mainnet releases + sui = + let latest = getLatestMainnet "sui"; + in if latest != null then standalonePackagesAttrs."sui-${latest}" else throw "No mainnet sui release found"; + + mvr = + let latest = getLatestMainnet "mvr"; + in if latest != null then standalonePackagesAttrs."mvr-${latest}" else throw "No mvr release found"; + + walrus = + let latest = getLatestMainnet "walrus"; + in if latest != null then standalonePackagesAttrs."walrus-${latest}" else throw "No mainnet walrus release found"; + + walrus-sites = + let latest = getLatestMainnet "walrus-sites"; + in if latest != null then standalonePackagesAttrs."walrus-sites-${latest}" else throw "No mainnet walrus-sites release found"; + } + // standalonePackagesAttrs; + + devShells.default = pkgs.mkShell { + inherit buildInputs; + + nativeBuildInputs = + nativeBuildInputs + ++ (with pkgs; [ + cargo-watch + rust-analyzer + patchelf + ]); + + RUST_SRC_PATH = "${rustToolchain}/lib/rustlib/src/rust/library"; + + # Set up XDG_DATA_HOME to point to a local directory for development + shellHook = '' + export XDG_DATA_HOME="''${XDG_DATA_HOME:-$HOME/.local/share}" + echo "Nix development shell for suiup" + echo "XDG_DATA_HOME: $XDG_DATA_HOME" + ''; + }; + + apps = { + default = { + type = "app"; + program = "${self.packages.${system}.default}/bin/suiup"; + }; + + update-releases = { + type = "app"; + program = toString (pkgs.writeShellScript "update-releases" '' + set -e + export PATH="${pkgs.lib.makeBinPath [ pkgs.python3 pkgs.nix pkgs.git ]}:$PATH" + + # Check if we're in a git repository + if ! ${pkgs.git}/bin/git rev-parse --git-dir > /dev/null 2>&1; then + echo "Error: This command must be run from within the suiup git repository" + exit 1 + fi + + # Find the script in the nix directory + if [ -f "./nix/update-standalone-releases.py" ]; then + # Pass nix/releases.json as the file to update, forward any additional arguments (like --force) + exec ${pkgs.python3}/bin/python3 ./nix/update-standalone-releases.py nix/releases.json "$@" + else + echo "Error: nix/update-standalone-releases.py not found" + exit 1 + fi + ''); + }; + }; + } + ); +} diff --git a/nix/releases.json b/nix/releases.json new file mode 100644 index 0000000..33cea7f --- /dev/null +++ b/nix/releases.json @@ -0,0 +1,50 @@ +{ + "mvr": { + "v0.0.14": { + "hash": "sha256-qOmBBUCMp6hkhqFULoELT7Zn8SS4x6kD+YIt4WpvZJQ=", + "url": "https://github.com/MystenLabs/mvr/releases/download/v0.0.14/mvr-ubuntu-x86_64" + }, + "v0.0.13": { + "hash": "sha256-v+nIHxu6l0RYlJaDApMV33OjxX97zAe94EYEzgnqBlw=", + "url": "https://github.com/MystenLabs/mvr/releases/download/v0.0.13/mvr-ubuntu-x86_64" + }, + "v0.0.12": { + "hash": "sha256-wX12NzDGoKOoNfGBdIX9MJSnlmqbfBfyBWJJZoaRpTI=", + "url": "https://github.com/MystenLabs/mvr/releases/download/v0.0.12/mvr-ubuntu-x86_64" + } + }, + "sui": { + "testnet-v1.59.0": { + "hash": "sha256-fAsPS5ZuuM3EB5wRpIk3t/va3t77/KhpaVJC2iR2mAc=", + "url": "https://github.com/MystenLabs/sui/releases/download/testnet-v1.59.0/sui-testnet-v1.59.0-ubuntu-x86_64.tgz" + }, + "devnet-v1.59.0": { + "hash": "sha256-fAsPS5ZuuM3EB5wRpIk3t/va3t77/KhpaVJC2iR2mAc=", + "url": "https://github.com/MystenLabs/sui/releases/download/devnet-v1.59.0/sui-devnet-v1.59.0-ubuntu-x86_64.tgz" + }, + "mainnet-v1.58.3": { + "hash": "sha256-FzexVaaaB8SqgzczPxiziFaf2xVI5Ovc+iuLdck0APA=", + "url": "https://github.com/MystenLabs/sui/releases/download/mainnet-v1.58.3/sui-mainnet-v1.58.3-ubuntu-x86_64.tgz" + } + }, + "walrus": { + "testnet-v1.35.0": { + "hash": "sha256-/nI+o9RL9ellBLjGtA7EcpWVNQYLJnq4aNJ7puJGrGw=", + "url": "https://github.com/MystenLabs/walrus/releases/download/testnet-v1.35.0/walrus-testnet-v1.35.0-ubuntu-x86_64.tgz" + }, + "mainnet-v1.34.2": { + "hash": "sha256-W+xE/zinHPnUND8Ag/SOW/QzGxJbGtKR0+Mg3XIKg3U=", + "url": "https://github.com/MystenLabs/walrus/releases/download/mainnet-v1.34.2/walrus-mainnet-v1.34.2-ubuntu-x86_64.tgz" + }, + "devnet-v1.34.0": { + "hash": "sha256-bIA7a7VgKpnsur38KIAfA8+Ny+Tx1UR9qAxgy5sMBzU=", + "url": "https://github.com/MystenLabs/walrus/releases/download/devnet-v1.34.0/walrus-devnet-v1.34.0-ubuntu-x86_64.tgz" + } + }, + "walrus-sites": { + "mainnet-v1.3.0": { + "hash": "sha256-4opJhaTqbJBVmWpkQKJxD2M0CWg1TfifIQF3Yh1dSpo=", + "url": "https://github.com/MystenLabs/walrus-sites/releases/download/mainnet-v1.3.0/site-builder-mainnet-v1.3.0-ubuntu-x86_64.tgz" + } + } +} diff --git a/nix/update-standalone-releases.py b/nix/update-standalone-releases.py new file mode 100755 index 0000000..8304253 --- /dev/null +++ b/nix/update-standalone-releases.py @@ -0,0 +1,485 @@ +#!/usr/bin/env python3 +""" +Update script to fetch latest standalone releases and update releases.json +""" + +import json +import subprocess +import sys +import tempfile +import re +import argparse +import os +from pathlib import Path +from typing import Dict, List, Optional +import urllib.request +import urllib.error + +# Configuration +# Note: Currently only mvr has standalone binaries with the naming pattern `mvr-ubuntu-x86_64` +# Other tools use .tgz archives with different naming patterns: +# - walrus: walrus-testnet-vX.Y.Z-ubuntu-x86_64.tgz +# - walrus-sites: site-builder-mainnet-vX.Y.Z-ubuntu-x86_64.tgz +# - sui: does not have standalone binaries in releases +REPOS = { + "mvr": "MystenLabs/mvr", + "sui": "MystenLabs/sui", + "walrus": "MystenLabs/walrus", + "walrus-sites": "MystenLabs/walrus-sites", +} + +NUM_RELEASES = 1 # Number of releases to fetch per network/type + + +# Global flag for color output (will be set by main()) +USE_COLOR = True + + +# Colors for output +class Colors: + @staticmethod + def _color(code: str) -> str: + """Return color code if colors are enabled, empty string otherwise.""" + return code if USE_COLOR else "" + + @property + def RED(self) -> str: + return self._color("\033[0;31m") + + @property + def GREEN(self) -> str: + return self._color("\033[0;32m") + + @property + def YELLOW(self) -> str: + return self._color("\033[1;33m") + + @property + def BLUE(self) -> str: + return self._color("\033[0;34m") + + @property + def NC(self) -> str: + return self._color("\033[0m") + + +# Create a singleton instance +Colors = Colors() + + +def get_filename_pattern(binary: str) -> str: + """Get the filename pattern for a binary. + + Returns a pattern that will be matched against asset names. + For .tgz archives, we match the pattern within the filename. + """ + patterns = { + "mvr": "mvr-ubuntu-x86_64", # Direct binary: mvr-ubuntu-x86_64 + "sui": "sui-.*-ubuntu-x86_64.tgz", # Archive: sui-testnet-v1.59.0-ubuntu-x86_64.tgz + "walrus": "walrus-.*-ubuntu-x86_64.tgz", # Archive: walrus-testnet-v1.35.0-ubuntu-x86_64.tgz + "walrus-sites": "site-builder-.*-ubuntu-x86_64.tgz", # Archive: site-builder-mainnet-v1.3.0-ubuntu-x86_64.tgz + } + return patterns.get(binary, f"{binary}-ubuntu-x86_64") + + +def fetch_releases(binary: str, repo: str) -> List[Dict[str, str]]: + """Fetch releases from GitHub API.""" + print( + f"{Colors.BLUE}Fetching latest {NUM_RELEASES} releases for {binary}...{Colors.NC}", + file=sys.stderr, + ) + + url = f"https://api.github.com/repos/{repo}/releases" + filename_pattern = get_filename_pattern(binary) + + try: + req = urllib.request.Request(url) + req.add_header("User-Agent", "suiup-update-script") + + with urllib.request.urlopen(req) as response: + releases_data = json.loads(response.read()) + except urllib.error.URLError as e: + print( + f"{Colors.RED}Failed to fetch releases from GitHub: {e}{Colors.NC}", + file=sys.stderr, + ) + return [] + + # Filter releases that have the required asset + # Compile pattern as regex + pattern_re = re.compile(filename_pattern) + + filtered_releases = [] + + # For tools with network variants (sui, walrus, walrus-sites), try to get diverse networks + # Track which network types we've seen + network_counts = {} + + for release in releases_data[ + : NUM_RELEASES * 10 + ]: # Search more releases to find diversity + assets = release.get("assets", []) + matching_asset = None + + for asset in assets: + # Use regex match for .tgz patterns, exact match for direct binaries + if pattern_re.fullmatch(asset["name"]): + matching_asset = asset + break + + if matching_asset: + tag = release["tag_name"] + + # Extract network type from tag (e.g., "mainnet-v1.58.3" -> "mainnet") + network = tag.split("-")[0] if "-" in tag else "default" + + # For network-based releases, limit to NUM_RELEASES per network + if network != "default": + network_counts[network] = network_counts.get(network, 0) + 1 + if network_counts[network] > NUM_RELEASES: + continue + + filtered_releases.append( + { + "tag": tag, + "url": matching_asset["browser_download_url"], + "filename": matching_asset["name"], + } + ) + + # Stop if we have enough overall or enough per network + total_desired = NUM_RELEASES * 3 # Get up to 3 networks worth + if len(filtered_releases) >= total_desired: + break + + if not filtered_releases: + print( + f"{Colors.YELLOW}Warning: No releases found for {binary}{Colors.NC}", + file=sys.stderr, + ) + + return filtered_releases + + +def compute_hash(url: str) -> Optional[str]: + """Download a file and compute its Nix SRI hash.""" + try: + with tempfile.NamedTemporaryFile(delete=False) as tmp_file: + tmp_path = tmp_file.name + + # Download the file + req = urllib.request.Request(url) + req.add_header("User-Agent", "suiup-update-script") + + with urllib.request.urlopen(req) as response: + tmp_file.write(response.read()) + + # Compute SHA256 hash + result = subprocess.run( + ["nix-hash", "--type", "sha256", "--flat", tmp_path], + capture_output=True, + text=True, + check=True, + ) + hash_hex = result.stdout.strip() + + # Convert to SRI format + result = subprocess.run( + [ + "nix", + "hash", + "convert", + "--hash-algo", + "sha256", + "--to", + "sri", + hash_hex, + ], + capture_output=True, + text=True, + check=True, + ) + sri_hash = result.stdout.strip() + + # Clean up + Path(tmp_path).unlink() + + return sri_hash + except Exception as e: + print(f"{Colors.RED}Failed to compute hash: {e}{Colors.NC}", file=sys.stderr) + return None + + +def generate_releases_for_binary(binary: str, repo: str) -> Dict[str, Dict[str, str]]: + """Generate version -> {hash, url} mapping for a binary.""" + releases = fetch_releases(binary, repo) + + if not releases: + return {} + + result = {} + for release in releases: + tag = release["tag"] + url = release["url"] + + print( + f"{Colors.GREEN} Processing {binary} {tag}...{Colors.NC}", file=sys.stderr + ) + + hash_value = compute_hash(url) + if not hash_value: + print( + f"{Colors.RED} Failed to compute hash for {tag}{Colors.NC}", + file=sys.stderr, + ) + continue + + print(f"{Colors.GREEN} Hash: {hash_value}{Colors.NC}", file=sys.stderr) + result[tag] = { + "hash": hash_value, + "url": url, + } + + return result + + +def cleanup_old_releases( + releases: Dict[str, Dict[str, str]], max_per_network: int = 10 +) -> tuple[Dict[str, Dict[str, str]], List[str]]: + """Keep only the latest N releases per network type. + + Args: + releases: Dict of version -> release info + max_per_network: Maximum number of releases to keep per network (default: 10) + + Returns: + Tuple of (cleaned up releases dict, list of removed versions) + """ + # Group releases by network + network_groups: Dict[str, List[str]] = {} + for version in releases.keys(): + # Extract network from version (e.g., "mainnet-v1.58.3" -> "mainnet") + network = version.split("-")[0] if "-" in version else "default" + if network not in network_groups: + network_groups[network] = [] + network_groups[network].append(version) + + # Keep only latest releases per network + versions_to_keep = set() + removed_versions = [] + for network, versions in network_groups.items(): + # Sort versions in descending order (latest first) + sorted_versions = sorted(versions, reverse=True) + # Keep only the latest max_per_network + kept = sorted_versions[:max_per_network] + removed = sorted_versions[max_per_network:] + versions_to_keep.update(kept) + removed_versions.extend(removed) + + # Return filtered releases and list of removed versions + return { + v: r for v, r in releases.items() if v in versions_to_keep + }, removed_versions + + +def main(): + """Main script execution.""" + parser = argparse.ArgumentParser( + description="Update standalone releases JSON with latest GitHub releases", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + %(prog)s # Update releases.json in current directory + %(prog)s nix/releases.json # Update specific file + %(prog)s --force # Re-download all releases + %(prog)s --max-releases 5 # Keep only 5 latest per network + %(prog)s --no-color # Disable colored output + """, + ) + parser.add_argument( + "file", + nargs="?", + default="releases.json", + help="Path to releases.json file to update (default: %(default)s)", + ) + parser.add_argument( + "--force", + action="store_true", + help="Re-download and recompute hashes for existing releases", + ) + parser.add_argument( + "--max-releases", + type=int, + default=10, + metavar="N", + help="Maximum number of releases to keep per network (default: %(default)s)", + ) + parser.add_argument( + "--no-color", + action="store_true", + help="Disable colored output (also honors NO_COLOR environment variable)", + ) + args = parser.parse_args() + + # Set color output based on --no-color flag or NO_COLOR environment variable + global USE_COLOR + USE_COLOR = not args.no_color and not os.environ.get("NO_COLOR") + + print( + f"{Colors.GREEN}=== Updating Standalone Releases ==={Colors.NC}", + file=sys.stderr, + ) + print("", file=sys.stderr) + + releases_json_path = Path(args.file) + + # Create parent directory if it doesn't exist + releases_json_path.parent.mkdir(parents=True, exist_ok=True) + + # Load existing releases + existing_releases = {} + if releases_json_path.exists(): + # Create backup + backup_path = releases_json_path.with_suffix(".json.bak") + backup_path.write_text(releases_json_path.read_text()) + print( + f"{Colors.YELLOW} Backup saved to {backup_path}{Colors.NC}", + file=sys.stderr, + ) + + try: + existing_releases = json.loads(releases_json_path.read_text()) + except json.JSONDecodeError: + print( + f"{Colors.YELLOW}Warning: Could not parse existing {releases_json_path}{Colors.NC}", + file=sys.stderr, + ) + + print("", file=sys.stderr) + + # Build updated releases structure + new_releases = {} + # Track changes for summary + changes: Dict[str, Dict[str, List[str]]] = {} + + for binary in ["mvr", "sui", "walrus", "walrus-sites"]: + if binary not in REPOS: + continue + + # Start with existing releases for this binary + existing_binary_releases = existing_releases.get(binary, {}) + changes[binary] = {"added": [], "removed": []} + + if not args.force and existing_binary_releases: + print( + f"{Colors.BLUE}Using existing {binary} releases (use --force to re-download){Colors.NC}", + file=sys.stderr, + ) + # Still apply cleanup to existing releases + cleaned_releases, removed = cleanup_old_releases( + existing_binary_releases, args.max_releases + ) + new_releases[binary] = cleaned_releases + changes[binary]["removed"] = removed + else: + # Fetch new releases and merge with existing + fetched_releases = generate_releases_for_binary(binary, REPOS[binary]) + + # Track newly added versions + for version in fetched_releases.keys(): + if version not in existing_binary_releases: + changes[binary]["added"].append(version) + + # Merge: prefer fetched (new) releases, but keep old ones not in new list + merged_releases = existing_binary_releases.copy() + merged_releases.update(fetched_releases) + + # Cleanup: keep only latest N per network + cleaned_releases, removed = cleanup_old_releases( + merged_releases, args.max_releases + ) + new_releases[binary] = cleaned_releases + changes[binary]["removed"] = removed + + # Write the updated JSON + releases_json_path.write_text(json.dumps(new_releases, indent=2) + "\n") + + print("", file=sys.stderr) + print( + f"{Colors.GREEN}✓ Updated {releases_json_path} successfully{Colors.NC}", + file=sys.stderr, + ) + if releases_json_path.with_suffix(".json.bak").exists(): + print( + f"{Colors.YELLOW} Backup saved to {releases_json_path.with_suffix('.json.bak')}{Colors.NC}", + file=sys.stderr, + ) + + # Display changes summary + print("", file=sys.stderr) + print(f"{Colors.GREEN}=== Changes Summary ==={Colors.NC}", file=sys.stderr) + has_changes = False + for binary, change_info in changes.items(): + if change_info["added"] or change_info["removed"]: + has_changes = True + print(f"\n{Colors.BLUE}{binary}:{Colors.NC}", file=sys.stderr) + if change_info["added"]: + print(f" {Colors.GREEN}Added:{Colors.NC}", file=sys.stderr) + for version in sorted(change_info["added"]): + print(f" + {version}", file=sys.stderr) + if change_info["removed"]: + print(f" {Colors.YELLOW}Removed:{Colors.NC}", file=sys.stderr) + for version in sorted(change_info["removed"]): + print(f" - {version}", file=sys.stderr) + + if not has_changes: + print(f" {Colors.YELLOW}No changes{Colors.NC}", file=sys.stderr) + + # Display all available versions per component + print("", file=sys.stderr) + print(f"{Colors.GREEN}=== Available Versions ==={Colors.NC}", file=sys.stderr) + for binary in ["mvr", "sui", "walrus", "walrus-sites"]: + versions = new_releases.get(binary, {}) + if versions: + print( + f"\n{Colors.BLUE}{binary}:{Colors.NC} ({len(versions)} versions)", + file=sys.stderr, + ) + # Group by network + network_groups: Dict[str, List[str]] = {} + for version in versions.keys(): + network = version.split("-")[0] if "-" in version else "default" + if network not in network_groups: + network_groups[network] = [] + network_groups[network].append(version) + + # Display grouped by network + for network in sorted(network_groups.keys()): + network_versions = sorted(network_groups[network], reverse=True) + print(f" {network}: {', '.join(network_versions)}", file=sys.stderr) + + print("", file=sys.stderr) + print(f"{Colors.GREEN}Example commands:{Colors.NC}", file=sys.stderr) + print( + f"{Colors.BLUE} nix build '.#sui' # Build latest mainnet sui{Colors.NC}", + file=sys.stderr, + ) + print( + f"{Colors.BLUE} nix build '.#walrus' # Build latest mainnet walrus{Colors.NC}", + file=sys.stderr, + ) + print( + f"{Colors.BLUE} nix run .#update-releases # Update releases{Colors.NC}", + file=sys.stderr, + ) + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print(f"\n{Colors.YELLOW}Interrupted by user{Colors.NC}", file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f"{Colors.RED}Error: {e}{Colors.NC}", file=sys.stderr) + sys.exit(1) diff --git a/src/commands/mod.rs b/src/commands/mod.rs index 45af5a3..00d21e2 100644 --- a/src/commands/mod.rs +++ b/src/commands/mod.rs @@ -6,6 +6,8 @@ mod default; mod doctor; mod install; mod list; +#[cfg(feature = "nix-patchelf")] +mod patch; mod remove; mod self_; mod show; @@ -51,6 +53,8 @@ pub enum Commands { Update(update::Command), Which(which::Command), Cleanup(cleanup::Command), + #[cfg(feature = "nix-patchelf")] + Patch(patch::Command), } impl Command { @@ -72,6 +76,8 @@ impl Command { Commands::Update(cmd) => cmd.exec(&self.github_token).await, Commands::Which(cmd) => cmd.exec(), Commands::Cleanup(cmd) => cmd.exec(&self.github_token).await, + #[cfg(feature = "nix-patchelf")] + Commands::Patch(cmd) => cmd.exec(), } } } diff --git a/src/commands/patch.rs b/src/commands/patch.rs new file mode 100644 index 0000000..247ab58 --- /dev/null +++ b/src/commands/patch.rs @@ -0,0 +1,27 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{anyhow, Result}; +use clap::Args; +use std::path::PathBuf; + +/// Patch a binary with Nix runtime dependencies (Linux only). +#[derive(Args, Debug)] +pub struct Command { + /// Path to the binary to patch + #[arg(value_name = "BINARY")] + pub binary: PathBuf, +} + +impl Command { + pub fn exec(&self) -> Result<()> { + use crate::patchelf::patch_binary; + + if !self.binary.exists() { + return Err(anyhow!("Binary not found: {}", self.binary.display())); + } + + patch_binary(&self.binary)?; + Ok(()) + } +} diff --git a/src/handlers/install.rs b/src/handlers/install.rs index af50f14..efb3a36 100644 --- a/src/handlers/install.rs +++ b/src/handlers/install.rs @@ -1,7 +1,7 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use std::path::PathBuf; +use std::path::Path; use std::process::{Command, Stdio}; use super::check_if_binaries_exist; @@ -23,7 +23,7 @@ pub fn install_binary( network: String, version: &str, debug: bool, - binary_path: PathBuf, + binary_path: &Path, yes: bool, ) -> Result<(), Error> { let mut installed_binaries = InstalledBinaries::new()?; @@ -72,7 +72,14 @@ pub async fn install_from_release( let binary_filename = format!("{}.exe", binary_filename); let binary_path = binaries_dir().join(network).join(binary_filename); - install_binary(name, network.to_string(), &version, debug, binary_path, yes)?; + install_binary( + name, + network.to_string(), + &version, + debug, + &binary_path, + yes, + )?; } else { println!("Binary {name}-{version} already installed. Use `suiup default set` to change the default binary."); } @@ -156,7 +163,7 @@ pub async fn install_from_nightly( branch.to_string(), "nightly", debug, - dst, + &dst, yes, )?; @@ -183,12 +190,21 @@ pub async fn install_standalone( let binary_path = binaries_dir() .join(&network) .join(format!("{}-{}", binary_name, installed_version)); + #[cfg(feature = "nix-patchelf")] + { + if let Err(e) = crate::patchelf::patch_binary(&binary_path) { + println!("Warning: Failed to patch binary with patchelf: {}", e); + println!( + "The binary may not work correctly. Ensure nix-runtime-deps.json is installed." + ); + } + } install_binary( &binary_name, network, &installed_version, false, - binary_path, + &binary_path, yes, )?; } else { diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs index 889b0cd..33f7b14 100644 --- a/src/handlers/mod.rs +++ b/src/handlers/mod.rs @@ -314,7 +314,7 @@ fn extract_component(orig_binary: &str, network: String, filename: &str) -> Resu { // Retrieve and apply the original file permissions on Unix-like systems if let Ok(permissions) = f.header().mode() { - set_permissions(output_path, PermissionsExt::from_mode(permissions)).map_err( + set_permissions(&output_path, PermissionsExt::from_mode(permissions)).map_err( |e| { anyhow!( "Cannot apply the original file permissions in a unix system: {e}" @@ -323,6 +323,16 @@ fn extract_component(orig_binary: &str, network: String, filename: &str) -> Resu )?; } } + + // Apply patchelf if the feature is enabled + #[cfg(feature = "nix-patchelf")] + { + if let Err(e) = crate::patchelf::patch_binary(&output_path) { + println!("Warning: Failed to patch binary with patchelf: {}", e); + println!("The binary may not work correctly. Ensure nix-runtime-deps.json is installed."); + } + } + break; } } diff --git a/src/lib.rs b/src/lib.rs index a1fb9de..2004b64 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -8,3 +8,6 @@ pub mod handlers; pub mod paths; pub mod standalone; pub mod types; + +#[cfg(feature = "nix-patchelf")] +pub mod patchelf; diff --git a/src/patchelf.rs b/src/patchelf.rs new file mode 100644 index 0000000..627f9d6 --- /dev/null +++ b/src/patchelf.rs @@ -0,0 +1,113 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{anyhow, Result}; +use serde::Deserialize; +use std::path::Path; +use std::process::Command; + +/// Default path to the nix-runtime-deps.json file +const DEFAULT_PATCHELF_CONFIG: &str = "/usr/share/suiup/nix-runtime-deps.json"; + +/// Patchelf executable name +const PATCHELF_EXECUTABLE: &str = "patchelf"; + +#[derive(Debug, Deserialize)] +pub struct NixRuntimeDeps { + pub interpreter: String, + pub lib_path: String, +} + +/// Load the Nix runtime dependencies from a JSON file +/// This file path is specified via the SUIUP_PATCHELF environment variable, +/// or falls back to the default path +pub fn load_nix_runtime_deps() -> Result { + let config_path = std::env::var("SUIUP_PATCHELF_CONFIG") + .unwrap_or_else(|_| DEFAULT_PATCHELF_CONFIG.to_string()); + + let config_path = Path::new(&config_path); + + if !config_path.exists() { + return Err(anyhow!( + "Nix runtime dependencies config not found at {}. Set SUIUP_PATCHELF_CONFIG environment variable or ensure the file exists at the default location.", + config_path.display() + )); + } + + let content = std::fs::read_to_string(config_path).map_err(|e| { + anyhow!( + "Failed to read json dependencies file: {} {}", + config_path.display(), + e + ) + })?; + let deps: NixRuntimeDeps = serde_json::from_str(&content)?; + Ok(deps) +} + +/// Patch a binary with patchelf using the Nix runtime dependencies +pub fn patch_binary(binary_path: &Path) -> Result<()> { + #[cfg(not(target_os = "linux"))] + { + // patchelf is only relevant on Linux + return Ok(()); + } + + #[cfg(target_os = "linux")] + { + if !binary_path.exists() { + return Err(anyhow!("Binary not found: {}", binary_path.display())); + } + + let deps = load_nix_runtime_deps()?; + + println!("Patching binary: {}", binary_path.display()); + + // Set interpreter + let status = Command::new(PATCHELF_EXECUTABLE) + .arg("--set-interpreter") + .arg(&deps.interpreter) + .arg("--set-rpath") + .arg(&deps.lib_path) + .arg(binary_path) + .status() + .map_err(|e| { + anyhow!( + "Failed to run {} (is it installed?): {}", + PATCHELF_EXECUTABLE, + e + ) + })?; + + if !status.success() { + return Err(anyhow!( + "Failed to set interpreter / rpath with {}", + PATCHELF_EXECUTABLE + )); + } + + println!("✓ Binary patched successfully"); + println!(" Interpreter: {}", deps.interpreter); + println!(" RPATH: {}", deps.lib_path); + + Ok(()) + } +} + +/// Check if patchelf is available in the system +#[allow(dead_code)] +pub fn is_patchelf_available() -> bool { + #[cfg(not(target_os = "linux"))] + { + false + } + + #[cfg(target_os = "linux")] + { + Command::new(PATCHELF_EXECUTABLE) + .arg("--version") + .output() + .map(|output| output.status.success()) + .unwrap_or(false) + } +}