diff --git a/.beads/ai_cli_preparation.db b/.beads/ai_cli_preparation.db deleted file mode 100644 index aaa667e..0000000 Binary files a/.beads/ai_cli_preparation.db and /dev/null differ diff --git a/.env.default b/.env.default index 51ba118..504277f 100644 --- a/.env.default +++ b/.env.default @@ -34,10 +34,3 @@ CLI_AUDIT_SORT=order # Docker info detection (disable if it hangs in your environment) CLI_AUDIT_DOCKER_INFO=1 - -# Installation strategy for tools -# CURRENT: Keep tools where they are currently installed (default) -# USER: Install to ~/.local/bin (user-local, no sudo) -# GLOBAL: Install to /usr/local/bin (system-wide, requires sudo) -# PROJECT: Install to ./.local/bin (project-local) -INSTALL_STRATEGY=USER diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6372d18..9572580 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,7 +70,7 @@ jobs: pytest tests/integration -v --cov=cli_audit --cov-append --cov-report=xml --cov-report=term - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: file: ./coverage.xml flags: unittests diff --git a/.gitignore b/.gitignore index 85f0227..0bf8bf3 100644 --- a/.gitignore +++ b/.gitignore @@ -8,9 +8,6 @@ __pycache__/ .env/ venv/ -# Environment variables (local config) -.env - # Packaging / build build/ dist/ @@ -37,7 +34,3 @@ node_modules/ # AI agent session context (not committed) claudedocs/ - -# Runtime directories -config/ -logs/ diff --git a/.python-version b/.python-version index f982feb..6324d40 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.14.0 +3.14 diff --git a/Makefile b/Makefile index fd9e0e6..6f25d5d 100644 --- a/Makefile +++ b/Makefile @@ -4,68 +4,93 @@ PYTHON ?= python3 -include .env.default -include .env -# Export all loaded Make variables to environment for subprocesses -export - -.PHONY: user-help help audit audit-offline audit-% audit-offline-% update upgrade guide \ - test test-unit test-integration test-coverage test-watch test-failed \ - lint lint-code lint-types lint-security format format-check \ - install install-dev install-core install-python install-node install-go \ - install-aws install-kubectl install-terraform install-ansible install-docker \ - install-brew install-rust upgrade-% uninstall-% reconcile-% \ - build build-dist build-wheel check-dist publish publish-test publish-prod \ - clean clean-build clean-test clean-pyc clean-all \ - scripts-perms audit-auto detect-managers upgrade-managed upgrade-dry-run \ - upgrade-managed-system upgrade-managed-user upgrade-project-deps upgrade-managed-all \ - upgrade-managed-system-only upgrade-managed-skip-system bootstrap init \ - upgrade-all upgrade-all-dry-run check-path fix-path check-python-managers check-node-managers - -# ============================================================================ -# HELP & OVERVIEW -# ============================================================================ - -.DEFAULT_GOAL := user-help - -user-help: ## Show user commands only (default) - @echo "" - @echo "AI CLI Preparation - User Commands" - @echo "===================================" - @echo "" - @awk 'BEGIN{FS=":.*##"; section=""} \ - /^## / {section=$$0; gsub(/^## /, "", section)} \ - /^[a-zA-Z0-9_-]+:.*##/ && section=="USER" {printf " \033[36m%-22s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" - @echo "\033[90mRun '\033[0m\033[1mmake help\033[0m\033[90m' for development and maintenance commands.\033[0m" - @echo "" - -help: ## Show complete help with all commands - @echo "" - @echo "AI CLI Preparation - Makefile Commands" - @echo "======================================" - @echo "" - @echo "USER COMMANDS (Application Functionality):" - @echo "-------------------------------------------" - @awk 'BEGIN{FS=":.*##"; section=""} \ - /^## / {section=$$0; gsub(/^## /, "", section)} \ - /^[a-zA-Z0-9_-]+:.*##/ && section=="USER" {printf " \033[36m%-22s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" - @echo "DEVELOPMENT COMMANDS (Build, Test, Quality):" - @echo "---------------------------------------------" - @awk 'BEGIN{FS=":.*##"; section=""} \ - /^## / {section=$$0; gsub(/^## /, "", section)} \ - /^[a-zA-Z0-9_-]+:.*##/ && section=="DEV" {printf " \033[33m%-22s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" - @echo "MAINTENANCE COMMANDS (Package, Deploy, Clean):" - @echo "-----------------------------------------------" - @awk 'BEGIN{FS=":.*##"; section=""} \ - /^## / {section=$$0; gsub(/^## /, "", section)} \ - /^[a-zA-Z0-9_-]+:.*##/ && section=="MAINT" {printf " \033[35m%-22s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" - -# ============================================================================ -# MODULAR INCLUDES -# ============================================================================ - -include Makefile.d/user.mk -include Makefile.d/dev.mk -include Makefile.d/maint.mk +.PHONY: audit audit-offline audit-only-% audit-offline-% lint fmt help update audit-auto upgrade + +help: ## Show available targets + @awk 'BEGIN{FS=":.*##";print "\nUsage: make \n"} /^[a-zA-Z0-9_.-]+:.*##/ {printf " %-22s %s\n", $$1, $$2}' $(MAKEFILE_LIST) + +audit: ## Render audit from snapshot (no network, <100ms) + @bash -c 'set -o pipefail; CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ + $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true + +audit-offline: ## Offline audit with hints (fast local scan) + @bash -c 'set -o pipefail; CLI_AUDIT_OFFLINE=1 CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ + $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true + +audit-%: scripts-perms ## Audit single tool (e.g., make audit-ripgrep) + @bash -c 'set -o pipefail; CLI_AUDIT_RENDER=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py --only $* | \ + $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true + +audit-offline-%: scripts-perms ## Offline audit subset (e.g., make audit-offline-python-core) + @bash -c 'set -o pipefail; CLI_AUDIT_OFFLINE=1 CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py --only $* | \ + $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true + +update: ## Collect fresh data and write snapshot (~10s) + @bash -c 'set -o pipefail; CLI_AUDIT_COLLECT=1 CLI_AUDIT_DEBUG=1 CLI_AUDIT_PROGRESS=1 $(PYTHON) cli_audit.py' || true + +SNAP_FILE?=$(shell python3 -c "import os;print(os.environ.get('CLI_AUDIT_SNAPSHOT_FILE','tools_snapshot.json'))") + +audit-auto: ## Update snapshot if missing, then render + @if [ ! -f "$(SNAP_FILE)" ]; then \ + echo "# snapshot missing: $(SNAP_FILE); running update..."; \ + CLI_AUDIT_COLLECT=1 CLI_AUDIT_DEBUG=1 CLI_AUDIT_PROGRESS=1 $(PYTHON) cli_audit.py || true; \ + fi; \ + CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ + $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header || true + +upgrade: scripts-perms ## Run interactive upgrade guide + @bash scripts/guide.sh + +guide: upgrade ## Alias for upgrade (deprecated) + +lint: ## Run pyflakes lint checks + @command -v pyflakes >/dev/null 2>&1 && pyflakes cli_audit.py || echo "pyflakes not installed; skipping" + +fmt: ## Format code (placeholder) + @echo "Nothing to format" + +scripts-perms: ## Ensure scripts are executable + chmod +x scripts/*.sh || true + chmod +x scripts/lib/*.sh || true + +install-core: scripts-perms ## Install core tools (fd, fzf, ripgrep, jq, yq, bat, delta, just) + ./scripts/install_core.sh + +install-python: scripts-perms ## Install Python toolchain via uv + ./scripts/install_python.sh + +install-node: scripts-perms ## Install Node.js via nvm + ./scripts/install_node.sh + +install-go: scripts-perms ## Install Go runtime + ./scripts/install_go.sh + +install-aws: scripts-perms ## Install AWS CLI + ./scripts/install_aws.sh + +install-kubectl: scripts-perms ## Install Kubernetes CLI + ./scripts/install_kubectl.sh + +install-terraform: scripts-perms ## Install Terraform + ./scripts/install_terraform.sh + +install-ansible: scripts-perms ## Install Ansible + ./scripts/install_ansible.sh + +install-docker: scripts-perms ## Install Docker + ./scripts/install_docker.sh + +install-brew: scripts-perms ## Install Homebrew (macOS/Linux) + ./scripts/install_brew.sh + +install-rust: scripts-perms ## Install Rust via rustup + ./scripts/install_rust.sh + +update-%: scripts-perms ## Update tool (e.g., make update-python) + ./scripts/install_$*.sh update + +uninstall-%: scripts-perms ## Uninstall tool (e.g., make uninstall-python) + ./scripts/install_$*.sh uninstall + +reconcile-%: scripts-perms ## Reconcile tool installation (e.g., make reconcile-node) + ./scripts/install_$*.sh reconcile diff --git a/Makefile.d/dev.mk b/Makefile.d/dev.mk deleted file mode 100644 index 07aa396..0000000 --- a/Makefile.d/dev.mk +++ /dev/null @@ -1,63 +0,0 @@ -# ============================================================================ -# DEVELOPMENT COMMANDS - Testing, Linting, Formatting -# ============================================================================ -## DEV - -test: ## Run all tests - $(PYTHON) -m pytest - -test-unit: ## Run unit tests only - $(PYTHON) -m pytest tests/ -k "not integration" - -test-integration: ## Run integration tests only - $(PYTHON) -m pytest tests/integration/ - -test-coverage: ## Run tests with coverage report - $(PYTHON) -m pytest --cov=cli_audit --cov-report=term --cov-report=html - -test-coverage-xml: ## Run tests with XML coverage (for CI) - $(PYTHON) -m pytest --cov=cli_audit --cov-report=xml --cov-report=term - -test-watch: ## Run tests in watch mode (requires pytest-watch) - $(PYTHON) -m pytest_watch - -test-failed: ## Re-run only failed tests - $(PYTHON) -m pytest --lf - -test-verbose: ## Run tests with verbose output - $(PYTHON) -m pytest -vv -s - -test-parallel: ## Run tests in parallel (requires pytest-xdist) - $(PYTHON) -m pytest -n auto - -lint: lint-code lint-types lint-security ## Run all linting checks - -lint-code: ## Run flake8 code linting - @echo "→ Running flake8..." - @$(PYTHON) -m flake8 cli_audit tests || echo "flake8 checks failed" - -lint-types: ## Run mypy type checking - @echo "→ Running mypy..." - @$(PYTHON) -m mypy cli_audit || echo "mypy checks failed" - -lint-security: ## Run bandit security checks - @echo "→ Running bandit..." - @$(PYTHON) -m bandit -r cli_audit -ll || echo "bandit checks failed" - -format: ## Format code with black and isort - @echo "→ Running black..." - @$(PYTHON) -m black cli_audit tests - @echo "→ Running isort..." - @$(PYTHON) -m isort cli_audit tests - -format-check: ## Check code formatting without changes - @echo "→ Checking black..." - @$(PYTHON) -m black --check cli_audit tests - @echo "→ Checking isort..." - @$(PYTHON) -m isort --check-only cli_audit tests - -install: ## Install package in editable mode - $(PYTHON) -m pip install -e . - -install-dev: ## Install package with development dependencies - $(PYTHON) -m pip install -e ".[dev]" diff --git a/Makefile.d/maint.mk b/Makefile.d/maint.mk deleted file mode 100644 index c9dd4f6..0000000 --- a/Makefile.d/maint.mk +++ /dev/null @@ -1,50 +0,0 @@ -# ============================================================================ -# MAINTENANCE COMMANDS - Build, Package, Deploy, Clean -# ============================================================================ -## MAINT - -build: clean-build ## Build source and wheel distributions - $(PYTHON) -m build - -build-dist: build ## Alias for build - -build-wheel: clean-build ## Build wheel distribution only - $(PYTHON) -m build --wheel - -check-dist: build ## Check distribution for PyPI compatibility - $(PYTHON) -m twine check dist/* - -publish-test: check-dist ## Publish to TestPyPI - $(PYTHON) -m twine upload --repository testpypi dist/* - -publish-prod: check-dist ## Publish to production PyPI - $(PYTHON) -m twine upload dist/* - -clean: clean-build clean-test clean-pyc ## Remove all build, test, and Python artifacts - -clean-build: ## Remove build artifacts - rm -rf build/ - rm -rf dist/ - rm -rf *.egg-info - rm -rf .eggs/ - -clean-test: ## Remove test and coverage artifacts - rm -rf .pytest_cache/ - rm -rf .coverage - rm -rf htmlcov/ - rm -rf .mypy_cache/ - rm -rf coverage.xml - -clean-pyc: ## Remove Python file artifacts - find . -type f -name '*.py[co]' -delete - find . -type d -name '__pycache__' -delete - find . -type d -name '*.egg-info' -exec rm -rf {} + || true - -clean-all: clean ## Remove all artifacts including virtual environments - rm -rf .venv/ - rm -rf venv/ - rm -rf .tox/ - -scripts-perms: ## Ensure scripts are executable - @chmod +x scripts/*.sh 2>/dev/null || true - @chmod +x scripts/lib/*.sh 2>/dev/null || true diff --git a/Makefile.d/user.mk b/Makefile.d/user.mk deleted file mode 100644 index bff40b3..0000000 --- a/Makefile.d/user.mk +++ /dev/null @@ -1,172 +0,0 @@ -# ============================================================================ -# USER COMMANDS - Application Functionality -# ============================================================================ -## USER - -audit: ## Render audit from snapshot (no network, <100ms) - @bash -c ' \ - SNAP_FILE=$${CLI_AUDIT_SNAPSHOT_FILE:-tools_snapshot.json}; \ - CACHE_MAX_AGE_HOURS=$${CACHE_MAX_AGE_HOURS:-24}; \ - if [ ! -f "$$SNAP_FILE" ]; then \ - echo "⚠️ Warning: Snapshot cache missing ($$SNAP_FILE)" >&2; \ - echo " Run '\''make update'\'' first to populate the cache." >&2; \ - else \ - now=$$(date +%s); \ - snap_time=$$(stat -c %Y "$$SNAP_FILE" 2>/dev/null || stat -f %m "$$SNAP_FILE" 2>/dev/null || echo 0); \ - age_seconds=$$((now - snap_time)); \ - age_hours=$$((age_seconds / 3600)); \ - if [ $$age_hours -gt $$CACHE_MAX_AGE_HOURS ]; then \ - echo "⚠️ Warning: Snapshot cache is $${age_hours} hours old (threshold: $${CACHE_MAX_AGE_HOURS}h)" >&2; \ - echo " Consider running '\''make update'\'' for fresh version data." >&2; \ - fi; \ - fi; \ - set -o pipefail; CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ - $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true - -audit-offline: ## Offline audit with hints (fast local scan) - @bash -c 'set -o pipefail; CLI_AUDIT_OFFLINE=1 CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ - $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true - -audit-%: scripts-perms ## Audit single tool (e.g., make audit-ripgrep) - @bash -c 'set -o pipefail; CLI_AUDIT_RENDER=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py --only $* | \ - $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true - -audit-offline-%: scripts-perms ## Offline audit subset (e.g., make audit-offline-python-core) - @bash -c 'set -o pipefail; CLI_AUDIT_OFFLINE=1 CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py --only $* | \ - $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header' || true - -SNAP_FILE?=$(shell python3 -c "import os;print(os.environ.get('CLI_AUDIT_SNAPSHOT_FILE','tools_snapshot.json'))") - -audit-auto: ## Update snapshot if missing, then render - @if [ ! -f "$(SNAP_FILE)" ]; then \ - echo "# snapshot missing: $(SNAP_FILE); running update..."; \ - CLI_AUDIT_COLLECT=1 CLI_AUDIT_DEBUG=1 CLI_AUDIT_PROGRESS=1 $(PYTHON) cli_audit.py || true; \ - fi; \ - CLI_AUDIT_RENDER=1 CLI_AUDIT_GROUP=0 CLI_AUDIT_HINTS=1 CLI_AUDIT_LINKS=1 CLI_AUDIT_EMOJI=1 $(PYTHON) cli_audit.py | \ - $(PYTHON) smart_column.py -s "|" -t --right 3,5 --header || true - -update: ## Collect fresh version data with network calls and update snapshot (~10s) - @echo "→ Collecting fresh version data from upstream sources..." >&2 - @bash -c 'set -o pipefail; CLI_AUDIT_COLLECT=1 CLI_AUDIT_TIMINGS=1 $(PYTHON) cli_audit.py' || true - @echo "✓ Snapshot updated. Run 'make audit' or 'make upgrade' to use it." >&2 - @echo "" >&2 - @echo "→ Running system health checks..." >&2 - @$(MAKE) check-path || true - @$(MAKE) check-python-managers || true - @$(MAKE) check-node-managers || true - -update-debug: ## Collect with verbose debug output (shows network calls) - @bash -c 'set -o pipefail; CLI_AUDIT_COLLECT=1 CLI_AUDIT_DEBUG=1 CLI_AUDIT_TIMINGS=1 $(PYTHON) cli_audit.py' || true - -upgrade: scripts-perms ## Run interactive upgrade guide (uses snapshot, no network calls) - @bash scripts/guide.sh - -guide: upgrade ## Alias for upgrade (deprecated) - -install-core: scripts-perms ## Install core tools (fd, fzf, ripgrep, jq, yq, bat, delta, just) - ./scripts/install_core.sh - -install-python: scripts-perms ## Install Python toolchain via uv - ./scripts/install_python.sh - -install-node: scripts-perms ## Install Node.js via nvm - ./scripts/install_node.sh - -install-go: scripts-perms ## Install Go runtime - ./scripts/install_go.sh - -install-aws: scripts-perms ## Install AWS CLI - ./scripts/install_aws.sh - -install-kubectl: scripts-perms ## Install Kubernetes CLI - ./scripts/install_kubectl.sh - -install-terraform: scripts-perms ## Install Terraform - ./scripts/install_terraform.sh - -install-ansible: scripts-perms ## Install Ansible - ./scripts/install_ansible.sh - -install-docker: scripts-perms ## Install Docker - ./scripts/install_docker.sh - -install-brew: scripts-perms ## Install Homebrew (macOS/Linux) - ./scripts/install_brew.sh - -install-rust: scripts-perms ## Install Rust via rustup - ./scripts/install_rust.sh - -upgrade-%: scripts-perms ## Upgrade tool (e.g., make upgrade-python) - ./scripts/install_$*.sh update - -uninstall-%: scripts-perms ## Uninstall tool (e.g., make uninstall-python) - ./scripts/install_$*.sh uninstall - -reconcile-pip-to-uv: scripts-perms ## Migrate user pip packages to UV tools - @./scripts/reconcile_pip_to_uv.sh - -reconcile-pipx-to-uv: scripts-perms ## Migrate pipx tools to UV - @./scripts/reconcile_pipx_to_uv.sh - -reconcile-%: scripts-perms ## Reconcile tool installation (e.g., make reconcile-node) - ./scripts/install_$*.sh reconcile - -detect-managers: scripts-perms ## Detect all installed package managers - ./scripts/auto_update.sh detect - -upgrade-managed: scripts-perms ## Upgrade all package managers and their packages - SCOPE=all ./scripts/auto_update.sh update - -upgrade-dry-run: scripts-perms ## Preview what would be upgraded without making changes - SCOPE=all ./scripts/auto_update.sh --dry-run update - -upgrade-managed-system-only: scripts-perms ## Upgrade only system package managers (apt, brew, snap, flatpak) - @bash -c './scripts/auto_update.sh apt && ./scripts/auto_update.sh brew && ./scripts/auto_update.sh snap && ./scripts/auto_update.sh flatpak' || true - -upgrade-managed-skip-system: scripts-perms ## Upgrade all package managers except system ones - ./scripts/auto_update.sh --skip-system update - -upgrade-managed-system: scripts-perms ## Upgrade only system-scoped packages (requires sudo) - SCOPE=system ./scripts/auto_update.sh update - -upgrade-managed-user: scripts-perms ## Upgrade only user-scoped packages (no sudo) - SCOPE=user ./scripts/auto_update.sh update - -upgrade-project-deps: scripts-perms ## Upgrade project dependencies (with confirmation) - SCOPE=project ./scripts/auto_update.sh update - -upgrade-managed-all: scripts-perms ## Upgrade system + user scopes (skip project) - SCOPE=all ./scripts/auto_update.sh update - -bootstrap: scripts-perms ## Initialize system (install Python if needed, setup environment) - @echo "→ Bootstrapping ai_cli_preparation..." >&2 - @bash -c ' \ - if ! command -v python3 >/dev/null 2>&1; then \ - echo "⚠️ Python not found. Installing..." >&2; \ - ./scripts/install_python.sh || exit 1; \ - fi; \ - py_version=$$(python3 --version 2>&1 | sed "s/Python //"); \ - echo "✓ Python $$py_version available" >&2; \ - $(MAKE) check-path || $(MAKE) fix-path; \ - $(MAKE) update; \ - echo "✓ Bootstrap complete. Run '\''make audit'\'' to see installed tools." >&2' - -init: bootstrap ## Alias for bootstrap - -upgrade-all: scripts-perms ## Complete system upgrade: update data → upgrade managers → upgrade tools - @bash scripts/upgrade_all.sh - -upgrade-all-dry-run: scripts-perms ## Preview complete system upgrade without making changes - @DRY_RUN=1 bash scripts/upgrade_all.sh - -check-path: scripts-perms ## Check PATH configuration for package managers - @bash -c "source scripts/lib/path_check.sh && check_all_paths" - -fix-path: scripts-perms ## Fix PATH configuration issues automatically - @bash -c "source scripts/lib/path_check.sh && fix_all_paths" - -check-python-managers: ## Check for multiple Python package managers and recommend consolidation - @bash scripts/check_python_package_managers.sh - -check-node-managers: ## Check for multiple Node.js package managers and recommend consolidation - @bash scripts/check_node_package_managers.sh diff --git a/README.md b/README.md index 44359ce..faf3288 100644 --- a/README.md +++ b/README.md @@ -534,202 +534,6 @@ make reconcile-node make reconcile-rust ``` -## Auto-Update: Package Manager Detection and Updates - -The `auto-update` feature automatically detects all installed package managers and runs their built-in update/upgrade tools. This is a comprehensive way to keep your entire development environment up-to-date. - -### Supported Package Managers - -**System Package Managers:** -- apt (Debian/Ubuntu) -- Homebrew (macOS/Linux) -- Snap -- Flatpak - -**Language-Specific Package Managers:** -- Cargo (Rust) + Rustup -- UV (Python) -- Pipx (Python) -- Pip (Python) -- NPM (Node.js) -- PNPM (Node.js) -- Yarn (Node.js) -- Go (binaries) -- RubyGems - -### Quick Start - -```bash -# Detect all installed package managers -make auto-update-detect - -# Update all package managers and their packages -make auto-update - -# Preview what would be updated (dry-run) -make auto-update-dry-run - -# Update only system package managers (apt, brew, snap, flatpak) -make auto-update-system-only - -# Update all except system package managers -make auto-update-skip-system -``` - -### Advanced Usage - -The `scripts/auto_update.sh` script can be called directly for fine-grained control: - -```bash -# Show detected package managers -./scripts/auto_update.sh detect - -# Update all package managers -./scripts/auto_update.sh update - -# Update specific package manager only -./scripts/auto_update.sh cargo -./scripts/auto_update.sh npm -./scripts/auto_update.sh brew - -# Dry-run mode (show what would be updated) -./scripts/auto_update.sh --dry-run update - -# Verbose output -./scripts/auto_update.sh --verbose update - -# Skip system package managers -./scripts/auto_update.sh --skip-system update - -# Environment variable control -DRY_RUN=1 ./scripts/auto_update.sh update -VERBOSE=1 ./scripts/auto_update.sh update -SKIP_SYSTEM=1 ./scripts/auto_update.sh update -``` - -### What Gets Updated - -Each package manager updates itself and all packages it manages: - -**APT:** Updates package lists and upgrades all installed packages -```bash -sudo apt-get update && sudo apt-get upgrade -y -``` - -**Homebrew:** Updates package index and upgrades all formulae/casks -```bash -brew update && brew upgrade && brew cleanup -``` - -**Cargo:** Updates Rust toolchain via rustup and upgrades all cargo-installed binaries -```bash -rustup update -cargo install-update -a # requires cargo-update -``` - -**UV:** Self-updates UV and upgrades all UV-managed tools -```bash -uv self update -uv tool upgrade -``` - -**Pipx:** Updates pipx itself and all pipx-installed packages -```bash -pip3 install --user --upgrade pipx -pipx upgrade-all -``` - -**NPM:** Updates npm itself and all global packages -```bash -npm install -g npm@latest -npm update -g -``` - -**PNPM:** Updates pnpm (via corepack) and global packages -```bash -corepack prepare pnpm@latest --activate -pnpm update -g -``` - -**Yarn:** Updates yarn (via corepack) -```bash -corepack prepare yarn@stable --activate -``` - -**RubyGems:** Updates gem system and all installed gems -```bash -gem update --system -gem update -gem cleanup -``` - -### Workflow Recommendations - -**Daily Development Workflow:** -```bash -# Quick check what's available -make auto-update-detect - -# Preview updates without making changes -make auto-update-dry-run - -# Apply updates to everything -make auto-update -``` - -**CI/CD or Scripting:** -```bash -# Silent updates with environment variables -VERBOSE=0 ./scripts/auto_update.sh update - -# Update only user-level tools (skip system packages) -SKIP_SYSTEM=1 ./scripts/auto_update.sh update -``` - -**Selective Updates:** -```bash -# Update only Rust ecosystem -./scripts/auto_update.sh cargo - -# Update only Node.js ecosystem -./scripts/auto_update.sh npm -./scripts/auto_update.sh pnpm -./scripts/auto_update.sh yarn - -# Update only Python ecosystem -./scripts/auto_update.sh uv -./scripts/auto_update.sh pipx -``` - -### Integration with Existing Workflow - -The auto-update feature complements the existing audit/upgrade workflow: - -```bash -# 1. Update version snapshot from upstream sources -make update - -# 2. Review what needs updating -make audit - -# 3. Run interactive upgrade for specific tools -make upgrade - -# 4. Auto-update all package managers and their packages -make auto-update - -# 5. Verify everything is up-to-date -make audit -``` - -### Notes - -- System package managers (apt, brew, snap, flatpak) require appropriate permissions (sudo) -- The auto-update process is designed to be safe and non-destructive -- Use `--dry-run` to preview changes before applying them -- Some package managers (like Go) don't have built-in bulk update mechanisms - manual updates are required -- The script gracefully handles missing package managers (skips them) - ## Caching - Manual baseline (committed): `latest_versions.json` in this repo (override with `CLI_AUDIT_MANUAL_FILE`). Used as the primary source in offline mode; also used as a fallback when online lookups fail. Example content: diff --git a/catalog/CATALOG_SUMMARY.md b/catalog/CATALOG_SUMMARY.md deleted file mode 100644 index 33bc589..0000000 --- a/catalog/CATALOG_SUMMARY.md +++ /dev/null @@ -1,95 +0,0 @@ -# Catalog Summary - -Total catalog files: 45 - -## Recently Added Tools (10) - -### GitHub Release Binary Tools (6) - -1. **git-lfs** - Git extension for versioning large files - - Download pattern: tar.gz with version in filename - - Architectures: x86_64 (amd64), aarch64 (arm64), armv7l (arm) - - URL: `https://github.com/git-lfs/git-lfs/releases/download/{version}/git-lfs-linux-{arch}-{version}.tar.gz` - -2. **git-absorb** - git commit --fixup, but automatic - - Download pattern: tar.gz with musl Linux build - - Architectures: x86_64 only (musl target) - - URL: `https://github.com/tummychow/git-absorb/releases/download/{version_nov}/git-absorb-{version_nov}-x86_64-unknown-linux-musl.tar.gz` - - Note: Limited ARM support with different target triple - -3. **git-branchless** - High-velocity, monorepo-scale workflow for Git - - Download pattern: tar.gz with musl Linux build - - Architectures: x86_64 only - - URL: `https://github.com/arxanas/git-branchless/releases/download/{version}/git-branchless-{version}-x86_64-unknown-linux-musl.tar.gz` - -4. **direnv** - Unclutter your .profile with environment switcher - - Download pattern: Direct binary (not an archive) - - Architectures: x86_64 (amd64), aarch64 (arm64), armv7l (arm) - - URL: `https://github.com/direnv/direnv/releases/download/{version}/direnv.linux-{arch}` - -5. **golangci-lint** - Fast linters runner for Go - - Download pattern: tar.gz with version in filename - - Architectures: x86_64 (amd64), aarch64 (arm64), armv7l (armv6) - - URL: `https://github.com/golangci/golangci-lint/releases/download/{version}/golangci-lint-{version_nov}-linux-{arch}.tar.gz` - -6. **ninja** - Small build system with a focus on speed - - Download pattern: zip files - - Architectures: x86_64 (no suffix), aarch64 (-aarch64 suffix) - - URL: `https://github.com/ninja-build/ninja/releases/download/{version}/ninja-linux{arch_suffix}.zip` - - Note: x86_64 uses `ninja-linux.zip`, arm64 uses `ninja-linux-aarch64.zip` - -### NPM Global Tool (1) - -7. **prettier** - Opinionated code formatter - - Install method: npm global - - Package: prettier - - Homepage: https://prettier.io - -### UV Tool (1) - -8. **ansible** - Radically simple IT automation - - Install method: uv tool - - Package: ansible - - Note: Also handled by install_ansible.sh script with fallback to pipx - -### Script-Based Installation (1) - -9. **parallel** - GNU Parallel - shell tool for executing jobs in parallel - - Install method: Custom script required - - Source: GNU FTP server (ftp://ftp.gnu.org/gnu/parallel/) - - Note: Not distributed via GitHub releases - -### Package Manager Installation (1) - -10. **entr** - Run arbitrary commands when files change - - Install method: System package manager - - Packages: entr (apt, dnf, pacman, brew) - - Homepage: http://eradman.com/entrproject/ - - Note: No GitHub releases; use system package manager - -## Installation Method Distribution - -- **github_release_binary**: 35 tools -- **uv_tool**: 6 tools -- **npm_global**: 1 tool -- **script**: 2 tools -- **package_manager**: 1 tool - -## Architecture Support Notes - -Most tools support: -- x86_64 (amd64) -- aarch64 (arm64) -- armv7l (arm/armv6) - -Exceptions: -- git-absorb: x86_64 only (musl build) -- git-branchless: x86_64 only -- Tools installed via uv/npm/package-manager: Architecture handled by installer - -## URL Template Variables - -- `{version}`: Full version string (e.g., v3.7.0) -- `{version_nov}`: Version without 'v' prefix (e.g., 3.7.0) -- `{arch}`: Architecture string from arch_map -- `{arch_suffix}`: Optional architecture suffix (ninja specific) diff --git a/catalog/COVERAGE.md b/catalog/COVERAGE.md deleted file mode 100644 index 7745162..0000000 --- a/catalog/COVERAGE.md +++ /dev/null @@ -1,65 +0,0 @@ -# Catalog Coverage - -This file documents which tools have catalog entries and which use dedicated install scripts. - -## Tools with Catalog Entries (54) - -These tools use the catalog-based installation system with generic installers: - -- ansible, ast-grep, aws, bandit, bat, black, codex, composer, curlie, dasel -- delta, direnv, dive, entr, fd, flake8, fx, fzf, gem, gh, git-absorb -- git-branchless, git-lfs, gitleaks, glab, golangci-lint, httpie, isort, just -- kubectl, ninja, npm, parallel, pip, pipx, pnpm, poetry, pre-commit, prettier -- rga, ripgrep, ruff, sd, semgrep, shellcheck, shfmt, sponge, terraform, tfsec -- trivy, watchexec, xsv, yarn, yq - -## Tools with Dedicated Install Scripts - -### Runtime Environments -These have their own complex installers in `scripts/`: -- **go** - `install_go.sh` -- **rust** - `install_rust.sh` -- **python** - `install_python.sh` -- **node** - `install_node.sh` - -### Package Managers -Most now in catalog, one dedicated script: -- **uv** - `install_uv.sh` (special bootstrap installer) -- All others (pip, pipx, npm, pnpm, yarn, gem, composer, poetry, sponge) - Now in catalog! - -### Docker Tools -- **docker** - `install_docker.sh` (uses official Docker install script) -- **docker-compose** - Typically installed with Docker - -### System Tools -- **git** - System package (apt/dnf/brew) -- **ctags** - System package -- **sponge** - Part of moreutils package -- **prename** - System package (Perl rename) -- **rename.ul** - System package (util-linux rename) - -### Other -- **gam** - Google Apps Manager (special installation) -- **claude** - Claude CLI (special installation) -- **ansible-core** - Subset of ansible package -- **eslint** - Node.js package (installed via npm) - -## Installation Method Distribution - -- **github_release_binary**: 31 tools -- **uv_tool**: 8 tools (Python CLI tools) -- **package_manager**: 10 tools (pip, pipx, poetry, npm, pnpm, yarn, gem, composer, sponge, entr) -- **hashicorp_zip**: 1 tool (terraform) -- **aws_installer**: 1 tool (aws) -- **npm_global**: 1 tool (prettier) -- **script**: 1 tool (parallel) -- **dedicated_script**: 10 tools (runtimes: go, rust, python, node; special: uv, docker, git, ctags, gam) -- **system_package**: 2 tools (cscope, rename variants) - -## Total: 69 tools tracked - -- **54 tools** have catalog entries -- **10 tools** use dedicated scripts (runtimes + special cases) -- **5 tools** are system packages only - -All installable tools either have catalog entries or use appropriate dedicated scripts. diff --git a/catalog/README.md b/catalog/README.md deleted file mode 100644 index b8770d5..0000000 --- a/catalog/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Tool Installation Catalog - -This directory contains installation metadata for all development tools managed by this project. - -## Structure - -Each tool has its own JSON file: `catalog/.json` - -## Installation Methods - -### `github_release_binary` -Download and install binary from GitHub releases. - -**Required fields:** -- `binary_name`: Name of the binary executable -- `download_url_template`: URL template with `{version}`, `{os}`, `{arch}` placeholders - -**Optional fields:** -- `github_repo`: GitHub repository (owner/name) for version lookup -- `version_url`: Direct URL to fetch latest version string -- `fallback_url_template`: Alternative download URL if primary fails -- `arch_map`: Architecture name mappings (e.g., `{"x86_64": "amd64"}`) - -**Example:** `kubectl.json`, `fd.json`, `ripgrep.json` - -### `hashicorp_zip` -Download and install HashiCorp products from releases.hashicorp.com. - -**Required fields:** -- `product_name`: HashiCorp product name (terraform, vault, consul, etc.) -- `binary_name`: Name of the binary executable -- `github_repo`: GitHub repository for version lookup - -**Optional fields:** -- `arch_map`: Architecture name mappings - -**Example:** `terraform.json` - -### `aws_installer` -Install AWS CLI using official installer. - -**Required fields:** -- `installer_url`: URL to AWS CLI installer zip -- `binary_name`: Name of the binary (usually "aws") - -**Example:** `aws.json` - -### `uv_tool` -Install Python tools via `uv tool install`. - -**Required fields:** -- `package_name`: PyPI package name - -**Example:** `semgrep.json`, `ruff.json`, `black.json` - -### `package_manager` -Install tools via system package managers (apt, brew, dnf, pacman). - -**Required fields:** -- `binary_name`: Name of the binary executable -- `packages`: Object with package names per manager - -**Optional fields:** -- `notes`: Installation notes (e.g., "comes with Python") - -**Example:** `pipx.json`, `yarn.json`, `sponge.json` - -## Adding a New Tool - -1. Create `catalog/.json` with appropriate metadata -2. The tool will automatically use the generic installer for its method -3. No need to create a custom install script! - -## Environment Variables - -- `INSTALL_STRATEGY`: Where to install tools (USER, GLOBAL, CURRENT, PROJECT) - - `USER` (default): Install to ~/.local/bin - - `GLOBAL`: Install to /usr/local/bin (requires sudo) - - `CURRENT`: Keep tool where currently installed - - `PROJECT`: Install to ./.local/bin - -## Usage - -```bash -# Install a tool -scripts/install_tool.sh kubectl - -# With custom strategy -INSTALL_STRATEGY=GLOBAL scripts/install_tool.sh terraform - -# Via guide.sh (interactive upgrade) -make upgrade -``` - -## Migration Status - -Tools with catalog entries use the new system. Tools without catalog entries fall back to legacy `install_core.sh`. - -**Migrated:** -- kubectl -- terraform -- aws -- semgrep - -**To migrate:** Add catalog entries for remaining tools from `install_core.sh` diff --git a/catalog/ansible.json b/catalog/ansible.json deleted file mode 100644 index 440998c..0000000 --- a/catalog/ansible.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "ansible", - "install_method": "uv_tool", - "description": "Radically simple IT automation", - "homepage": "https://github.com/ansible/ansible", - "package_name": "ansible", - "notes": "Installation handled by install_ansible.sh which uses uv tool or pipx" - ,"guide": { - "display_name": "Ansible", - "install_action": "update", - "order": 205 - } -} diff --git a/catalog/ast-grep.json b/catalog/ast-grep.json deleted file mode 100644 index b1df8e0..0000000 --- a/catalog/ast-grep.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "ast-grep", - "install_method": "github_release_binary", - "description": "A CLI tool for code structural search, lint and rewriting", - "homepage": "https://github.com/ast-grep/ast-grep", - "github_repo": "ast-grep/ast-grep", - "binary_name": "ast-grep", - "download_url_template": "https://github.com/ast-grep/ast-grep/releases/download/{version}/app-{arch}-unknown-linux-gnu.zip", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } -} diff --git a/catalog/aws.json b/catalog/aws.json deleted file mode 100644 index 6b4a417..0000000 --- a/catalog/aws.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "aws", - "install_method": "aws_installer", - "description": "AWS Command Line Interface", - "homepage": "https://aws.amazon.com/cli/", - "installer_url": "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip", - "binary_name": "aws" - ,"guide": { - "display_name": "AWS CLI", - "install_action": "install", - "order": 202 - } -} diff --git a/catalog/bandit.json b/catalog/bandit.json deleted file mode 100644 index 52aca8b..0000000 --- a/catalog/bandit.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "bandit", - "install_method": "uv_tool", - "description": "Security-oriented static analyzer for Python code to find common security issues", - "homepage": "https://github.com/PyCQA/bandit", - "package_name": "bandit" -} diff --git a/catalog/bat.json b/catalog/bat.json deleted file mode 100644 index 1c7f407..0000000 --- a/catalog/bat.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "bat", - "install_method": "auto", - "description": "A cat clone with syntax highlighting and Git integration", - "homepage": "https://github.com/sharkdp/bat", - "github_repo": "sharkdp/bat", - "binary_name": "bat", - "download_url_template": "https://github.com/sharkdp/bat/releases/download/{version}/bat-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - }, - "available_methods": [ - { - "method": "github_release_binary", - "priority": 1, - "config": { - "repo": "sharkdp/bat", - "asset_pattern": "bat-.*-x86_64-unknown-linux-musl.tar.gz" - } - }, - { - "method": "cargo", - "priority": 2, - "config": { - "crate": "bat" - } - }, - { - "method": "apt", - "priority": 3, - "config": { - "package": "bat" - } - } - ], - "requires": [], - "tags": ["core"] -} diff --git a/catalog/black.json b/catalog/black.json deleted file mode 100644 index 1a1781e..0000000 --- a/catalog/black.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "black", - "install_method": "uv_tool", - "description": "The uncompromising Python code formatter", - "homepage": "https://github.com/psf/black", - "package_name": "black" -} diff --git a/catalog/codex.json b/catalog/codex.json deleted file mode 100644 index 196b847..0000000 --- a/catalog/codex.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "codex", - "install_method": "uv_tool", - "description": "CLI tool for generating and managing code documentation", - "homepage": "https://github.com/codex-cli/codex", - "package_name": "codex", - "python_version": "3.13", - "notes": "Requires Python 3.13 or earlier due to pkgutil.get_loader() removal in Python 3.14" -} diff --git a/catalog/composer.json b/catalog/composer.json deleted file mode 100644 index 8bd4602..0000000 --- a/catalog/composer.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "composer", - "install_method": "package_manager", - "description": "PHP dependency manager", - "homepage": "https://getcomposer.org/", - "binary_name": "composer", - "packages": { - "apt": "composer", - "brew": "composer", - "dnf": "composer", - "pacman": "composer" - } -} diff --git a/catalog/ctags.json b/catalog/ctags.json deleted file mode 100644 index 05fbf5a..0000000 --- a/catalog/ctags.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "ctags", - "install_method": "package_manager", - "description": "Universal Ctags - multilanguage implementation of ctags", - "homepage": "https://github.com/universal-ctags/ctags", - "binary_name": "ctags", - "packages": { - "apt": "universal-ctags", - "brew": "universal-ctags" - }, - "pinned_version": "5.9.0" -} diff --git a/catalog/curlie.json b/catalog/curlie.json deleted file mode 100644 index 6f9bbad..0000000 --- a/catalog/curlie.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "curlie", - "install_method": "github_release_binary", - "description": "The power of curl, the ease of use of httpie", - "homepage": "https://github.com/rs/curlie", - "github_repo": "rs/curlie", - "binary_name": "curlie", - "download_url_template": "https://github.com/rs/curlie/releases/download/{version}/curlie_{version_nov}_linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv6" - } -} diff --git a/catalog/dasel.json b/catalog/dasel.json deleted file mode 100644 index 1866a8d..0000000 --- a/catalog/dasel.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "dasel", - "install_method": "github_release_binary", - "description": "Query and update data structures from the command line", - "homepage": "https://github.com/TomWright/dasel", - "github_repo": "TomWright/dasel", - "binary_name": "dasel", - "download_url_template": "https://github.com/TomWright/dasel/releases/download/{version}/dasel_linux_{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm7" - } -} diff --git a/catalog/delta.json b/catalog/delta.json deleted file mode 100644 index 51ff1fc..0000000 --- a/catalog/delta.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "delta", - "install_method": "github_release_binary", - "description": "A syntax-highlighting pager for git, diff, and grep output", - "homepage": "https://github.com/dandavison/delta", - "github_repo": "dandavison/delta", - "binary_name": "delta", - "download_url_template": "https://github.com/dandavison/delta/releases/download/{version}/delta-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } - ,"tags": ["core"] -} diff --git a/catalog/direnv.json b/catalog/direnv.json deleted file mode 100644 index de684a7..0000000 --- a/catalog/direnv.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "direnv", - "install_method": "github_release_binary", - "description": "Unclutter your .profile with environment switcher", - "homepage": "https://github.com/direnv/direnv", - "github_repo": "direnv/direnv", - "binary_name": "direnv", - "download_url_template": "https://github.com/direnv/direnv/releases/download/{version}/direnv.linux-{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - }, - "notes": "Direct binary download, not an archive" -} diff --git a/catalog/dive.json b/catalog/dive.json deleted file mode 100644 index d94283d..0000000 --- a/catalog/dive.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "dive", - "install_method": "github_release_binary", - "description": "A tool for exploring each layer in a docker image", - "homepage": "https://github.com/wagoodman/dive", - "github_repo": "wagoodman/dive", - "binary_name": "dive", - "download_url_template": "https://github.com/wagoodman/dive/releases/download/{version}/dive_{version_nov}_linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - } -} diff --git a/catalog/docker.json b/catalog/docker.json deleted file mode 100644 index bbd2690..0000000 --- a/catalog/docker.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "docker", - "install_method": "dedicated_script", - "description": "Container platform for building and running applications", - "homepage": "https://www.docker.com", - "binary_name": "docker", - "script": "install_docker.sh", - "guide": { - "display_name": "Docker CLI", - "install_action": "install", - "order": 200 - }, - "pinned_version": "29.0.0" -} diff --git a/catalog/entr.json b/catalog/entr.json deleted file mode 100644 index 2f4ffdd..0000000 --- a/catalog/entr.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "entr", - "install_method": "package_manager", - "description": "Run arbitrary commands when files change", - "homepage": "http://eradman.com/entrproject/", - "package_managers": { - "apt": "entr", - "dnf": "entr", - "pacman": "entr", - "brew": "entr" - }, - "notes": "entr does not publish GitHub releases; install via system package manager", - "pinned_version": "5.5" -} diff --git a/catalog/fd.json b/catalog/fd.json deleted file mode 100644 index 223ed41..0000000 --- a/catalog/fd.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "fd", - "install_method": "auto", - "description": "A simple, fast and user-friendly alternative to find", - "homepage": "https://github.com/sharkdp/fd", - "github_repo": "sharkdp/fd", - "binary_name": "fd", - "download_url_template": "https://github.com/sharkdp/fd/releases/download/{version}/fd-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - }, - "available_methods": [ - { - "method": "github_release_binary", - "priority": 1, - "config": { - "repo": "sharkdp/fd", - "asset_pattern": "fd-.*-x86_64-unknown-linux-musl.tar.gz" - } - }, - { - "method": "cargo", - "priority": 2, - "config": { - "crate": "fd-find" - } - }, - { - "method": "apt", - "priority": 3, - "config": { - "package": "fd-find" - } - } - ], - "requires": [], - "tags": ["core", "search", "file-utils"] -} diff --git a/catalog/flake8.json b/catalog/flake8.json deleted file mode 100644 index e375522..0000000 --- a/catalog/flake8.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "flake8", - "install_method": "uv_tool", - "description": "Python style guide enforcement tool combining PyFlakes, pycodestyle, and McCabe complexity checker", - "homepage": "https://github.com/PyCQA/flake8", - "package_name": "flake8" -} diff --git a/catalog/fx.json b/catalog/fx.json deleted file mode 100644 index ca64677..0000000 --- a/catalog/fx.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "fx", - "install_method": "github_release_binary", - "description": "Terminal JSON viewer and processor", - "homepage": "https://github.com/antonmedv/fx", - "github_repo": "antonmedv/fx", - "binary_name": "fx", - "download_url_template": "https://github.com/antonmedv/fx/releases/download/{version}/fx_linux_{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv6" - } -} diff --git a/catalog/fzf.json b/catalog/fzf.json deleted file mode 100644 index 5c4d1d6..0000000 --- a/catalog/fzf.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "fzf", - "install_method": "github_release_binary", - "description": "A command-line fuzzy finder", - "homepage": "https://github.com/junegunn/fzf", - "github_repo": "junegunn/fzf", - "binary_name": "fzf", - "download_url_template": "https://github.com/junegunn/fzf/releases/download/{version}/fzf-{version_nov}-linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv7" - } - ,"tags": ["core"] -} diff --git a/catalog/gam.json b/catalog/gam.json deleted file mode 100644 index 993fc5e..0000000 --- a/catalog/gam.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "gam", - "install_method": "uv_tool", - "description": "Google Workspace admin management tool", - "homepage": "https://github.com/GAM-team/GAM", - "github_repo": "GAM-team/GAM", - "binary_name": "gam", - "package_name": "gam7" -} diff --git a/catalog/gem.json b/catalog/gem.json deleted file mode 100644 index 39fabf7..0000000 --- a/catalog/gem.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "gem", - "install_method": "dedicated_script", - "description": "Ruby package manager (RubyGems)", - "homepage": "https://rubygems.org/", - "binary_name": "gem", - "script": "install_gem.sh", - "notes": "gem comes bundled with Ruby. Updates via 'gem update --system'. Requires Ruby to be installed via rbenv." - ,"guide": { - "display_name": "gem (RubyGems)", - "install_action": "update", - "order": 33 - } -} diff --git a/catalog/gh.json b/catalog/gh.json deleted file mode 100644 index d67af43..0000000 --- a/catalog/gh.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "gh", - "install_method": "github_release_binary", - "description": "GitHub's official command line tool", - "homepage": "https://github.com/cli/cli", - "github_repo": "cli/cli", - "binary_name": "gh", - "download_url_template": "https://github.com/cli/cli/releases/download/{version}/gh_{version_nov}_linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv6" - } -} diff --git a/catalog/git-absorb.json b/catalog/git-absorb.json deleted file mode 100644 index b8adf60..0000000 --- a/catalog/git-absorb.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "git-absorb", - "install_method": "github_release_binary", - "description": "git commit --fixup, but automatic", - "homepage": "https://github.com/tummychow/git-absorb", - "github_repo": "tummychow/git-absorb", - "binary_name": "git-absorb", - "download_url_template": "https://github.com/tummychow/git-absorb/releases/download/{version_nov}/git-absorb-{version_nov}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "armv7l": "arm" - }, - "notes": "Only x86_64 builds available; arm builds use different target triple" -} diff --git a/catalog/git-branchless.json b/catalog/git-branchless.json deleted file mode 100644 index 2b9cc83..0000000 --- a/catalog/git-branchless.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "git-branchless", - "install_method": "github_release_binary", - "description": "High-velocity, monorepo-scale workflow for Git", - "homepage": "https://github.com/arxanas/git-branchless", - "github_repo": "arxanas/git-branchless", - "binary_name": "git-branchless", - "download_url_template": "https://github.com/arxanas/git-branchless/releases/download/{version}/git-branchless-{version}-x86_64-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64-unknown-linux-musl" - }, - "notes": "Only x86_64 Linux builds available" -} diff --git a/catalog/git-lfs.json b/catalog/git-lfs.json deleted file mode 100644 index 8b57625..0000000 --- a/catalog/git-lfs.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "git-lfs", - "install_method": "github_release_binary", - "description": "Git extension for versioning large files", - "homepage": "https://github.com/git-lfs/git-lfs", - "github_repo": "git-lfs/git-lfs", - "binary_name": "git-lfs", - "download_url_template": "https://github.com/git-lfs/git-lfs/releases/download/{version}/git-lfs-linux-{arch}-{version}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - } -} diff --git a/catalog/git.json b/catalog/git.json deleted file mode 100644 index e6bc91d..0000000 --- a/catalog/git.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "git", - "install_method": "package_manager", - "description": "Distributed version control system", - "homepage": "https://git-scm.com", - "binary_name": "git", - "packages": { - "apt": "git", - "brew": "git", - "dnf": "git", - "pacman": "git" - }, - "notes": "For latest version on Ubuntu, use PPA: add-apt-repository ppa:git-core/ppa" -} diff --git a/catalog/gitleaks.json b/catalog/gitleaks.json deleted file mode 100644 index 05f1d75..0000000 --- a/catalog/gitleaks.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "gitleaks", - "install_method": "github_release_binary", - "description": "A SAST tool for detecting and preventing hardcoded secrets like passwords, api keys, and tokens in git repos", - "homepage": "https://github.com/gitleaks/gitleaks", - "github_repo": "gitleaks/gitleaks", - "binary_name": "gitleaks", - "download_url_template": "https://github.com/gitleaks/gitleaks/releases/download/{version}/gitleaks_{version_nov}_linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "x64", - "aarch64": "arm64", - "armv7l": "armv7" - } -} diff --git a/catalog/glab.json b/catalog/glab.json deleted file mode 100644 index cb33b2c..0000000 --- a/catalog/glab.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "glab", - "install_method": "github_release_binary", - "description": "GitLab CLI tool bringing GitLab to your command line", - "homepage": "https://gitlab.com/gitlab-org/cli", - "gitlab_project": "gitlab-org/cli", - "binary_name": "glab", - "download_url_template": "https://gitlab.com/api/v4/projects/gitlab-org%2Fcli/packages/generic/glab/{version_nov}/glab_{version_nov}_linux_{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv6" - } -} diff --git a/catalog/go.json b/catalog/go.json deleted file mode 100644 index 0b8ec5c..0000000 --- a/catalog/go.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "go", - "install_method": "dedicated_script", - "description": "Go programming language", - "homepage": "https://go.dev", - "binary_name": "go", - "script": "install_go.sh" - ,"guide": { - "display_name": "Go toolchain", - "install_action": "install", - "order": 50 - } -} diff --git a/catalog/golangci-lint.json b/catalog/golangci-lint.json deleted file mode 100644 index 8c59c06..0000000 --- a/catalog/golangci-lint.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "golangci-lint", - "install_method": "github_release_binary", - "description": "Fast linters runner for Go", - "homepage": "https://github.com/golangci/golangci-lint", - "github_repo": "golangci/golangci-lint", - "binary_name": "golangci-lint", - "download_url_template": "https://github.com/golangci/golangci-lint/releases/download/{version}/golangci-lint-{version_nov}-linux-{arch}.tar.gz", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv6" - } -} diff --git a/catalog/httpie.json b/catalog/httpie.json deleted file mode 100644 index 11460e5..0000000 --- a/catalog/httpie.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "httpie", - "install_method": "uv_tool", - "description": "A user-friendly command-line HTTP client with intuitive syntax and colorized output", - "homepage": "https://github.com/httpie/cli", - "package_name": "httpie" -} diff --git a/catalog/isort.json b/catalog/isort.json deleted file mode 100644 index dcae177..0000000 --- a/catalog/isort.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "isort", - "install_method": "uv_tool", - "description": "A Python utility to sort imports alphabetically and automatically separate them into sections", - "homepage": "https://github.com/PyCQA/isort", - "package_name": "isort" -} diff --git a/catalog/jq.json b/catalog/jq.json deleted file mode 100644 index d1a91d6..0000000 --- a/catalog/jq.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "jq", - "install_method": "github_release_binary", - "description": "Command-line JSON processor", - "homepage": "https://jqlang.github.io/jq/", - "github_repo": "jqlang/jq", - "binary_name": "jq", - "download_url_template": "https://github.com/jqlang/jq/releases/download/jq-{version}/jq-{os}-{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv7" - }, - "tags": ["core", "json", "text-utils"] -} diff --git a/catalog/just.json b/catalog/just.json deleted file mode 100644 index e19cb90..0000000 --- a/catalog/just.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "just", - "install_method": "github_release_binary", - "description": "A handy way to save and run project-specific commands", - "homepage": "https://github.com/casey/just", - "github_repo": "casey/just", - "binary_name": "just", - "download_url_template": "https://github.com/casey/just/releases/download/{version}/just-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } - ,"tags": ["core"] -} diff --git a/catalog/kubectl.json b/catalog/kubectl.json deleted file mode 100644 index 5e372f5..0000000 --- a/catalog/kubectl.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "kubectl", - "install_method": "github_release_binary", - "description": "Kubernetes command-line tool", - "homepage": "https://kubernetes.io/docs/reference/kubectl/", - "github_repo": "kubernetes/kubernetes", - "version_url": "https://dl.k8s.io/release/stable.txt", - "download_url_template": "https://dl.k8s.io/release/{version}/bin/{os}/{arch}/kubectl", - "fallback_url_template": "https://storage.googleapis.com/kubernetes-release/release/{version}/bin/{os}/{arch}/kubectl", - "binary_name": "kubectl", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "armv7", - "s390x": "s390x", - "ppc64le": "ppc64le" - } - ,"guide": { - "display_name": "kubectl", - "install_action": "install", - "order": 203 - } -} diff --git a/catalog/ninja.json b/catalog/ninja.json deleted file mode 100644 index 84dc466..0000000 --- a/catalog/ninja.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "ninja", - "install_method": "github_release_binary", - "description": "Small build system with a focus on speed", - "homepage": "https://github.com/ninja-build/ninja", - "github_repo": "ninja-build/ninja", - "binary_name": "ninja", - "download_url_template": "https://github.com/ninja-build/ninja/releases/download/{version}/ninja-linux{arch_suffix}.zip", - "arch_map": { - "x86_64": "", - "aarch64": "-aarch64" - }, - "notes": "x86_64 uses empty suffix (ninja-linux.zip), arm64 uses -aarch64 suffix" -} diff --git a/catalog/node.json b/catalog/node.json deleted file mode 100644 index 71d1e96..0000000 --- a/catalog/node.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "node", - "install_method": "dedicated_script", - "description": "Node.js JavaScript runtime", - "homepage": "https://nodejs.org", - "binary_name": "node", - "script": "install_node.sh", - "guide": { - "display_name": "Node.js stack", - "install_action": "reconcile", - "order": 40 - }, - "pinned_version": "25.0.0" -} diff --git a/catalog/npm.json b/catalog/npm.json deleted file mode 100644 index aa7700d..0000000 --- a/catalog/npm.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "npm", - "install_method": "npm_self_update", - "description": "Node.js package manager", - "homepage": "https://www.npmjs.com/", - "binary_name": "npm", - "notes": "npm comes bundled with Node.js but can be upgraded independently using 'npm install -g npm@latest'" -} diff --git a/catalog/parallel.json b/catalog/parallel.json deleted file mode 100644 index 1c7cc2f..0000000 --- a/catalog/parallel.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "parallel", - "install_method": "package_manager", - "description": "GNU Parallel - shell tool for executing jobs in parallel", - "homepage": "https://www.gnu.org/software/parallel/", - "binary_name": "parallel", - "packages": { - "apt": "parallel", - "brew": "parallel", - "dnf": "parallel", - "pacman": "parallel" - }, - "notes": "Installed via package manager. Note: System packages are typically 1-2 years behind upstream releases. For the latest version, install from https://www.gnu.org/software/parallel/", - "pinned_version": "20231122" -} diff --git a/catalog/pip.json b/catalog/pip.json deleted file mode 100644 index cc7afbc..0000000 --- a/catalog/pip.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "pip", - "install_method": "package_manager", - "description": "Python package installer", - "homepage": "https://pip.pypa.io/", - "binary_name": "pip", - "packages": { - "apt": "python3-pip", - "brew": "python3", - "dnf": "python3-pip", - "pacman": "python-pip" - }, - "notes": "pip typically comes with Python 3. Use python3 -m pip if pip command is not available.", - "pinned_version": "never" -} diff --git a/catalog/pipx.json b/catalog/pipx.json deleted file mode 100644 index c896848..0000000 --- a/catalog/pipx.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "pipx", - "install_method": "package_manager", - "description": "Install and run Python applications in isolated environments", - "homepage": "https://pipx.pypa.io/", - "binary_name": "pipx", - "packages": { - "apt": "pipx", - "brew": "pipx", - "dnf": "pipx", - "pacman": "python-pipx" - }, - "pinned_version": "never" -} diff --git a/catalog/pnpm.json b/catalog/pnpm.json deleted file mode 100644 index b2cd1e1..0000000 --- a/catalog/pnpm.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "pnpm", - "install_method": "package_manager", - "description": "Fast, disk space efficient Node.js package manager", - "homepage": "https://pnpm.io/", - "binary_name": "pnpm", - "packages": { - "apt": "pnpm", - "brew": "pnpm", - "dnf": "pnpm", - "pacman": "pnpm" - }, - "notes": "Can also be installed via npm: npm install -g pnpm" -} diff --git a/catalog/poetry.json b/catalog/poetry.json deleted file mode 100644 index 73aea00..0000000 --- a/catalog/poetry.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "poetry", - "install_method": "package_manager", - "description": "Python dependency management and packaging made easy", - "homepage": "https://python-poetry.org/", - "binary_name": "poetry", - "packages": { - "apt": "python3-poetry", - "brew": "poetry", - "dnf": "poetry", - "pacman": "python-poetry" - }, - "notes": "Some distributions may require installing via pipx or the official installer: curl -sSL https://install.python-poetry.org | python3 -", - "pinned_version": "never" -} diff --git a/catalog/pre-commit.json b/catalog/pre-commit.json deleted file mode 100644 index f671d92..0000000 --- a/catalog/pre-commit.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "pre-commit", - "install_method": "uv_tool", - "description": "A framework for managing and maintaining multi-language pre-commit hooks", - "homepage": "https://github.com/pre-commit/pre-commit", - "package_name": "pre-commit" -} diff --git a/catalog/prettier.json b/catalog/prettier.json deleted file mode 100644 index a797bb3..0000000 --- a/catalog/prettier.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "prettier", - "install_method": "npm_global", - "description": "Opinionated code formatter", - "homepage": "https://prettier.io", - "package_name": "prettier", - "notes": "Prettier is a JavaScript/Node.js tool installed via npm" -} diff --git a/catalog/python.json b/catalog/python.json deleted file mode 100644 index a1b6fb1..0000000 --- a/catalog/python.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "python", - "install_method": "dedicated_script", - "description": "Python programming language", - "homepage": "https://www.python.org", - "binary_name": "python3", - "script": "install_python.sh" - ,"guide": { - "display_name": "Python stack", - "install_action": "update", - "order": 30 - } -} diff --git a/catalog/rbenv.json b/catalog/rbenv.json deleted file mode 100644 index 1b092a0..0000000 --- a/catalog/rbenv.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "rbenv", - "install_method": "github_clone", - "description": "Ruby version manager", - "homepage": "https://github.com/rbenv/rbenv", - "github_repo": "rbenv/rbenv", - "binary_name": "rbenv", - "clone_path": "~/.rbenv", - "install_script": "src/configure && make -C src" - ,"guide": { - "display_name": "rbenv", - "install_action": "install", - "order": 30 - } -} diff --git a/catalog/rga.json b/catalog/rga.json deleted file mode 100644 index a3a388e..0000000 --- a/catalog/rga.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "rga", - "install_method": "github_release_binary", - "description": "ripgrep, but also search in PDFs, E-Books, Office documents, zip, tar.gz, etc.", - "homepage": "https://github.com/phiresky/ripgrep-all", - "github_repo": "phiresky/ripgrep-all", - "binary_name": "rga", - "download_url_template": "https://github.com/phiresky/ripgrep-all/releases/download/{version}/ripgrep_all-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } -} diff --git a/catalog/ripgrep.json b/catalog/ripgrep.json deleted file mode 100644 index 14dce59..0000000 --- a/catalog/ripgrep.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "ripgrep", - "install_method": "auto", - "description": "A line-oriented search tool that recursively searches directories for a regex pattern", - "homepage": "https://github.com/BurntSushi/ripgrep", - "github_repo": "BurntSushi/ripgrep", - "binary_name": "rg", - "download_url_template": "https://github.com/BurntSushi/ripgrep/releases/download/{version}/ripgrep-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "arm-unknown-linux-gnueabihf" - }, - "available_methods": [ - { - "method": "github_release_binary", - "priority": 1, - "config": { - "repo": "BurntSushi/ripgrep", - "asset_pattern": "ripgrep-.*-x86_64-unknown-linux-musl.tar.gz" - } - }, - { - "method": "cargo", - "priority": 2, - "config": { - "crate": "ripgrep" - } - }, - { - "method": "apt", - "priority": 3, - "config": { - "package": "ripgrep" - } - } - ], - "requires": [], - "tags": ["core"] -} diff --git a/catalog/ruby-build.json b/catalog/ruby-build.json deleted file mode 100644 index b93e357..0000000 --- a/catalog/ruby-build.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "ruby-build", - "install_method": "github_clone", - "description": "Install Ruby versions (rbenv plugin)", - "homepage": "https://github.com/rbenv/ruby-build", - "github_repo": "rbenv/ruby-build", - "binary_name": "ruby-build", - "clone_path": "~/.rbenv/plugins/ruby-build", - "notes": "Installs as rbenv plugin. Requires rbenv to be installed first." - ,"guide": { - "display_name": "ruby-build", - "install_action": "install", - "order": 31 - } -} diff --git a/catalog/ruby.json b/catalog/ruby.json deleted file mode 100644 index 0cf4de4..0000000 --- a/catalog/ruby.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "ruby", - "install_method": "dedicated_script", - "description": "Ruby programming language", - "homepage": "https://www.ruby-lang.org/", - "github_repo": "ruby/ruby", - "binary_name": "ruby", - "script": "install_ruby.sh", - "notes": "Installed via rbenv for version management. Removes apt-managed Ruby in favor of rbenv.", - "guide": { - "display_name": "Ruby (rbenv)", - "install_action": "reconcile", - "order": 32 - } -} diff --git a/catalog/ruff.json b/catalog/ruff.json deleted file mode 100644 index a46560b..0000000 --- a/catalog/ruff.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "ruff", - "install_method": "uv_tool", - "description": "An extremely fast Python linter and code formatter, written in Rust", - "homepage": "https://github.com/astral-sh/ruff", - "package_name": "ruff" -} diff --git a/catalog/rust.json b/catalog/rust.json deleted file mode 100644 index 04c3e6f..0000000 --- a/catalog/rust.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "rust", - "install_method": "dedicated_script", - "description": "Rust programming language and toolchain", - "homepage": "https://www.rust-lang.org", - "binary_name": "rustc", - "script": "install_rust.sh", - "guide": { - "display_name": "Rust (cargo)", - "install_action": "reconcile", - "order": 10 - } -} diff --git a/catalog/sd.json b/catalog/sd.json deleted file mode 100644 index e77e286..0000000 --- a/catalog/sd.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "sd", - "install_method": "github_release_binary", - "description": "Intuitive find & replace CLI (sed alternative)", - "homepage": "https://github.com/chmln/sd", - "github_repo": "chmln/sd", - "binary_name": "sd", - "download_url_template": "https://github.com/chmln/sd/releases/download/{version}/sd-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } -} diff --git a/catalog/semgrep.json b/catalog/semgrep.json deleted file mode 100644 index e0e23fb..0000000 --- a/catalog/semgrep.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "semgrep", - "install_method": "uv_tool", - "description": "Lightweight static analysis tool", - "homepage": "https://semgrep.dev/", - "package_name": "semgrep" -} diff --git a/catalog/shellcheck.json b/catalog/shellcheck.json deleted file mode 100644 index 7c6dfaa..0000000 --- a/catalog/shellcheck.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "shellcheck", - "install_method": "github_release_binary", - "description": "A static analysis tool for shell scripts", - "homepage": "https://github.com/koalaman/shellcheck", - "github_repo": "koalaman/shellcheck", - "binary_name": "shellcheck", - "download_url_template": "https://github.com/koalaman/shellcheck/releases/download/{version}/shellcheck-{version}.linux.{arch}.tar.xz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv6hf" - } -} diff --git a/catalog/shfmt.json b/catalog/shfmt.json deleted file mode 100644 index 7ca4227..0000000 --- a/catalog/shfmt.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "shfmt", - "install_method": "github_release_binary", - "description": "A shell parser, formatter, and interpreter with bash support", - "homepage": "https://github.com/mvdan/sh", - "github_repo": "mvdan/sh", - "binary_name": "shfmt", - "download_url_template": "https://github.com/mvdan/sh/releases/download/{version}/shfmt_{version}_linux_{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - } -} diff --git a/catalog/sponge.json b/catalog/sponge.json deleted file mode 100644 index 3c0ef78..0000000 --- a/catalog/sponge.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "sponge", - "install_method": "package_manager", - "description": "Soak up standard input and write to a file - part of moreutils", - "homepage": "https://joeyh.name/code/moreutils/", - "binary_name": "sponge", - "version_command": "dpkg -s moreutils 2>/dev/null | grep '^Version:' | awk '{print $2}'", - "packages": { - "apt": "moreutils", - "brew": "moreutils", - "dnf": "moreutils", - "pacman": "moreutils" - }, - "notes": "sponge is part of the moreutils package, which includes other useful tools like chronic, combine, errno, ifdata, ifne, isutf8, lckdo, parallel, pee, ts, vidir, vipe, and zrun.", - "pinned_version": "0.69" -} diff --git a/catalog/terraform.json b/catalog/terraform.json deleted file mode 100644 index 88de88e..0000000 --- a/catalog/terraform.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "terraform", - "install_method": "hashicorp_zip", - "description": "Infrastructure as Code tool", - "homepage": "https://www.terraform.io/", - "github_repo": "hashicorp/terraform", - "product_name": "terraform", - "binary_name": "terraform", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64" - } - ,"guide": { - "display_name": "Terraform", - "install_action": "install", - "order": 204 - } -} diff --git a/catalog/tfsec.json b/catalog/tfsec.json deleted file mode 100644 index 7118ed4..0000000 --- a/catalog/tfsec.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "tfsec", - "install_method": "github_release_binary", - "description": "Security scanner for your Terraform code", - "homepage": "https://github.com/aquasecurity/tfsec", - "github_repo": "aquasecurity/tfsec", - "binary_name": "tfsec", - "download_url_template": "https://github.com/aquasecurity/tfsec/releases/download/{version}/tfsec-linux-{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - } -} diff --git a/catalog/trivy.json b/catalog/trivy.json deleted file mode 100644 index 5e710e6..0000000 --- a/catalog/trivy.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "trivy", - "install_method": "github_release_binary", - "description": "A comprehensive security scanner for vulnerabilities in container images, file systems, and Git repositories", - "homepage": "https://github.com/aquasecurity/trivy", - "github_repo": "aquasecurity/trivy", - "binary_name": "trivy", - "download_url_template": "https://github.com/aquasecurity/trivy/releases/download/{version}/trivy_{version_nov}_Linux-{arch}.tar.gz", - "arch_map": { - "x86_64": "64bit", - "aarch64": "ARM64", - "armv7l": "ARM" - } -} diff --git a/catalog/uv.json b/catalog/uv.json deleted file mode 100644 index 1b74862..0000000 --- a/catalog/uv.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "uv", - "install_method": "dedicated_script", - "description": "Fast Python package installer and resolver", - "homepage": "https://github.com/astral-sh/uv", - "binary_name": "uv", - "script": "install_uv.sh" - ,"guide": { - "display_name": "uv", - "install_action": "reconcile", - "order": 20 - } -} diff --git a/catalog/watchexec.json b/catalog/watchexec.json deleted file mode 100644 index 6d00308..0000000 --- a/catalog/watchexec.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "watchexec", - "install_method": "github_release_binary", - "description": "Execute commands in response to file modifications", - "homepage": "https://github.com/watchexec/watchexec", - "github_repo": "watchexec/watchexec", - "binary_name": "watchexec", - "download_url_template": "https://github.com/watchexec/watchexec/releases/download/{version}/watchexec-{version_nov}-{arch}-unknown-linux-musl.tar.xz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } -} diff --git a/catalog/xsv.json b/catalog/xsv.json deleted file mode 100644 index 24a633c..0000000 --- a/catalog/xsv.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "xsv", - "install_method": "github_release_binary", - "description": "A fast CSV command line toolkit written in Rust", - "homepage": "https://github.com/BurntSushi/xsv", - "github_repo": "BurntSushi/xsv", - "binary_name": "xsv", - "download_url_template": "https://github.com/BurntSushi/xsv/releases/download/{version}/xsv-{version}-{arch}-unknown-linux-musl.tar.gz", - "arch_map": { - "x86_64": "x86_64", - "aarch64": "aarch64", - "armv7l": "armv7" - } -} diff --git a/catalog/yarn.json b/catalog/yarn.json deleted file mode 100644 index 0ec8bd2..0000000 --- a/catalog/yarn.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "yarn", - "install_method": "dedicated_script", - "description": "Alternative Node.js package manager with offline mode and deterministic installs", - "homepage": "https://yarnpkg.com/", - "binary_name": "yarn", - "script": "install_yarn.sh", - "notes": "Installed via Node.js corepack or npm. Requires Node.js to be installed via nvm. Do NOT install via apt (conflicts with cmdtest package).", - "guide": { - "display_name": "Yarn", - "install_action": "update", - "order": 151 - }, - "pinned_version": "never" -} diff --git a/catalog/yq.json b/catalog/yq.json deleted file mode 100644 index 6d1004a..0000000 --- a/catalog/yq.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "yq", - "install_method": "github_release_binary", - "description": "A lightweight and portable command-line YAML, JSON and XML processor", - "homepage": "https://github.com/mikefarah/yq", - "github_repo": "mikefarah/yq", - "binary_name": "yq", - "download_url_template": "https://github.com/mikefarah/yq/releases/download/{version}/yq_linux_{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64", - "armv7l": "arm" - } - ,"tags": ["core"] -} diff --git a/claudedocs/project_context.md b/claudedocs/project_context.md index c9fc0f3..944980f 100644 --- a/claudedocs/project_context.md +++ b/claudedocs/project_context.md @@ -1,18 +1,16 @@ # AI CLI Preparation - Project Context (AI Agent Reference) -**Last Updated:** 2025-10-18 -**Version:** 2.0.0-alpha.6 +**Last Updated:** 2025-10-09 **For:** AI Coding Agents (Claude Code, etc.) ## Quick Reference -**Purpose:** Environment audit and tool management system ensuring AI coding agents have all necessary CLI tools installed, current, and properly configured +**Purpose:** Environment audit tool ensuring AI coding agents have all necessary CLI tools installed and current **Repository:** github.com/netresearch/coding_agent_cli_toolset **Primary Language:** Python 3.9+ (dev: 3.14.0rc2) -**Architecture:** Offline-first, parallel, resilient tool version auditing with comprehensive upgrade orchestration -**Tool Coverage:** 64 tools across 10 categories -**Phase Status:** Phase 1 (Audit) Complete | Phase 2 (Install/Upgrade) Alpha +**Architecture:** Offline-first, parallel, resilient tool version auditing +**Tool Coverage:** 50+ developer tools across 10 categories ## Core Capabilities @@ -20,10 +18,8 @@ - Installation method detection (uv, pipx, npm, cargo, apt, homebrew, etc.) - Offline operation via committed cache (latest_versions.json) - Snapshot-based workflow (separate collection from rendering) -- Parallel execution (16 workers default, configurable via CLI_AUDIT_MAX_WORKERS) -- Enhanced tool detection (searches PATH + common directories + tool-specific locations) +- Parallel execution (16 workers, 3s timeout per tool) - Role-based presets (agent-core, python-core, security-core, etc.) -- Environment variable configuration via .env file (exported to subprocesses) ## File Structure @@ -42,31 +38,14 @@ ai_cli_preparation/ │ ├── install_go.sh │ └── ... (9 more) ├── docs/ # Human-focused technical docs -│ ├── Phase 1: Detection & Auditing -│ │ ├── INDEX.md -│ │ ├── ARCHITECTURE.md (updated with Phase 2) -│ │ ├── API_REFERENCE.md -│ │ ├── FUNCTION_REFERENCE.md -│ │ ├── QUICK_REFERENCE.md -│ │ ├── DEVELOPER_GUIDE.md -│ │ ├── TOOL_ECOSYSTEM.md -│ │ ├── DEPLOYMENT.md -│ │ └── TROUBLESHOOTING.md -│ ├── Phase 2: Installation & Upgrade -│ │ ├── PHASE2_API_REFERENCE.md -│ │ ├── CLI_REFERENCE.md -│ │ ├── TESTING.md -│ │ ├── ERROR_CATALOG.md -│ │ └── INTEGRATION_EXAMPLES.md -│ ├── Planning & Specifications -│ │ ├── PRD.md -│ │ ├── PHASE2_IMPLEMENTATION.md -│ │ ├── CONFIGURATION_SPEC.md -│ │ └── adr/ (6 ADRs) +│ ├── INDEX.md +│ ├── ARCHITECTURE.md +│ ├── API_REFERENCE.md +│ ├── DEVELOPER_GUIDE.md +│ ├── TOOL_ECOSYSTEM.md +│ ├── DEPLOYMENT.md +│ └── TROUBLESHOOTING.md └── claudedocs/ # AI agent context (this directory) - ├── project_context.md - ├── session_summary.md - └── session_initialization.md ``` ## Key Components @@ -82,20 +61,13 @@ class Tool: ``` ### TOOLS Registry (cli_audit.py:738) -Ordered tuple of 64 Tool definitions, categorized: +Ordered tuple of 50+ Tool definitions, categorized: 1. Runtimes (go, python, rust, node) + package managers 2. Core dev tools (ripgrep, ast-grep, fzf, fd, jq, etc.) 3. Security (semgrep, bandit, gitleaks, trivy) 4. Formatters (black, eslint, prettier, shellcheck) 5. Git tools (git, gh, glab, git-absorb) 6. Cloud/infra (aws, kubectl, terraform, docker) -7. AI agent tools (claude, codex, gam) - -**Enhanced Detection (2025-10-13):** -- Searches beyond PATH for tools in non-standard locations -- Tool-specific paths: gam in ~/bin/gam7/, claude in ~/.claude/local/ -- Common search paths: ~/bin, ~/.local/bin, /usr/local/bin -- Cargo bin fallback for Rust tools ### Architecture Pattern ``` @@ -123,39 +95,10 @@ make update # Interactive upgrade guide make upgrade -# Complete system upgrade (5 stages) -make upgrade-all - -# Preview system upgrade (dry-run) -make upgrade-all-dry-run - # Offline audit with hints make audit-offline ``` -### System-Wide Upgrade (New in alpha.6) -```bash -# 5-Stage orchestrated upgrade workflow -make upgrade-all -# Stage 1: Refresh version data -# Stage 2: Upgrade package managers (apt, brew, snap, flatpak) -# Stage 3: Upgrade runtimes (Python, Node.js, Go, Ruby, Rust) -# Stage 4: Upgrade user package managers (uv, pipx, npm, pnpm, yarn, cargo, etc.) -# Stage 5: Upgrade all CLI tools - -# Features: -# - UV migration (auto-migrates pip/pipx to uv tools) -# - System package detection (skips apt/brew managed) -# - Comprehensive logging (logs/upgrade-YYYYMMDD-HHMMSS.log) -# - Dry-run mode available - -# Preview without making changes -make upgrade-all-dry-run - -# Check PATH configuration -make check-path -``` - ### Role-Based Audits ```bash make audit-offline-agent-core # AI agent essentials @@ -177,11 +120,9 @@ CLI_AUDIT_TRACE=1 python3 cli_audit.py - `CLI_AUDIT_OFFLINE=1` - Force offline (manual cache only) **Performance:** -- `CLI_AUDIT_MAX_WORKERS=16` - Concurrency (default 16, loaded from .env if present) +- `CLI_AUDIT_MAX_WORKERS=16` - Concurrency - `CLI_AUDIT_TIMEOUT_SECONDS=3` - Per-tool timeout -**Note (2025-10-13):** Environment variables are now properly exported from Makefile to Python subprocesses. Set in .env file to configure globally. - **Output:** - `CLI_AUDIT_JSON=1` - JSON output - `CLI_AUDIT_LINKS=1` - OSC 8 hyperlinks @@ -245,27 +186,16 @@ CLI_AUDIT_TRACE=1 python3 cli_audit.py ## Current Git State **Branch:** main -**Working Tree:** Modified (documentation updates in progress) -**Modified:** latest_versions.json, package.json, package-lock.json, tools_snapshot.json, docs/ +**Modified:** cli_audit.py, latest_versions.json **Untracked:** node_modules/ **Remote:** git@github.com:netresearch/coding_agent_cli_toolset.git -**Recent commits (2025-10-18 - upgrade-all feature):** -- 9b784ed - chore: update version snapshots -- 22c1603 - feat(upgrade-all): add UV migration for pip/pipx packages -- 1b71b71 - feat(upgrade-all): skip pip/pipx when uv is managing Python packages -- cceed18 - fix(upgrade-all): detect pipx packages with missing metadata and provide fix -- 7d691cb - feat(upgrade-all): add comprehensive version and location info to all upgrade stages - -**Recent commits (2025-10-13 - environment fixes):** -- 34fa37f - chore(snapshot): update tool audit cache with improved detection -- 80abd30 - fix(guide): clarify Docker CLI vs Docker Engine terminology -- aa57210 - fix(cli_audit): resolve three critical issues in environment and detection - -**Recent commits (2025-10-09 - documentation):** +**Recent commits:** - 0c7ade3 - Snapshot-based collect/render modes - 3dd5082 - Lock ordering fixes (thread safety) - c160361 - Classification improvements (shebang detection) +- 634c035 - HTTP robustness (retries, backoff) +- 8c04e03 - Smoke testing ## Key Design Patterns @@ -308,35 +238,17 @@ CLI_AUDIT_TRACE=1 python3 cli_audit.py ## Documentation Map **For Humans (docs/):** - -**Phase 1 (Detection & Auditing):** -- INDEX.md - Documentation navigation hub -- ARCHITECTURE.md - System design, data flows (updated with Phase 2) -- API_REFERENCE.md - Phase 1 audit functions, environment variables -- FUNCTION_REFERENCE.md - Function quick lookup -- QUICK_REFERENCE.md - Command cheat sheet +- INDEX.md - Documentation navigation +- ARCHITECTURE.md - System design, data flows +- API_REFERENCE.md - Functions, environment variables - DEVELOPER_GUIDE.md - Contributing, adding tools - TOOL_ECOSYSTEM.md - Complete 50+ tool catalog - DEPLOYMENT.md - Makefile targets, CI/CD - TROUBLESHOOTING.md - Common issues, debugging -**Phase 2 (Installation & Upgrade):** -- PHASE2_API_REFERENCE.md - Complete Phase 2 API (78 symbols across 11 modules) -- CLI_REFERENCE.md - Command-line reference with 60+ environment variables -- TESTING.md - Comprehensive testing guide for contributors -- ERROR_CATALOG.md - Error categorization with troubleshooting (26 error codes) -- INTEGRATION_EXAMPLES.md - Real-world CI/CD and workflow patterns - -**Planning & Specifications:** -- PRD.md - Product requirements document -- PHASE2_IMPLEMENTATION.md - Implementation roadmap -- CONFIGURATION_SPEC.md - .cli-audit.yml schema -- adr/ - Architecture Decision Records (6 ADRs) - **For AI Agents (claudedocs/):** - project_context.md (this file) - Quick reference -- session_summary.md - Session history -- session_initialization.md - Session context +- session_summary.md - Current session state ## Quick Troubleshooting diff --git a/claudedocs/session_summary.md b/claudedocs/session_summary.md index 327e189..e10ef95 100644 --- a/claudedocs/session_summary.md +++ b/claudedocs/session_summary.md @@ -1,82 +1,10 @@ -# Session Summary - Recent Sessions - -## Session 2025-10-13: Environment & Tool Detection Fixes - -**Session Type:** Bug fixes and environment configuration -**Session ID:** Direct debugging and fixes following user reports - -### Session Overview - -Critical fixes to environment variable loading, tool detection, and user interface clarity based on user-reported issues with `.env` configuration and tool detection. - -### Issues Resolved - -1. **.env Variables Not Loaded** (Priority: 🔴 CRITICAL) - - **Problem:** `CLI_AUDIT_MAX_WORKERS=10` in `.env` ignored, only 4 workers used - - **Root Cause:** Makefile's `-include .env` only set Make variables, not subprocess environment - - **Fix:** Added `export` directive in Makefile after includes to propagate variables - - **Verification:** User increased to 40 workers, confirmed "40 workers" in output - -2. **Incorrect Default Value** (Priority: 🟡 IMPORTANT) - - **Problem:** Hardcoded default was "4" but documentation claimed "16" - - **Root Cause:** Inconsistency between code and documentation/configuration - - **Fix:** Changed `MAX_WORKERS` default from `"4"` to `"16"` in cli_audit.py:62 - - **Impact:** Better default alignment with documented behavior - -3. **Tools Not Detected** (Priority: 🔴 CRITICAL) - - **Problem:** `gam --version` and `claude --version` worked, but audit showed NOT INSTALLED - - **Root Cause:** Tools in non-PATH locations: - - gam: `/home/sme/bin/gam7/gam` - - claude: `/home/sme/.claude/local/claude` - - **Fix:** Enhanced `find_paths()` with: - - `TOOL_SPECIFIC_PATHS` dict for known tool locations - - `EXTRA_SEARCH_PATHS` list for common directories - - Layered search strategy: PATH → tool-specific → extra → cargo - - **Verification:** Both tools now detected correctly in snapshot - -4. **Docker Terminology Clarification** (Priority: 🟢 RECOMMENDED) - - **Problem:** Guide labeled Docker CLI client as "Docker Engine" - - **Root Cause:** Misleading terminology confusing client vs server - - **Fix:** Updated scripts/guide.sh to use "Docker CLI" with explanatory notes - - **Impact:** Improved user experience and technical accuracy - -### Files Modified - -**Makefile** (lines 1-8) -- Added `export` directive to propagate environment variables - -**cli_audit.py** (multiple sections) -- Added HOME constant and search path lists (lines 43-56) -- Fixed MAX_WORKERS default value (line 62) -- Enhanced find_paths() function (lines 1049-1094) - -**scripts/guide.sh** (lines 355-372) -- Changed "Docker Engine" → "Docker CLI" -- Added clarifying notes about client vs server distinction - -### Commits - -1. `aa57210` - fix(cli_audit): resolve three critical issues in environment and detection -2. `80abd30` - fix(guide): clarify Docker CLI vs Docker Engine terminology -3. `34fa37f` - chore(snapshot): update tool audit cache with improved detection - -### Current State - -- **Working Tree:** Clean, all changes committed -- **Branch:** main (12 commits ahead of origin/main) -- **Tools Audited:** 64 tools -- **Outdated:** 5 tools (fzf, yq, just, gam, docker) -- **Missing:** 2 tools (git-branchless, golangci-lint) -- **Environment:** CLI_AUDIT_MAX_WORKERS=40 (user configuration) - ---- - -## Session 2025-10-09: Documentation Generation +# Session Summary - Documentation Generation +**Session Date:** 2025-10-09 **Session Type:** Project indexing and comprehensive documentation generation **Session ID:** /sc:load + /sc:index with --ultrathink --comprehensive --validate -### Session Overview +## Session Overview Comprehensive documentation suite created for AI CLI Preparation project using /sc:load and /sc:index slash commands with deep analysis flags. diff --git a/cli_audit.py b/cli_audit.py index 7eac0aa..aa0c4d6 100644 --- a/cli_audit.py +++ b/cli_audit.py @@ -25,7 +25,6 @@ import os import re import shutil -import signal import subprocess import sys import time @@ -41,19 +40,7 @@ TIMEOUT_SECONDS: int = int(os.environ.get("CLI_AUDIT_TIMEOUT_SECONDS", "3")) -HOME: str = os.path.expanduser("~") -CARGO_BIN: str = os.path.join(HOME, ".cargo", "bin") -# Common tool installation directories to check beyond PATH -EXTRA_SEARCH_PATHS: list[str] = [ - os.path.join(HOME, "bin"), - os.path.join(HOME, ".local", "bin"), - "/usr/local/bin", -] -# Tool-specific installation directories (tool_name -> [search_dirs]) -TOOL_SPECIFIC_PATHS: dict[str, list[str]] = { - "gam": [os.path.join(HOME, "bin", "gam7"), os.path.join(HOME, "bin", "gam")], - "claude": [os.path.join(HOME, ".claude", "local"), os.path.join(HOME, ".local", "bin"), os.path.join(HOME, "bin")], -} +CARGO_BIN: str = os.path.expanduser("~/.cargo/bin") USER_AGENT_HEADERS = {"User-Agent": "cli-audit/1.0"} NPM_REGISTRY_URL = "https://registry.npmjs.org" GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", "") @@ -61,21 +48,17 @@ # Manual cache lock for updating the committed latest_versions.json MANUAL_LOCK = threading.Lock() -# GitHub rate limit tracking -GITHUB_RATE_LIMIT_HIT = False -GITHUB_RATE_LIMIT_LOCK = threading.Lock() - # Output rendering options STDOUT_IS_TTY: bool = sys.stdout.isatty() # Respect env flags regardless of TTY so links/icons can be used with pipes (e.g., column) ENABLE_LINKS: bool = os.environ.get("CLI_AUDIT_LINKS", "1") == "1" USE_EMOJI_ICONS: bool = os.environ.get("CLI_AUDIT_EMOJI", "1") == "1" OFFLINE_MODE: bool = os.environ.get("CLI_AUDIT_OFFLINE", "0") == "1" -MAX_WORKERS: int = int(os.environ.get("CLI_AUDIT_MAX_WORKERS", "16")) # Default matches .env.default and documentation +MAX_WORKERS: int = int(os.environ.get("CLI_AUDIT_MAX_WORKERS", "16")) DOCKER_INFO_ENABLED: bool = os.environ.get("CLI_AUDIT_DOCKER_INFO", "1") == "1" PROGRESS: bool = os.environ.get("CLI_AUDIT_PROGRESS", "0") == "1" OFFLINE_USE_CACHE: bool = os.environ.get("CLI_AUDIT_OFFLINE_USE_CACHE", "1") == "1" # kept for compatibility, no effect -SHOW_TIMINGS: bool = os.environ.get("CLI_AUDIT_TIMINGS", "0") == "1" # Only enable during 'make update' to identify slow operations +SHOW_TIMINGS: bool = os.environ.get("CLI_AUDIT_TIMINGS", "1") == "1" MANUAL_FIRST: bool = os.environ.get("CLI_AUDIT_MANUAL_FIRST", "0") == "1" DPKG_CACHE: dict[str, bool] = {} DPKG_OWNER_CACHE: dict[str, str] = {} @@ -94,7 +77,6 @@ # Snapshot / mode toggles (decouple collection from rendering) COLLECT_ONLY: bool = os.environ.get("CLI_AUDIT_COLLECT", "0") == "1" RENDER_ONLY: bool = os.environ.get("CLI_AUDIT_RENDER", "0") == "1" -MERGE_MODE: bool = os.environ.get("CLI_AUDIT_MERGE", "0") == "1" SNAPSHOT_FILE: str = os.environ.get( "CLI_AUDIT_SNAPSHOT_FILE", os.path.join(os.path.dirname(__file__), "tools_snapshot.json"), @@ -107,7 +89,7 @@ # Ultra-verbose tracing TRACE: bool = os.environ.get("CLI_AUDIT_TRACE", "0") == "1" -TRACE_NET: bool = os.environ.get("CLI_AUDIT_TRACE_NET", "0") == "1" or AUDIT_DEBUG # Auto-enable with DEBUG +TRACE_NET: bool = os.environ.get("CLI_AUDIT_TRACE_NET", "0") == "1" SLOW_MS: int = int(os.environ.get("CLI_AUDIT_SLOW_MS", "2000")) def _vlog(msg: str) -> None: @@ -126,7 +108,7 @@ def _tlog(msg: str) -> None: def _now_iso() -> str: try: - return datetime.datetime.now(datetime.UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z") + return datetime.datetime.utcnow().replace(microsecond=0).isoformat() + "Z" except Exception: return "" @@ -170,44 +152,6 @@ def load_snapshot(paths: Sequence[str] | None = None) -> dict[str, Any]: return {} def write_snapshot(tools_payload: list[dict[str, Any]], extra: dict[str, Any] | None = None) -> dict[str, Any]: - # In MERGE_MODE, load existing snapshot and update only the specified tools - if MERGE_MODE: - try: - with open(SNAPSHOT_FILE, "r", encoding="utf-8") as f: - existing = json.load(f) - existing_tools = existing.get("tools", []) - - # Create a dict of new tools keyed by tool name - new_tools_dict = {t["tool"]: t for t in tools_payload} - - # Update existing tools or keep them as-is - merged_tools = [] - updated_names = set() - for tool in existing_tools: - tool_name = tool.get("tool") - if tool_name in new_tools_dict: - # Replace with updated entry - merged_tools.append(new_tools_dict[tool_name]) - updated_names.add(tool_name) - else: - # Keep existing entry - merged_tools.append(tool) - - # Add any new tools that weren't in the existing snapshot - for tool_name, tool_data in new_tools_dict.items(): - if tool_name not in updated_names: - merged_tools.append(tool_data) - - tools_payload = merged_tools - except FileNotFoundError: - # No existing snapshot, proceed with new payload - pass - except Exception as e: - if AUDIT_DEBUG: - print(f"# DEBUG: Merge mode failed to load existing snapshot: {e}", file=sys.stderr) - # Proceed with new payload on error - pass - meta = { "schema_version": 1, "created_at": _now_iso(), @@ -257,20 +201,13 @@ def render_from_snapshot(doc: dict[str, Any], selected: set[str] | None = None) os.path.join(os.path.dirname(__file__), "latest_versions.json"), ) MANUAL_VERSIONS: dict[str, Any] = {} -# Disable incremental writes during parallel execution to avoid file locking deadlocks -# With MAX_WORKERS > 1, multiple threads compete for MANUAL_LOCK causing severe contention -# Incremental writes are unnecessary since snapshot/results are finalized at the end -WRITE_MANUAL: bool = os.environ.get("CLI_AUDIT_WRITE_MANUAL", "1") == "1" and MAX_WORKERS == 1 +WRITE_MANUAL: bool = os.environ.get("CLI_AUDIT_WRITE_MANUAL", "1") == "1" MANUAL_USED: dict[str, bool] = {} # Selected-path tracking for JSON output (populated by audit_tool) SELECTED_PATHS: dict[str, str] = {} SELECTED_REASON: dict[str, str] = {} -# Track ALL installations found during deep scans (for duplicate detection) -# Format: {tool_name: [(version, method, path), ...]} -ALL_INSTALLATIONS: dict[str, list[tuple[str, str, str]]] = {} - # Per-origin concurrency caps for network requests SEMAPHORES: dict[str, threading.BoundedSemaphore] = { "github.com": threading.BoundedSemaphore(value=int(os.environ.get("CLI_AUDIT_HOST_CAP_GITHUB", "4"))), @@ -322,7 +259,7 @@ def http_fetch( pass # GitHub token only for API host if GITHUB_TOKEN and host == "api.github.com": - req_headers["Authorization"] = f"token {GITHUB_TOKEN}" + req_headers["Authorization"] = f"Bearer {GITHUB_TOKEN}" if retries is None: retries = HTTP_RETRIES @@ -333,62 +270,30 @@ def http_fetch( last_exc: Exception | None = None for attempt in range(max(1, retries)): try: - # Debug: show HTTP request details - if AUDIT_DEBUG: - method_str = method or "GET" - print(f"# DEBUG: HTTP {method_str} {url} (timeout={timeout}s, attempt={attempt+1}/{retries})", file=sys.stderr, flush=True) - if sem is None: req = urllib.request.Request(url, headers=req_headers, method=method) - req_start = time.time() with urllib.request.urlopen(req, timeout=timeout) as resp: - data = resp.read() - req_dur = int((time.time() - req_start) * 1000) if TRACE_NET: _tlog(f"# http_open host={host} code={getattr(resp, 'status', 0)} url={url}") - if AUDIT_DEBUG: - status = getattr(resp, 'status', 0) - content_len = len(data) - print(f"# DEBUG: HTTP {status} {url} ({req_dur}ms, {content_len} bytes)", file=sys.stderr, flush=True) - return data + return resp.read() with sem: req = urllib.request.Request(url, headers=req_headers, method=method) - req_start = time.time() with urllib.request.urlopen(req, timeout=timeout) as resp: - data = resp.read() - req_dur = int((time.time() - req_start) * 1000) if TRACE_NET: _tlog(f"# http_open host={host} code={getattr(resp, 'status', 0)} url={url}") - if AUDIT_DEBUG: - status = getattr(resp, 'status', 0) - content_len = len(data) - print(f"# DEBUG: HTTP {status} {url} ({req_dur}ms, {content_len} bytes)", file=sys.stderr, flush=True) - return data + return resp.read() except urllib.error.HTTPError as e: last_exc = e code = getattr(e, "code", 0) or 0 retryable = (code == 429) or (500 <= code <= 599) or (host == "api.github.com" and code == 403) - - # Detect GitHub rate limit errors - if host == "api.github.com" and code == 403: - global GITHUB_RATE_LIMIT_HIT - with GITHUB_RATE_LIMIT_LOCK: - GITHUB_RATE_LIMIT_HIT = True - if TRACE_NET: _tlog(f"# http_error host={host} code={code} retryable={retryable} url={url}") - if AUDIT_DEBUG: - print(f"# DEBUG: HTTP ERROR {code} {url} (retryable={retryable}, attempt={attempt+1}/{retries})", file=sys.stderr, flush=True) if attempt >= retries - 1 or not retryable: raise except Exception as e: last_exc = e - exc_type = type(e).__name__ - exc_msg = str(e)[:100] # Truncate long error messages if TRACE_NET: - _tlog(f"# http_exc host={host} type={exc_type} attempt={attempt+1}/{retries} url={url}") - if AUDIT_DEBUG: - print(f"# DEBUG: HTTP EXCEPTION {exc_type}: {exc_msg} on {url} (attempt={attempt+1}/{retries})", file=sys.stderr, flush=True) + _tlog(f"# http_exc host={host} type={type(e).__name__} attempt={attempt+1}/{retries} url={url}") if attempt >= retries - 1: raise # backoff with jitter @@ -767,8 +672,6 @@ def _python_dist_version_from_venv(tool_name: str, exe_path: str, dist_name: str return "" def set_manual_latest(tool_name: str, tag_or_version: str) -> None: - if AUDIT_DEBUG: - print(f"# DEBUG: set_manual_latest({tool_name}, {tag_or_version[:50] if tag_or_version else ''}) WRITE_MANUAL={WRITE_MANUAL}", file=sys.stderr, flush=True) if not WRITE_MANUAL: return s = (tag_or_version or "").strip() @@ -827,7 +730,7 @@ def _wcswidth(s: str) -> int: class Tool: name: str candidates: tuple[str, ...] - source_kind: str # "gh" | "gitlab" | "pypi" | "crates" | "npm" | "gnu" | "skip" + source_kind: str # "gh" | "pypi" | "crates" | "npm" | "gnu" | "skip" source_args: tuple[str, ...] # e.g., (owner, repo) or (package,) or (crate,) or (npm_pkg,) or (gnu_project,) @@ -845,9 +748,6 @@ class Tool: Tool("npm", ("npm",), "npm", ("npm",)), Tool("pnpm", ("pnpm",), "npm", ("pnpm",)), Tool("yarn", ("yarn",), "npm", ("yarn",)), - Tool("composer", ("composer",), "gh", ("composer", "composer")), - Tool("ruby", ("ruby",), "gh", ("ruby", "ruby")), - Tool("gem", ("gem",), "gh", ("rubygems", "rubygems")), # 2) Core developer tools and utilities Tool("fd", ("fd", "fdfind"), "gh", ("sharkdp", "fd")), Tool("fzf", ("fzf",), "gh", ("junegunn", "fzf")), @@ -883,8 +783,6 @@ class Tool: Tool("ansible-core", ("ansible", "ansible-core"), "pypi", ("ansible-core",)), Tool("git-absorb", ("git-absorb",), "gh", ("tummychow", "git-absorb")), Tool("git-branchless", ("git-branchless",), "gh", ("arxanas", "git-branchless")), - Tool("git-lfs", ("git-lfs",), "gh", ("git-lfs", "git-lfs")), - Tool("tfsec", ("tfsec",), "gh", ("aquasecurity", "tfsec")), # 3) Formatters & linters Tool("black", ("black",), "pypi", ("black",)), Tool("isort", ("isort",), "pypi", ("isort",)), @@ -893,22 +791,14 @@ class Tool: Tool("prettier", ("prettier",), "gh", ("prettier", "prettier")), Tool("shfmt", ("shfmt",), "gh", ("mvdan", "sh")), Tool("shellcheck", ("shellcheck",), "gh", ("koalaman", "shellcheck")), - Tool("golangci-lint", ("golangci-lint",), "gh", ("golangci", "golangci-lint")), # 4) JSON/YAML viewers Tool("fx", ("fx",), "gh", ("antonmedv", "fx")), - # 4.5) AI assistants - Tool("codex", ("codex",), "npm", ("@openai/codex",)), - Tool("claude", ("claude",), "npm", ("@anthropic-ai/claude-code",)), # 5) VCS & platforms Tool("git", ("git",), "gh", ("git", "git")), Tool("gh", ("gh",), "gh", ("cli", "cli")), - Tool("glab", ("glab",), "gitlab", ("gitlab-org", "cli")), - Tool("gam", ("gam",), "gh", ("GAM-team", "GAM")), - # 6) Task runners & build systems + Tool("glab", ("glab",), "gh", ("profclems", "glab")), + # 6) Task runners Tool("just", ("just",), "gh", ("casey", "just")), - Tool("ninja", ("ninja",), "gh", ("ninja-build", "ninja")), - # 6.5) Code navigation - # Tool("cscope", ("cscope",), "skip", ()), # C-only tool, not relevant for this project # 7) Cloud / infra Tool("aws", ("aws",), "gh", ("aws", "aws-cli")), Tool("kubectl", ("kubectl",), "gh", ("kubernetes", "kubernetes")), @@ -932,8 +822,6 @@ class Tool: "npm": "runtimes", "pnpm": "runtimes", "yarn": "runtimes", - "composer": "runtimes", - "gem": "runtimes", # Search & code-aware tools "ripgrep": "search", "ast-grep": "search", @@ -950,9 +838,6 @@ class Tool: "yq": "json-yaml", "dasel": "json-yaml", "fx": "json-yaml", - # AI assistants - "codex": "ai-assistants", - "claude": "ai-assistants", # HTTP/CLI clients "httpie": "http", "curlie": "http", @@ -976,12 +861,10 @@ class Tool: "prettier": "formatters", "shfmt": "formatters", "shellcheck": "formatters", - "golangci-lint": "formatters", # VCS & platforms "git": "vcs", "gh": "vcs", "glab": "vcs", - "gam": "vcs", # Cloud & infra "aws": "cloud-infra", "kubectl": "cloud-infra", @@ -1013,7 +896,6 @@ class Tool: "cloud-infra", "task-runners", "data", - "ai-assistants", "other", ) @@ -1122,28 +1004,10 @@ def find_paths(command_name: str, deep: bool) -> list[str]: paths.append(line) except Exception: pass - - # Check tool-specific installation directories - if command_name in TOOL_SPECIFIC_PATHS: - for search_dir in TOOL_SPECIFIC_PATHS[command_name]: - candidate = os.path.join(search_dir, command_name) - if os.path.isfile(candidate) and os.access(candidate, os.X_OK): - if candidate not in paths: - paths.append(candidate) - - # Check common extra search paths - for search_dir in EXTRA_SEARCH_PATHS: - candidate = os.path.join(search_dir, command_name) - if os.path.isfile(candidate) and os.access(candidate, os.X_OK): - if candidate not in paths: - paths.append(candidate) - - # Check cargo bin (legacy location check) cargo_path = os.path.join(CARGO_BIN, command_name) if os.path.isfile(cargo_path) and os.access(cargo_path, os.X_OK): if cargo_path not in paths: paths.append(cargo_path) - return paths @@ -1239,44 +1103,6 @@ def get_version_line(path: str, tool_name: str) -> str: return line # Special cases next - if tool_name == "composer": - # Composer may print PHP deprecation warnings before version - # Example: "Deprecation Notice: ..." then "Composer version 2.5.8 2023-06-09 17:13:21" - for flags in (("--version",), ("-V",)): - try: - proc = subprocess.run( - [path, *flags], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, timeout=TIMEOUT_SECONDS, check=False - ) - except Exception: - continue - out = (proc.stdout or "").splitlines() - # Skip deprecation warnings and find line starting with "Composer version" - for line in out: - if line.strip().startswith("Composer version"): - return line.strip() - # Fallback: find any line with version that doesn't look like a warning - for line in out: - if not line.lower().startswith("deprecation") and extract_version_number(line): - return line.strip() - return "" - if tool_name == "tfsec": - # tfsec prints multi-line migration notice before version - # Example: "====...\ntfsec is joining the Trivy family\n...\n====\nv1.28.14" - for flags in (("--version",), ("-V",), ("version",)): - try: - proc = subprocess.run( - [path, *flags], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, timeout=TIMEOUT_SECONDS, check=False - ) - except Exception: - continue - out = (proc.stdout or "").splitlines() - # Find line that starts with v and has version pattern, skipping banner lines - for line in out: - stripped = line.strip() - if stripped and not any(kw in stripped.lower() for kw in ["trivy", "tfsec is", "continue", "read more", "==", "attention", "directed"]): - if extract_version_number(stripped): - return stripped - return "" if tool_name == "shellcheck": # ShellCheck prints multi-line banner with a dedicated 'version: X.Y.Z' line when using -V # Prefer -V, then --version; return the whole first matching line for consistent display. @@ -1465,19 +1291,6 @@ def get_version_line(path: str, tool_name: str) -> str: if "client version" in lcline or VERSION_RE.search(line): set_local_flag_hint("kubectl", " ".join(args[1:])) return line - if tool_name == "docker": - # Docker CLI version (client); prefer explicit client version to avoid confusion with server - # Try docker version --format first for clean output - line = run_with_timeout([path, "version", "--format", "{{.Client.Version}}"]) - if line and extract_version_number(line): - set_local_flag_hint("docker", "version --format {{.Client.Version}}") - return f"docker {line.strip()}" - # Fallback to --version which shows "Docker version X.Y.Z, build ..." - line = run_with_timeout([path, "--version"]) - if line: - set_local_flag_hint("docker", "--version") - return line - return "" if tool_name == "docker-compose": # Fallbacks already attempted above; return empty to continue generic flags if needed return "" @@ -1535,23 +1348,6 @@ def extract_version_number(s: str) -> str: return m2.group(1) if m2 else "" -def normalize_version_tag(tag: str) -> str: - """Normalize version tags to consistent format. - - Converts: - - v3_4_7 -> v3.4.7 (Ruby convention: underscores to dots) - - v1.2.3 -> v1.2.3 (already normalized) - - 1.2.3 -> 1.2.3 (no v prefix is fine) - - This ensures all stored versions use dots, not underscores. - """ - if not tag: - return tag - # Replace underscores with dots in version numbers - # Pattern: match sequences like "3_4_7" and convert to "3.4.7" - return tag.replace("_", ".") - - def _format_duration(seconds: float) -> str: try: if seconds < 1: @@ -1589,146 +1385,6 @@ def status_icon(status: str, installed_line: str) -> str: return "❓" -def detect_path_shadowing(tool_name: str) -> dict[str, str]: - """Detect if a tool is shadowed by another binary earlier in PATH, - or if there's a known conflicting package installed. - - Returns dict with: - - shadowed: "yes" if tool is shadowed, "" otherwise - - shadowed_by: path to the shadowing binary - - shadowed_package: package name if available (dpkg) - - expected_path: path to the expected binary - - warning: human-readable warning message - """ - result = { - "shadowed": "", - "shadowed_by": "", - "shadowed_package": "", - "expected_path": "", - "warning": "" - } - - try: - # Known package conflicts (tool_name -> (conflicting_package, system_path)) - KNOWN_CONFLICTS = { - "yarn": ("cmdtest", "/usr/bin/yarn"), - } - - # Get the first binary in PATH - first_path = shutil.which(tool_name) - if not first_path: - return result - - # Check for known package conflicts FIRST - if tool_name in KNOWN_CONFLICTS: - conflict_pkg, conflict_path = KNOWN_CONFLICTS[tool_name] - if os.path.isfile(conflict_path): - # Check if this conflicting binary exists - try: - pkg_out = subprocess.run( - ["dpkg", "-l", conflict_pkg], - capture_output=True, - text=True, - timeout=2 - ) - if pkg_out.returncode == 0 and (f"ii {conflict_pkg}" in pkg_out.stdout or f"\nii {conflict_pkg}" in pkg_out.stdout): - # Package is installed - first_real = os.path.realpath(first_path) - conflict_real = os.path.realpath(conflict_path) - - # If the conflicting binary is being used OR exists in PATH - if first_real == conflict_real: - result["shadowed"] = "yes" - result["shadowed_by"] = conflict_real - result["shadowed_package"] = conflict_pkg - result["expected_path"] = f"(managed {tool_name} not in PATH)" - result["warning"] = f"⚠️ Conflicting package '{conflict_pkg}' installed (remove with: sudo apt remove {conflict_pkg})" - else: - # Managed version is first, but conflict exists in system - result["warning"] = f"⚠️ Conflicting package '{conflict_pkg}' installed but not active (recommended: sudo apt remove {conflict_pkg})" - except Exception: - pass - - # If we already found a conflict, return early - if result["shadowed"] or result["warning"]: - return result - - # Get ALL binaries with this name in PATH - path_dirs = os.environ.get("PATH", "").split(os.pathsep) - all_paths = [] - for directory in path_dirs: - candidate = os.path.join(directory, tool_name) - if os.path.isfile(candidate) and os.access(candidate, os.X_OK): - try: - all_paths.append(os.path.realpath(candidate)) - except Exception: - all_paths.append(candidate) - - # Remove duplicates while preserving order - seen = set() - unique_paths = [] - for p in all_paths: - if p not in seen: - seen.add(p) - unique_paths.append(p) - - # If there's only one binary, no shadowing - if len(unique_paths) <= 1: - return result - - # Check if the first binary is from a known managed location - # (nvm, cargo, uv, ~/.local/bin, etc.) - first_real = os.path.realpath(first_path) - home = os.path.expanduser("~") - - managed_patterns = [ - (os.path.join(home, ".nvm"), "nvm"), - (os.path.join(home, ".cargo", "bin"), "cargo"), - (os.path.join(home, ".local", "bin"), "local"), - (os.path.join(home, ".rbenv"), "rbenv"), - (os.path.join(home, ".pyenv"), "pyenv"), - ("/usr/local/bin", "usr-local"), - ] - - first_is_managed = any(first_real.startswith(pattern) for pattern, _ in managed_patterns) - - # If first binary is from /usr/bin or /bin, and there's a managed one later - if first_real.startswith(("/usr/bin/", "/bin/")): - for later_path in unique_paths[1:]: - if any(later_path.startswith(pattern) for pattern, _ in managed_patterns): - # Found shadowing: system binary is hiding a managed one - result["shadowed"] = "yes" - result["shadowed_by"] = first_real - result["expected_path"] = later_path - - # Try to identify the package - try: - pkg_out = subprocess.run( - ["dpkg", "-S", first_real], - capture_output=True, - text=True, - timeout=2 - ) - if pkg_out.returncode == 0 and pkg_out.stdout: - pkg_name = pkg_out.stdout.split(":")[0].strip() - result["shadowed_package"] = pkg_name - except Exception: - pass - - # Build warning message - if result["shadowed_package"]: - result["warning"] = f"⚠️ System binary ({result['shadowed_package']}) shadowing managed installation (remove with: sudo apt remove {result['shadowed_package']})" - else: - result["warning"] = "⚠️ System binary shadowing managed installation" - - break - - except Exception: - pass - - return result - - def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: """Return (method, reason) for install classification.""" try: @@ -1740,8 +1396,6 @@ def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: except Exception: real = path p = real or path - # Keep original path for directory prefix checks (handles symlinks like /usr/local/bin/aws → /usr/local/aws-cli/...) - orig = path if tool_name == "python": if "/.venvs/" in p: return "uv venv", "shebang-in-~/.venvs" @@ -1751,15 +1405,11 @@ def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: return "uv tool", "uv-list-match" if any(t in p for t in ("/.local/share/uv/", "/.cache/uv/", "/.uv/")): return "uv tool", "path-contains-uv" - if tool_name == "docker": - # Classify docker CLI client by path, not by server source - # Note: docker server may be from Docker Desktop, but CLI is what we audit - if p.startswith("/usr/bin/") or p.startswith("/bin/"): - # System-installed docker CLI (may connect to Docker Desktop server) - return "system", "path-under-/usr/bin" - if p.startswith("/usr/local/bin/"): - return "/usr/local/bin", "path-under-/usr/local/bin" - # Fall through to generic classification + if tool_name == "docker" and DOCKER_INFO_ENABLED: + wsl = bool(os.environ.get("WSL_DISTRO_NAME", "")) + info = run_with_timeout(["docker", "info", "--format", "{{.OperatingSystem}}"]) + if info and "Docker Desktop" in info: + return ("docker-desktop (WSL)" if wsl else "docker-desktop"), "docker-info-os" if p.startswith(os.path.join(home, ".nvm")): return "nvm/npm", "path-under-~/.nvm" if "/.cache/corepack" in p or "/.corepack" in p: @@ -1819,8 +1469,7 @@ def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: return "rustup/cargo", "path-under-~/.cargo/bin" if any(t in p for t in ("/.local/share/pipx/venvs/", "/.local/pipx/venvs/")): return "pipx/user", "path-under-pipx-venvs" - # Check both realpath and original path for ~/.local/bin (handles symlinks) - if p.startswith(os.path.join(home, ".local", "bin")) or orig.startswith(os.path.join(home, ".local", "bin")): + if p.startswith(os.path.join(home, ".local", "bin")): # Refine ~/.local/bin classification via shebang to detect pipx/uv venv wrappers try: with open(p, "rb") as f: @@ -1839,9 +1488,6 @@ def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: if any(t in py for t in ("/.local/share/uv/", "/.cache/uv/", "/.uv/")): return "uv tool", "shebang-uv" return os.path.join(home, ".local", "bin"), "path-under-~/.local/bin" - # Check both realpath and original path for ~/bin (common user install location) - if p.startswith(os.path.join(home, "bin")) or orig.startswith(os.path.join(home, "bin")): - return os.path.join(home, "bin"), "path-under-~/bin" if "/snap/" in p: return "snap", "path-contains-snap" # Homebrew (macOS and Linuxbrew). Prefer env hints when available. @@ -1850,8 +1496,7 @@ def _classify_install_method(path: str, tool_name: str) -> tuple[str, str]: if (hb_prefix and p.startswith(hb_prefix)) or (hb_cellar and hb_cellar in p) or \ ("/home/linuxbrew/.linuxbrew" in p) or ("/opt/homebrew" in p) or ("/usr/local/Cellar" in p): return "homebrew", "brew-prefix-or-cellar" - # Check both realpath and original path for /usr/local/bin (handles symlinks like aws) - if p.startswith("/usr/local/bin") or orig.startswith("/usr/local/bin"): + if p.startswith("/usr/local/bin"): return "/usr/local/bin", "path-under-/usr/local/bin" if p.startswith("/usr/bin") or p.startswith("/bin"): global DPKG_CACHE @@ -1892,8 +1537,6 @@ def upstream_method_for(tool: Tool) -> str: return "npm (nvm)" if kind == "gh": return "github" - if kind == "gitlab": - return "gitlab" if kind == "gnu": return "gnu-ftp" return "" @@ -1906,9 +1549,6 @@ def tool_homepage_url(tool: Tool) -> str: if kind == "gh": owner, repo = args # type: ignore[misc] return f"https://github.com/{owner}/{repo}" - if kind == "gitlab": - group, project = args # type: ignore[misc] - return f"https://gitlab.com/{group}/{project}" if kind == "pypi": (pkg,) = args # type: ignore[misc] return f"https://pypi.org/project/{pkg}/" @@ -1935,11 +1575,6 @@ def latest_target_url(tool: Tool, latest_tag: str, latest_num: str) -> str: if latest_tag: return f"https://github.com/{owner}/{repo}/releases/tag/{latest_tag}" return f"https://github.com/{owner}/{repo}/releases/latest" - if kind == "gitlab": - group, project = args # type: ignore[misc] - if latest_tag: - return f"https://gitlab.com/{group}/{project}/-/releases/{latest_tag}" - return f"https://gitlab.com/{group}/{project}/-/releases" if kind == "pypi": (pkg,) = args # type: ignore[misc] return f"https://pypi.org/project/{pkg}/" @@ -2016,20 +1651,17 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: return "", "" # Always prefer the releases/latest redirect (skips pre-releases) try: - url = f"https://github.com/{owner}/{repo}/releases/latest" - if AUDIT_DEBUG: - print(f"# DEBUG: GitHub HEAD {url} (timeout={TIMEOUT_SECONDS}s)", file=sys.stderr, flush=True) - req = urllib.request.Request(url, headers=USER_AGENT_HEADERS, method="HEAD") + req = urllib.request.Request( + f"https://github.com/{owner}/{repo}/releases/latest", + headers=USER_AGENT_HEADERS, + method="HEAD", + ) opener = urllib.request.build_opener(urllib.request.HTTPRedirectHandler) - req_start = time.time() resp = opener.open(req, timeout=TIMEOUT_SECONDS) - req_dur = int((time.time() - req_start) * 1000) - if AUDIT_DEBUG: - print(f"# DEBUG: GitHub HEAD response ({req_dur}ms, redirect to {resp.geturl()})", file=sys.stderr, flush=True) final = resp.geturl() last = final.rsplit("/", 1)[-1] if last and last.lower() not in ("releases", "latest"): - tag = normalize_version_tag(last.strip()) + tag = last.strip() result = (tag, extract_version_number(tag)) set_manual_latest(repo, tag) set_hint(f"gh:{owner}/{repo}", "latest_redirect") @@ -2039,7 +1671,7 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: # Fallback to GitHub API releases/latest (also non-prerelease) try: data = json.loads(http_get(f"https://api.github.com/repos/{owner}/{repo}/releases/latest")) - tag = normalize_version_tag((data.get("tag_name") or "").strip()) + tag = (data.get("tag_name") or "").strip() if tag and tag.lower() not in ("releases", "latest"): result = (tag, extract_version_number(tag)) set_manual_latest(repo, tag) @@ -2050,16 +1682,12 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: # Special-case: golang/go has no GitHub releases; filter stable tags only try: if owner == "golang" and repo == "go": - if AUDIT_DEBUG: - print(f"# DEBUG: Processing golang/go tags (special case)", file=sys.stderr, flush=True) # Fetch up to 200 tags (2 pages) and choose the highest stable goX[.Y][.Z] tag best: tuple[tuple[int, ...], str, str] | None = None for page in (1, 2): data = json.loads(http_get(f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100&page={page}")) if not isinstance(data, list): break - if AUDIT_DEBUG: - print(f"# DEBUG: Processing page {page} with {len(data)} tags", file=sys.stderr, flush=True) for item in data: name = (item.get("name") or "").strip() # Accept only stable tags like go1.25 or go1.25.1; exclude weekly/beta/rc @@ -2078,50 +1706,6 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: # If we already found a good candidate on the first page, stop early if best is not None: break - if AUDIT_DEBUG: - print(f"# DEBUG: Best go tag found: {best}", file=sys.stderr, flush=True) - if best is not None: - _, tag_name, ver_num = best - result = (tag_name, ver_num) - if AUDIT_DEBUG: - print(f"# DEBUG: Calling set_manual_latest({repo}, {tag_name})", file=sys.stderr, flush=True) - set_manual_latest(repo, tag_name) - if AUDIT_DEBUG: - print(f"# DEBUG: Calling set_hint(gh:{owner}/{repo}, tags_api)", file=sys.stderr, flush=True) - set_hint(f"gh:{owner}/{repo}", "tags_api") - if AUDIT_DEBUG: - print(f"# DEBUG: Returning result for go: {result}", file=sys.stderr, flush=True) - return result - except Exception: - pass - # Special-case: python/cpython - filter stable release tags only (vX.Y.Z, exclude rc/alpha/beta) - try: - if owner == "python" and repo == "cpython": - # Fetch tags and choose the highest stable vX.Y.Z tag (exclude rc/alpha/beta/a/b) - best: tuple[tuple[int, ...], str, str] | None = None - for page in (1, 2): - data = json.loads(http_get(f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100&page={page}")) - if not isinstance(data, list): - break - for item in data: - name = (item.get("name") or "").strip() - # Accept only stable final release tags like v3.14.0 or v3.12.7 - # Exclude rc, alpha, beta, a, b suffixes - if not re.match(r"^v\d+\.\d+\.\d+$", name): - continue - ver = extract_version_number(name) - if not ver: - continue - try: - nums = tuple(int(x) for x in ver.split(".")) - except Exception: - continue - tup = (nums, name, ver) - if best is None or tup[0] > best[0]: - best = tup - # If we found a good candidate on the first page, stop early - if best is not None: - break if best is not None: _, tag_name, ver_num = best result = (tag_name, ver_num) @@ -2134,7 +1718,7 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: try: data = json.loads(http_get(f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=1")) if isinstance(data, list) and data: - tag = normalize_version_tag((data[0].get("name") or "").strip()) + tag = (data[0].get("name") or "").strip() if tag: result = (tag, extract_version_number(tag)) set_manual_latest(repo, tag) @@ -2144,117 +1728,16 @@ def latest_github(owner: str, repo: str) -> tuple[str, str]: pass try: atom = http_get(f"https://github.com/{owner}/{repo}/releases.atom").decode("utf-8", "ignore") - # Filter out pre-release tags (alpha, beta, rc) from atom feed for all repos - # Find all stable tags and pick the highest version - best: tuple[tuple[int, ...], str, str] | None = None - for match in re.finditer(r"/releases/tag/([^<\"]+)", atom): - tag = normalize_version_tag(match.group(1).strip()) - # Accept only stable final release tags like v3.14.0, v28.5.1 - # Exclude rc, alpha, beta, a, b suffixes (e.g., v29.0.0-rc.1, v3.15.0a1) - if tag and re.match(r"^v?\d+\.\d+(\.\d+)?$", tag): - ver = extract_version_number(tag) - if ver: - try: - nums = tuple(int(x) for x in ver.split(".")) - tup = (nums, tag, ver) - if best is None or tup[0] > best[0]: - best = tup - except Exception: - continue - if best is not None: - _, tag, ver = best - result = (tag, ver) - set_manual_latest(repo, tag) - set_hint(f"gh:{owner}/{repo}", "atom_filtered") - return result - except Exception: - pass - return "", "" - - -def latest_gitlab(group: str, project: str) -> tuple[str, str]: - """ - Fetch the latest release from GitLab using the GitLab API. - Args: - group: GitLab group/namespace (e.g., "gitlab-org") - project: Project name (e.g., "cli") - Returns: - (tag_name, version_number) tuple or ("", "") if not found - """ - if OFFLINE_MODE: - return "", "" - - # GitLab API requires URL-encoded project path - project_path = f"{group}%2F{project}" - - # Try releases API first (excludes pre-releases by default) - try: - url = f"https://gitlab.com/api/v4/projects/{project_path}/releases" - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab API {url} (timeout={TIMEOUT_SECONDS}s)", file=sys.stderr, flush=True) - - data = json.loads(http_get(url)) - - if isinstance(data, list) and data: - # GitLab releases API returns releases in descending order by default - # First release is the latest - release = data[0] - tag = normalize_version_tag((release.get("tag_name") or "").strip()) - - if tag: + m = re.search(r"/releases/tag/([^<\"]+)", atom) + if m: + tag = m.group(1).strip() + if tag and tag.lower() not in ("releases", "latest"): result = (tag, extract_version_number(tag)) - set_manual_latest(project, tag) - set_hint(f"gitlab:{group}/{project}", "releases_api") - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab found release: {tag}", file=sys.stderr, flush=True) - return result - except Exception as e: - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab releases API failed: {e}", file=sys.stderr, flush=True) - pass - - # Fallback to tags API - try: - url = f"https://gitlab.com/api/v4/projects/{project_path}/repository/tags?per_page=20" - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab tags API {url}", file=sys.stderr, flush=True) - - data = json.loads(http_get(url)) - - if isinstance(data, list): - # Filter stable releases and find highest version - best: tuple[tuple[int, ...], str, str] | None = None - - for item in data: - tag_name = (item.get("name") or "").strip() - tag = normalize_version_tag(tag_name) - - # Accept only stable final release tags (v1.2.3, 1.2.3) - # Exclude rc, alpha, beta, pre, dev suffixes - if tag and re.match(r"^v?\d+\.\d+(\.\d+)?$", tag): - ver = extract_version_number(tag) - if ver: - try: - nums = tuple(int(x) for x in ver.split(".")) - tup = (nums, tag, ver) - if best is None or tup[0] > best[0]: - best = tup - except Exception: - continue - - if best is not None: - _, tag, ver = best - result = (tag, ver) - set_manual_latest(project, tag) - set_hint(f"gitlab:{group}/{project}", "tags_api") - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab found tag: {tag}", file=sys.stderr, flush=True) + set_manual_latest(repo, tag) + set_hint(f"gh:{owner}/{repo}", "atom") return result - except Exception as e: - if AUDIT_DEBUG: - print(f"# DEBUG: GitLab tags API failed: {e}", file=sys.stderr, flush=True) + except Exception: pass - return "", "" @@ -2378,17 +1861,21 @@ def keyify(v: str): latest_v = versions[-1] return latest_v, (extract_version_number(latest_v) or latest_v) - # Try mirrors first (faster, more up-to-date than canonical ftp.gnu.org) - # 1) Try kernel mirror (fast, reliable, ~1s) - html = http_get(f"https://mirrors.kernel.org/gnu/{project}/").decode("utf-8", "ignore") + # Try canonical directory + html = http_get(f"https://ftp.gnu.org/gnu/{project}/").decode("utf-8", "ignore") + # 1) Prefer LATEST-IS-* hint if present tag, num = parse_dir(html) if not tag: - # 2) Try ftpmirror (fast alternative, ~0.9s) + # Try sorted listing query + html = http_get(f"https://ftp.gnu.org/gnu/{project}/?C=M;O=D").decode("utf-8", "ignore") + tag, num = parse_dir(html) + if not tag: + # Try mirror html = http_get(f"https://ftpmirror.gnu.org/gnu/{project}/").decode("utf-8", "ignore") tag, num = parse_dir(html) if not tag: - # 3) Try canonical directory (last resort, slower ~5s) - html = http_get(f"https://ftp.gnu.org/gnu/{project}/").decode("utf-8", "ignore") + # Try kernel mirror + html = http_get(f"https://mirrors.kernel.org/gnu/{project}/").decode("utf-8", "ignore") tag, num = parse_dir(html) if not tag: return "", "" @@ -2461,18 +1948,6 @@ def get_latest(tool: Tool) -> tuple[str, str]: return man_tag, man_num MANUAL_USED[tool.name] = False return tag, num - if kind == "gitlab": - group, project = args # type: ignore[misc] - tag, num = latest_gitlab(group, project) - if tag or num: - MANUAL_USED[tool.name] = False - set_manual_method(tool.name, "gitlab") - return tag, num - if manual_available: - MANUAL_USED[tool.name] = True - return man_tag, man_num - MANUAL_USED[tool.name] = False - return tag, num if kind == "pypi": (pkg,) = args # type: ignore[misc] tag, num = latest_pypi(pkg) @@ -2527,19 +2002,13 @@ def get_latest(tool: Tool) -> tuple[str, str]: def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: - # Debug: show when tool audit starts - if AUDIT_DEBUG: - print(f"# DEBUG: START audit_tool({tool.name}) source={tool.source_kind} offline={OFFLINE_MODE}", file=sys.stderr, flush=True) - # Detect installed candidates t0_inst = time.time() candidates = tool.candidates tuples: list[tuple[str, str, str]] = [] # (num, line, path) any_found = False # Use shallow discovery for most tools (first match); deep only for special cases - # Enable deep scan for tools likely to have multiple installations - python_cli_tools = {"semgrep", "pre-commit", "bandit", "black", "flake8", "isort", "ansible", "poetry", "pipx"} - deep_scan = (tool.name == "node") or (tool.name in python_cli_tools) # prefer to find all installation variants + deep_scan = (tool.name == "node") # prefer to find both system and nvm variants chosen: tuple[str, str, str] | tuple[()] = () # Prefer uv-managed Python as the authoritative interpreter when available if tool.name == "python": @@ -2548,18 +2017,10 @@ def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: chosen = uv_choice # Only fall back to PATH scanning if we didn't select a uv choice if not chosen: - if AUDIT_DEBUG: - print(f"# DEBUG: Scanning PATH for {tool.name} candidates: {candidates}", file=sys.stderr, flush=True) for cand in candidates: - if AUDIT_DEBUG: - print(f"# DEBUG: Searching for candidate: {cand}", file=sys.stderr, flush=True) for path in find_paths(cand, deep=deep_scan): any_found = True - if AUDIT_DEBUG: - print(f"# DEBUG: Found path: {path}, getting version...", file=sys.stderr, flush=True) line = get_version_line(path, tool.name) - if AUDIT_DEBUG: - print(f"# DEBUG: Version line: {line}", file=sys.stderr, flush=True) num = extract_version_number(line) tuples.append((num, line, path)) # Fast exit for fx once we have a version line @@ -2574,18 +2035,6 @@ def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: chosen = choose_highest(tuples) else: chosen = () - - # Store ALL installations found during deep scan for duplicate detection - if deep_scan and tuples: - installations = [] - for num, line, path in tuples: - try: - method, _ = _classify_install_method(path, tool.name) - except Exception: - method = detect_install_method(path, tool.name) - installations.append((num, method, path)) - ALL_INSTALLATIONS[tool.name] = installations - if not chosen: installed_line = "X" installed_num = "" @@ -2595,18 +2044,8 @@ def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: t1_inst = time.time() latest_start = time.time() - - # Debug: show network call about to happen - if AUDIT_DEBUG: - print(f"# DEBUG: NETWORK get_latest({tool.name}) source={tool.source_kind} offline={OFFLINE_MODE}", file=sys.stderr, flush=True) - latest_tag, latest_num = get_latest(tool) latest_end = time.time() - - # Debug: show network call completed - if AUDIT_DEBUG: - dur_ms = int((latest_end - latest_start) * 1000) - print(f"# DEBUG: DONE get_latest({tool.name}) dur={dur_ms}ms tag='{latest_tag}' num='{latest_num}'", file=sys.stderr, flush=True) # Slow operation trace dur_ms = int((latest_end - latest_start) * 1000) if dur_ms >= SLOW_MS: @@ -2623,25 +2062,6 @@ def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: else: status = "UNKNOWN" - # Check if tool is pinned to current version - if status == "OUTDATED" and inst_num: - script_dir = os.path.dirname(os.path.abspath(__file__)) - catalog_file = os.path.join(script_dir, "catalog", f"{tool.name}.json") - if os.path.exists(catalog_file): - try: - with open(catalog_file, "r", encoding="utf-8") as f: - catalog_data = json.load(f) - pinned_version = catalog_data.get("pinned_version", "") - if pinned_version and extract_version_number(pinned_version) == inst_num: - # Tool is pinned to installed version - treat as up-to-date - status = "UP-TO-DATE" - except Exception: - pass # Catalog read failed, continue with original status - - # Note: Tools with pinned_version="never" are filtered out in guide.sh, - # so we don't need to change their status here. Keep them as NOT INSTALLED - # to avoid confusion (showing ✅ icon when tool isn't actually installed). - # Sanitize latest display to numeric (like installed) if latest_num: latest_display = latest_num @@ -2672,8 +2092,7 @@ def audit_tool(tool: Tool) -> tuple[str, str, str, str, str, str, str, str]: installed_display = installed_num else: installed_display = installed_line - # Only add timings for display output, NOT for snapshot persistence - if SHOW_TIMINGS and not COLLECT_ONLY: + if SHOW_TIMINGS: # Show timing even when not installed if installed_display: installed_display = installed_display + f" ({_format_duration(t1_inst - t0_inst)})" @@ -2733,96 +2152,7 @@ def _parse_tool_filter(argv: Sequence[str]) -> list[str]: return [] -def _render_only_mode() -> int: - """Fast path: render audit results from snapshot without live checks.""" - # Friendly startup message for UX - snap_file = os.environ.get("CLI_AUDIT_SNAPSHOT_FILE", "tools_snapshot.json") - if os.path.exists(snap_file): - meta = {} - try: - with open(snap_file, "r", encoding="utf-8") as f: - data = json.load(f) - meta = data.get("__meta__", {}) - except Exception: - pass - tool_count = meta.get("count", "~50") - age = meta.get("collected_at", "") - if age: - try: - from datetime import datetime - collected_dt = datetime.fromisoformat(age) - now = datetime.now(collected_dt.tzinfo) - age_seconds = (now - collected_dt).total_seconds() - if age_seconds < 60: - age_str = "just now" - elif age_seconds < 3600: - age_str = f"{int(age_seconds / 60)}m ago" - elif age_seconds < 86400: - age_str = f"{int(age_seconds / 3600)}h ago" - else: - age_str = f"{int(age_seconds / 86400)}d ago" - except Exception: - age_str = "cached" - else: - age_str = "cached" - print(f"# Auditing {tool_count} development tools from snapshot ({age_str})...", file=sys.stderr) - else: - print(f"# No snapshot found - run 'make update' to collect fresh data", file=sys.stderr) - print("", file=sys.stderr) # Blank line to separate informational message from table output - - snap = load_snapshot() - selected_names = _parse_tool_filter(sys.argv[1:]) - selected_set = set(selected_names) if selected_names else None - rows = render_from_snapshot(snap, selected_set) - - # JSON output from snapshot - if os.environ.get("CLI_AUDIT_JSON", "0") == "1": - payload = [] - for name, installed, installed_method, latest, upstream_method, status, tool_url, latest_url in rows: - payload.append({ - "tool": name, - "category": category_for(name), - "installed": installed, - "installed_method": installed_method, - "installed_version": extract_version_number(installed), - "latest_version": extract_version_number(latest), - "latest_upstream": latest, - "upstream_method": upstream_method, - "status": status, - "tool_url": tool_url, - "latest_url": latest_url, - "state_icon": status_icon(status, installed), - "is_up_to_date": (status == "UP-TO-DATE"), - }) - print(json.dumps(payload, ensure_ascii=False)) - return 0 - - # Table output from snapshot - headers = (" ", "tool", "installed", "installed_method", "latest_upstream", "upstream_method") - print("|".join(headers)) - for name, installed, installed_method, latest, upstream_method, status, tool_url, latest_url in rows: - icon = status_icon(status, installed) - print("|".join((icon, name, installed, installed_method, latest, upstream_method))) - - # Summary line from snapshot meta if present - try: - meta = snap.get("__meta__", {}) - total = meta.get("count", len(rows)) - missing = sum(1 for r in rows if r[5] == "NOT INSTALLED") - outdated = sum(1 for r in rows if r[5] == "OUTDATED") - unknown = sum(1 for r in rows if r[5] == "UNKNOWN") - offline_tag = " (offline)" if meta.get("offline") else "" - print(f"\nReadiness{offline_tag}: {total} tools, {outdated} outdated, {missing} missing, {unknown} unknown", file=sys.stderr) - except Exception: - pass - return 0 - - def main() -> int: - # RENDER-ONLY mode: bypass live audit entirely, render from snapshot (FAST PATH) - if RENDER_ONLY: - return _render_only_mode() - # Determine selected tools (optional filtering) selected_names = _parse_tool_filter(sys.argv[1:]) # Optional alphabetical sort for output stability when desired @@ -2850,182 +2180,25 @@ def main() -> int: total_tools = len(tools_seq) completed_tools = 0 - - # Always show GITHUB_TOKEN status and actual rate limit info (for both COLLECT_ONLY and live audit) - if not OFFLINE_MODE: - try: - # Query GitHub rate limit API to show actual current status - rate_limit_data = http_get("https://api.github.com/rate_limit") - rate_info = json.loads(rate_limit_data) - core_limit = rate_info.get("resources", {}).get("core", {}) - limit = core_limit.get("limit", 0) - remaining = core_limit.get("remaining", 0) - reset_time = core_limit.get("reset", 0) - - if limit > 0: - # Calculate time until reset - import time as time_module - reset_in_min = max(0, int((reset_time - time_module.time()) / 60)) - if GITHUB_TOKEN: - print(f"# GitHub rate limit: {remaining}/{limit} requests remaining (resets in {reset_in_min}m)", file=sys.stderr) - else: - print(f"# GitHub rate limit: {remaining}/{limit} requests remaining (resets in {reset_in_min}m) - no token", file=sys.stderr) - else: - # Fallback if API call failed - if GITHUB_TOKEN: - print(f"# GITHUB_TOKEN: configured (5,000 requests/hour)", file=sys.stderr) - else: - print(f"# GITHUB_TOKEN: not set (60 requests/hour limit)", file=sys.stderr) - except Exception as e: - # Fallback if rate limit check fails - if AUDIT_DEBUG: - print(f"# DEBUG: Rate limit API call failed: {e}", file=sys.stderr) - if GITHUB_TOKEN: - print(f"# GITHUB_TOKEN: configured (5,000 requests/hour)", file=sys.stderr) - else: - print(f"# GITHUB_TOKEN: not set (60 requests/hour limit)", file=sys.stderr) - else: - # In offline mode, just show token status - if GITHUB_TOKEN: - print(f"# GITHUB_TOKEN: configured", file=sys.stderr) - else: - print(f"# GITHUB_TOKEN: not set", file=sys.stderr) - - # Always show friendly startup message (not just when PROGRESS=1) - if COLLECT_ONLY: - offline_note = " (offline mode)" if OFFLINE_MODE else "" - print(f"# Collecting fresh data for {total_tools} tools{offline_note}...", file=sys.stderr) - estimated_time = int((total_tools / MAX_WORKERS) * TIMEOUT_SECONDS * 1.5) - print(f"# Estimated time: ~{estimated_time}s (timeout={TIMEOUT_SECONDS}s per tool, {MAX_WORKERS} workers)", file=sys.stderr) - if not OFFLINE_MODE: - print(f"# Note: Network issues may cause hangs. Press Ctrl-C to cancel, or use 'make audit-offline' for faster results.", file=sys.stderr) - - # Detailed progress for debugging (only when PROGRESS=1) print(f"# start collect: tools={total_tools} timeout={TIMEOUT_SECONDS}s retries={HTTP_RETRIES} offline={OFFLINE_MODE}", file=sys.stderr) if PROGRESS else None - - # Debug: show thread pool configuration - if AUDIT_DEBUG: - actual_workers = min(MAX_WORKERS, total_tools) - print(f"# DEBUG: ThreadPoolExecutor starting with max_workers={actual_workers}", file=sys.stderr, flush=True) - with ThreadPoolExecutor(max_workers=min(MAX_WORKERS, total_tools)) as executor: future_to_idx = {} for idx, tool in enumerate(tools_seq): if PROGRESS: print(f"# auditing {tool.name}...", file=sys.stderr) - if AUDIT_DEBUG: - print(f"# DEBUG: SUBMIT future for tool={tool.name} idx={idx}", file=sys.stderr, flush=True) future_to_idx[executor.submit(audit_tool, tool)] = idx for future in as_completed(future_to_idx): idx = future_to_idx[future] - - # Debug: show future completion - if AUDIT_DEBUG: - tool_name = tools_seq[idx].name - print(f"# DEBUG: COMPLETE future for tool={tool_name} idx={idx}", file=sys.stderr, flush=True) - try: row = future.result() - except Exception as e: + except Exception: t = tools_seq[idx] - if AUDIT_DEBUG: - print(f"# DEBUG: EXCEPTION in future for tool={t.name}: {e}", file=sys.stderr, flush=True) row = (t.name, "X", "", "", upstream_method_for(t), "UNKNOWN", tool_homepage_url(t), latest_target_url(t, "", "")) results[idx] = row - completed_tools += 1 - - # Always show basic progress counter during parallel execution (not just PROGRESS=1) - # Show progress for both COLLECT_ONLY (make update) and live audit (make upgrade) - if COLLECT_ONLY or MAX_WORKERS > 1: - try: - name, installed, _im, latest, _um, status, _tu, _lu = row - - # ANSI color codes - GREEN = "\033[32m" - YELLOW = "\033[33m" - RED = "\033[31m" - BOLD_GREEN = "\033[1;32m" - RESET = "\033[0m" - - # Determine comparison operator and colors - # Extract version without timing info (e.g., "0.9.2 (8ms)" -> "0.9.2") - inst_val = installed.split(" (")[0] if installed and installed != "X" else "" - latest_val = latest.split(" (")[0] if latest else "" - - # Treat timing-only values like "(37ms)" as not installed - if inst_val.startswith("(") or not inst_val: - inst_val = "" - if latest_val.startswith("(") or not latest_val: - latest_val = "" - - # Display values - inst_display = inst_val if inst_val else "n/a" - latest_display = latest_val if latest_val else "n/a" - - if inst_val and latest_val: - if status == "UP-TO-DATE": - operator = "===" - inst_color = GREEN - latest_color = GREEN - else: # OUTDATED or version mismatch - # Check if cache has stale data (installed is newer than "latest") - try: - from packaging import version as pkg_version - inst_ver = pkg_version.parse(inst_val.lstrip("v")) - latest_ver = pkg_version.parse(latest_val.lstrip("v")) - if inst_ver > latest_ver: - # Installed is NEWER - cache is stale - operator = ">>>" # Installed ahead of cache - inst_color = GREEN - latest_color = RED - else: - # Normal outdated case - operator = "!==" - inst_color = YELLOW - latest_color = BOLD_GREEN - except Exception: - # Can't parse versions, use default outdated styling - operator = "!==" - inst_color = YELLOW - latest_color = BOLD_GREEN - elif not inst_val and latest_val: - # Not installed but latest available - operator = "?" - inst_color = RED - latest_color = BOLD_GREEN - elif inst_val and not latest_val: - # Installed but latest unknown - operator = "?" - inst_color = YELLOW - latest_color = RED - else: - # Both unknown - operator = "?" - inst_color = RED - latest_color = RED - - # Format: "# [1/64] uv (installed: 0.9.2 === latest: 0.9.2)" - version_info = f"installed: {inst_color}{inst_display}{RESET} {operator} latest: {latest_color}{latest_display}{RESET}" - print(f"# [{completed_tools}/{total_tools}] {name} ({version_info})", file=sys.stderr, flush=True) - - # Check for PATH shadowing and emit warning - shadowing = detect_path_shadowing(name) if installed else {} - warning = shadowing.get("warning", "") - if warning: - print(f"# {warning}", file=sys.stderr, flush=True) - if shadowing.get("expected_path"): - print(f"# Expected: {shadowing['expected_path']}", file=sys.stderr, flush=True) - if shadowing.get("shadowed_by"): - print(f"# Found: {shadowing['shadowed_by']}", file=sys.stderr, flush=True) - except Exception: - # Fallback to simple message if row parsing fails - name = row[0] if row and len(row) > 0 else "?" - print(f"# [{completed_tools}/{total_tools}] {name}", file=sys.stderr, flush=True) - - # Detailed progress for debugging (only when PROGRESS=1) if PROGRESS: try: name, installed, _installed_method, latest, upstream_method, status, _tool_url, _latest_url = row + completed_tools += 1 print(f"# done {name} ({completed_tools}/{total_tools}) status={status} installed='{installed}' latest='{latest}' upstream={upstream_method}", file=sys.stderr) except Exception: pass @@ -3039,22 +2212,55 @@ def main() -> int: latest_with_hint = latest_render if not hint else (latest_render + f" [{hint}]") print("|".join((icon, name_render, installed, installed_method, latest_with_hint, upstream_method))) + # RENDER-ONLY mode: bypass live audit, render from snapshot + if RENDER_ONLY: + snap = load_snapshot() + # Render fast path using snapshot doc + selected_names = set(_parse_tool_filter(sys.argv[1:])) + rows = render_from_snapshot(snap, selected_names or None) + # JSON output from snapshot + if os.environ.get("CLI_AUDIT_JSON", "0") == "1": + payload = [] + for name, installed, installed_method, latest, upstream_method, status, tool_url, latest_url in rows: + payload.append({ + "tool": name, + "category": category_for(name), + "installed": installed, + "installed_method": installed_method, + "installed_version": extract_version_number(installed), + "latest_version": extract_version_number(latest), + "latest_upstream": latest, + "upstream_method": upstream_method, + "status": status, + "tool_url": tool_url, + "latest_url": latest_url, + "state_icon": status_icon(status, installed), + "is_up_to_date": (status == "UP-TO-DATE"), + }) + print(json.dumps(payload, ensure_ascii=False)) + return 0 + # Table output from snapshot + headers = (" ", "tool", "installed", "installed_method", "latest_upstream", "upstream_method") + print("|".join(headers)) + for name, installed, installed_method, latest, upstream_method, status, tool_url, latest_url in rows: + icon = status_icon(status, installed) + print("|".join((icon, name, installed, installed_method, latest, upstream_method))) + # Summary line from snapshot meta if present + try: + meta = snap.get("__meta__", {}) + total = meta.get("count", len(rows)) + missing = sum(1 for r in rows if r[5] == "NOT INSTALLED") + outdated = sum(1 for r in rows if r[5] == "OUTDATED") + unknown = sum(1 for r in rows if r[5] == "UNKNOWN") + offline_tag = " (offline)" if meta.get("offline") else "" + print(f"\nReadiness{offline_tag}: {total} tools, {outdated} outdated, {missing} missing, {unknown} unknown") + except Exception: + pass + return 0 + if os.environ.get("CLI_AUDIT_JSON", "0") == "1": payload = [] for name, installed, installed_method, latest, upstream_method, status, tool_url, latest_url in results: - # Build list of all installations for this tool (for duplicate detection) - all_installs = [] - if name in ALL_INSTALLATIONS: - for version, method, path in ALL_INSTALLATIONS[name]: - all_installs.append({ - "version": version, - "method": method, - "path": path, - }) - - # Detect PATH shadowing - shadowing = detect_path_shadowing(name) if installed else {} - payload.append({ "tool": name, "category": category_for(name), @@ -3074,23 +2280,13 @@ def main() -> int: "latest_url": latest_url, "state_icon": status_icon(status, installed), "is_up_to_date": (status == "UP-TO-DATE"), - # New field: all installations found (for duplicate detection) - "all_installations": all_installs, - "has_duplicates": len(all_installs) > 1, - # PATH shadowing detection - "shadowed": shadowing.get("shadowed", ""), - "shadowed_by": shadowing.get("shadowed_by", ""), - "shadowed_package": shadowing.get("shadowed_package", ""), - "expected_path": shadowing.get("expected_path", ""), - "shadowing_warning": shadowing.get("warning", ""), }) print(json.dumps(payload, ensure_ascii=False)) return 0 # Always print raw (with OSC8 + emoji if enabled). When piped to column, OSC8 should be transparent. # In streaming mode, we've already printed lines; skip re-printing body - # In COLLECT_ONLY mode, skip table output (only needed for audit/render) - if not STREAM_OUTPUT and not COLLECT_ONLY: + if not STREAM_OUTPUT: headers = (" ", "tool", "installed", "installed_method", "latest_upstream", "upstream_method") print("|".join(headers)) @@ -3104,7 +2300,7 @@ def _category_key(row: tuple[str, ...]) -> tuple[int, str]: order = len(CATEGORY_ORDER) return (order, nm) - if not STREAM_OUTPUT and not COLLECT_ONLY: + if not STREAM_OUTPUT: rows = results if GROUP_BY_CATEGORY: rows = sorted(results, key=_category_key) @@ -3125,24 +2321,7 @@ def _category_key(row: tuple[str, ...]) -> tuple[int, str]: outdated = sum(1 for r in results if r[5] == "OUTDATED") unknown = sum(1 for r in results if r[5] == "UNKNOWN") offline_tag = " (offline)" if OFFLINE_MODE else "" - print(f"\nReadiness{offline_tag}: {total} tools, {outdated} outdated, {missing} missing, {unknown} unknown", file=sys.stderr) - - # Check PATH configuration for package managers - scripts_dir = os.path.join(os.path.dirname(__file__), "scripts", "lib") - path_check_script = os.path.join(scripts_dir, "path_check.sh") - if os.path.isfile(path_check_script): - try: - result = subprocess.run( - ["bash", "-c", f"source {path_check_script} && check_all_paths"], - capture_output=True, - text=True, - timeout=5 - ) - # If check_all_paths returns non-zero, it found issues - if result.returncode != 0 and result.stderr: - print(result.stderr, file=sys.stderr, end="") - except Exception: - pass + print(f"\nReadiness{offline_tag}: {total} tools, {outdated} outdated, {missing} missing, {unknown} unknown") except Exception: pass @@ -3167,22 +2346,9 @@ def _category_key(row: tuple[str, ...]) -> tuple[int, str]: "tool_url": tool_url, "latest_url": latest_url, }) - # Always show completion message (not just when PROGRESS=1) - action = "Merging" if MERGE_MODE else "Writing" - print(f"# {action} snapshot to {SNAPSHOT_FILE}...", file=sys.stderr) + if PROGRESS: + print(f"# writing snapshot to {SNAPSHOT_FILE}...", file=sys.stderr) meta = write_snapshot(payload) - try: - count = meta.get('count', len(payload)) - audited_count = len(payload) - if MERGE_MODE: - print(f"# ✓ Snapshot merged: {audited_count} tools updated, {count} total", file=sys.stderr) - else: - print(f"# ✓ Snapshot saved: {count} tools audited", file=sys.stderr) - print(f"# Run 'make audit' to view results", file=sys.stderr) - except Exception: - print(f"# ✓ Snapshot saved to {SNAPSHOT_FILE}", file=sys.stderr) - - # Detailed debug info (only when PROGRESS=1) if PROGRESS: try: print( @@ -3194,76 +2360,16 @@ def _category_key(row: tuple[str, ...]) -> tuple[int, str]: except Exception as e: if AUDIT_DEBUG: print(f"# DEBUG: failed to write snapshot: {e}", file=sys.stderr) - - # GitHub rate limit warning (always show if encountered) - if GITHUB_RATE_LIMIT_HIT: - print("", file=sys.stderr) - print("⚠️ GitHub API Rate Limit Detected", file=sys.stderr) - print("", file=sys.stderr) - print("Some version checks failed due to GitHub API rate limits.", file=sys.stderr) - print("Without authentication, GitHub allows only 60 requests per hour.", file=sys.stderr) - print("", file=sys.stderr) - print("To fix this issue:", file=sys.stderr) - print(" 1. Create a GitHub Personal Access Token:", file=sys.stderr) - print(" https://github.com/settings/tokens/new", file=sys.stderr) - print(" (No special permissions required - just create a token)", file=sys.stderr) - print("", file=sys.stderr) - print(" 2. Set the GITHUB_TOKEN environment variable:", file=sys.stderr) - print(" export GITHUB_TOKEN='your_token_here'", file=sys.stderr) - print("", file=sys.stderr) - print(" 3. Add to your shell profile (~/.bashrc or ~/.zshrc):", file=sys.stderr) - print(" echo 'export GITHUB_TOKEN=\"your_token_here\"' >> ~/.bashrc", file=sys.stderr) - print("", file=sys.stderr) - print("With a token, you get 5,000 requests per hour.", file=sys.stderr) - print("", file=sys.stderr) - return 0 # Optional footer (disabled by default to avoid breaking table layout) if os.environ.get("CLI_AUDIT_FOOTER", "0") == "1": path_has_cargo = CARGO_BIN in os.environ.get("PATH", "").split(":") print(f"# cargo_bin: {'yes' if path_has_cargo else 'no'}") - - # GitHub rate limit warning (always show if encountered) - if GITHUB_RATE_LIMIT_HIT: - print("", file=sys.stderr) - print("⚠️ GitHub API Rate Limit Detected", file=sys.stderr) - print("", file=sys.stderr) - print("Some version checks failed due to GitHub API rate limits.", file=sys.stderr) - print("Without authentication, GitHub allows only 60 requests per hour.", file=sys.stderr) - print("", file=sys.stderr) - print("To fix this issue:", file=sys.stderr) - print(" 1. Create a GitHub Personal Access Token:", file=sys.stderr) - print(" https://github.com/settings/tokens/new", file=sys.stderr) - print(" (No special permissions required - just create a token)", file=sys.stderr) - print("", file=sys.stderr) - print(" 2. Set the GITHUB_TOKEN environment variable:", file=sys.stderr) - print(" export GITHUB_TOKEN='your_token_here'", file=sys.stderr) - print("", file=sys.stderr) - print(" 3. Add to your shell profile (~/.bashrc or ~/.zshrc):", file=sys.stderr) - print(" echo 'export GITHUB_TOKEN=\"your_token_here\"' >> ~/.bashrc", file=sys.stderr) - print("", file=sys.stderr) - print("With a token, you get 5,000 requests per hour.", file=sys.stderr) - print("", file=sys.stderr) - return 0 -def _sigint_handler(signum, frame): - """Handle SIGINT (Ctrl-C) with immediate clean exit.""" - # Suppress threading shutdown errors by forcing immediate exit - print("", file=sys.stderr) - os._exit(130) # Standard Unix exit code for SIGINT, immediate exit - - if __name__ == "__main__": - # Install signal handler for clean Ctrl-C behavior - signal.signal(signal.SIGINT, _sigint_handler) - try: - raise SystemExit(main()) - except KeyboardInterrupt: - # Fallback: clean exit on Ctrl-C without stack trace - print("", file=sys.stderr) - os._exit(130) # Standard Unix exit code for SIGINT, immediate exit + raise SystemExit(main()) diff --git a/config.json.example b/config.json.example deleted file mode 100644 index e3e469e..0000000 --- a/config.json.example +++ /dev/null @@ -1,22 +0,0 @@ -{ - "_comment": "User configuration for ai_cli_preparation tool installation preferences", - "_location": "Copy this file to ~/.ai_cli_prep/config.json to customize", - - "preferred_strategy": "auto", - "_preferred_strategy_options": [ - "auto - Use catalog default priorities", - "github_first - Prefer GitHub releases > cargo > npm > apt", - "cargo_first - Prefer cargo > GitHub > npm > apt", - "npm_first - Prefer npm > GitHub > cargo > apt", - "apt_first - Prefer apt > brew > GitHub > cargo" - ], - - "allow_sudo": true, - "_allow_sudo_comment": "Set to false if you don't have sudo access or don't want tools to use apt", - - "overrides": { - "_comment": "Override installation method for specific tools", - "_example": "ripgrep: 'cargo' to force cargo installation for ripgrep", - "_available_methods": "github_release_binary, cargo, npm, gem, pip, pipx, apt, brew" - } -} diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index 05a7c57..1fdf676 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -461,564 +461,8 @@ To add YAML output: --- -## Phase 2: Installation & Upgrade Management Architecture - -**Status:** ✅ Implementation Complete | 📝 Documentation Complete - -### Phase 2 Overview - -Phase 2 extends Phase 1's audit capabilities with automated installation, upgrade management, and reconciliation. The architecture emphasizes context-aware decisions, safe operations with rollback, and parallel execution. - -**Design Principles:** -1. **Context-Aware:** Environment detection (CI/server/workstation) influences installation strategies -2. **Safe Operations:** Backup and automatic rollback on failures -3. **Parallel Execution:** Bulk operations use ThreadPoolExecutor for speed -4. **Retryable Errors:** Network and lock contention errors retry with exponential backoff -5. **Flexible Configuration:** Multi-source configuration with precedence rules - -### Phase 2 System Architecture - -``` -┌─────────────────────────────────────────────────────────┐ -│ Configuration Layer │ -│ ├─ Project (.cli-audit.yml) │ -│ ├─ User (~/.config/cli-audit/config.yml) │ -│ └─ System (/etc/cli-audit/config.yml) │ -└───────────────────────┬─────────────────────────────────┘ - │ - v -┌─────────────────────────────────────────────────────────┐ -│ Environment Detection │ -│ - CI detection (CI=true, GITHUB_ACTIONS, etc.) │ -│ - Server detection (multiple users, high uptime) │ -│ - Workstation detection (DISPLAY, single user) │ -│ - Confidence scoring (0.0-1.0) │ -└───────────────────────┬─────────────────────────────────┘ - │ - ┌───────────────┴───────────────┐ - │ │ - v v -┌───────────────────┐ ┌────────────────────────┐ -│ Install Pipeline │ │ Upgrade Pipeline │ -│ - Tool selection │ │ - Version comparison │ -│ - PM selection │ │ - Breaking detection │ -│ - Dependency res │ │ - Backup creation │ -│ - Parallel exec │ │ - Auto rollback │ -│ - Validation │ │ - Cache management │ -└─────────┬─────────┘ └────────┬───────────────┘ - │ │ - v v -┌─────────────────────────────────────────────────────────┐ -│ Package Manager Selection (Hierarchical) │ -│ Python: [uv, pipx, pip] │ -│ Rust: [cargo] │ -│ Node: [npm, pnpm, yarn] │ -│ System: [apt, dnf, pacman, brew] │ -└───────────────────────┬─────────────────────────────────┘ - │ - v -┌─────────────────────────────────────────────────────────┐ -│ Installation Execution │ -│ - Command generation │ -│ - Subprocess execution │ -│ - Retry logic (network, lock) │ -│ - Timeout enforcement (5-60s) │ -│ - Progress tracking │ -└───────────────────────┬─────────────────────────────────┘ - │ - v -┌─────────────────────────────────────────────────────────┐ -│ Post-Install Validation │ -│ - Binary availability in PATH │ -│ - Version verification │ -│ - Checksum validation (optional) │ -└─────────────────────────────────────────────────────────┘ -``` - -### Component Details: Phase 2 - -#### 1. Environment Detection - -**Location:** `cli_audit/environment.py` - -Detects runtime environment to influence installation decisions: - -```python -@dataclass(frozen=True) -class Environment: - mode: str # "ci", "server", or "workstation" - confidence: float # 0.0-1.0 - indicators: tuple[str, ...] - override: bool = False - -def detect_environment(override: str | None = None) -> Environment: - # CI detection - if os.environ.get("CI") == "true": return Environment("ci", 0.95, ("CI=true",)) - if "GITHUB_ACTIONS" in os.environ: return Environment("ci", 0.99, ("GITHUB_ACTIONS",)) - - # Server detection (multiple active users, high uptime) - if users > 3: return Environment("server", 0.85, ("multiple_users",)) - - # Workstation detection - if "DISPLAY" in os.environ: return Environment("workstation", 0.90, ("DISPLAY",)) -``` - -**Environment Impact:** -- **CI:** Non-interactive, fail-fast, no confirmation prompts -- **Server:** Conservative updates, system package managers preferred -- **Workstation:** User package managers, interactive confirmations - -#### 2. Configuration Management - -**Location:** `cli_audit/config.py` - -Multi-source configuration with precedence rules: - -```python -@dataclass(frozen=True) -class Config: - version: int = 1 - environment_mode: str = "auto" - tools: dict[str, ToolConfig] = field(default_factory=dict) - preferences: Preferences = field(default_factory=Preferences) - presets: dict[str, list[str]] = field(default_factory=dict) - source: str = "" - -@dataclass(frozen=True) -class Preferences: - reconciliation: str = "parallel" # "parallel" or "aggressive" - breaking_changes: str = "warn" # "accept", "warn", or "reject" - auto_upgrade: bool = True - timeout_seconds: int = 5 # 1-60 - max_workers: int = 16 # 1-32 - cache_ttl_seconds: int = 3600 # 60-86400 - package_managers: dict[str, list[str]] = field(default_factory=dict) -``` - -**Configuration Precedence:** -1. Project (`.cli-audit.yml`) -2. User (`~/.config/cli-audit/config.yml`) -3. System (`/etc/cli-audit/config.yml`) - -#### 3. Installation Pipeline - -**Location:** `cli_audit/installer.py` - -Single tool installation with retry logic: - -```python -def install_tool( - tool_name: str, - package_name: str, - target_version: str = "latest", - config: Config | None = None, - env: Environment | None = None, - dry_run: bool = False, - verbose: bool = False, -) -> InstallResult: - # 1. Select package manager - pm_name, reason = select_package_manager(tool_name, language, config, env) - - # 2. Generate install command - install_cmd = generate_install_command(pm_name, package_name, target_version) - - # 3. Execute with retry - try: - result = subprocess.run(install_cmd, timeout=timeout, capture_output=True) - if result.returncode != 0: - if is_retryable_error(result.returncode, result.stderr): - # Auto-retry with exponential backoff - ... - except subprocess.TimeoutExpired: - raise InstallError("Command timed out", retryable=False) - - # 4. Validate installation - success, binary_path, version = validate_installation(tool_name) - - return InstallResult(...) -``` - -**Retryable Error Detection:** - -```python -def is_retryable_error(exit_code: int, stderr: str) -> bool: - # Network errors - network_indicators = [ - "connection refused", "connection timed out", - "connection reset", "network unreachable", - "could not resolve host", "temporary failure" - ] - - # Lock contention - lock_indicators = [ - "could not get lock", "lock file exists", - "waiting for cache lock", "dpkg frontend lock" - ] - - # Exit codes - if exit_code in (75, 111, 128): # EAGAIN, conn refused, git error - return True - - return any(ind in stderr.lower() for ind in network_indicators + lock_indicators) -``` - -#### 4. Bulk Operations - -**Location:** `cli_audit/bulk.py` - -Parallel installation with dependency resolution: - -```python -def bulk_install( - mode: str = "explicit", - tool_names: Sequence[str] | None = None, - max_workers: int | None = None, - fail_fast: bool = False, - atomic: bool = False, - progress_tracker: ProgressTracker | None = None, -) -> BulkInstallResult: - # 1. Determine tools to install - specs = get_tools_to_install(mode, tool_names, preset_name, config) - - # 2. Resolve dependencies (topological sort) - levels = resolve_dependencies(specs) - - # 3. Execute level by level - for level_specs in levels: - with ThreadPoolExecutor(max_workers=max_workers) as executor: - futures = { - executor.submit(_install_with_progress, spec, config, env, tracker): spec - for spec in level_specs - } - - for future in as_completed(futures): - result = future.result() - if result.success: - successes.append(result) - else: - failures.append(result) - if fail_fast: - break - - # 4. Generate rollback script - if successes: - rollback_script = generate_rollback_script(successes) - - # 5. Handle atomic rollback - if atomic and failures: - execute_rollback(rollback_script) - - return BulkInstallResult(...) -``` - -**Dependency Resolution:** - -```python -def resolve_dependencies(specs: Sequence[ToolSpec]) -> list[list[ToolSpec]]: - # Topological sort by levels - levels = [] - in_degree = {spec.tool_name: 0 for spec in specs} - - for spec in specs: - for dep in spec.dependencies: - if dep in spec_map: - in_degree[spec.tool_name] += 1 - - # Find tools with no dependencies - while remaining: - ready = [tool for tool in remaining if in_degree[tool] == 0] - levels.append([spec_map[tool] for tool in ready]) - # Update in-degrees... - - return levels -``` - -#### 5. Upgrade Management - -**Location:** `cli_audit/upgrade.py` - -Safe upgrades with backup and rollback: - -```python -def upgrade_tool( - tool_name: str, - target_version: str = "latest", - force: bool = False, - skip_backup: bool = False, -) -> UpgradeResult: - # 1. Validate tool is installed - success, binary_path, current_version = validate_installation(tool_name) - - # 2. Determine target version - if target_version == "latest": - target_version = get_available_version(tool_name, pm_name, cache_ttl) - - # 3. Check breaking change policy - is_breaking = is_major_upgrade(current_version, target_version) - if is_breaking: - allowed, reason = check_breaking_change_policy(config, current, target) - if not allowed and not force: - return UpgradeResult(..., error_message="Breaking change blocked") - - if reason == "breaking_warning" and not force: - if not confirm_breaking_change(warning): - return UpgradeResult(..., error_message="User declined") - - # 4. Create backup - if not skip_backup: - backup = create_upgrade_backup(tool_name, binary_path, current_version, pm_name) - - # 5. Execute upgrade - try: - install_result = install_tool(tool_name, tool_name, target_version, config, env) - if install_result.success: - return UpgradeResult(..., success=True) - else: - # Auto-rollback on failure - if backup: - rollback_success = restore_from_backup(backup) - return UpgradeResult(..., rollback_executed=True, rollback_success=...) - except Exception as e: - if backup: - restore_from_backup(backup) - raise -``` - -**Breaking Change Detection:** - -```python -def is_major_upgrade(current: str, target: str) -> bool: - from packaging import version - current_ver = version.parse(current) - target_ver = version.parse(target) - return target_ver.major > current_ver.major -``` - -#### 6. Reconciliation Strategies - -**Location:** `cli_audit/reconcile.py` - -Manages multiple installations of the same tool: - -```python -def reconcile_tool( - tool_name: str, - mode: str = "parallel", # "parallel" or "aggressive" - config: Config | None = None, -) -> ReconciliationResult: - # 1. Detect all installations - installations = detect_installations(tool_name) - - # 2. Determine preferred installation - preferred = select_preferred_installation(installations, config) - - # 3. Apply reconciliation strategy - if mode == "aggressive": - # Remove non-preferred installations - removed = [] - for install in installations: - if install != preferred and tool_name not in SYSTEM_TOOL_SAFELIST: - if remove_installation(install): - removed.append(install) - return ReconciliationResult(..., installations_removed=removed) - else: - # Keep all installations - return ReconciliationResult(..., installations_removed=[]) -``` - -**System Tool Safelist:** -- Protected tools: python, python3, pip, node, npm, cargo, git, etc. -- Never removed during aggressive reconciliation - -#### 7. Package Manager Selection - -**Location:** `cli_audit/package_managers.py` - -Hierarchical selection with environment awareness: - -```python -def select_package_manager( - tool_name: str, - language: str | None, - config: Config, - env: Environment, -) -> tuple[str, str]: - # 1. Get hierarchy from config - if language: - hierarchy = config.preferences.package_managers.get(language, []) - else: - hierarchy = infer_hierarchy_from_tool(tool_name) - - # 2. Filter by availability - available = [] - for pm in hierarchy: - if is_package_manager_available(pm): - available.append(pm) - - # 3. Environment-aware selection - if env.mode == "ci": - # Prefer fast, user-level PMs (uv, pipx, cargo) - ... - elif env.mode == "server": - # Prefer system PMs (apt, dnf, brew) - ... - - # 4. Return first available - if available: - return (available[0], f"First available from {hierarchy}") - else: - raise ValueError(f"No suitable package manager for {tool_name}") -``` - -### Phase 2 Data Flow Diagrams - -#### Installation Flow - -``` -User: bulk_install(mode="missing") - ↓ -Load Config + Detect Environment - ↓ -Determine tools to install - ↓ -Resolve dependencies (topological sort) - ↓ -For each level (parallel within level): - ├─ Select package manager - ├─ Generate install command - ├─ Execute with retry (network, lock) - ├─ Validate installation (PATH, version) - └─ Update progress tracker - ↓ -Generate rollback script - ↓ -Return BulkInstallResult -``` - -#### Upgrade Flow - -``` -User: upgrade_tool("ruff", "latest") - ↓ -Validate tool is installed - ↓ -Query available version (with cache, TTL=1h) - ↓ -Compare versions (current vs target) - ↓ -Check breaking change policy - ├─ If major: warn/block based on config - └─ If minor/patch: proceed - ↓ -Create backup (binary + configs) - ↓ -Execute upgrade via install_tool() - ↓ -Success? → Return UpgradeResult -Failure? → Auto-rollback from backup -``` - -### Phase 2 Resilience Patterns - -#### 1. Retry with Exponential Backoff - -**Problem:** Network failures and lock contention are transient - -**Solution:** -```python -max_retries = 3 -base_delay = 0.5 - -for attempt in range(max_retries + 1): - try: - result = subprocess.run(cmd, timeout=timeout, check=True) - return result - except subprocess.CalledProcessError as e: - if is_retryable_error(e.returncode, e.stderr) and attempt < max_retries: - delay = base_delay * (2 ** attempt) + random.uniform(0, 0.5) - time.sleep(delay) - else: - raise InstallError(str(e), retryable=False) -``` - -#### 2. Backup and Rollback - -**Problem:** Upgrades may fail or introduce issues - -**Solution:** -- Create backup before upgrade (binary + configs) -- Verify backup integrity (SHA256 checksum) -- Automatic rollback on failure -- Manual rollback script for complex scenarios - -#### 3. Atomic Operations - -**Problem:** Partial installations leave system in inconsistent state - -**Solution:** -```python -# Atomic mode: rollback ALL on ANY failure -result = bulk_install(mode="preset", preset_name="python-dev", atomic=True) - -if atomic and result.failures: - execute_rollback(result.rollback_script) -``` - -### Phase 2 Performance Characteristics - -| Operation | Time (typical) | Notes | -|-----------|---------------|-------| -| Single install | 5-30s | Depends on package size, network | -| Bulk install (10 tools) | 30-60s | Parallel execution (16 workers) | -| Upgrade check | 1-3s | With version cache (TTL=1h) | -| Reconciliation | 2-5s | Detection + selective removal | -| Environment detection | <50ms | Quick heuristics | -| Config loading | <20ms | YAML parsing + validation | - -### Phase 2 Extension Points - -#### Adding New Package Manager - -```python -# 1. Register in package_managers.py -PACKAGE_MANAGERS = { - "my-pm": { - "check_cmd": ["my-pm", "--version"], - "install_cmd": ["my-pm", "install"], - "languages": ["python", "rust"], - } -} - -# 2. Add to hierarchy -preferences: - package_managers: - python: [uv, pipx, my-pm, pip] -``` - -#### Custom Breaking Change Rules - -```python -# Override breaking change policy per tool -tools: - ruff: - breaking_changes: reject # Never allow major upgrades - pytest: - breaking_changes: accept # Always allow -``` - ---- - ## See Also -### Phase 1 Documentation -- **[API_REFERENCE.md](API_REFERENCE.md)** - Phase 1 audit functions -- **[FUNCTION_REFERENCE.md](FUNCTION_REFERENCE.md)** - Function reference card -- **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Contributing guide +- **[API_REFERENCE.md](API_REFERENCE.md)** - Function signatures and parameters +- **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - How to contribute - **[TROUBLESHOOTING.md](TROUBLESHOOTING.md)** - Common issues and debugging - -### Phase 2 Documentation -- **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - Complete Phase 2 API documentation -- **[CLI_REFERENCE.md](CLI_REFERENCE.md)** - Command-line reference -- **[TESTING.md](TESTING.md)** - Testing guide -- **[ERROR_CATALOG.md](ERROR_CATALOG.md)** - Error reference and troubleshooting -- **[INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md)** - Real-world integration patterns diff --git a/docs/CLI_REFERENCE.md b/docs/CLI_REFERENCE.md deleted file mode 100644 index c384408..0000000 --- a/docs/CLI_REFERENCE.md +++ /dev/null @@ -1,753 +0,0 @@ -# CLI Reference - -**Version:** 2.0.0-alpha.6 -**Last Updated:** 2025-10-13 - -Complete command-line reference for CLI Audit tool, covering Phase 1 audit commands, Phase 2 installation workflows, and Makefile automation. - ---- - -## Table of Contents - -- [Quick Reference](#quick-reference) -- [Phase 1: Audit Commands](#phase-1-audit-commands) -- [Environment Variables](#environment-variables) -- [Makefile Targets](#makefile-targets) -- [Phase 2: Python API](#phase-2-python-api) -- [Configuration Files](#configuration-files) -- [Output Formats](#output-formats) -- [Common Workflows](#common-workflows) -- [Troubleshooting](#troubleshooting) - ---- - -## Quick Reference - -```bash -# Basic audit -python3 cli_audit.py | column -s '|' -t - -# Specific tools only -python3 cli_audit.py ripgrep fd bat - -# JSON output -CLI_AUDIT_JSON=1 python3 cli_audit.py - -# Offline mode -CLI_AUDIT_OFFLINE=1 python3 cli_audit.py - -# Snapshot-based workflow -make update # Collect data (network required) -make audit # Render table (offline) -make audit-auto # Update if missing, then render - -# System-wide upgrade -make upgrade-all # Complete 5-stage system upgrade -make upgrade-all-dry-run # Preview without making changes -make check-path # Validate PATH configuration - -# Installation (Makefile) -make install-core -make install-python -make install-node - -# Phase 2 API (Python) -from cli_audit import install_tool, bulk_install, upgrade_tool -``` - ---- - -## Phase 1: Audit Commands - -### Basic Usage - -```bash -python3 cli_audit.py [OPTIONS] [TOOLS...] -``` - -**Positional Arguments:** -- `TOOLS`: Optional tool names to audit (default: all tools) - -**Options:** -- `--only TOOL [TOOL ...]`: Select specific tools (alias: `--tool`) -- No flags for most options - controlled via environment variables - -### Tool Selection - -```bash -# Audit all tools (default) -python3 cli_audit.py - -# Audit specific tools -python3 cli_audit.py ripgrep fd bat - -# Using --only flag -python3 cli_audit.py --only ripgrep fd bat - -# Preset categories -python3 cli_audit.py --only agent-core -python3 cli_audit.py --only python-core -python3 cli_audit.py --only node-core -python3 cli_audit.py --only go-core -python3 cli_audit.py --only infra-core -python3 cli_audit.py --only security-core -python3 cli_audit.py --only data-core -``` - -### Output Formatting - -```bash -# Pipe-delimited table (default) -python3 cli_audit.py - -# Formatted table with column -python3 cli_audit.py | column -s '|' -t - -# Advanced formatting with smart_column -python3 cli_audit.py | python3 smart_column.py -s '|' -t --right 3,5 --header - -# JSON array output -CLI_AUDIT_JSON=1 python3 cli_audit.py - -# JSON with jq filtering -CLI_AUDIT_JSON=1 python3 cli_audit.py | jq '.[] | select(.status != "UP-TO-DATE")' - -# Filter by category -CLI_AUDIT_JSON=1 python3 cli_audit.py | jq '.[] | select(.category == "security")' -``` - -### Snapshot Workflow - -The tool separates data collection (network) from rendering (offline): - -```bash -# 1. Collect data (writes tools_snapshot.json) -CLI_AUDIT_COLLECT=1 python3 cli_audit.py - -# 2. Render from snapshot (no network) -CLI_AUDIT_RENDER=1 python3 cli_audit.py - -# Combined: update if missing, then render -make audit-auto - -# Manual workflow -make update # Collect only -make audit # Render only -``` - -**Snapshot File:** -- Default: `tools_snapshot.json` in project root -- Override: `CLI_AUDIT_SNAPSHOT_FILE=/path/to/snapshot.json` - -### Offline Mode - -```bash -# Use only manual cache (latest_versions.json) -CLI_AUDIT_OFFLINE=1 python3 cli_audit.py - -# Offline + render from snapshot -CLI_AUDIT_OFFLINE=1 CLI_AUDIT_RENDER=1 python3 cli_audit.py -``` - ---- - -## Environment Variables - -### Core Behavior - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_TIMEOUT_SECONDS` | int | `3` | Network timeout for version checks | -| `CLI_AUDIT_MAX_WORKERS` | int | `16` | Parallel worker threads | -| `CLI_AUDIT_OFFLINE` | bool | `0` | Use only manual cache (no network) | -| `CLI_AUDIT_DEBUG` | bool | `0` | Print debug messages to stderr | -| `CLI_AUDIT_TRACE` | bool | `0` | Ultra-verbose tracing | -| `CLI_AUDIT_TRACE_NET` | bool | `0` | Trace network operations | - -### Workflow Modes - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_COLLECT` | bool | `0` | Collect-only mode (write snapshot) | -| `CLI_AUDIT_RENDER` | bool | `0` | Render-only mode (read snapshot) | -| `CLI_AUDIT_FAST` | bool | `0` | Fast mode (skip slow checks) | -| `CLI_AUDIT_STREAM` | bool | `0` | Stream output as tools complete | - -### Output Formatting - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_JSON` | bool | `0` | Output JSON array instead of table | -| `CLI_AUDIT_LINKS` | bool | `1` | Enable OSC 8 hyperlinks | -| `CLI_AUDIT_EMOJI` | bool | `1` | Use emoji status indicators | -| `CLI_AUDIT_TIMINGS` | bool | `1` | Show timing information | -| `CLI_AUDIT_SORT` | string | `order` | Sort mode: `order` or `alpha` | -| `CLI_AUDIT_GROUP` | bool | `1` | Group output by category | -| `CLI_AUDIT_HINTS` | bool | `1` | Show remediation hints | - -### Snapshot Configuration - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_SNAPSHOT_FILE` | path | `tools_snapshot.json` | Snapshot file path | -| `CLI_AUDIT_MANUAL_FILE` | path | `latest_versions.json` | Manual cache path | -| `CLI_AUDIT_WRITE_MANUAL` | bool | `1` | Auto-update manual cache | -| `CLI_AUDIT_MANUAL_FIRST` | bool | `0` | Try manual cache before network | - -### Progress and Logging - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_PROGRESS` | bool | `0` | Show progress messages | -| `CLI_AUDIT_SLOW_MS` | int | `2000` | Threshold for slow operation warnings | - -### Rate Limiting - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_HOST_CAP_GITHUB` | int | `4` | Max concurrent GitHub requests | -| `CLI_AUDIT_HOST_CAP_GITHUB_API` | int | `4` | Max concurrent GitHub API requests | -| `CLI_AUDIT_HOST_CAP_NPM` | int | `4` | Max concurrent npm registry requests | -| `CLI_AUDIT_HOST_CAP_CRATES` | int | `4` | Max concurrent crates.io requests | -| `CLI_AUDIT_HOST_CAP_GNU` | int | `2` | Max concurrent GNU FTP requests | - -### HTTP Behavior - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_HTTP_RETRIES` | int | `2` | Number of HTTP retry attempts | -| `CLI_AUDIT_BACKOFF_BASE` | float | `0.2` | Base delay for exponential backoff (seconds) | -| `CLI_AUDIT_BACKOFF_JITTER` | float | `0.1` | Jitter for backoff randomization (seconds) | - -### Feature Toggles - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_DOCKER_INFO` | bool | `1` | Check Docker version (may hang in some environments) | -| `CLI_AUDIT_VALIDATE_MANUAL` | bool | `1` | Validate manual cache entries | -| `CLI_AUDIT_DPKG_CACHE_LIMIT` | int | `1024` | Max entries in dpkg cache | - -### Tool Selection - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `CLI_AUDIT_ONLY` | string | `` | Comma-separated tool names to audit | - -### Authentication - -| Variable | Type | Default | Description | -|----------|------|---------|-------------| -| `GITHUB_TOKEN` | string | `` | GitHub personal access token (increases rate limits) | - ---- - -## Makefile Targets - -### Audit Workflows - -```bash -# Update snapshot (collect-only, network required) -make update - -# Render from snapshot (offline) -make audit - -# Update if snapshot missing, then render -make audit-auto - -# Interactive upgrade guide -make upgrade -``` - -### System-Wide Upgrade - -```bash -# Complete system upgrade (5 stages: data → managers → runtimes → user managers → tools) -make upgrade-all - -# Preview upgrade without making changes (dry-run) -make upgrade-all-dry-run - -# Check PATH configuration before upgrading -make check-path -``` - -**5-Stage Workflow:** -1. Refresh version data from upstream -2. Upgrade system package managers (apt, brew, snap, flatpak) -3. Upgrade language runtimes (Python, Node.js, Go, Ruby, Rust) -4. Upgrade user package managers (uv, pipx, npm, pnpm, yarn, cargo, composer, poetry) -5. Upgrade all CLI tools managed by each package manager - -**Features:** -- UV migration (auto-migrates pip/pipx packages to uv tools) -- System package detection (skips system-managed tools) -- Comprehensive logging to `logs/upgrade-YYYYMMDD-HHMMSS.log` -- Colored output with statistics summary - -**Environment Variables:** -- `DRY_RUN=1` - Preview mode -- `SKIP_SYSTEM=1` - Skip system package managers -- `VERBOSE=1` - Detailed output - -### Installation Scripts - -**Core Tools:** -```bash -make install-core # fd, fzf, ripgrep, jq, yq, bat, delta, just -``` - -**Language Stacks:** -```bash -make install-python # Python toolchain (uv, pipx, poetry) -make install-node # Node toolchain (nvm, node, npm) -make install-go # Go toolchain -make install-rust # Rust toolchain (rustup, cargo) -``` - -**Infrastructure:** -```bash -make install-aws # AWS CLI -make install-kubectl # Kubernetes CLI -make install-terraform # Terraform -make install-ansible # Ansible -make install-docker # Docker -make install-brew # Homebrew (macOS/Linux) -``` - -### Update Scripts - -```bash -make update-core -make update-python -make update-node -make update-go -make update-aws -``` - -### Uninstall Scripts - -```bash -make uninstall-node -make uninstall-rust -``` - -### Reconciliation - -```bash -# Remove duplicate installations, keep preferred -make reconcile-node # Remove distro Node, keep nvm-managed -make reconcile-rust # Remove distro Rust, keep rustup-managed -``` - -### Permissions - -```bash -# Make scripts executable -make scripts-perms -``` - ---- - -## Phase 2: Python API - -Phase 2 provides programmatic installation, upgrade, and reconciliation APIs. - -**Quick Start:** - -```python -from cli_audit import install_tool, Config, Environment - -config = Config() -env = Environment.detect() - -result = install_tool( - tool_name="ripgrep", - package_name="ripgrep", - target_version="latest", - config=config, - env=env, - language="rust", -) -``` - -**See Also:** -- [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) - Complete Phase 2 API documentation -- [README.md](../README.md) - Code examples for installation, upgrades, and reconciliation - ---- - -## Configuration Files - -### YAML Configuration - -Create `.cli-audit.yml` in your project root, `~/.config/cli-audit/config.yml`, or `/etc/cli-audit/config.yml`. - -**Precedence:** Project → User → System → Defaults - -**Example:** - -```yaml -version: 1 - -environment: - mode: workstation # auto, ci, server, or workstation - -tools: - black: - version: "24.*" # Pin to major version - method: pipx # Preferred package manager - fallback: pip # Fallback if primary fails - - ripgrep: - version: latest - method: cargo - -preferences: - reconciliation: aggressive # parallel or aggressive - breaking_changes: warn # accept, warn, or reject - auto_upgrade: true - timeout_seconds: 10 - max_workers: 8 - cache_ttl_seconds: 3600 # 1 hour version cache - - bulk: - fail_fast: false - auto_rollback: true - generate_rollback_script: true - - package_managers: - python: - - uv - - pipx - - pip - rust: - - cargo - -presets: - dev-essentials: - - black - - ripgrep - - fd - - bat -``` - -**Configuration Validation:** - -```python -from cli_audit import load_config, validate_config - -config = load_config(custom_path=".my-config.yml") -warnings = validate_config(config) - -for warning in warnings: - print(f"⚠️ {warning}") -``` - ---- - -## Output Formats - -### Table Format (Default) - -``` -state|tool|installed|installed_method|latest_upstream|upstream_method -+|fd|9.0.0 (140ms)|apt/dpkg|9.0.0 (220ms)|github -⚠|ripgrep|13.0.0 (120ms)|cargo|14.1.1 (180ms)|github -✗|bat|X|N/A|0.24.0 (200ms)|github -``` - -**Fields:** -1. **state**: Status indicator - - `+` or `✓`: Up-to-date - - `⚠`: Outdated - - `✗` or `-`: Not installed - - `?`: Unknown (check failed) - -2. **tool**: Tool name -3. **installed**: Local version (with timing if `CLI_AUDIT_TIMINGS=1`) -4. **installed_method**: Installation source - - `uv tool`, `pipx/user`, `cargo`, `npm (user)`, `apt/dpkg`, etc. -5. **latest_upstream**: Latest version upstream -6. **upstream_method**: Source of latest version - - `github`, `pypi`, `crates`, `npm`, `gnu-ftp`, `manual` - -### JSON Format - -```bash -CLI_AUDIT_JSON=1 python3 cli_audit.py -``` - -**Schema:** - -```json -{ - "tool": "ripgrep", - "installed": "13.0.0 (120ms)", - "installed_version": "13.0.0", - "installed_method": "cargo", - "installed_path_resolved": "/home/user/.cargo/bin/rg", - "classification_reason": "path-under-~/.cargo/bin", - "installed_path_selected": "/home/user/.cargo/bin/rg", - "classification_reason_selected": "path-under-~/.cargo/bin", - "latest_upstream": "14.1.1 (180ms)", - "latest_version": "14.1.1", - "upstream_method": "github", - "status": "OUTDATED", - "category": "core", - "description": "Fast search tool" -} -``` - -**Status Values:** -- `UP-TO-DATE`: Installed version matches latest -- `OUTDATED`: Newer version available -- `NOT INSTALLED`: Tool not found -- `UNKNOWN`: Version check failed - -### Snapshot Format - -**File:** `tools_snapshot.json` - -```json -{ - "__meta__": { - "schema_version": 1, - "created_at": "2025-10-13T10:30:00Z", - "offline": false, - "count": 50, - "partial_failures": 2 - }, - "tools": [ - { - "tool": "ripgrep", - "installed": "13.0.0", - "latest_upstream": "14.1.1", - "status": "OUTDATED", - ... - } - ] -} -``` - ---- - -## Common Workflows - -### Quick Agent Readiness Check - -```bash -# Table scan -python3 cli_audit.py | column -s '|' -t - -# Filter outdated tools -CLI_AUDIT_JSON=1 python3 cli_audit.py \ - | jq -r '.[] | select(.status != "UP-TO-DATE") | [.tool, .status] | @tsv' - -# Security tools only -CLI_AUDIT_JSON=1 python3 cli_audit.py \ - | jq '.[] | select(.category == "security")' -``` - -### Offline Development - -```bash -# Before going offline: collect snapshot -make update - -# Offline: render from snapshot -CLI_AUDIT_OFFLINE=1 make audit - -# Or combined -CLI_AUDIT_OFFLINE=1 make audit-auto -``` - -### CI/CD Integration - -```bash -# Fast audit in CI (no timing info, fail on outdated) -CLI_AUDIT_TIMINGS=0 CLI_AUDIT_FAST=1 python3 cli_audit.py > audit.txt - -# JSON for automation -CLI_AUDIT_JSON=1 python3 cli_audit.py > audit.json - -# Parse results -OUTDATED=$(jq -r '.[] | select(.status == "OUTDATED") | .tool' audit.json) -if [ -n "$OUTDATED" ]; then - echo "Outdated tools: $OUTDATED" - exit 1 -fi -``` - -### Custom Tool Selection - -```bash -# Python ecosystem only -python3 cli_audit.py --only python-core - -# Multiple categories -python3 cli_audit.py --only agent-core security-core - -# Specific tools -python3 cli_audit.py ripgrep fd bat delta - -# Using environment variable -CLI_AUDIT_ONLY="ripgrep,fd,bat" python3 cli_audit.py -``` - -### Performance Optimization - -```bash -# Fast mode (skip slow checks) -CLI_AUDIT_FAST=1 python3 cli_audit.py - -# Reduce workers for slow network -CLI_AUDIT_MAX_WORKERS=4 python3 cli_audit.py - -# Increase timeout for slow hosts -CLI_AUDIT_TIMEOUT_SECONDS=10 python3 cli_audit.py - -# Use manual cache first (faster) -CLI_AUDIT_MANUAL_FIRST=1 python3 cli_audit.py -``` - -### Debugging - -```bash -# Basic debug output -CLI_AUDIT_DEBUG=1 python3 cli_audit.py 2> debug.log - -# Verbose tracing -CLI_AUDIT_TRACE=1 python3 cli_audit.py 2> trace.log - -# Network tracing -CLI_AUDIT_TRACE_NET=1 python3 cli_audit.py 2> network.log - -# Progress messages -CLI_AUDIT_PROGRESS=1 python3 cli_audit.py -``` - ---- - -## Troubleshooting - -### Network Issues - -**Problem:** Timeouts or slow responses - -```bash -# Increase timeout -CLI_AUDIT_TIMEOUT_SECONDS=10 python3 cli_audit.py - -# Reduce concurrency -CLI_AUDIT_MAX_WORKERS=4 python3 cli_audit.py - -# Cap per-host requests -CLI_AUDIT_HOST_CAP_GITHUB=2 CLI_AUDIT_HOST_CAP_NPM=2 python3 cli_audit.py -``` - -**Problem:** GitHub rate limits - -```bash -# Use personal access token -export GITHUB_TOKEN="ghp_xxxxxxxxxxxx" -python3 cli_audit.py - -# Use offline mode -CLI_AUDIT_OFFLINE=1 python3 cli_audit.py -``` - -### Missing Tools - -**Problem:** Tool shows as "NOT INSTALLED" but is actually installed - -```bash -# Check PATH -echo $PATH - -# Check extra search paths -CLI_AUDIT_DEBUG=1 python3 cli_audit.py 2>&1 | grep -i "search" - -# Tool-specific paths (edit cli_audit.py) -TOOL_SPECIFIC_PATHS = { - "mytool": ["/custom/path/to/tool"], -} -``` - -### Version Detection - -**Problem:** Installed version shows as "X" or "unknown" - -```bash -# Debug version detection -CLI_AUDIT_DEBUG=1 python3 cli_audit.py mytool 2> debug.log - -# Check tool's version flag manually -mytool --version -mytool -v -mytool version -``` - -### Snapshot Issues - -**Problem:** Stale snapshot data - -```bash -# Force snapshot update -rm tools_snapshot.json -make update - -# Or use CLI flags -CLI_AUDIT_COLLECT=1 python3 cli_audit.py -``` - -**Problem:** Corrupted snapshot - -```bash -# Validate JSON -jq '.' tools_snapshot.json - -# Rebuild from scratch -rm tools_snapshot.json latest_versions.json -make update -``` - -### Performance - -**Problem:** Slow audit execution - -```bash -# Check slow operations -CLI_AUDIT_TRACE=1 python3 cli_audit.py 2>&1 | grep "slow" - -# Use fast mode -CLI_AUDIT_FAST=1 python3 cli_audit.py - -# Use snapshot workflow -make update # Once, when needed -make audit # Fast, offline rendering -``` - -### Docker Hangs - -**Problem:** Docker version check hangs - -```bash -# Disable Docker info -CLI_AUDIT_DOCKER_INFO=0 python3 cli_audit.py -``` - ---- - -## Related Documentation - -- **[README.md](../README.md)** - Project overview and quick start -- **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - Complete Phase 2 API documentation -- **[ARCHITECTURE.md](ARCHITECTURE.md)** - System architecture and design -- **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Contributing and development guide -- **[INDEX.md](INDEX.md)** - Complete documentation index - ---- - -**Last Updated:** 2025-10-13 -**Maintainers:** See [CONTRIBUTING.md](../CONTRIBUTING.md) diff --git a/docs/DEPLOYMENT.md b/docs/DEPLOYMENT.md index 5c9a158..ae1c09c 100644 --- a/docs/DEPLOYMENT.md +++ b/docs/DEPLOYMENT.md @@ -100,103 +100,6 @@ Interactive remediation guide (renamed from `guide`). make upgrade ``` -### System-Wide Upgrade Targets - -#### `make upgrade-all` -Complete system upgrade in 5 orchestrated stages. - -**Workflow:** -1. **Stage 1: Refresh Data** - Update version snapshot from upstream sources -2. **Stage 2: Upgrade Package Managers** - Self-update system package managers (apt, brew, snap, flatpak) -3. **Stage 3: Upgrade Language Runtimes** - Update core runtimes (Python, Node.js, Go, Ruby, Rust) -4. **Stage 4: Upgrade User Package Managers** - Update language-specific managers (uv, pipx, npm, pnpm, yarn, cargo, composer, poetry) -5. **Stage 5: Upgrade Tools** - Upgrade all CLI tools managed by each package manager - -**Features:** -- Comprehensive logging to `logs/upgrade-YYYYMMDD-HHMMSS.log` -- Colored terminal output with progress tracking -- Version and location info for successful upgrades -- UV migration support (auto-migrates pip/pipx packages to uv tools) -- System package detection (skips system-managed tools, suggests reconcile) -- Statistics summary (upgraded/failed/skipped counts) -- Dry-run mode available - -**Use Case:** Complete development environment upgrade, ensuring all tools across all package managers are current. - -**Duration:** 5-15 minutes depending on number of outdated packages. - -```bash -# Full system upgrade -make upgrade-all - -# Preview what would be upgraded (dry-run) -make upgrade-all-dry-run -``` - -**Output Example:** -``` -[1/5] Stage 1: Refresh version data - ✓ Updated snapshot (64 tools checked) - -[2/5] Stage 2: Upgrade package managers - ✓ apt (upgraded 12 packages) - ⏭ homebrew (not installed) - ✓ snap (2.63 at /usr/bin/snap) - -[3/5] Stage 3: Upgrade language runtimes - ✓ python (3.12.7 → 3.12.8) - ✓ node (20.10.0 → 20.11.0) - ⏭ go (already latest: 1.22.0) - -[4/5] Stage 4: Upgrade user package managers - ✓ uv (0.4.30 → 0.5.0) - ✓ pipx (1.7.1 → 1.8.0) - → Migrating pip packages to uv... - ✓ black migrated to uv tool - ✓ ruff migrated to uv tool - -[5/5] Stage 5: Upgrade CLI tools - ✓ Upgraded 8 uv tools - ✓ Upgraded 15 npm packages - ✓ Upgraded 3 cargo binaries - -Summary: 45 upgraded, 12 skipped, 2 failed -Duration: 8m 32s -Log: logs/upgrade-20251018-073045.log -``` - -**Environment Variables:** -- `DRY_RUN=1` - Preview mode without making changes -- `SKIP_SYSTEM=1` - Skip system package managers (apt, brew, snap, flatpak) -- `VERBOSE=1` - Detailed output for debugging - -**Advanced:** -```bash -# Direct script execution with options -DRY_RUN=1 VERBOSE=1 bash scripts/upgrade_all.sh - -# Skip system package managers -SKIP_SYSTEM=1 make upgrade-all - -# Check PATH configuration before upgrading -make check-path -``` - -#### `make check-path` -Validate PATH configuration for all package managers. - -**Behavior:** -- Checks if package manager binaries are in PATH -- Validates PATH ordering (user bins before system bins) -- Identifies potential shadowing issues -- Provides remediation suggestions - -**Use Case:** Diagnose PATH issues before or after system upgrade. - -```bash -make check-path -``` - ### Tool-Specific Audit Targets #### `make audit-TOOLNAME` diff --git a/docs/ERROR_CATALOG.md b/docs/ERROR_CATALOG.md deleted file mode 100644 index 5c19f5d..0000000 --- a/docs/ERROR_CATALOG.md +++ /dev/null @@ -1,767 +0,0 @@ -# Error Catalog - -**Version:** 2.0.0-alpha.6 -**Last Updated:** 2025-10-13 - -Complete reference of errors, exceptions, and failure modes in CLI Audit tool, with causes, resolutions, and troubleshooting guidance. - ---- - -## Table of Contents - -- [Error Categories](#error-categories) -- [Configuration Errors](#configuration-errors) -- [Installation Errors](#installation-errors) -- [Environment Detection Errors](#environment-detection-errors) -- [Package Manager Errors](#package-manager-errors) -- [Validation Errors](#validation-errors) -- [Network Errors](#network-errors) -- [System Errors](#system-errors) -- [Exit Codes](#exit-codes) -- [Troubleshooting](#troubleshooting) - ---- - -## Error Categories - -| Category | Severity | Retryable | Examples | -|----------|----------|-----------|----------| -| **Configuration** | High | No | Invalid config values, unsupported versions | -| **Installation** | Medium | Sometimes | Network failures, lock contention, command failures | -| **Environment** | Low | No | Invalid override values | -| **Package Manager** | High | No | PM not available, no suitable PM found | -| **Validation** | Medium | No | Binary not found, version mismatch | -| **Network** | Medium | Yes | Timeouts, connection refused, DNS failures | -| **System** | High | Sometimes | Permission denied, disk full, command not found | - ---- - -## Configuration Errors - -### CONFIG-001: Unsupported Config Version - -**Error Message:** -``` -ValueError: Unsupported config version: X. Expected version 1 -``` - -**Cause:** -- Configuration file specifies unsupported schema version -- Current supported version: 1 - -**Resolution:** -1. Update config file to use `version: 1` -2. Review [Configuration Files](CLI_REFERENCE.md#configuration-files) for current schema - -**Example:** -```yaml -# ❌ Wrong -version: 2 - -# ✅ Correct -version: 1 -``` - -### CONFIG-002: Invalid Environment Mode - -**Error Message:** -``` -ValueError: Invalid environment_mode: X. Must be one of: auto, ci, server, workstation -``` - -**Cause:** -- Configuration specifies invalid environment mode -- Valid modes: `auto`, `ci`, `server`, `workstation` - -**Resolution:** -```yaml -# ❌ Wrong -environment: - mode: production - -# ✅ Correct -environment: - mode: server -``` - -### CONFIG-003: Invalid Reconciliation Strategy - -**Error Message:** -``` -ValueError: Invalid reconciliation strategy: X. Must be 'parallel' or 'aggressive' -``` - -**Cause:** -- Invalid `reconciliation` preference value -- Valid values: `parallel` (keep all), `aggressive` (remove non-preferred) - -**Resolution:** -```yaml -preferences: - reconciliation: parallel # or 'aggressive' -``` - -### CONFIG-004: Invalid Breaking Changes Policy - -**Error Message:** -``` -ValueError: Invalid breaking_changes setting: X. Must be 'accept', 'warn', or 'reject' -``` - -**Cause:** -- Invalid `breaking_changes` preference value -- Valid values: `accept`, `warn`, `reject` - -**Resolution:** -```yaml -preferences: - breaking_changes: warn # accept, warn, or reject -``` - -### CONFIG-005: Invalid Timeout Value - -**Error Message:** -``` -ValueError: Invalid timeout_seconds: X. Must be between 1 and 60 -``` - -**Cause:** -- Timeout value outside valid range (1-60 seconds) - -**Resolution:** -```yaml -preferences: - timeout_seconds: 10 # 1-60 seconds -``` - -### CONFIG-006: Invalid Max Workers - -**Error Message:** -``` -ValueError: Invalid max_workers: X. Must be between 1 and 32 -``` - -**Cause:** -- Worker count outside valid range (1-32) - -**Resolution:** -```yaml -preferences: - max_workers: 16 # 1-32 workers -``` - -### CONFIG-007: Invalid Cache TTL - -**Error Message:** -``` -ValueError: Invalid cache_ttl_seconds: X. Must be between 60 and 86400 (1 minute to 1 day) -``` - -**Cause:** -- Cache TTL outside valid range (60-86400 seconds) - -**Resolution:** -```yaml -preferences: - cache_ttl_seconds: 3600 # 1 hour (60-86400 seconds) -``` - -### CONFIG-008: Config File Not Found - -**Error Message:** -``` -ValueError: Could not load config from specified path: /path/to/config.yml -``` - -**Cause:** -- Custom config path specified but file doesn't exist or can't be read -- YAML parser not available (PyYAML not installed) - -**Resolution:** -1. Verify file exists: `ls -l /path/to/config.yml` -2. Check file permissions: `chmod 644 config.yml` -3. Install PyYAML: `pip install pyyaml` -4. Validate YAML syntax: `yamllint config.yml` - ---- - -## Installation Errors - -### INSTALL-001: Installation Failed at Step - -**Error Message:** -``` -Installation failed at step: - -``` - -**Cause:** -- Command execution failed during installation -- Network issues, permission problems, or package manager errors - -**Resolution:** -1. Check error details for specific cause -2. For network errors: retry or check connectivity -3. For permission errors: ensure sudo access or use user-level PM -4. For lock errors: wait and retry - -**Retry Logic:** -- Network errors: Auto-retry with exponential backoff -- Lock contention: Auto-retry with exponential backoff -- Other errors: No auto-retry (manual intervention required) - -### INSTALL-002: Post-Install Validation Failed - -**Error Message:** -``` -Post-install validation failed for -``` - -**Cause:** -- Tool installed but binary not found in PATH -- Binary exists but version check failed -- Tool name differs from package name - -**Resolution:** -1. Check PATH: `echo $PATH` -2. Find binary manually: `find ~/.local ~/.cargo /usr/local -name ''` -3. Verify installation: ` --version` -4. Update PATH if needed: - ```bash - export PATH="$HOME/.local/bin:$PATH" - ``` - -### INSTALL-003: Command Timeout - -**Error Message:** -``` -Command timed out after Xs -``` - -**Cause:** -- Installation command exceeded configured timeout -- Slow network, large download, or unresponsive package manager - -**Resolution:** -1. Increase timeout: - ```yaml - preferences: - timeout_seconds: 30 # Increase from default 5s - ``` -2. Check network connectivity -3. Try different package manager - -### INSTALL-004: Command Not Found - -**Error Message:** -``` -Command not found: -``` - -**Cause:** -- Package manager binary not in PATH -- Package manager not installed - -**Resolution:** -1. Install package manager: - ```bash - # For cargo - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh - - # For pipx - python3 -m pip install --user pipx - python3 -m pipx ensurepath - ``` -2. Verify installation: `which ` -3. Update PATH if needed - -### INSTALL-005: Installation Error (Generic) - -**Error Message:** -``` -Installation error:
-``` - -**Cause:** -- Unexpected exception during installation -- Python errors, file system issues, or system problems - -**Resolution:** -1. Check error details for specific cause -2. Enable debug mode: `CLI_AUDIT_DEBUG=1` -3. Check system logs: `journalctl -xe` -4. Report issue if persistent - ---- - -## Installation Error (InstallError) - -### Custom Exception: InstallError - -**Properties:** -- `message`: Human-readable error message -- `retryable`: Whether error can be retried -- `remediation`: Suggested fix - -**Usage:** -```python -try: - result = install_tool(...) -except InstallError as e: - print(f"Error: {e.message}") - if e.retryable: - print("This error can be retried") - if e.remediation: - print(f"Suggested fix: {e.remediation}") -``` - -**Retryable Errors:** - -Network-related: -- `connection refused` -- `connection timed out` -- `connection reset` -- `temporary failure` -- `network unreachable` -- `could not resolve host` - -Lock contention: -- `could not get lock` -- `lock file exists` -- `waiting for cache lock` -- `dpkg frontend lock` - -Exit codes: -- `75` (EAGAIN - temporary failure) -- `111` (connection refused) -- `128` (git error) - ---- - -## Environment Detection Errors - -### ENV-001: Invalid Environment Override - -**Error Message:** -``` -ValueError: Invalid environment override: X. Must be one of: auto, ci, server, workstation -``` - -**Cause:** -- Invalid override value passed to `detect_environment()` -- Valid values: `auto`, `ci`, `server`, `workstation` - -**Resolution:** -```python -# ❌ Wrong -env = detect_environment(override="production") - -# ✅ Correct -env = detect_environment(override="server") -``` - -### ENV-002: Low Confidence Detection - -**Warning Message:** -``` -Environment detected with low confidence: % -``` - -**Cause:** -- Ambiguous environment indicators -- Missing expected environment variables or system signals - -**Resolution:** -1. Use explicit override: - ```python - env = detect_environment(override="ci") - ``` -2. Or in config: - ```yaml - environment: - mode: ci # Explicit mode - ``` -3. Set environment-specific variables (CI, DISPLAY, etc.) - ---- - -## Package Manager Errors - -### PM-001: No Suitable Package Manager Found - -**Error Message:** -``` -ValueError: No suitable package manager found for . Please install a package manager for . -``` - -**Cause:** -- No package manager available for tool's language/ecosystem -- Package managers not in PATH - -**Resolution:** -1. Install recommended package manager: - - **Python:** - ```bash - pip install --user pipx - python -m pipx ensurepath - ``` - - **Rust:** - ```bash - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh - ``` - - **Node:** - ```bash - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash - ``` - -2. Configure package manager hierarchy: - ```yaml - preferences: - package_managers: - python: [uv, pipx, pip] - rust: [cargo] - node: [npm, pnpm, yarn] - ``` - -### PM-002: Package Manager Not Found - -**Error Message:** -``` -ValueError: Package manager not found: -``` - -**Cause:** -- Specified package manager not registered in system -- Typo in package manager name - -**Resolution:** -1. Check available PMs: See [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md#package_managerspy) -2. Verify spelling: `cargo`, `pipx`, `npm`, etc. -3. Install missing PM (see PM-001) - ---- - -## Validation Errors - -### VALID-001: Binary Not Found in PATH - -**Error Message:** -``` -Binary not found in PATH: -``` - -**Cause:** -- Installation succeeded but binary not in PATH -- Tool installed to unexpected location -- PATH not updated after installation - -**Resolution:** -1. Find binary: - ```bash - find ~ -name '' 2>/dev/null - ``` -2. Update PATH: - ```bash - # Add to ~/.bashrc or ~/.zshrc - export PATH="$HOME/.local/bin:$PATH" - export PATH="$HOME/.cargo/bin:$PATH" - ``` -3. Reload shell: `source ~/.bashrc` - -### VALID-002: Version Detection Failed - -**Warning Message:** -``` -Could not determine version for -``` - -**Cause:** -- Tool doesn't support standard version flags -- Version output format not recognized - -**Resolution:** -1. Check manually: - ```bash - --version - -V - version - ``` -2. Tool still functional despite version detection failure -3. Report issue if version should be detectable - -### VALID-003: Version Mismatch - -**Warning Message:** -``` -Installed version differs from target -``` - -**Cause:** -- Package manager installed different version than requested -- Latest version changed between plan generation and execution - -**Resolution:** -1. Check if acceptable: minor version differences usually OK -2. For exact version, specify in config: - ```yaml - tools: - tool_name: - version: "1.2.3" # Exact version - ``` - ---- - -## Network Errors - -### NET-001: Connection Timeout - -**Error Message:** -``` -connection timed out -``` - -**Cause:** -- Network latency or package repository unresponsive -- Firewall blocking connections - -**Resolution:** -1. Check network: `ping pypi.org` -2. Increase timeout: - ```yaml - preferences: - timeout_seconds: 30 - ``` -3. Configure proxy if needed: - ```bash - export HTTP_PROXY=http://proxy:8080 - export HTTPS_PROXY=http://proxy:8080 - ``` - -### NET-002: Connection Refused - -**Error Message:** -``` -connection refused -``` - -**Cause:** -- Package repository temporarily unavailable -- Port blocked by firewall -- Wrong repository URL - -**Resolution:** -1. Wait and retry (auto-retry enabled) -2. Check repository status -3. Try alternative package manager - -### NET-003: DNS Resolution Failed - -**Error Message:** -``` -could not resolve host -``` - -**Cause:** -- DNS server unreachable -- Domain name doesn't exist -- Network connectivity issue - -**Resolution:** -1. Check DNS: `nslookup pypi.org` -2. Test connectivity: `ping 8.8.8.8` -3. Configure DNS: - ```bash - # Add to /etc/resolv.conf - nameserver 8.8.8.8 - nameserver 1.1.1.1 - ``` - ---- - -## System Errors - -### SYS-001: Permission Denied - -**Error Message:** -``` -Permission denied -``` - -**Cause:** -- Insufficient permissions for operation -- System-level package manager requires sudo -- Directory not writable - -**Resolution:** -1. Use user-level package manager: - ```yaml - preferences: - package_managers: - python: [uv, pipx, pip] # uv/pipx don't need sudo - ``` -2. Or use sudo: - ```bash - sudo python3 cli_audit.py - ``` -3. Fix directory permissions: - ```bash - chmod 755 ~/.local/bin - ``` - -### SYS-002: Disk Space Exhausted - -**Error Message:** -``` -No space left on device -``` - -**Cause:** -- Insufficient disk space for installation -- Download cache full - -**Resolution:** -1. Check space: `df -h` -2. Clean cache: - ```bash - # pip - pip cache purge - - # cargo - cargo clean - - # apt - sudo apt clean - ``` -3. Free up space or expand disk - -### SYS-003: File Not Found - -**Error Message:** -``` -FileNotFoundError: [Errno 2] No such file or directory -``` - -**Cause:** -- Required file or directory missing -- Incorrect path specified - -**Resolution:** -1. Verify path exists -2. Create missing directories: - ```bash - mkdir -p ~/.config/cli-audit - ``` -3. Check file permissions - ---- - -## Exit Codes - -| Code | Meaning | Retryable | Action | -|------|---------|-----------|--------| -| `0` | Success | N/A | Continue | -| `1` | General error | No | Check error message | -| `2` | Misuse of command | No | Fix command syntax | -| `75` | Temporary failure | Yes | Auto-retry | -| `111` | Connection refused | Yes | Auto-retry | -| `126` | Command not executable | No | Check permissions | -| `127` | Command not found | No | Install package manager | -| `128` | Invalid exit argument | Yes | Auto-retry (git errors) | -| `130` | Terminated by Ctrl+C | No | User interrupted | -| `-1` | Timeout or system error | Sometimes | Check timeout settings | - ---- - -## Troubleshooting - -### General Debugging Steps - -1. **Enable Debug Mode:** - ```bash - CLI_AUDIT_DEBUG=1 python3 cli_audit.py - ``` - -2. **Enable Verbose Logging:** - ```python - result = install_tool(..., verbose=True) - ``` - -3. **Check System Requirements:** - ```bash - python3 --version # 3.9+ - which pip pipx cargo npm - echo $PATH - ``` - -4. **Validate Configuration:** - ```python - from cli_audit import load_config, validate_config - - config = load_config() - warnings = validate_config(config) - for warning in warnings: - print(f"⚠️ {warning}") - ``` - -### Common Resolution Patterns - -**Network Issues:** -1. Increase timeout -2. Check connectivity -3. Configure proxy -4. Try offline mode: `CLI_AUDIT_OFFLINE=1` - -**Permission Issues:** -1. Use user-level package managers (uv, pipx, cargo) -2. Fix directory permissions: `chmod 755` -3. Or use sudo (system PMs only) - -**Package Manager Issues:** -1. Verify PM installed: `which ` -2. Update PATH -3. Configure PM hierarchy in config -4. Try alternative PM - -**Validation Issues:** -1. Check PATH -2. Verify binary exists: `which ` -3. Test manually: ` --version` -4. Update PATH and reload shell - -### Getting Help - -1. **Check Documentation:** - - [CLI_REFERENCE.md](CLI_REFERENCE.md) - Command reference - - [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) - API documentation - - [TESTING.md](TESTING.md) - Testing and debugging - -2. **Enable Tracing:** - ```bash - CLI_AUDIT_TRACE=1 CLI_AUDIT_TRACE_NET=1 python3 cli_audit.py 2> trace.log - ``` - -3. **Report Issue:** - Include: - - Error message and stack trace - - Operating system and version - - Python version - - Tool and package manager versions - - Debug output - ---- - -## Related Documentation - -- **[CLI_REFERENCE.md](CLI_REFERENCE.md)** - Command-line reference -- **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - API documentation -- **[TESTING.md](TESTING.md)** - Testing guide -- **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Development guide -- **[CONTRIBUTING.md](../CONTRIBUTING.md)** - Contributing guidelines - ---- - -**Last Updated:** 2025-10-13 -**Maintainers:** See [CONTRIBUTING.md](../CONTRIBUTING.md) diff --git a/docs/INDEX.md b/docs/INDEX.md index 808a7f7..1aa1eef 100644 --- a/docs/INDEX.md +++ b/docs/INDEX.md @@ -1,22 +1,16 @@ # AI CLI Preparation - Documentation Index -**Version:** 2.0.0-alpha.6 -**Last Updated:** 2025-10-13 +**Version:** 1.0 +**Last Updated:** 2025-10-09 ## Overview AI CLI Preparation is a specialized environment audit tool designed to ensure AI coding agents (like Claude Code) have access to all necessary developer tools. This documentation provides comprehensive technical details for developers, contributors, and integrators. -**Project Status:** -- **Phase 1 (Detection & Auditing):** ✅ Complete -- **Phase 2 (Installation & Upgrade):** ✅ Implementation Complete | 📝 Documentation Complete - ## Documentation Structure ### For Developers & Contributors -#### Phase 1: Detection & Auditing - 1. **[QUICK_REFERENCE.md](QUICK_REFERENCE.md)** - Quick Lookup & Cheat Sheet ⭐ - One-liners for common operations - Environment variable reference @@ -29,9 +23,9 @@ AI CLI Preparation is a specialized environment audit tool designed to ensure AI - HTTP layer with retries and rate limiting - Cache hierarchy and resilience patterns -3. **[API_REFERENCE.md](API_REFERENCE.md)** - Phase 1 API Documentation +3. **[API_REFERENCE.md](API_REFERENCE.md)** - API Documentation - Tool dataclass specification - - Core audit functions by category + - Core functions by category - Configuration via environment variables - Cache file formats and schemas @@ -41,95 +35,57 @@ AI CLI Preparation is a specialized environment audit tool designed to ensure AI - Usage examples and patterns - Cross-references to detailed docs -5. **[TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md)** - Tool Catalog +5. **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Contributing Guide + - How to add new tools + - Testing strategies and validation + - Code organization and style + - Common contribution patterns + +6. **[TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md)** - Tool Catalog - Complete 50+ tool reference - Categories and use cases - Upgrade strategies per tool - Role-based presets -6. **[DEPLOYMENT.md](DEPLOYMENT.md)** - Operations Guide +7. **[DEPLOYMENT.md](DEPLOYMENT.md)** - Operations Guide - Makefile target reference - Installation script usage - Snapshot workflow patterns - Offline mode configuration -7. **[TROUBLESHOOTING.md](TROUBLESHOOTING.md)** - Problem Solving +8. **[TROUBLESHOOTING.md](TROUBLESHOOTING.md)** - Problem Solving - Common issues and solutions - Debugging techniques - Performance optimization - Network timeout handling -#### Phase 2: Installation & Upgrade Management - -8. **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - Phase 2 API Documentation ⭐ - - Installation, upgrade, and reconciliation APIs - - Environment detection and configuration - - Bulk operations and dependency resolution - - Breaking change management - - Package manager selection - -9. **[CLI_REFERENCE.md](CLI_REFERENCE.md)** - Command-Line Reference - - All CLI commands and options - - Environment variable reference (60+ variables) - - Output formats and usage patterns - - Common workflows and examples - -10. **[TESTING.md](TESTING.md)** - Testing Guide - - Test organization and structure - - Running tests (unit, integration, E2E) - - Writing tests and fixtures - - Mocking patterns and best practices - - Coverage requirements and CI integration - -11. **[ERROR_CATALOG.md](ERROR_CATALOG.md)** - Error Reference - - Complete error categorization - - Causes, resolutions, and troubleshooting - - InstallError exception patterns - - Retryable error detection - - Exit codes and debugging - -12. **[INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md)** - Integration Patterns - - CI/CD integration (GitHub Actions, GitLab CI) - - Development workflow automation - - Custom toolchain management - - Python API integration examples - - Configuration patterns - -#### Contributing - -13. **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Contributing Guide - - How to add new tools - - Testing strategies and validation - - Code organization and style - - Common contribution patterns - -14. **[../scripts/README.md](../scripts/README.md)** - Installation Scripts - - All 13+ installation scripts documented - - Actions: install, update, uninstall, reconcile - - Per-script usage and best practices - - Troubleshooting script issues +9. **[../scripts/README.md](../scripts/README.md)** - Installation Scripts + - All 13+ installation scripts documented + - Actions: install, update, uninstall, reconcile + - Per-script usage and best practices + - Troubleshooting script issues ### Planning & Specifications -15. **[PRD.md](PRD.md)** - Product Requirements Document +10. **[PRD.md](PRD.md)** - Product Requirements Document - Phase 1 summary (detection and auditing) - Phase 2 specification (installation and upgrade management) - User stories and success criteria - Risk assessment and mitigation strategies -16. **[PHASE2_IMPLEMENTATION.md](PHASE2_IMPLEMENTATION.md)** - Implementation Roadmap +11. **[PHASE2_IMPLEMENTATION.md](PHASE2_IMPLEMENTATION.md)** - Implementation Roadmap - 5 implementation phases with timelines - Deliverables and success criteria per phase - Testing strategies and rollout plan - Risk mitigation and validation -17. **[CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md)** - Configuration Reference +12. **[CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md)** - Configuration Reference - .cli-audit.yml schema and syntax - File locations and precedence rules - Version specification syntax - Examples for all environments -18. **[adr/README.md](adr/README.md)** - Architecture Decision Records +13. **[adr/README.md](adr/README.md)** - Architecture Decision Records - ADR process and templates - Index of all architectural decisions - Phase 2 decision rationale @@ -146,30 +102,30 @@ AI CLI Preparation is a specialized environment audit tool designed to ensure AI ### By Role **First-Time Users:** -Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) ⭐ → [CLI_REFERENCE.md](CLI_REFERENCE.md) → [TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md) → [DEPLOYMENT.md](DEPLOYMENT.md) +Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) ⭐ → [TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md) → [DEPLOYMENT.md](DEPLOYMENT.md) **Contributors:** -Start with [DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md) → [ARCHITECTURE.md](ARCHITECTURE.md) → [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) → [TESTING.md](TESTING.md) +Start with [DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md) → [ARCHITECTURE.md](ARCHITECTURE.md) → [API_REFERENCE.md](API_REFERENCE.md) → [FUNCTION_REFERENCE.md](FUNCTION_REFERENCE.md) **Maintainers:** -Start with [ARCHITECTURE.md](ARCHITECTURE.md) → [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) → [ERROR_CATALOG.md](ERROR_CATALOG.md) → [TROUBLESHOOTING.md](TROUBLESHOOTING.md) +Start with [ARCHITECTURE.md](ARCHITECTURE.md) → [TROUBLESHOOTING.md](TROUBLESHOOTING.md) → [DEPLOYMENT.md](DEPLOYMENT.md) → [adr/README.md](adr/README.md) **Integrators:** -Start with [INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md) ⭐ → [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) → [CLI_REFERENCE.md](CLI_REFERENCE.md) → [CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md) +Start with [TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md) → [API_REFERENCE.md](API_REFERENCE.md) → [DEPLOYMENT.md](DEPLOYMENT.md) **Product/Planning:** Start with [PRD.md](PRD.md) → [adr/README.md](adr/README.md) → [PHASE2_IMPLEMENTATION.md](PHASE2_IMPLEMENTATION.md) **AI Agent Developers:** -Start with [../claudedocs/](../claudedocs/) → [INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md) → [PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) → [TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md) +Start with [../claudedocs/](../claudedocs/) → [TOOL_ECOSYSTEM.md](TOOL_ECOSYSTEM.md) → [API_REFERENCE.md](API_REFERENCE.md) **Operators/DevOps:** -Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) → [CLI_REFERENCE.md](CLI_REFERENCE.md) → [DEPLOYMENT.md](DEPLOYMENT.md) → [../scripts/README.md](../scripts/README.md) +Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) → [DEPLOYMENT.md](DEPLOYMENT.md) → [../scripts/README.md](../scripts/README.md) ### By Task **Quick Command Lookup:** -[QUICK_REFERENCE.md](QUICK_REFERENCE.md) ⭐ → [CLI_REFERENCE.md](CLI_REFERENCE.md) - Start here for common operations +[QUICK_REFERENCE.md](QUICK_REFERENCE.md) ⭐ - Start here for common operations **Adding a New Tool:** [DEVELOPER_GUIDE.md#adding-tools](DEVELOPER_GUIDE.md#adding-tools) → [API_REFERENCE.md#tool-dataclass](API_REFERENCE.md#tool-dataclass) @@ -177,23 +133,14 @@ Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) → [CLI_REFERENCE.md](CLI_R **Understanding Architecture:** [ARCHITECTURE.md#overview](ARCHITECTURE.md#overview) → [ARCHITECTURE.md#data-flow](ARCHITECTURE.md#data-flow) -**Using Phase 1 API (Audit):** +**Using Functions:** [FUNCTION_REFERENCE.md](FUNCTION_REFERENCE.md) → [API_REFERENCE.md](API_REFERENCE.md) -**Using Phase 2 API (Install/Upgrade):** -[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md) ⭐ → [INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md) - **Installing Tools:** -[INTEGRATION_EXAMPLES.md](INTEGRATION_EXAMPLES.md) → [CLI_REFERENCE.md](CLI_REFERENCE.md) → [../scripts/README.md](../scripts/README.md) +[../scripts/README.md](../scripts/README.md) → [DEPLOYMENT.md](DEPLOYMENT.md) **Debugging Issues:** -[ERROR_CATALOG.md](ERROR_CATALOG.md) ⭐ → [TROUBLESHOOTING.md](TROUBLESHOOTING.md) → [CLI_REFERENCE.md](CLI_REFERENCE.md) - -**Writing Tests:** -[TESTING.md](TESTING.md) → [DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md) - -**CI/CD Integration:** -[INTEGRATION_EXAMPLES.md#cicd-integration](INTEGRATION_EXAMPLES.md#cicd-integration) → [CLI_REFERENCE.md](CLI_REFERENCE.md) +[TROUBLESHOOTING.md#common-issues](TROUBLESHOOTING.md#common-issues) → [QUICK_REFERENCE.md](QUICK_REFERENCE.md) → [API_REFERENCE.md#environment-variables](API_REFERENCE.md#environment-variables) **Running in Production:** [DEPLOYMENT.md#makefile-targets](DEPLOYMENT.md#makefile-targets) → [DEPLOYMENT.md#offline-mode](DEPLOYMENT.md#offline-mode) @@ -202,7 +149,7 @@ Start with [QUICK_REFERENCE.md](QUICK_REFERENCE.md) → [CLI_REFERENCE.md](CLI_R [PRD.md](PRD.md) → [adr/README.md](adr/README.md) → [PHASE2_IMPLEMENTATION.md](PHASE2_IMPLEMENTATION.md) → [CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md) **Configuring Tool Installation:** -[CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md) → [INTEGRATION_EXAMPLES.md#configuration-patterns](INTEGRATION_EXAMPLES.md#configuration-patterns) +[CONFIGURATION_SPEC.md](CONFIGURATION_SPEC.md) → [ADR-006](adr/ADR-006-configuration-file-format.md) **Understanding Architectural Decisions:** [adr/README.md](adr/README.md) - Browse ADR index for specific decisions diff --git a/docs/INTEGRATION_EXAMPLES.md b/docs/INTEGRATION_EXAMPLES.md deleted file mode 100644 index 9e78123..0000000 --- a/docs/INTEGRATION_EXAMPLES.md +++ /dev/null @@ -1,1137 +0,0 @@ -# Integration Examples - -**Version:** 2.0.0-alpha.6 -**Last Updated:** 2025-10-13 - -Real-world integration patterns for CLI Audit tool in CI/CD pipelines, development workflows, and custom automation scenarios. - ---- - -## Table of Contents - -- [CI/CD Integration](#cicd-integration) -- [Development Workflows](#development-workflows) -- [Custom Toolchain Management](#custom-toolchain-management) -- [Python API Integration](#python-api-integration) -- [Configuration Patterns](#configuration-patterns) -- [Advanced Use Cases](#advanced-use-cases) - ---- - -## CI/CD Integration - -### GitHub Actions - -**Basic Tool Audit in CI:** - -```yaml -# .github/workflows/tool-audit.yml -name: Tool Audit -on: [push, pull_request] - -jobs: - audit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Run CLI Audit - run: | - python cli_audit.py --format json --output tools.json - - - name: Upload Audit Results - uses: actions/upload-artifact@v4 - with: - name: tool-audit - path: tools.json -``` - -**Auto-Install Missing Tools:** - -```yaml -# .github/workflows/dev-setup.yml -name: Development Setup -on: [push] - -jobs: - setup-tools: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Cache Tool Installations - uses: actions/cache@v4 - with: - path: | - ~/.cargo/bin - ~/.local/bin - key: dev-tools-${{ hashFiles('.cli-audit.yml') }} - - - name: Install Missing Tools - run: | - python3 -c " - from cli_audit import bulk_install, load_config - - config = load_config('.cli-audit.yml') - result = bulk_install( - mode='missing', - config=config, - max_workers=8, - verbose=True - ) - - print(f'✅ Installed: {len(result.successes)}') - print(f'❌ Failed: {len(result.failures)}') - - if result.failures: - exit(1) - " - - - name: Verify Installation - run: | - rg --version - fd --version - hyperfine --version -``` - -**Tool Version Enforcement:** - -```yaml -# .github/workflows/version-check.yml -name: Tool Version Check -on: [pull_request] - -jobs: - version-check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Check Tool Versions - run: | - python3 -c " - from cli_audit import audit_tools, load_config, generate_snapshot - - config = load_config() - result = audit_tools(config=config, verbose=True) - - # Check for outdated tools - outdated = [ - entry for entry in result.entries - if entry.status == 'version_mismatch' - ] - - if outdated: - print('⚠️ Outdated tools detected:') - for entry in outdated: - print(f' {entry.tool_name}: {entry.found_version} ' - f'(expected: {entry.expected_version})') - exit(1) - " -``` - -### GitLab CI - -**Tool Audit Pipeline:** - -```yaml -# .gitlab-ci.yml -stages: - - audit - - install - - test - -audit-tools: - stage: audit - image: python:3.11-slim - script: - - python cli_audit.py --format json --output audit.json - - python cli_audit.py --format markdown > audit.md - artifacts: - paths: - - audit.json - - audit.md - reports: - dotenv: audit.env - -install-missing: - stage: install - image: python:3.11-slim - script: - - | - python3 -c " - from cli_audit import bulk_install, load_config - - config = load_config() - result = bulk_install( - mode='missing', - config=config, - max_workers=4, - verbose=True - ) - - if result.failures: - exit(1) - " - cache: - key: dev-tools-${CI_COMMIT_REF_SLUG} - paths: - - .cargo/ - - .local/ - -verify-tools: - stage: test - image: python:3.11-slim - script: - - python cli_audit.py --verify-only -``` - -**Parallel Tool Installation:** - -```yaml -# .gitlab-ci.yml -install-rust-tools: - stage: install - script: - - | - python3 -c " - from cli_audit import bulk_install, Config, Environment - from cli_audit.bulk import ToolSpec - - specs = [ - ToolSpec('ripgrep', 'ripgrep', 'latest', 'rust'), - ToolSpec('fd-find', 'fd-find', 'latest', 'rust'), - ToolSpec('bat', 'bat', 'latest', 'rust'), - ] - - result = bulk_install( - mode='explicit', - tool_names=[s.tool_name for s in specs], - max_workers=3, - verbose=True - ) - - print(result.summary() if hasattr(result, 'summary') else 'Done') - " - parallel: - matrix: - - TOOL_SET: [rust, python, node] -``` - ---- - -## Development Workflows - -### Local Development Setup - -**One-Command Setup:** - -```bash -#!/bin/bash -# scripts/setup-dev-env.sh -# Sets up complete development environment - -set -euo pipefail - -echo "🚀 Setting up development environment..." - -# 1. Audit current tools -python3 cli_audit.py --format compact - -# 2. Install missing tools -python3 -c " -from cli_audit import bulk_install, load_config, Environment, detect_environment - -env = detect_environment() -config = load_config('.cli-audit.yml') - -print(f'Environment: {env.mode} (confidence: {env.confidence:.0%})') - -result = bulk_install( - mode='missing', - config=config, - max_workers=8, - verbose=True -) - -print(f'\n✅ Installed: {len(result.successes)}') -for r in result.successes: - print(f' • {r.tool_name} v{r.installed_version}') - -if result.failures: - print(f'\n❌ Failed: {len(result.failures)}') - for r in result.failures: - print(f' • {r.tool_name}: {r.error_message}') - exit(1) -" - -echo "✨ Development environment ready!" -``` - -**Tool Upgrade Script:** - -```bash -#!/bin/bash -# scripts/upgrade-tools.sh -# Upgrades all development tools with backup - -python3 -c " -from cli_audit import bulk_upgrade, load_config, Environment - -config = load_config() - -print('🔍 Checking for available upgrades...') - -result = bulk_upgrade( - mode='outdated', - config=config, - max_workers=4, - force=False, # Prompt for breaking changes - skip_backup=False, # Create backups - verbose=True -) - -print(result.summary()) - -# Show what was upgraded -if result.upgrades: - print('\n✅ Upgraded:') - for upgrade in result.upgrades: - print(f' • {upgrade.tool_name}: ' - f'{upgrade.previous_version} → {upgrade.new_version}') - -# Show rollbacks -if result.rollbacks_executed > 0: - print(f'\n🔄 Automatic rollbacks: {result.rollbacks_executed}') -" -``` - -### Pre-Commit Hook - -**Tool Version Verification:** - -```bash -#!/bin/bash -# .git/hooks/pre-commit -# Verifies required tools are installed and at correct versions - -python3 -c " -from cli_audit import audit_tools, load_config - -config = load_config() -result = audit_tools(config=config, verbose=False) - -# Check for critical tools -critical_tools = {'ruff', 'mypy', 'black', 'pytest'} -missing_critical = [ - entry.tool_name - for entry in result.entries - if entry.tool_name in critical_tools and entry.status == 'not_found' -] - -if missing_critical: - print(f'❌ Critical tools missing: {missing_critical}') - print('Run: python cli_audit.py --install') - exit(1) - -# Check for outdated tools -outdated = [ - entry for entry in result.entries - if entry.status == 'version_mismatch' -] - -if outdated: - print('⚠️ Warning: Some tools are outdated') - for entry in outdated: - print(f' {entry.tool_name}: {entry.found_version} ' - f'(expected: {entry.expected_version})') - # Don't block commit, just warn -" -``` - -### Makefile Integration - -```makefile -# Makefile -.PHONY: audit install upgrade verify - -# Audit current tool state -audit: - @python3 cli_audit.py --format compact - -# Install missing tools -install: - @python3 -c "from cli_audit import bulk_install, load_config; \ - config = load_config(); \ - result = bulk_install(mode='missing', config=config); \ - exit(0 if not result.failures else 1)" - -# Upgrade all tools -upgrade: - @python3 -c "from cli_audit import bulk_upgrade, load_config; \ - config = load_config(); \ - result = bulk_upgrade(mode='outdated', config=config, force=False); \ - print(result.summary())" - -# Verify tool installation -verify: - @python3 cli_audit.py --verify-only || \ - (echo "❌ Verification failed. Run 'make install'"; exit 1) - -# Full development setup -dev-setup: audit install verify - @echo "✨ Development environment ready!" -``` - ---- - -## Custom Toolchain Management - -### Language-Specific Toolchains - -**Python Development Tools:** - -```python -# scripts/setup-python-tools.py -"""Install Python development toolchain.""" - -from cli_audit import bulk_install, load_config, Environment -from cli_audit.bulk import ToolSpec - -def setup_python_toolchain(verbose: bool = True): - """Install complete Python development toolchain.""" - - # Define Python tool specs - specs = [ - ToolSpec("ruff", "ruff", "latest", "python"), - ToolSpec("black", "black", "latest", "python"), - ToolSpec("mypy", "mypy", "latest", "python"), - ToolSpec("pytest", "pytest", "latest", "python"), - ToolSpec("pytest-cov", "pytest-cov", "latest", "python"), - ToolSpec("ipython", "ipython", "latest", "python"), - ] - - tool_names = [spec.tool_name for spec in specs] - - result = bulk_install( - mode="explicit", - tool_names=tool_names, - max_workers=6, - atomic=True, # Rollback on any failure - verbose=verbose - ) - - print(f"\n✅ Installed: {len(result.successes)}") - print(f"❌ Failed: {len(result.failures)}") - - if result.rollback_script: - print(f"📜 Rollback script: {result.rollback_script}") - - return len(result.failures) == 0 - -if __name__ == "__main__": - import sys - sys.exit(0 if setup_python_toolchain() else 1) -``` - -**Rust Development Tools:** - -```python -# scripts/setup-rust-tools.py -"""Install Rust development toolchain.""" - -from cli_audit import bulk_install -from cli_audit.bulk import ToolSpec - -def setup_rust_toolchain(): - """Install complete Rust development toolchain.""" - - specs = [ - ToolSpec("ripgrep", "ripgrep", "latest", "rust"), - ToolSpec("fd-find", "fd-find", "latest", "rust"), - ToolSpec("bat", "bat", "latest", "rust"), - ToolSpec("exa", "exa", "latest", "rust"), - ToolSpec("hyperfine", "hyperfine", "latest", "rust"), - ToolSpec("tokei", "tokei", "latest", "rust"), - ] - - result = bulk_install( - mode="explicit", - tool_names=[s.tool_name for s in specs], - max_workers=6, - fail_fast=False, # Continue on failures - verbose=True - ) - - return result - -if __name__ == "__main__": - result = setup_rust_toolchain() - print(f"\n{'✅' if not result.failures else '⚠️'} Setup complete") -``` - -### Multi-Environment Configuration - -**Environment-Aware Configuration:** - -```yaml -# .cli-audit.yml -version: 1 - -# Environment detection -environment: - mode: auto # auto, ci, server, workstation - -# Tool configurations -tools: - # Core tools (all environments) - ruff: - version: ">=0.1.0" - install_on: [ci, server, workstation] - - mypy: - version: ">=1.7.0" - install_on: [ci, server, workstation] - - # Development-only tools - ipython: - version: "latest" - install_on: [workstation] - - hyperfine: - version: "latest" - install_on: [workstation] - - # CI-only tools - pytest-cov: - version: "latest" - install_on: [ci] - -# Environment-specific preferences -preferences: - reconciliation: parallel - breaking_changes: warn # warn in dev, reject in CI - auto_upgrade: true - timeout_seconds: 10 - max_workers: 16 - - # Package manager preferences - package_managers: - python: [uv, pipx, pip] - rust: [cargo] - node: [npm] - -# Tool presets -presets: - python-dev: [ruff, black, mypy, pytest, ipython] - rust-dev: [ripgrep, fd-find, bat, exa, hyperfine] - minimal: [ruff, mypy, pytest] -``` - -**Environment-Specific Installation:** - -```python -# scripts/install-by-env.py -"""Install tools based on current environment.""" - -from cli_audit import ( - bulk_install, - load_config, - detect_environment, -) - -def install_environment_tools(): - """Install tools appropriate for current environment.""" - - # Detect environment - env = detect_environment() - config = load_config() - - print(f"Environment: {env.mode} (confidence: {env.confidence:.0%})") - print(f"Indicators: {', '.join(env.indicators)}") - - # Filter tools by environment - tools_for_env = [] - for tool_name, tool_config in config.tools.items(): - install_on = getattr(tool_config, 'install_on', ['all']) - if env.mode in install_on or 'all' in install_on: - tools_for_env.append(tool_name) - - if not tools_for_env: - print("No tools to install for this environment") - return - - print(f"\nInstalling {len(tools_for_env)} tools: {tools_for_env}") - - result = bulk_install( - mode="explicit", - tool_names=tools_for_env, - config=config, - env=env, - max_workers=8, - verbose=True - ) - - print(f"\n✅ Installed: {len(result.successes)}") - print(f"❌ Failed: {len(result.failures)}") - -if __name__ == "__main__": - install_environment_tools() -``` - ---- - -## Python API Integration - -### Custom Tool Manager - -**Tool Manager Class:** - -```python -# tool_manager.py -"""Custom tool manager using CLI Audit API.""" - -from dataclasses import dataclass -from typing import Sequence -from cli_audit import ( - audit_tools, - bulk_install, - bulk_upgrade, - load_config, - detect_environment, - Config, - Environment, -) -from cli_audit.bulk import ProgressTracker - -@dataclass -class ToolManager: - """High-level tool management interface.""" - - config: Config - env: Environment - verbose: bool = False - - @classmethod - def create(cls, config_path: str | None = None, verbose: bool = False): - """Create tool manager with auto-detection.""" - config = load_config(config_path, verbose=verbose) - env = detect_environment(verbose=verbose) - return cls(config=config, env=env, verbose=verbose) - - def audit(self): - """Audit current tool state.""" - result = audit_tools(config=self.config, verbose=self.verbose) - - print(f"Tool Audit Results:") - print(f" ✅ Available: {result.summary['available']}") - print(f" ❌ Missing: {result.summary['not_found']}") - print(f" ⚠️ Version mismatch: {result.summary['version_mismatch']}") - - return result - - def install_missing(self): - """Install all missing tools.""" - result = bulk_install( - mode="missing", - config=self.config, - env=self.env, - max_workers=8, - verbose=self.verbose - ) - - print(f"\nInstallation Results:") - print(f" ✅ Installed: {len(result.successes)}") - print(f" ❌ Failed: {len(result.failures)}") - - return result - - def upgrade_all(self, force: bool = False): - """Upgrade all tools with available updates.""" - result = bulk_upgrade( - mode="outdated", - config=self.config, - env=self.env, - max_workers=4, - force=force, - verbose=self.verbose - ) - - print(result.summary()) - return result - - def install_preset(self, preset_name: str): - """Install tools from preset.""" - result = bulk_install( - mode="preset", - preset_name=preset_name, - config=self.config, - env=self.env, - verbose=self.verbose - ) - - return result - -# Example usage -if __name__ == "__main__": - manager = ToolManager.create(verbose=True) - - # Audit current state - audit_result = manager.audit() - - # Install missing tools - if audit_result.summary['not_found'] > 0: - manager.install_missing() - - # Upgrade outdated tools - manager.upgrade_all(force=False) -``` - -### Progress Tracking Integration - -**Real-Time Progress Display:** - -```python -# progress_example.py -"""Example of real-time progress tracking.""" - -import time -from cli_audit import bulk_install, load_config -from cli_audit.bulk import ProgressTracker - -def print_progress(tool_name: str, status: str, message: str): - """Progress callback for real-time updates.""" - icons = { - "pending": "⏳", - "in_progress": "🔄", - "success": "✅", - "failed": "❌", - "skipped": "⏭️", - } - icon = icons.get(status, "❓") - print(f"{icon} {tool_name}: {status} - {message}") - -def install_with_progress(): - """Install tools with real-time progress tracking.""" - - # Create progress tracker with callback - tracker = ProgressTracker() - tracker.register_callback(print_progress) - - config = load_config() - - print("Starting installation...\n") - - result = bulk_install( - mode="missing", - config=config, - max_workers=4, - progress_tracker=tracker, - verbose=False # Disable verbose to see clean progress - ) - - # Get final summary - summary = tracker.get_summary() - print(f"\nFinal Summary:") - print(f" Success: {summary['success']}") - print(f" Failed: {summary['failed']}") - print(f" Skipped: {summary['skipped']}") - - return result - -if __name__ == "__main__": - install_with_progress() -``` - -### Configuration Management - -**Dynamic Configuration:** - -```python -# config_builder.py -"""Build configuration programmatically.""" - -from cli_audit.config import Config, Preferences, ToolConfig - -def build_config(): - """Build configuration programmatically.""" - - # Define preferences - prefs = Preferences( - reconciliation="parallel", - breaking_changes="warn", - auto_upgrade=True, - timeout_seconds=10, - max_workers=16, - package_managers={ - "python": ["uv", "pipx", "pip"], - "rust": ["cargo"], - } - ) - - # Define tools - tools = { - "ruff": ToolConfig(version=">=0.1.0", priority=1), - "mypy": ToolConfig(version=">=1.7.0", priority=1), - "black": ToolConfig(version="latest", priority=2), - "pytest": ToolConfig(version="latest", priority=1), - } - - # Build config - config = Config( - version=1, - environment_mode="auto", - tools=tools, - preferences=prefs, - ) - - return config - -# Usage -if __name__ == "__main__": - from cli_audit import bulk_install - - config = build_config() - - result = bulk_install( - mode="all", - config=config, - verbose=True - ) -``` - ---- - -## Configuration Patterns - -### Minimal Configuration - -```yaml -# .cli-audit-minimal.yml -version: 1 - -tools: - ruff: {} - mypy: {} - pytest: {} - -preferences: - breaking_changes: warn -``` - -### Comprehensive Configuration - -```yaml -# .cli-audit-comprehensive.yml -version: 1 - -# Environment detection -environment: - mode: auto - -# Tool definitions -tools: - # Python tools - ruff: - version: ">=0.1.0" - priority: 1 - package_manager: uv - - mypy: - version: ">=1.7.0" - priority: 1 - package_manager: pipx - - black: - version: "latest" - priority: 2 - - pytest: - version: ">=7.4.0" - priority: 1 - - # Rust tools - ripgrep: - version: "latest" - priority: 2 - package_manager: cargo - - fd-find: - version: "latest" - priority: 2 - - bat: - version: "latest" - priority: 3 - -# Preferences -preferences: - reconciliation: parallel - breaking_changes: warn - auto_upgrade: true - timeout_seconds: 10 - max_workers: 16 - cache_ttl_seconds: 3600 - - package_managers: - python: [uv, pipx, pip] - rust: [cargo] - node: [npm, pnpm, yarn] - - bulk: - fail_fast: false - atomic: false - retry_failed: true - max_retries: 3 - -# Tool presets -presets: - essential: [ruff, mypy, pytest] - python-dev: [ruff, mypy, black, pytest, ipython] - rust-dev: [ripgrep, fd-find, bat, exa] - full: [ruff, mypy, black, pytest, ripgrep, fd-find, bat] -``` - -### Multi-Project Configuration - -**Project-Specific Configuration:** - -```yaml -# project-a/.cli-audit.yml -version: 1 - -tools: - ruff: - version: "0.1.6" # Pinned version for consistency - mypy: - version: "1.7.1" - pytest: - version: "7.4.3" - -preferences: - breaking_changes: reject # Strict for production -``` - -**User-Level Configuration:** - -```yaml -# ~/.config/cli-audit/config.yml -version: 1 - -# Global preferences -preferences: - reconciliation: parallel - breaking_changes: warn - max_workers: 16 - - package_managers: - python: [uv, pipx, pip] - rust: [cargo] - -# Global tool defaults -tools: - ruff: - priority: 1 - mypy: - priority: 1 -``` - ---- - -## Advanced Use Cases - -### Dependency Resolution - -**Tools with Dependencies:** - -```python -# install_with_deps.py -"""Install tools with dependency resolution.""" - -from cli_audit import bulk_install, Config -from cli_audit.bulk import ToolSpec - -# Define tools with dependencies -specs = [ - ToolSpec("gcc", "gcc", "latest", dependencies=()), - ToolSpec("make", "make", "latest", dependencies=("gcc",)), - ToolSpec("cmake", "cmake", "latest", dependencies=("make", "gcc")), - ToolSpec("ninja", "ninja", "latest", dependencies=()), -] - -# Bulk install will resolve dependencies automatically -result = bulk_install( - mode="explicit", - tool_names=[s.tool_name for s in specs], - verbose=True -) - -print(f"Installed in dependency order:") -for success in result.successes: - print(f" {success.tool_name} v{success.installed_version}") -``` - -### Atomic Operations - -**All-or-Nothing Installation:** - -```python -# atomic_install.py -"""Atomic installation with automatic rollback.""" - -from cli_audit import bulk_install, load_config - -config = load_config() - -print("Starting atomic installation...") - -result = bulk_install( - mode="preset", - preset_name="python-dev", - config=config, - atomic=True, # Rollback everything on any failure - verbose=True -) - -if result.failures: - print(f"\n❌ Installation failed. All changes rolled back.") - print(f"Rollback script: {result.rollback_script}") -else: - print(f"\n✅ All tools installed successfully!") -``` - -### Reconciliation Strategies - -**Parallel Installation Reconciliation:** - -```python -# reconcile_example.py -"""Reconcile multiple tool installations.""" - -from cli_audit import reconcile_tool, Config, load_config - -config = load_config() - -# Parallel mode: keep all installations -result = reconcile_tool( - tool_name="python", - mode="parallel", - config=config, - verbose=True -) - -print(f"Found {len(result.installations_found)} Python installations:") -for install in result.installations_found: - print(f" • {install.version} at {install.path} (via {install.package_manager})") - -print(f"\nKept all installations (parallel mode)") -``` - -**Aggressive Reconciliation:** - -```python -# aggressive_reconcile.py -"""Remove duplicate installations.""" - -from cli_audit import reconcile_tool, Config - -result = reconcile_tool( - tool_name="ripgrep", - mode="aggressive", # Remove non-preferred installations - config=Config(), - dry_run=False, - verbose=True -) - -print(f"Installations found: {len(result.installations_found)}") -print(f"Preferred: {result.preferred_installation}") -print(f"Removed: {len(result.installations_removed)}") -``` - -### Breaking Change Management - -**Breaking Change Policy:** - -```python -# breaking_changes_example.py -"""Handle breaking changes during upgrades.""" - -from cli_audit import upgrade_tool, Config, load_config - -config = load_config() - -# Upgrade with breaking change protection -result = upgrade_tool( - tool_name="ruff", - target_version="latest", - config=config, - force=False, # Don't force breaking changes - skip_backup=False, # Create backup for rollback - verbose=True -) - -if result.breaking_change: - if result.breaking_change_accepted: - print(f"✅ Breaking change upgrade: " - f"{result.previous_version} → {result.new_version}") - else: - print(f"❌ Breaking change rejected by policy") - -if result.rollback_executed: - print(f"🔄 Automatic rollback: {'success' if result.rollback_success else 'failed'}") -``` - -### Custom Package Manager Selection - -**Package Manager Hierarchy:** - -```python -# pm_selection.py -"""Custom package manager selection logic.""" - -from cli_audit.package_managers import select_package_manager -from cli_audit.config import Config, Preferences - -# Build config with custom PM hierarchy -config = Config( - version=1, - preferences=Preferences( - package_managers={ - "python": ["uv", "pipx", "pip"], # Try uv first - "rust": ["cargo"], - "node": ["pnpm", "npm", "yarn"], - } - ) -) - -# Select PM for Python tool -pm_name, reason = select_package_manager( - tool_name="ruff", - language="python", - config=config, - verbose=True -) - -print(f"Selected: {pm_name}") -print(f"Reason: {reason}") -``` - ---- - -## Related Documentation - -- **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - Complete API documentation -- **[CLI_REFERENCE.md](CLI_REFERENCE.md)** - Command-line reference -- **[ERROR_CATALOG.md](ERROR_CATALOG.md)** - Error reference and troubleshooting -- **[TESTING.md](TESTING.md)** - Testing guide -- **[ARCHITECTURE.md](../ARCHITECTURE.md)** - System architecture - ---- - -**Last Updated:** 2025-10-13 -**Maintainers:** See [CONTRIBUTING.md](../CONTRIBUTING.md) diff --git a/docs/PHASE2_COMPLETION_REPORT.md b/docs/PHASE2_COMPLETION_REPORT.md index 91a96d2..534456c 100644 --- a/docs/PHASE2_COMPLETION_REPORT.md +++ b/docs/PHASE2_COMPLETION_REPORT.md @@ -1,7 +1,7 @@ # Phase 2 Completion Report -**Version:** 2.0.0-alpha.6 -**Completion Date:** 2025-10-15 +**Version:** 2.0.0-alpha.5 +**Completion Date:** 2025-10-09 **Status:** ✅ COMPLETE - Ready for Beta --- diff --git a/docs/QUICK_REFERENCE.md b/docs/QUICK_REFERENCE.md index 13c4d8e..bcb5f2d 100644 --- a/docs/QUICK_REFERENCE.md +++ b/docs/QUICK_REFERENCE.md @@ -19,15 +19,6 @@ make audit-offline # Interactive upgrade guide make upgrade -# Complete system upgrade (5 stages: data → managers → runtimes → user managers → tools) -make upgrade-all - -# Preview system upgrade (dry-run) -make upgrade-all-dry-run - -# Check PATH configuration -make check-path - # Single tool check python3 cli_audit.py --only ripgrep | python3 smart_column.py -s "|" -t ``` diff --git a/docs/TESTING.md b/docs/TESTING.md deleted file mode 100644 index 614c7ee..0000000 --- a/docs/TESTING.md +++ /dev/null @@ -1,930 +0,0 @@ -# Testing Guide - -**Version:** 2.0.0-alpha.6 -**Last Updated:** 2025-10-13 - -Complete testing guide for AI CLI Preparation contributors, covering unit tests, integration tests, coverage requirements, and testing best practices. - ---- - -## Table of Contents - -- [Quick Start](#quick-start) -- [Test Organization](#test-organization) -- [Running Tests](#running-tests) -- [Writing Tests](#writing-tests) -- [Coverage Requirements](#coverage-requirements) -- [Mocking and Fixtures](#mocking-and-fixtures) -- [Testing Patterns](#testing-patterns) -- [CI/CD Integration](#cicd-integration) -- [Troubleshooting](#troubleshooting) - ---- - -## Quick Start - -```bash -# Install development dependencies -pip install -e ".[dev]" -# OR -pip install -r requirements-dev.txt - -# Run all tests -pytest - -# Run with coverage -pytest --cov=cli_audit --cov-report=term --cov-report=html - -# Run specific module tests -pytest tests/test_config.py - -# Run tests in parallel -pytest -n auto - -# Watch mode (requires pytest-watch) -ptw -``` - ---- - -## Test Organization - -### Directory Structure - -``` -tests/ -├── __init__.py # Test package initialization -├── fixtures/ # Test fixtures and sample data -│ ├── config_valid.yml # Valid config file -│ ├── config_minimal.yml # Minimal config -│ ├── config_invalid_*.yml # Invalid configs for error testing -│ └── ... -├── integration/ # End-to-end integration tests -│ └── test_e2e_install.py # E2E installation workflow -├── test_config.py # Configuration parsing tests -├── test_environment.py # Environment detection tests -├── test_installer.py # Single-tool installation tests -├── test_bulk.py # Bulk operations tests -├── test_upgrade.py # Upgrade management tests -├── test_reconcile.py # Reconciliation tests -├── test_package_managers.py # Package manager selection tests -├── test_install_plan.py # Installation plan generation tests -└── test_logging.py # Logging framework tests -``` - -### Test Types - -**Unit Tests** (tests/*.py) -- Test individual functions and classes in isolation -- Mock external dependencies (network, filesystem, subprocess) -- Fast execution (<1s per module) -- Target coverage: 80%+ - -**Integration Tests** (tests/integration/*.py) -- Test complete workflows end-to-end -- May interact with real package managers (in isolated environments) -- Slower execution (5-30s) -- Target coverage: Key workflows - ---- - -## Running Tests - -### Basic Commands - -```bash -# Run all tests -pytest - -# Verbose output -pytest -v - -# Very verbose (show test function names) -pytest -vv - -# Stop on first failure -pytest -x - -# Show local variables on failure -pytest -l - -# Run only failed tests from last run -pytest --lf - -# Run failed tests first, then others -pytest --ff -``` - -### Running Specific Tests - -```bash -# Single test file -pytest tests/test_config.py - -# Single test class -pytest tests/test_config.py::TestToolConfig - -# Single test method -pytest tests/test_config.py::TestToolConfig::test_tool_config_defaults - -# Pattern matching -pytest -k "test_config" -pytest -k "test_tool_config_defaults or test_preferences_defaults" -``` - -### Coverage Reporting - -```bash -# Terminal report -pytest --cov=cli_audit --cov-report=term - -# HTML report (opens in browser) -pytest --cov=cli_audit --cov-report=html -open htmlcov/index.html # macOS -xdg-open htmlcov/index.html # Linux - -# Missing lines report -pytest --cov=cli_audit --cov-report=term-missing - -# Multiple formats -pytest --cov=cli_audit --cov-report=term --cov-report=html --cov-report=xml - -# Minimum coverage threshold (fail if below) -pytest --cov=cli_audit --cov-fail-under=80 -``` - -### Parallel Execution - -```bash -# Auto-detect CPU count -pytest -n auto - -# Specific number of workers -pytest -n 4 - -# Parallel with coverage (requires pytest-cov) -pytest -n auto --cov=cli_audit -``` - -### Test Markers - -```bash -# Run only unit tests -pytest -m unit - -# Run only integration tests -pytest -m integration - -# Skip slow tests -pytest -m "not slow" - -# Run tests with specific marker -pytest -m network # Tests requiring network -pytest -m subprocess # Tests spawning processes -``` - ---- - -## Writing Tests - -### Test Structure - -```python -""" -Tests for . - -Target coverage: 85%+ -""" - -import pytest -from unittest.mock import patch, MagicMock - -from cli_audit. import function_to_test - - -class TestFunctionName: - """Tests for function_name function.""" - - def test_basic_case(self): - """Test basic functionality with valid input.""" - result = function_to_test("input") - assert result == "expected" - - def test_edge_case(self): - """Test edge case handling.""" - result = function_to_test("") - assert result is None - - def test_error_handling(self): - """Test that errors are raised correctly.""" - with pytest.raises(ValueError, match="Invalid input"): - function_to_test(None) -``` - -### Naming Conventions - -**Test Classes:** -- `TestFunctionName` - For testing functions -- `TestClassName` - For testing classes -- Group related tests in same class - -**Test Methods:** -- `test__` - Descriptive names -- `test_function_with_valid_input` -- `test_function_with_empty_string` -- `test_function_raises_error_on_none` - -**Good Examples:** -```python -def test_install_python_tool_with_pipx() -def test_config_merge_overrides_lower_priority() -def test_environment_detect_ci_with_github_actions() -``` - -**Bad Examples:** -```python -def test_1() # Too vague -def test_install() # Not specific enough -def test_function() # Missing scenario -``` - -### Test Documentation - -```python -def test_install_tool_with_retry_logic(self): - """ - Test that install_tool retries on transient failures. - - Scenario: - - Network failure on first attempt - - Success on second attempt - - Verify retry count and backoff delay - """ - # Test implementation -``` - -### Assertions - -```python -# Basic assertions -assert result == expected -assert result is not None -assert result in [1, 2, 3] -assert len(items) == 5 -assert "substring" in text - -# Pytest assertions (more informative failures) -assert result == expected, "Custom failure message" - -# Approximate comparisons -assert result == pytest.approx(3.14, rel=0.01) - -# Exception assertions -with pytest.raises(ValueError): - function_that_raises() - -with pytest.raises(ValueError, match="specific message"): - function_that_raises() - -# Warning assertions -with pytest.warns(UserWarning): - function_that_warns() -``` - ---- - -## Coverage Requirements - -### Target Coverage - -| Module | Target | Notes | -|--------|--------|-------| -| `config.py` | 85%+ | Configuration is critical | -| `environment.py` | 90%+ | Well-tested, few edge cases | -| `installer.py` | 85%+ | Complex retry logic | -| `bulk.py` | 80%+ | Parallel execution complexity | -| `upgrade.py` | 85%+ | Breaking change detection | -| `reconcile.py` | 80%+ | Multi-installation handling | -| `package_managers.py` | 85%+ | PM selection logic | -| `install_plan.py` | 85%+ | Plan generation | -| `breaking_changes.py` | 90%+ | Policy enforcement | -| `logging_config.py` | 80%+ | Logging setup | -| `common.py` | 85%+ | Utility functions | - -### Excluded from Coverage - -Lines excluded via `# pragma: no cover`: -- Abstract methods -- `if __name__ == "__main__":` -- Type checking blocks (`if TYPE_CHECKING:`) -- Debug-only code paths -- `__repr__` methods (unless critical) - -```python -def debug_only_function(): # pragma: no cover - """Only used for debugging, not tested.""" - pass -``` - -### Checking Coverage - -```bash -# Overall coverage -pytest --cov=cli_audit --cov-report=term - -# Per-module coverage -pytest --cov=cli_audit.config --cov-report=term - -# Find untested code -pytest --cov=cli_audit --cov-report=term-missing | grep -v "100%" - -# Coverage badge (for README) -coverage-badge -o coverage.svg -f -``` - ---- - -## Mocking and Fixtures - -### unittest.mock Patterns - -```python -from unittest.mock import patch, MagicMock, mock_open - -# Mock function return value -@patch('cli_audit.installer.subprocess.run') -def test_install_success(mock_run): - mock_run.return_value = MagicMock(returncode=0, stdout="Success") - result = install_tool(...) - assert result.success - -# Mock multiple calls -mock_run.side_effect = [ - MagicMock(returncode=1), # First call fails - MagicMock(returncode=0), # Second succeeds -] - -# Mock environment variables -@patch.dict(os.environ, {"CI": "true"}, clear=True) -def test_detect_ci(): - env = detect_environment() - assert env.mode == "ci" - -# Mock file operations -@patch("builtins.open", mock_open(read_data="version: 1")) -def test_read_config(): - config = load_config("config.yml") - assert config.version == 1 - -# Mock network requests -@patch('urllib.request.urlopen') -def test_fetch_version(mock_urlopen): - mock_response = MagicMock() - mock_response.read.return_value = b'{"version": "1.2.3"}' - mock_urlopen.return_value.__enter__.return_value = mock_response - - version = fetch_latest_version("tool") - assert version == "1.2.3" -``` - -### pytest Fixtures - -```python -import pytest - -# Simple fixture -@pytest.fixture -def sample_config(): - """Provide sample configuration for tests.""" - return Config( - environment_mode="workstation", - tools={"python": ToolConfig(version="3.12.*")}, - ) - -def test_with_fixture(sample_config): - assert sample_config.environment_mode == "workstation" - -# Fixture with teardown -@pytest.fixture -def temp_config_file(tmp_path): - """Create temporary config file.""" - config_file = tmp_path / "config.yml" - config_file.write_text("version: 1") - yield str(config_file) - # Cleanup happens automatically with tmp_path - -# Parametrized fixture -@pytest.fixture(params=["ci", "server", "workstation"]) -def environment_mode(request): - """Provide different environment modes.""" - return request.param - -def test_modes(environment_mode): - # Runs 3 times with different modes - env = detect_environment(override=environment_mode) - assert env.mode == environment_mode - -# Module-scoped fixture (setup once per module) -@pytest.fixture(scope="module") -def expensive_setup(): - """Setup that's expensive to create.""" - data = load_large_dataset() - yield data - cleanup(data) -``` - -### Fixture Location - -**conftest.py** (shared fixtures): -```python -# tests/conftest.py -import pytest - -@pytest.fixture -def mock_network(): - """Mock network calls for all tests.""" - with patch('urllib.request.urlopen') as mock: - yield mock - -@pytest.fixture(autouse=True) -def reset_caches(): - """Auto-reset caches before each test.""" - from cli_audit import reconcile, upgrade - reconcile.clear_detection_cache() - upgrade.clear_version_cache() -``` - ---- - -## Testing Patterns - -### Testing Configuration - -```python -class TestConfigLoading: - """Test configuration loading and validation.""" - - def test_load_valid_config(self, tmp_path): - """Test loading valid YAML configuration.""" - config_file = tmp_path / "config.yml" - config_file.write_text(""" -version: 1 -environment: - mode: workstation -""") - - config = load_config_file(str(config_file)) - assert config is not None - assert config.environment_mode == "workstation" - - def test_config_validation_error(self): - """Test that invalid config raises ValueError.""" - with pytest.raises(ValueError, match="Unsupported config version"): - Config(version=999) -``` - -### Testing Environment Detection - -```python -class TestEnvironmentDetection: - """Test environment detection logic.""" - - @patch.dict(os.environ, {"CI": "true"}, clear=True) - def test_detect_ci(self): - """Test CI detection with CI environment variable.""" - env = detect_environment() - assert env.mode == "ci" - assert env.confidence >= 0.9 - assert any("CI" in ind for ind in env.indicators) - - @patch("cli_audit.environment.get_active_user_count", return_value=5) - @patch("cli_audit.environment.get_system_uptime_days", return_value=60) - def test_detect_server(self, mock_uptime, mock_users): - """Test server detection with mocked system info.""" - env = detect_environment() - assert env.mode == "server" -``` - -### Testing Installation - -```python -class TestInstaller: - """Test installation logic.""" - - @patch('subprocess.run') - def test_install_success(self, mock_run): - """Test successful installation.""" - mock_run.return_value = MagicMock( - returncode=0, - stdout="Successfully installed" - ) - - result = install_tool( - tool_name="ripgrep", - package_name="ripgrep", - target_version="latest", - config=Config(), - env=Environment(mode="workstation", confidence=1.0), - language="rust", - ) - - assert result.success - assert result.tool_name == "ripgrep" - mock_run.assert_called_once() - - @patch('subprocess.run') - def test_install_retry_on_failure(self, mock_run): - """Test retry logic on transient failures.""" - mock_run.side_effect = [ - MagicMock(returncode=1), # First attempt fails - MagicMock(returncode=0), # Second succeeds - ] - - result = install_tool(...) - assert result.success - assert mock_run.call_count == 2 -``` - -### Testing Bulk Operations - -```python -class TestBulkOperations: - """Test bulk installation operations.""" - - @patch('cli_audit.bulk.install_tool') - def test_bulk_install_parallel(self, mock_install): - """Test parallel bulk installation.""" - mock_install.return_value = InstallResult( - tool_name="test", - success=True, - installed_version="1.0.0", - package_manager_used="cargo", - steps_completed=(), - duration_seconds=1.0, - ) - - result = bulk_install( - mode="explicit", - tool_names=["tool1", "tool2", "tool3"], - config=Config(), - env=Environment(mode="workstation", confidence=1.0), - max_workers=3, - ) - - assert len(result.successes) == 3 - assert mock_install.call_count == 3 -``` - -### Testing Error Handling - -```python -class TestErrorHandling: - """Test error handling and recovery.""" - - def test_invalid_version_format(self): - """Test error on invalid version format.""" - with pytest.raises(ValueError, match="Invalid version"): - validate_version("not-a-version") - - def test_network_timeout(self): - """Test timeout handling.""" - with patch('urllib.request.urlopen') as mock: - mock.side_effect = urllib.error.URLError("Timeout") - - result = fetch_latest_version("tool", timeout=1) - assert result is None - - def test_rollback_on_failure(self): - """Test automatic rollback on installation failure.""" - with patch('cli_audit.installer.install_tool') as mock_install: - mock_install.return_value = InstallResult(success=False) - - result = upgrade_tool("tool", config=Config()) - assert result.rollback_executed -``` - -### Parametrized Tests - -```python -@pytest.mark.parametrize("mode,expected", [ - ("ci", "ci"), - ("server", "server"), - ("workstation", "workstation"), -]) -def test_environment_modes(mode, expected): - """Test all environment modes.""" - env = detect_environment(override=mode) - assert env.mode == expected - -@pytest.mark.parametrize("version,is_breaking", [ - ("1.0.0", "2.0.0", True), # Major version bump - ("1.0.0", "1.1.0", False), # Minor version bump - ("1.0.0", "1.0.1", False), # Patch version bump -]) -def test_breaking_change_detection(version, target, is_breaking): - """Test breaking change detection for various version jumps.""" - result = is_major_upgrade(version, target) - assert result == is_breaking -``` - ---- - -## CI/CD Integration - -### GitHub Actions Workflow - -```yaml -# .github/workflows/ci.yml -name: CI - -on: - push: - branches: [main, develop] - pull_request: - branches: [main, develop] - -jobs: - test: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.9', '3.10', '3.11', '3.12'] - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - pip install -e ".[dev]" - - - name: Lint with flake8 - run: flake8 cli_audit tests - - - name: Type check with mypy - run: mypy cli_audit - - - name: Test with pytest - run: | - pytest --cov=cli_audit --cov-report=xml --cov-report=term - - - name: Upload coverage - uses: codecov/codecov-action@v3 - with: - file: ./coverage.xml -``` - -### Running Tests Locally (Pre-commit) - -```bash -# Format code -black cli_audit tests -isort cli_audit tests - -# Lint -flake8 cli_audit tests - -# Type check -mypy cli_audit - -# Run tests with coverage -pytest --cov=cli_audit --cov-fail-under=80 - -# Security checks -bandit -r cli_audit -safety check -``` - -### Pre-commit Hook - -Create `.git/hooks/pre-commit`: - -```bash -#!/bin/bash -set -e - -echo "Running pre-commit checks..." - -# Format -black --check cli_audit tests || { echo "❌ black formatting failed"; exit 1; } - -# Lint -flake8 cli_audit tests || { echo "❌ flake8 linting failed"; exit 1; } - -# Type check -mypy cli_audit || { echo "❌ mypy type checking failed"; exit 1; } - -# Tests -pytest --cov=cli_audit --cov-fail-under=80 || { echo "❌ tests failed"; exit 1; } - -echo "✅ All checks passed" -``` - -Make executable: -```bash -chmod +x .git/hooks/pre-commit -``` - ---- - -## Troubleshooting - -### Common Issues - -**ImportError: No module named 'cli_audit'** -```bash -# Install package in editable mode -pip install -e . -``` - -**Tests can't find fixtures** -```bash -# Ensure pytest.ini testpaths is correct -# Check that fixtures directory exists -ls -la tests/fixtures/ -``` - -**Mocks not working** -```python -# Ensure you're patching the right location -# Patch where it's used, not where it's defined -@patch('cli_audit.installer.subprocess.run') # ✓ Correct -@patch('subprocess.run') # ✗ Wrong location -``` - -**Coverage too low** -```bash -# Find untested code -pytest --cov=cli_audit --cov-report=term-missing - -# Focus on critical paths first -pytest --cov=cli_audit.installer --cov-report=term-missing -``` - -**Slow tests** -```bash -# Profile slow tests -pytest --durations=10 - -# Run in parallel -pytest -n auto - -# Skip slow tests during development -pytest -m "not slow" -``` - -**Flaky tests (intermittent failures)** -```python -# Add retries for network tests -@pytest.mark.flaky(reruns=3, reruns_delay=1) -def test_network_operation(): - ... - -# Increase timeouts -@patch('cli_audit.config.TIMEOUT_SECONDS', 10) -def test_with_longer_timeout(): - ... -``` - -### Debugging Tests - -```bash -# Print output (use -s to see print statements) -pytest -s tests/test_config.py - -# Drop into debugger on failure -pytest --pdb - -# Drop into debugger on first failure -pytest -x --pdb - -# Show local variables on failure -pytest -l - -# Very verbose output -pytest -vv -``` - -**Using pdb:** -```python -def test_debug_example(): - import pdb; pdb.set_trace() # Breakpoint - result = complex_function() - assert result == expected -``` - -**Using pytest.set_trace():** -```python -def test_debug_example(): - pytest.set_trace() # Pytest-aware breakpoint - result = complex_function() - assert result == expected -``` - ---- - -## Best Practices - -### DO - -✅ **Write descriptive test names** -```python -def test_install_python_tool_with_pipx_retries_on_network_failure(): -``` - -✅ **Test one thing per test** -```python -def test_config_loads_from_yaml(): -def test_config_validates_version(): -def test_config_raises_on_invalid_mode(): -``` - -✅ **Use fixtures for common setup** -```python -@pytest.fixture -def sample_config(): - return Config(...) -``` - -✅ **Mock external dependencies** -```python -@patch('subprocess.run') -@patch('urllib.request.urlopen') -def test_install(...): -``` - -✅ **Test edge cases** -```python -def test_empty_string(): -def test_none_input(): -def test_very_long_input(): -``` - -✅ **Test error paths** -```python -def test_raises_on_invalid_input(): - with pytest.raises(ValueError): - ... -``` - -### DON'T - -❌ **Don't test multiple things in one test** -```python -# Bad: Tests loading, validation, and merging -def test_config_everything(): - config = load_config() - assert config.version == 1 - merged = config.merge_with(other) - ... -``` - -❌ **Don't use hard-coded paths** -```python -# Bad -config = load_config("/home/user/.config/app/config.yml") - -# Good -config = load_config(tmp_path / "config.yml") -``` - -❌ **Don't skip tests without good reason** -```python -# Bad -@pytest.mark.skip -def test_important_feature(): - ... - -# Good -@pytest.mark.skip(reason="Waiting for upstream fix #123") -def test_blocked_feature(): - ... -``` - -❌ **Don't test implementation details** -```python -# Bad: Tests internal variable names -assert obj._internal_cache == {...} - -# Good: Tests public API behavior -assert obj.get_cached_value() == expected -``` - ---- - -## Related Documentation - -- **[CONTRIBUTING.md](../CONTRIBUTING.md)** - Contribution guidelines -- **[PHASE2_API_REFERENCE.md](PHASE2_API_REFERENCE.md)** - API documentation -- **[DEVELOPER_GUIDE.md](DEVELOPER_GUIDE.md)** - Development guide -- **[CLI_REFERENCE.md](CLI_REFERENCE.md)** - Command-line reference - ---- - -**Last Updated:** 2025-10-13 -**Maintainers:** See [CONTRIBUTING.md](../CONTRIBUTING.md) diff --git a/docs/TOOL_ECOSYSTEM.md b/docs/TOOL_ECOSYSTEM.md index 4fb0749..f83d274 100644 --- a/docs/TOOL_ECOSYSTEM.md +++ b/docs/TOOL_ECOSYSTEM.md @@ -4,21 +4,20 @@ Complete catalog of all tools tracked by AI CLI Preparation, organized by catego ## Overview -AI CLI Preparation tracks **68 developer tools** across 11 categories, optimized for AI coding agent environments. Each tool is classified by installation method and tracked against upstream releases. +AI CLI Preparation tracks **50+ developer tools** across 10 categories, optimized for AI coding agent environments. Each tool is classified by installation method and tracked against upstream releases. ## Categories - [Runtimes & Package Managers](#runtimes--package-managers) - Language runtimes and package managers (11 tools) -- [Search & Code Analysis](#search--code-analysis) - Code search and analysis tools (6 tools) +- [Search & Code Analysis](#search--code-analysis) - Code search and analysis tools (5 tools) - [Editors & Utilities](#editors--utilities) - Editing helpers and diffs (8 tools) - [JSON/YAML Processors](#jsonyaml-processors) - Data format tools (4 tools) - [HTTP Clients](#http-clients) - HTTP/API testing tools (2 tools) -- [Automation & Watch](#automation--watch) - File watching and automation (6 tools) -- [Security & Compliance](#security--compliance) - Security scanning tools (5 tools) -- [Git Tools](#git-tools) - Version control and Git helpers (8 tools) -- [Formatters & Linters](#formatters--linters) - Code formatting and linting (12 tools) +- [Automation & Watch](#automation--watch) - File watching and automation (4 tools) +- [Security & Compliance](#security--compliance) - Security scanning tools (4 tools) +- [Git Tools](#git-tools) - Version control and Git helpers (5 tools) +- [Formatters & Linters](#formatters--linters) - Code formatting and linting (7 tools) - [Cloud & Infrastructure](#cloud--infrastructure) - Cloud and container tools (5 tools) -- [Build Systems](#build-systems) - Build automation and compilation tools (1 tool) ## Role-Based Presets @@ -159,14 +158,6 @@ make audit-offline-data-core - **Install:** `cargo install ast-grep` - **Upgrade:** `cargo install --force ast-grep` -### cscope -- **Purpose:** C source code browser and navigation tool -- **Executable:** `cscope` -- **Upstream:** SourceForge (cscope) -- **Use Case:** Navigate C/C++ codebases, find function definitions and callers -- **Install:** `apt install cscope` or `brew install cscope` -- **Upgrade:** Update via system package manager - ### fzf - **Purpose:** Command-line fuzzy finder - **Executable:** `fzf` @@ -411,14 +402,6 @@ make audit-offline-data-core - **Install:** `pipx install pre-commit` - **Upgrade:** `pipx upgrade pre-commit` -### tfsec -- **Purpose:** Security scanner for Terraform code -- **Executable:** `tfsec` -- **Upstream:** GitHub (aquasecurity/tfsec) -- **Use Case:** Finding security issues in Terraform/IaC configurations -- **Install:** `scripts/install_core.sh reconcile tfsec` or download from releases -- **Upgrade:** Re-download latest release - --- ## Git Tools @@ -463,30 +446,6 @@ make audit-offline-data-core - **Install:** `cargo install git-branchless` - **Upgrade:** `cargo install --force git-branchless` -### just -- **Purpose:** Command runner for project-specific tasks -- **Executable:** `just` -- **Upstream:** GitHub (casey/just) -- **Use Case:** Project task automation with justfile recipes -- **Install:** `cargo install just` or `apt install just` -- **Upgrade:** `cargo install --force just` or `scripts/auto_update.sh` - -### gam -- **Purpose:** Google Workspace Admin command-line management -- **Executable:** `gam` -- **Upstream:** GitHub (GAM-team/GAM) -- **Use Case:** Automate Google Workspace administration -- **Install:** Download from GitHub releases -- **Upgrade:** Re-download latest release - -### git-lfs -- **Purpose:** Git Large File Storage extension -- **Executable:** `git-lfs` -- **Upstream:** GitHub (git-lfs/git-lfs) -- **Use Case:** Version control for large files (models, datasets, media) -- **Install:** `scripts/install_core.sh reconcile git-lfs` or `apt install git-lfs` -- **Upgrade:** Re-download latest release or update system package - --- ## Formatters & Linters @@ -547,30 +506,6 @@ make audit-offline-data-core - **Install:** `apt install shellcheck` or download binary - **Upgrade:** Update system package -### golangci-lint -- **Purpose:** Fast Go linters aggregator -- **Executable:** `golangci-lint` -- **Upstream:** GitHub (golangci/golangci-lint) -- **Use Case:** Running multiple Go linters in parallel -- **Install:** `go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest` -- **Upgrade:** `go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest` - -### codex -- **Purpose:** OpenAI Codex CLI (if available) -- **Executable:** `codex` -- **Upstream:** npm (@openai/codex) -- **Use Case:** AI code generation from CLI -- **Install:** `npm install -g @openai/codex` -- **Upgrade:** `npm update -g @openai/codex` - -### claude -- **Purpose:** Anthropic Claude Code CLI -- **Executable:** `claude` -- **Upstream:** npm (@anthropic-ai/claude-code) -- **Use Case:** AI coding assistant from command line -- **Install:** `npm install -g @anthropic-ai/claude-code` -- **Upgrade:** `npm update -g @anthropic-ai/claude-code` - --- ## Cloud & Infrastructure @@ -625,18 +560,6 @@ make audit-offline-data-core --- -## Build Systems - -### ninja -- **Purpose:** Small build system with focus on speed -- **Executable:** `ninja` -- **Upstream:** GitHub (ninja-build/ninja) -- **Use Case:** Fast incremental builds, used by CMake and Meson -- **Install:** `scripts/install_core.sh reconcile ninja` or `apt install ninja-build` -- **Upgrade:** Re-download latest release or update system package - ---- - ## Upgrade Strategies ### By Installation Method diff --git a/latest_versions.json b/latest_versions.json index cd8a5cf..63de3ef 100644 --- a/latest_versions.json +++ b/latest_versions.json @@ -1,29 +1,24 @@ { "__hints__": { "gh:BurntSushi/ripgrep": "latest_redirect", - "gh:GAM-team/GAM": "latest_redirect", "gh:TomWright/dasel": "latest_redirect", "gh:antonmedv/fx": "latest_redirect", - "gh:aquasecurity/tfsec": "latest_redirect", "gh:aquasecurity/trivy": "latest_redirect", "gh:arxanas/git-branchless": "latest_redirect", "gh:ast-grep/ast-grep": "latest_redirect", "gh:astral-sh/uv": "latest_redirect", - "gh:aws/aws-cli": "tags_api", + "gh:aws/aws-cli": "atom", "gh:casey/just": "latest_redirect", "gh:cli/cli": "latest_redirect", - "gh:composer/composer": "latest_redirect", "gh:dandavison/delta": "latest_redirect", "gh:direnv/direnv": "latest_redirect", - "gh:docker/cli": "tags_api", + "gh:docker/cli": "atom", "gh:docker/compose": "latest_redirect", - "gh:eradman/entr": "tags_api", + "gh:eradman/entr": "atom", "gh:eslint/eslint": "latest_redirect", - "gh:git-lfs/git-lfs": "latest_redirect", - "gh:git/git": "tags_api", + "gh:git/git": "atom", "gh:gitleaks/gitleaks": "latest_redirect", - "gh:golang/go": "tags_api", - "gh:golangci/golangci-lint": "latest_redirect", + "gh:golang/go": "atom", "gh:hashicorp/terraform": "latest_redirect", "gh:jqlang/jq": "latest_redirect", "gh:junegunn/fzf": "latest_redirect", @@ -31,15 +26,12 @@ "gh:kubernetes/kubernetes": "latest_redirect", "gh:mikefarah/yq": "latest_redirect", "gh:mvdan/sh": "latest_redirect", - "gh:ninja-build/ninja": "latest_redirect", "gh:nodejs/node": "latest_redirect", "gh:phiresky/ripgrep-all": "latest_redirect", "gh:prettier/prettier": "latest_redirect", "gh:profclems/glab": "latest_redirect", - "gh:python/cpython": "tags_api", + "gh:python/cpython": "atom", "gh:rs/curlie": "latest_redirect", - "gh:ruby/ruby": "latest_redirect", - "gh:rubygems/rubygems": "latest_redirect", "gh:rust-lang/rust": "latest_redirect", "gh:sharkdp/bat": "latest_redirect", "gh:sharkdp/fd": "latest_redirect", @@ -47,44 +39,34 @@ "gh:universal-ctags/ctags": "latest_redirect", "gh:wagoodman/dive": "latest_redirect", "gh:watchexec/watchexec": "latest_redirect", - "gitlab:gitlab-org/cli": "releases_api", "local_dc:docker-compose": "plugin", "local_flag:ast-grep": "--version", "local_flag:aws": "--version", "local_flag:bandit": "--version", "local_flag:bat": "--version", "local_flag:black": "--version", - "local_flag:claude": "-v", - "local_flag:codex": "--version", "local_flag:ctags": "--version", "local_flag:curlie": "version", "local_flag:dasel": "--version", "local_flag:delta": "--version", "local_flag:direnv": "--version", "local_flag:dive": "--version", - "local_flag:docker": "version --format {{.Client.Version}}", + "local_flag:docker": "--version", "local_flag:eslint": "--version", "local_flag:fd": "--version", "local_flag:flake8": "--version", - "local_flag:fx": "--version", "local_flag:fzf": "--version", - "local_flag:gam": "--version", - "local_flag:gem": "-v", "local_flag:gh": "--version", "local_flag:git": "--version", "local_flag:git-absorb": "--version", - "local_flag:git-branchless": "--version", - "local_flag:git-lfs": "-v", - "local_flag:gitleaks": "version", + "local_flag:gitleaks": "--version", "local_flag:glab": "--version", "local_flag:go": "version", - "local_flag:golangci-lint": "--version", "local_flag:httpie": "--version", "local_flag:isort": "version", "local_flag:jq": "--version", "local_flag:just": "--version", "local_flag:kubectl": "version --client", - "local_flag:ninja": "--version", "local_flag:node": "--version", "local_flag:npm": "--version", "local_flag:parallel": "--version", @@ -98,7 +80,6 @@ "local_flag:rename": "--version", "local_flag:rga": "--version", "local_flag:ripgrep": "-V", - "local_flag:ruby": "-v", "local_flag:rust": "--version", "local_flag:sd": "--version", "local_flag:shfmt": "--version", @@ -125,7 +106,7 @@ "migrated:git-branchless": "v0.10.0", "migrated:gitleaks": "v8.28.0", "migrated:glab": "v1.22.0", - "migrated:go": "go1.25.2", + "migrated:go": "go1.25.1", "migrated:jq": "jq-1.8.1", "migrated:kubernetes": "v1.34.1", "migrated:node": "v24.8.0", @@ -135,81 +116,20 @@ "migrated:terraform": "v1.13.2", "migrated:trivy": "v0.66.0", "migrated:watchexec": "v2.3.2", - "migrated:yarn": "{'stable': '4.10.3', 'canary': '4.10.3'}", + "migrated:yarn": "{'stable': '4.9.4', 'canary': '4.9.4'}", "migrated:yq": "v4.47.2" }, "__methods__": { - "ansible": "pypi", - "ansible-core": "pypi", - "ast-grep": "github", - "aws": "github", - "bandit": "pypi", - "bat": "github", - "black": "pypi", - "claude": "npm", - "codex": "npm", - "composer": "github", - "ctags": "github", - "curlie": "github", - "dasel": "github", - "delta": "github", - "direnv": "github", - "dive": "github", - "docker": "github", - "docker-compose": "github", - "entr": "github", - "eslint": "github", - "fd": "github", - "flake8": "pypi", - "fx": "github", - "fzf": "github", - "gam": "github", - "gem": "github", - "gh": "github", - "git": "github", - "git-absorb": "github", - "git-branchless": "github", - "git-lfs": "github", - "gitleaks": "github", - "glab": "gitlab", - "go": "github", - "golangci-lint": "github", - "httpie": "pypi", - "isort": "pypi", - "jq": "github", - "just": "github", - "kubectl": "github", - "ninja": "github", - "node": "github", "npm": "npm", - "parallel": "gnu-ftp", "pip": "pypi", "pipx": "pypi", - "pnpm": "npm", "poetry": "pypi", - "pre-commit": "pypi", - "prettier": "github", - "python": "github", - "rga": "github", "ripgrep": "github", - "ruby": "github", - "rust": "github", - "sd": "crates", - "semgrep": "pypi", - "shellcheck": "github", - "shfmt": "github", - "terraform": "github", - "tfsec": "github", - "trivy": "github", - "uv": "github", - "watchexec": "github", - "xsv": "crates", - "yarn": "yarn-tags", - "yq": "github" + "sd": "crates" }, "ansible": "12.0.0", "ansible-core": "2.19.2", - "ast-grep": "0.39.6", + "ast-grep": "0.39.5", "aws-cli": "2.30.2", "bandit": "1.8.6", "bat": "0.25.0", @@ -224,7 +144,7 @@ "direnv": "2.37.1", "dive": "0.13.1", "entr": "5.7", - "eslint": "9.37.0", + "eslint": "9.35.0", "fd": "10.3.0", "flake8": "7.3.0", "fx": "39.1.0", @@ -234,7 +154,7 @@ "git-branchless": "0.10.0", "gitleaks": "8.28.0", "glab": "1.22.0", - "go": "1.25.2", + "go": "1.25.1", "httpie": "3.2.4", "isort": "6.0.1", "jq": "1.8.1", @@ -242,26 +162,25 @@ "kubernetes": "1.34.1", "node": "24.8.0", "npm": "11.6.2", - "parallel": "20250922", + "parallel": "20250822", "pip": "25.2", "pipx": "1.8.0", "pnpm": "10.16.1", "poetry": "2.2.1", "pre-commit": "4.3.0", "prettier": "3.6.2", - "python": "3.14.0", "ripgrep": "14.1.1", "ripgrep-all": "0.10.9", - "rust": "1.90.0", + "rust": "1.89.0", "sd": "1.0.0", "semgrep": "1.136.0", "sh": "3.12.0", "shellcheck": "0.11.0", "terraform": "1.13.2", - "trivy": "0.67.2", - "uv": "0.9.1", + "trivy": "0.66.0", + "uv": "0.8.17", "watchexec": "2.3.2", "xsv": "0.13.0", - "yarn": "4.10.3", + "yarn": "4.9.4", "yq": "4.47.2" } \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 181c7fa..a561e27 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,18 +1,17 @@ { - "name": "coding_agent_cli_toolset", + "name": "ai_cli_preparation", "lockfileVersion": 3, "requires": true, "packages": { "": { "dependencies": { - "24": "^0.0.0", - "@anthropic-ai/claude-code": "^2.0.22" + "@anthropic-ai/claude-code": "^2.0.11" } }, "node_modules/@anthropic-ai/claude-code": { - "version": "2.0.22", - "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-code/-/claude-code-2.0.22.tgz", - "integrity": "sha512-3FVySgr2zYrHnQ0deOeCzXdLSa/JkCBm1QR7x4j07puNnvHrFhBo6whv/IMOcSvxL62RRW3csRcGsQhUGPNc/w==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-code/-/claude-code-2.0.11.tgz", + "integrity": "sha512-DBQd5znEJn11JsmqFOhjTeSO6oihxPkl9KiHxsvc1kbyQKUNVRLFnaAjUfGUOktmWLN+d7e3vbIRKhHtXoA0BQ==", "license": "SEE LICENSE IN README.md", "bin": { "claude": "cli.js" @@ -237,12 +236,6 @@ "funding": { "url": "https://opencollective.com/libvips" } - }, - "node_modules/24": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/24/-/24-0.0.0.tgz", - "integrity": "sha512-dgRd1Jm77CFbawoAu4/37jJCl4LjQVzfACgUVyGeN7OG1qHKsEeTEyvPvzsK0FF59F/uJ1eq2wsrO+U+Sa028Q==", - "license": "MIT" } } } diff --git a/package.json b/package.json index bfecb3f..4af236f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,5 @@ { "dependencies": { - "24": "^0.0.0", - "@anthropic-ai/claude-code": "^2.0.22" + "@anthropic-ai/claude-code": "^2.0.11" } } diff --git a/scripts/README.md b/scripts/README.md index 337c2b0..5eafdb7 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -2,232 +2,654 @@ ## Overview -The AI CLI Preparation project uses a **catalog-based installation system** that provides consistent, declarative management of 65+ developer tools. All tools are defined in individual catalog entries (`catalog/.json`) with installation delegated to generic, reusable installers. +The AI CLI Preparation project includes 14 automated installation scripts for setting up developer tools and language environments. Each script supports four standard actions: `install`, `update`, `uninstall`, and `reconcile`. -## Key Concepts +### Script Inventory -### Catalog System -- **Single Source of Truth**: Every tool has a `catalog/.json` entry with metadata -- **Generic Installers**: One installer script per method handles all tools of that type -- **Tag-Based Grouping**: Tools tagged for logical grouping (`core`, `runtime`, `security`, etc.) -- **No Duplication**: Tool-specific code eliminated in favor of data-driven configuration +| Script | Category | Purpose | +|--------|----------|---------| +| `install_ansible.sh` | Infrastructure | Ansible automation platform | +| `install_aws.sh` | Cloud CLI | AWS command-line interface | +| `install_brew.sh` | Package Manager | Homebrew (macOS/Linux) | +| `install_core.sh` | Developer Tools | Core CLI tools (fd, fzf, ripgrep, jq, yq, bat, delta, just) | +| `install_docker.sh` | Containers | Docker engine and docker-compose | +| `install_go.sh` | Language Runtime | Go programming language | +| `install_kubectl.sh` | Cloud CLI | Kubernetes command-line tool | +| `install_node.sh` | Language Runtime | Node.js via nvm (Node Version Manager) | +| `install_python.sh` | Language Runtime | Python runtime and package managers | +| `install_rust.sh` | Language Runtime | Rust via rustup | +| `install_terraform.sh` | Infrastructure | Terraform infrastructure-as-code | +| `install_uv.sh` | Package Manager | uv Python package manager | -### Installation Strategy -Configurable via `INSTALL_STRATEGY` in `.env`: -- **USER** (default): Install to `~/.local/bin` (no sudo required) -- **GLOBAL**: Install to `/usr/local/bin` (requires sudo) -- **CURRENT**: Keep tool where currently installed -- **PROJECT**: Install to `./.local/bin` (project-local) +Plus 2 shared library scripts: +- `lib/common.sh` - Shared utilities and helper functions +- `lib/distro.sh` - Distribution detection and package manager wrappers -## Core Scripts +## Actions Supported -| Script | Purpose | -|--------|---------| -| `install_tool.sh` | **Main orchestrator** - reads catalog, delegates to installers | -| `install_group.sh` | **Group installer** - installs all tools with a specific tag | -| `installers/*.sh` | **Generic installers** - one per installation method | +All installation scripts support four standard actions: -## Usage Examples +### 1. Install + +Installs tools from the latest upstream sources when available, falling back to package managers. -### Install Single Tool ```bash -./scripts/install_tool.sh ripgrep -./scripts/install_tool.sh kubectl -./scripts/install_tool.sh python +./scripts/install_core.sh install +./scripts/install_python.sh install +./scripts/install_rust.sh install ``` -### Install by Group/Tag +**Behavior:** +- Checks if tool already exists (idempotent) +- Prefers vendor-specific installers (rustup, nvm) over package managers +- Downloads latest releases from GitHub/official sources when possible +- Falls back to apt/brew if vendor installer unavailable +- Creates necessary directories (`~/.local/bin`, etc.) + +### 2. Update + +Updates tools to latest versions using their native update mechanisms. + ```bash -# Install all core tools (fd, fzf, ripgrep, jq, yq, bat, delta, just) -./scripts/install_group.sh core +./scripts/install_core.sh update +./scripts/install_python.sh update +./scripts/install_rust.sh update +``` + +**Behavior:** +- Uses tool-specific update commands (`rustup update`, `brew upgrade`) +- For apt-installed tools, runs `apt-get install --only-upgrade` +- No-op if tool not installed +- Preserves existing configuration + +### 3. Uninstall -# List available tags -./scripts/install_group.sh +Removes tools and cleans up installation artifacts. + +```bash +./scripts/install_core.sh uninstall +./scripts/install_python.sh uninstall +./scripts/install_rust.sh uninstall ``` -### Configure Installation Location +**Behavior:** +- Removes binaries from standard locations (`~/.local/bin`, `/usr/local/bin`) +- Uses tool-specific uninstallers when available (`rustup self uninstall`) +- Removes package manager installations (`apt-get remove`) +- Does not remove user configuration files + +### 4. Reconcile + +Ensures tools are installed using preferred methods, migrating from package managers to upstream installers. + ```bash -# Install to user directory (default) -INSTALL_STRATEGY=USER ./scripts/install_tool.sh fd +./scripts/install_core.sh reconcile +./scripts/install_core.sh reconcile ripgrep # Single tool +./scripts/install_python.sh reconcile +``` + +**Behavior:** +- Removes package manager versions (apt, snap) +- Reinstalls using preferred upstream method +- Useful for migrating from distro packages to vendor installers +- Reports before/after versions and paths + +## Installation Methods + +### Vendor Installers (Preferred) + +Scripts prefer official vendor installers for better version control and updates: + +| Tool | Installer | Method | +|------|-----------|--------| +| Rust | rustup | `curl https://sh.rustup.rs \| sh` | +| Node | nvm | `curl https://raw.githubusercontent.com/nvm-sh/nvm/...` | +| Python | deadsnakes PPA | `add-apt-repository ppa:deadsnakes/ppa` | +| Go | Official tarball | `wget https://go.dev/dl/go*.tar.gz` | +| uv | Official installer | `curl -LsSf https://astral.sh/uv/install.sh` | -# Install to system directory (requires sudo) -INSTALL_STRATEGY=GLOBAL ./scripts/install_tool.sh fd +### GitHub Releases -# Keep where currently installed -INSTALL_STRATEGY=CURRENT ./scripts/install_tool.sh fd +For tools without vendor installers, scripts download from GitHub releases: + +- **Binary assets:** Direct download (fd, ripgrep, fzf, gh, shellcheck) +- **Source builds:** Clone → build → install (entr, parallel, ctags) +- **Fallback to distro packages** when GitHub unavailable + +### Package Managers + +Used as fallback when upstream sources unavailable: + +- **apt/dpkg:** Debian/Ubuntu systems +- **Homebrew:** macOS and Linuxbrew +- **snap:** Containerized packages (discouraged, removed during reconcile) + +## Script-Specific Documentation + +### install_core.sh + +Manages essential developer CLI tools. + +**Tools Installed:** +- **fd** - Fast find replacement +- **fzf** - Fuzzy finder +- **ripgrep** - Fast grep replacement +- **jq** - JSON processor +- **yq** - YAML processor +- **bat** - Cat with syntax highlighting +- **delta** - Git diff viewer +- **just** - Command runner + +**Additional Tools (via dedicated functions):** +- git, gh (GitHub CLI), glab (GitLab CLI) +- ctags (universal-ctags from source) +- entr (file watcher) +- parallel (GNU parallel) +- shellcheck, shfmt (shell linters/formatters) +- eslint, prettier (JS linters/formatters) +- trivy, gitleaks (security scanners) +- dive (Docker image analyzer) +- direnv, ast-grep, fx, curlie + +**Special Handling:** + +1. **ctags**: Built from source via checkinstall for clean uninstall + ```bash + # Reads target version from latest_versions.json + # Builds universal-ctags with autoconf + # Registers with update-alternatives + ``` + +2. **entr**: Builds from GitHub source tarball + ```bash + # Discovers latest via redirect + # Downloads release tarball + # Compiles with make + ``` + +3. **parallel**: Downloads from GNU FTP mirror + ```bash + # Supports tar.bz2, tar.xz, tar.gz + # Configures with --prefix + # Fallback to apt if build fails + ``` + +### install_rust.sh + +Installs Rust via rustup with all components. + +**Components:** +- rustc (compiler) +- cargo (package manager) +- rust-std (standard library) +- rust-docs (offline documentation) +- rustfmt, clippy (formatters/linters) + +**Actions:** +```bash +install_rust # Runs rustup installer +update_rust # Runs rustup update +uninstall_rust # Runs rustup self uninstall +reconcile_rust # Removes apt rustc/cargo, installs via rustup ``` -## Catalog Entry Format +**Detection:** +- Checks `cargo` command availability +- Sources `~/.cargo/env` after installation +- Prefers rustup over apt packages + +### install_python.sh -Each tool has a JSON catalog entry with: +Manages Python runtimes and package managers. -```json -{ - "name": "tool-name", - "install_method": "github_release_binary", - "description": "Tool description", - "homepage": "https://...", - "binary_name": "tool", - "download_url_template": "https://.../download/{version}/{os}-{arch}", - "arch_map": { - "x86_64": "amd64", - "aarch64": "arm64" - }, - "tags": ["core", "search"] -} +**Components:** +- Python 3.x (via deadsnakes PPA on Ubuntu) +- pip (Python package installer) +- pipx (isolated CLI tools) +- poetry (dependency management) +- uv (fast Python package manager) + +**Actions:** +```bash +install_python # Installs Python + pip + pipx + poetry +update_python # Updates all Python tools +uninstall_python # Removes Python environments +reconcile_python # Migrates to preferred methods ``` -## Installation Methods +**Version Selection:** +- Prioritizes Python 3.12+ +- Uses deadsnakes PPA for latest versions on Ubuntu +- Installs python3-venv for virtual environments + +### install_node.sh + +Installs Node.js via nvm for version management. + +**Components:** +- nvm (Node Version Manager) +- Node.js LTS (via nvm) +- npm (bundled with Node) +- yarn, pnpm (alternative package managers) + +**Actions:** +```bash +install_node # Installs nvm + Node LTS +update_node # Updates nvm + Node to latest LTS +uninstall_node # Removes nvm and Node +reconcile_node # Migrates to nvm-managed installation +``` + +**Features:** +- Installs latest LTS version by default +- Configures shell initialization (`~/.bashrc`, `~/.zshrc`) +- Supports multiple Node versions via nvm + +### install_go.sh + +Installs Go from official tarballs. + +**Installation Path:** `/usr/local/go` (system) or `~/.local/go` (user) + +**Actions:** +```bash +install_go # Downloads + extracts Go tarball +update_go # Removes old + installs latest +uninstall_go # Removes Go installation +reconcile_go # Migrates from package manager to tarball +``` + +**Version Discovery:** +- Queries https://go.dev/dl/?mode=json +- Selects latest stable release +- Validates SHA256 checksums + +### install_docker.sh + +Installs Docker Engine and docker-compose. + +**Components:** +- Docker Engine (CE) +- docker-compose v2 (plugin) +- Docker Buildx (plugin) + +**Installation Methods:** +1. **Docker Desktop** (WSL detection) +2. **Docker APT repository** (official) +3. **Convenience script** (fallback) + +**Actions:** +```bash +install_docker # Installs Docker + compose +update_docker # Updates Docker packages +uninstall_docker # Removes Docker completely +reconcile_docker # Migrates to preferred method +``` + +**Post-Install:** +- Adds user to `docker` group +- Enables Docker service +- Validates installation with `docker run hello-world` + +### install_terraform.sh + +Installs Terraform from HashiCorp releases. + +**Installation Method:** +- Downloads official ZIP from releases.hashicorp.com +- Extracts to `/usr/local/bin` or `~/.local/bin` +- Validates binary hash + +**Actions:** +```bash +install_terraform # Downloads + installs latest +update_terraform # Removes old + installs latest +uninstall_terraform # Removes binary +reconcile_terraform # Migrates from package manager +``` + +### install_kubectl.sh + +Installs kubectl from Kubernetes releases. + +**Installation Method:** +- Downloads from https://dl.k8s.io/release/ +- Validates with SHA256 checksum +- Installs to `/usr/local/bin` or `~/.local/bin` + +**Actions:** +```bash +install_kubectl # Downloads + installs latest stable +update_kubectl # Removes old + installs latest +uninstall_kubectl # Removes binary +reconcile_kubectl # Migrates from package manager +``` + +**Version Discovery:** +- Queries https://dl.k8s.io/release/stable.txt +- Supports specific version pins via environment variable -The system supports 9 installation methods: +### install_aws.sh -| Method | Tools | Example | -|--------|-------|---------| -| `github_release_binary` | 30+ tools | fd, fzf, ripgrep, kubectl, terraform | -| `dedicated_script` | 10 tools | go, rust, python, node, docker | -| `package_manager` | 10 tools | pip, npm, yarn, gem, composer | -| `uv_tool` | 10 tools | black, ruff, bandit, isort | -| `hashicorp_zip` | 1 tool | terraform (alternative method) | -| `aws_installer` | 1 tool | aws (official AWS CLI installer) | -| `npm_global` | 1 tool | prettier | -| `script` | 1 tool | parallel (GNU) | -| `pipx_tool` | 1 tool | ansible (via pipx) | +Installs AWS CLI v2. -### github_release_binary -Downloads pre-compiled binaries from GitHub releases. Handles: -- Architecture mapping (x86_64 → amd64, aarch64 → arm64) -- Archive extraction (.tar.gz, .zip) -- Binary renaming and permission setting -- Version detection from GitHub API +**Installation Method:** +- Downloads official installer from awscli.amazonaws.com +- Runs bundled install script +- Installs to `/usr/local/aws-cli` by default -**Installer:** `scripts/installers/github_release_binary.sh` +**Actions:** +```bash +install_aws # Downloads + runs AWS installer +update_aws # Runs installer with --update flag +uninstall_aws # Removes AWS CLI installation +reconcile_aws # Migrates from v1 or package manager to v2 +``` + +### install_ansible.sh + +Installs Ansible via pipx for isolated environment. + +**Installation Method:** +- Uses pipx to install ansible-core +- Ensures Python 3.x available +- Installs to `~/.local/bin` via pipx -### dedicated_script -For complex tools with existing installation scripts: -- Runtime environments (go, rust, python, node) -- Docker (official install script) -- System tools (git, ctags, gam) +**Actions:** +```bash +install_ansible # pipx install ansible-core +update_ansible # pipx upgrade ansible-core +uninstall_ansible # pipx uninstall ansible-core +reconcile_ansible # Migrates from pip/apt to pipx +``` + +### install_brew.sh -**Installer:** `scripts/installers/dedicated_script.sh` (delegates to existing scripts) +Installs Homebrew package manager. -### package_manager -Installs via system package managers (apt/brew/dnf/pacman): -- Package managers themselves (pip, npm, yarn, gem, composer) -- System utilities (sponge from moreutils) +**Installation Method:** +- Runs official Homebrew installer script +- Supports macOS and Linux +- Configures shell initialization + +**Actions:** +```bash +install_brew # Installs Homebrew +update_brew # brew update && brew upgrade +uninstall_brew # Runs Homebrew uninstaller +reconcile_brew # Re-installs to fix issues +``` -**Installer:** `scripts/installers/package_manager.sh` +**Post-Install:** +- Adds Homebrew to PATH in shell profile +- Runs `brew doctor` to validate installation -### uv_tool -Python CLI tools installed via `uv tool install`: -- Python formatters/linters (black, ruff, isort, flake8) -- Security scanners (bandit) -- Build tools (poetry) +### install_uv.sh -**Installer:** `scripts/installers/uv_tool.sh` +Installs uv Python package manager. -## Tool Tags +**Installation Method:** +- Uses official installer: `curl -LsSf https://astral.sh/uv/install.sh | sh` +- Installs to `~/.local/bin/uv` +- Much faster than pip/pipx -Tools are tagged for logical grouping: +**Actions:** +```bash +install_uv # Runs official installer +update_uv # uv self update +uninstall_uv # Removes uv binary +reconcile_uv # Migrates from pipx to official installer +``` -| Tag | Tools | -|-----|-------| -| `core` | fd, fzf, ripgrep, jq, yq, bat, delta, just | -| `runtime` | go, rust, python, node | -| `security` | trivy, gitleaks, bandit, semgrep, tfsec | -| `git` | git, gh, glab, git-lfs, git-absorb, git-branchless | -| `cloud` | aws, kubectl, terraform, docker | -| `text-utils` | bat, yq, jq, fx, dasel | -| `search` | fd, ripgrep, rga, ast-grep | +## Common Workflows -## Migration Guide +### Initial Environment Setup -### From Old install_core.sh ```bash -# Old way +# Install core developer tools ./scripts/install_core.sh install -# New way (install by tag) -./scripts/install_group.sh core +# Install language runtimes +./scripts/install_python.sh install +./scripts/install_rust.sh install +./scripts/install_node.sh install +./scripts/install_go.sh install + +# Install cloud tools +./scripts/install_docker.sh install +./scripts/install_kubectl.sh install +./scripts/install_terraform.sh install +./scripts/install_aws.sh install +``` + +### Keep Tools Updated -# Or individual tools -./scripts/install_tool.sh fd -./scripts/install_tool.sh ripgrep +```bash +# Update all tools +for script in scripts/install_*.sh; do + "$script" update +done + +# Or selectively update +./scripts/install_core.sh update +./scripts/install_python.sh update +./scripts/install_rust.sh update ``` -### From Dedicated Scripts +### Migrate from Package Managers to Vendor Installers + ```bash -# Old way -./scripts/install_kubectl.sh -./scripts/install_terraform.sh +# Reconcile removes package manager versions and installs from upstream +./scripts/install_core.sh reconcile +./scripts/install_python.sh reconcile +./scripts/install_rust.sh reconcile +./scripts/install_node.sh reconcile + +# Reconcile single tool +./scripts/install_core.sh reconcile ripgrep +``` -# New way -./scripts/install_tool.sh kubectl -./scripts/install_tool.sh terraform +### Clean Uninstall + +```bash +# Remove all tools +for script in scripts/install_*.sh; do + "$script" uninstall +done + +# Or selectively uninstall +./scripts/install_docker.sh uninstall +./scripts/install_terraform.sh uninstall ``` -## Adding New Tools +## Environment Variables -1. Create catalog entry `catalog/newtool.json`: -```json -{ - "name": "newtool", - "install_method": "github_release_binary", - "description": "Description", - "homepage": "https://...", - "github_repo": "owner/repo", - "binary_name": "newtool", - "download_url_template": "https://github.com/owner/repo/releases/download/{version}/newtool-{os}-{arch}", - "tags": ["category"] -} +Installation scripts respect these environment variables: + +| Variable | Purpose | Example | +|----------|---------|---------| +| `PREFIX` | Installation prefix | `PREFIX=$HOME/.local` | +| `BIN_DIR` | Binary directory | `BIN_DIR=/usr/local/bin` | +| `GITHUB_TOKEN` | GitHub API token for rate limits | `export GITHUB_TOKEN=ghp_xxx` | +| `FORCE` | Force reinstallation | `FORCE=1 ./install_core.sh install` | +| `*_VERSION` | Pin specific version | `GO_VERSION=1.22.0` | + +## Best Practices + +### 1. Use Vendor Installers When Available + +Vendor installers provide better update mechanisms and version management: + +✅ **Preferred:** +- Rust via rustup +- Node via nvm +- Python via deadsnakes PPA + uv +- uv via official installer + +❌ **Avoid:** +- Distro-packaged Rust (often outdated) +- System Node (use nvm for version control) +- pip install --user (use pipx or uv) + +### 2. Run Reconcile to Migrate + +After initial setup with package managers, run `reconcile` to migrate: + +```bash +# Check current installation +./scripts/../cli_audit.py | grep ripgrep +# ripgrep|14.0.0|apt/dpkg|14.1.1|github|OUTDATED + +# Reconcile to vendor method +./scripts/install_core.sh reconcile ripgrep + +# Verify upgrade +./scripts/../cli_audit.py | grep ripgrep +# ripgrep|14.1.1|rustup/cargo|14.1.1|github|UP-TO-DATE ``` -2. Install: +### 3. Regularly Run Updates + +Keep tools current to avoid security vulnerabilities: + ```bash -./scripts/install_tool.sh newtool +# Weekly update routine +./scripts/install_python.sh update +./scripts/install_rust.sh update +./scripts/install_node.sh update +./scripts/install_core.sh update ``` -That's it! No code changes needed. +### 4. Use GitHub Token for Rate Limits + +Avoid GitHub API rate limiting during installations: + +```bash +export GITHUB_TOKEN=ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxx +./scripts/install_core.sh install +``` -## Architecture Benefits +### 5. Test in Disposable Environments -✅ **Single Source of Truth**: All tool metadata in catalog -✅ **95% Code Reduction**: Eliminated 1150+ lines of duplicated code -✅ **Easy to Add Tools**: Just add catalog entry, no code changes -✅ **Consistent Interface**: `install_tool.sh TOOL` for everything -✅ **Tag-Based Grouping**: No hard-coded tool lists -✅ **Configurable Locations**: INSTALL_STRATEGY controls install paths -✅ **Generic Installers**: Reusable logic for common patterns +Test installation scripts in containers before running on host: + +```bash +docker run -it --rm ubuntu:24.04 bash +# Inside container: +apt-get update && apt-get install -y curl git +curl -fsSL https://raw.githubusercontent.com/.../install_core.sh | bash -s install +``` ## Troubleshooting -### Tool Not Found +### Script Fails with "Permission Denied" + +**Cause:** Script trying to write to protected directory + +**Solution:** +```bash +# Option 1: Allow passwordless sudo +sudo visudo +# Add: youruser ALL=(ALL) NOPASSWD: ALL + +# Option 2: Use user prefix +PREFIX=$HOME/.local ./scripts/install_core.sh install +``` + +### GitHub Download Fails (403/429) + +**Cause:** GitHub API rate limiting + +**Solution:** +```bash +# Create personal access token at https://github.com/settings/tokens +export GITHUB_TOKEN=ghp_xxxxxxxxxxxxxxxxxxxx +./scripts/install_core.sh install +``` + +### Tool Not Found After Installation + +**Cause:** Binary directory not in PATH + +**Solution:** +```bash +# Check installation location +./scripts/install_core.sh install 2>&1 | grep "may need to add" + +# Add to PATH (example for ~/.local/bin) +echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc +source ~/.bashrc +``` + +### Build Fails for entr/parallel/ctags + +**Cause:** Missing build dependencies + +**Solution:** ```bash -./scripts/install_tool.sh unknown -# Error: No catalog entry found -# Available tools: fd fzf ripgrep kubectl ... +# Install build essentials +sudo apt-get update +sudo apt-get install -y build-essential autoconf automake libtool pkg-config git ``` -### List Available Tags +### Rust/Node Not Available After Install + +**Cause:** Shell environment not reloaded + +**Solution:** ```bash -./scripts/install_group.sh -# Available tags: -# - core -# - runtime -# - security -# - git +# For Rust +source "$HOME/.cargo/env" + +# For Node (nvm) +export NVM_DIR="$HOME/.nvm" +[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" + +# Or restart shell +exec $SHELL -l ``` -### Installation Fails +### Docker Permission Denied + +**Cause:** User not in docker group + +**Solution:** ```bash -# Check catalog entry -cat catalog/toolname.json +sudo usermod -aG docker $USER +newgrp docker # Refresh groups without logout +docker run hello-world +``` + +## Integration with cli_audit.py + +Installation scripts are designed to work with `cli_audit.py` for version tracking: + +```bash +# Before installation +python3 cli_audit.py | grep ripgrep +# ripgrep|X|NOT INSTALLED|14.1.1|github|NOT INSTALLED -# Verify installer exists -ls scripts/installers/github_release_binary.sh +# Install +./scripts/install_core.sh install + +# After installation +python3 cli_audit.py | grep ripgrep +# ripgrep|14.1.1 (150ms)|rustup/cargo|14.1.1 (220ms)|github|UP-TO-DATE ``` +**Version Discovery:** +- `cli_audit.py` detects installation methods (rustup/cargo, nvm/npm, etc.) +- Scripts install to locations `cli_audit.py` expects +- Both use same upstream sources (GitHub, PyPI, crates.io) + +**Reconcile Workflow:** +1. Run `cli_audit.py` to identify outdated/mismatched tools +2. Run script with `reconcile` action to fix +3. Run `cli_audit.py` again to verify + ## See Also -- **[catalog/README.md](../catalog/README.md)** - Catalog format and conventions -- **[catalog/COVERAGE.md](../catalog/COVERAGE.md)** - Complete tool inventory -- **[ARCHITECTURE.md](../docs/ARCHITECTURE.md)** - System design details +- **[ARCHITECTURE.md](../docs/ARCHITECTURE.md)** - System design and data flow +- **[API_REFERENCE.md](../docs/API_REFERENCE.md)** - Function signatures +- **[DEVELOPER_GUIDE.md](../docs/DEVELOPER_GUIDE.md)** - Development practices +- **[TROUBLESHOOTING.md](../docs/TROUBLESHOOTING.md)** - Debugging guide diff --git a/scripts/auto_update.sh b/scripts/auto_update.sh deleted file mode 100755 index 6d7dc8f..0000000 --- a/scripts/auto_update.sh +++ /dev/null @@ -1,1044 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Auto-update all package managers and their packages -# Detects installed package managers and runs their native update tools - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" -. "$DIR/lib/scope_detection.sh" - -DRY_RUN="${DRY_RUN:-0}" -VERBOSE="${VERBOSE:-0}" -SKIP_SYSTEM="${SKIP_SYSTEM:-0}" -SCOPE="${SCOPE:-}" # Can be: system, user, project, all, or auto-detect if empty - -log() { - printf "[auto-update] %s\n" "$*" >&2 -} - -vlog() { - if [ "$VERBOSE" = "1" ]; then - printf "[auto-update] %s\n" "$*" >&2 - fi -} - -run_cmd() { - local desc="$1" - shift - if [ "$DRY_RUN" = "1" ]; then - log "DRY-RUN: $desc" - log " Command: $*" - else - log "$desc" - if [ "$VERBOSE" = "1" ]; then - "$@" - else - "$@" >/dev/null 2>&1 || true - fi - fi -} - -# ============================================================================ -# Package Manager Detection -# ============================================================================ - -detect_apt() { - command -v apt-get >/dev/null 2>&1 -} - -detect_brew() { - command -v brew >/dev/null 2>&1 -} - -detect_cargo() { - command -v cargo >/dev/null 2>&1 -} - -detect_pip() { - command -v pip3 >/dev/null 2>&1 || command -v pip >/dev/null 2>&1 -} - -detect_pipx() { - command -v pipx >/dev/null 2>&1 -} - -detect_uv() { - command -v uv >/dev/null 2>&1 -} - -detect_npm() { - command -v npm >/dev/null 2>&1 -} - -detect_pnpm() { - command -v pnpm >/dev/null 2>&1 -} - -detect_yarn() { - command -v yarn >/dev/null 2>&1 -} - -detect_go() { - command -v go >/dev/null 2>&1 -} - -detect_gem() { - command -v gem >/dev/null 2>&1 -} - -detect_snap() { - command -v snap >/dev/null 2>&1 -} - -detect_flatpak() { - command -v flatpak >/dev/null 2>&1 -} - -detect_rustup() { - command -v rustup >/dev/null 2>&1 -} - -detect_nvm() { - [ -s "$HOME/.nvm/nvm.sh" ] && return 0 - command -v nvm >/dev/null 2>&1 -} - -detect_gcloud() { - command -v gcloud >/dev/null 2>&1 -} - -detect_az() { - command -v az >/dev/null 2>&1 -} - -detect_composer() { - command -v composer >/dev/null 2>&1 -} - -detect_poetry() { - command -v poetry >/dev/null 2>&1 -} - -detect_conda() { - command -v conda >/dev/null 2>&1 -} - -detect_mamba() { - command -v mamba >/dev/null 2>&1 -} - -detect_bundler() { - command -v bundle >/dev/null 2>&1 -} - -detect_jspm() { - command -v jspm >/dev/null 2>&1 -} - -detect_nuget() { - command -v nuget >/dev/null 2>&1 || command -v dotnet >/dev/null 2>&1 -} - -# ============================================================================ -# System Package Managers (requires sudo) -# ============================================================================ - -update_apt() { - if ! detect_apt; then return; fi - log "APT: Updating package lists and upgrading packages" - - if [ "$DRY_RUN" = "1" ]; then - log "DRY-RUN: sudo apt-get update && sudo apt-get upgrade -y" - else - if [ "$VERBOSE" = "1" ]; then - sudo apt-get update && sudo apt-get upgrade -y - else - sudo apt-get update >/dev/null 2>&1 || true - sudo apt-get upgrade -y >/dev/null 2>&1 || true - fi - log "APT: Complete" - fi -} - -update_brew() { - if ! detect_brew; then return; fi - log "Homebrew: Updating and upgrading all packages" - - run_cmd "Brew: Update package index" brew update - run_cmd "Brew: Upgrade packages" brew upgrade - run_cmd "Brew: Cleanup old versions" brew cleanup - - log "Homebrew: Complete" -} - -update_snap() { - if ! detect_snap; then return; fi - log "Snap: Refreshing all snaps" - - run_cmd "Snap: Refresh all" sudo snap refresh - - log "Snap: Complete" -} - -update_flatpak() { - if ! detect_flatpak; then return; fi - log "Flatpak: Updating all applications" - - run_cmd "Flatpak: Update" flatpak update -y - - log "Flatpak: Complete" -} - -# ============================================================================ -# Language-Specific Package Managers -# ============================================================================ - -update_cargo() { - if ! detect_cargo; then return; fi - log "Cargo: Updating installed packages" - - # Update rustup first - if detect_rustup; then - run_cmd "Rustup: Update toolchains" rustup update - - # Update rustup components (clippy, rustfmt, rust-analyzer, etc.) - vlog "Rustup: Updating components" - for component in clippy rustfmt rust-analyzer rust-src; do - if rustup component list 2>/dev/null | grep -q "^${component}.*installed"; then - vlog "Rustup: Component $component is installed" - # Components are updated with rustup update, no separate update needed - fi - done - fi - - # Install cargo-update if not present - if ! command -v cargo-install-update >/dev/null 2>&1; then - vlog "Installing cargo-update for package upgrades" - run_cmd "Cargo: Install cargo-update" cargo install cargo-update - fi - - # Update all cargo-installed packages - if command -v cargo-install-update >/dev/null 2>&1; then - run_cmd "Cargo: Upgrade all packages" cargo install-update -a - fi - - log "Cargo: Complete" -} - -update_uv() { - if ! detect_uv; then return; fi - log "UV: Updating UV tools" - - # Update uv itself - run_cmd "UV: Self-update" uv self update - - # Update all uv-managed tools - if [ "$DRY_RUN" = "0" ]; then - local tools - # Filter out binary lines (starting with dash) and keep only tool names - tools="$(uv tool list 2>/dev/null | grep -v '^-' | awk 'NF > 0 {print $1}' || true)" - if [ -n "$tools" ]; then - log "UV: Upgrading $(echo "$tools" | wc -l) installed tools" - while IFS= read -r tool; do - [ -z "$tool" ] && continue - run_cmd "UV: Upgrade $tool" uv tool upgrade "$tool" - done <<< "$tools" - fi - else - log "DRY-RUN: uv self update" - log "DRY-RUN: uv tool upgrade " - fi - - log "UV: Complete" -} - -update_pipx() { - if ! detect_pipx; then return; fi - log "Pipx: Updating all packages" - - run_cmd "Pipx: Upgrade pipx" pip3 install --user --upgrade pipx - run_cmd "Pipx: Upgrade all packages" pipx upgrade-all - - # Explicitly list important dev tools we track via pipx - local important_tools=("semgrep" "pre-commit" "coverage" "tox" "checkov" "black" "flake8" "pylint" "mypy") - for tool in "${important_tools[@]}"; do - if pipx list 2>/dev/null | grep -q "package $tool"; then - vlog "Pipx: $tool is installed" - fi - done - - log "Pipx: Complete" -} - -update_pip() { - if ! detect_pip; then return; fi - log "Pip: Updating user-installed packages" - - # Update pip itself - run_cmd "Pip: Self-update" python3 -m pip install --user --upgrade pip - - # List and upgrade user packages - if [ "$DRY_RUN" = "0" ]; then - local outdated - outdated="$(python3 -m pip list --user --outdated --format=json 2>/dev/null || echo '[]')" - if [ "$outdated" != "[]" ] && [ -n "$outdated" ]; then - vlog "Found outdated pip packages" - # Extract package names and upgrade them - echo "$outdated" | python3 -c " -import sys, json -try: - data = json.load(sys.stdin) - for pkg in data: - print(pkg['name']) -except: - pass -" | while IFS= read -r pkg; do - [ -z "$pkg" ] && continue - run_cmd "Pip: Upgrade $pkg" python3 -m pip install --user --upgrade "$pkg" - done - fi - else - log "DRY-RUN: pip list --outdated and upgrade packages" - fi - - log "Pip: Complete" -} - -update_npm() { - if ! detect_npm; then return; fi - log "NPM: Updating global packages" - - # Update npm itself - run_cmd "NPM: Self-update" npm install -g npm@latest - - # Update all global packages - run_cmd "NPM: Upgrade global packages" npm update -g - - log "NPM: Complete" -} - -update_pnpm() { - if ! detect_pnpm; then return; fi - log "PNPM: Updating global packages" - - # Update pnpm itself via corepack if available - if command -v corepack >/dev/null 2>&1; then - run_cmd "PNPM: Update via corepack" corepack prepare pnpm@latest --activate - else - run_cmd "PNPM: Self-update" npm install -g pnpm@latest - fi - - # Update global packages - run_cmd "PNPM: Upgrade global packages" pnpm update -g - - log "PNPM: Complete" -} - -update_yarn() { - if ! detect_yarn; then return; fi - log "Yarn: Updating global packages" - - # Update yarn itself via corepack if available - if command -v corepack >/dev/null 2>&1; then - run_cmd "Yarn: Update via corepack" corepack prepare yarn@stable --activate - else - run_cmd "Yarn: Self-update" npm install -g yarn@latest - fi - - # Yarn doesn't have a built-in global package upgrade command - # Users typically manage this per-project - vlog "Yarn: Global package upgrades managed per-project" - - log "Yarn: Complete" -} - -update_go() { - if ! detect_go; then return; fi - log "Go: Updating installed binaries" - - # Go doesn't have a built-in package manager for updating binaries - # List common go-installed tools and suggest updating - local gobin gopath - gobin="$(go env GOBIN 2>/dev/null || true)" - gopath="$(go env GOPATH 2>/dev/null || true)" - - if [ -z "$gobin" ] && [ -n "$gopath" ]; then - gobin="$gopath/bin" - fi - - if [ -n "$gobin" ] && [ -d "$gobin" ]; then - vlog "Go: Binaries in $gobin (manual upgrade needed: go install @latest)" - log "Go: Update via go install @latest for each tool" - fi - - log "Go: Manual updates required" -} - -update_gem() { - if ! detect_gem; then return; fi - log "RubyGems: Updating all gems" - - run_cmd "Gem: Update system" gem update --system - run_cmd "Gem: Upgrade all gems" gem update - run_cmd "Gem: Cleanup old versions" gem cleanup - - log "RubyGems: Complete" -} - -update_composer() { - if ! detect_composer; then return; fi - log "Composer: Updating" - - run_cmd "Composer: Self-update" composer self-update - run_cmd "Composer: Update global packages" composer global update - - log "Composer: Complete" -} - -update_poetry() { - if ! detect_poetry; then return; fi - log "Poetry: Updating" - - # Try poetry self update first (Poetry 1.2+) - if poetry self update --help >/dev/null 2>&1; then - run_cmd "Poetry: Self-update" poetry self update - # Fallback to uv tool upgrade if poetry is managed by uv - elif command -v uv >/dev/null 2>&1 && uv tool list 2>/dev/null | grep -q "^poetry"; then - run_cmd "Poetry: Upgrade via UV" uv tool upgrade poetry - # Fallback to pipx upgrade if poetry is managed by pipx - elif command -v pipx >/dev/null 2>&1 && pipx list 2>/dev/null | grep -q "poetry"; then - run_cmd "Poetry: Upgrade via pipx" pipx upgrade poetry - else - vlog "Poetry: No automatic update method available" - log "Poetry: Manual update required (see https://python-poetry.org/docs/#updating-poetry)" - fi - - log "Poetry: Complete" -} - -update_gcloud() { - if ! detect_gcloud; then return; fi - log "Google Cloud SDK: Updating components" - - run_cmd "gcloud: Update all components" gcloud components update --quiet - - log "Google Cloud SDK: Complete" -} - -update_az() { - if ! detect_az; then return; fi - log "Azure CLI: Updating" - - # Azure CLI update method depends on installation type - if command -v apt-get >/dev/null 2>&1 && dpkg -l azure-cli >/dev/null 2>&1; then - # Installed via apt - run_cmd "Azure CLI: Update via apt" sudo apt-get update && sudo apt-get install --only-upgrade -y azure-cli - elif command -v brew >/dev/null 2>&1 && brew list azure-cli >/dev/null 2>&1; then - # Installed via brew - run_cmd "Azure CLI: Update via brew" brew upgrade azure-cli - else - # Try az upgrade command (available in az CLI 2.11.0+) - run_cmd "Azure CLI: Self-upgrade" az upgrade --yes - fi - - log "Azure CLI: Complete" -} - -# ============================================================================ -# Main Orchestration -# ============================================================================ - -get_manager_stats() { - local mgr="$1" - local location version pkg_count - - case "$mgr" in - apt) - location="$(command -v apt-get 2>/dev/null || echo "N/A")" - version="$(apt-get --version 2>/dev/null | head -n1 | awk '{print $2}' || echo "unknown")" - pkg_count="$(dpkg -l 2>/dev/null | grep '^ii' | wc -l | tr -d '[:space:]' || echo "0")" - ;; - brew) - location="$(command -v brew 2>/dev/null || echo "N/A")" - version="$(brew --version 2>/dev/null | head -n1 | awk '{print $2}' || echo "unknown")" - pkg_count="$(brew list --formula 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - snap) - location="$(command -v snap 2>/dev/null || echo "N/A")" - version="$(snap version 2>/dev/null | grep '^snap' | awk '{print $2}' || echo "unknown")" - pkg_count="$(snap list 2>/dev/null | tail -n +2 | wc -l | tr -d '[:space:]' || echo "0")" - ;; - flatpak) - location="$(command -v flatpak 2>/dev/null || echo "N/A")" - version="$(flatpak --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(flatpak list --app 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - cargo) - location="$(command -v cargo 2>/dev/null || echo "N/A")" - version="$(cargo --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(cargo install --list 2>/dev/null | grep -c '^[^ ]' | tr -d '[:space:]' || echo "0")" - ;; - rustup) - location="$(command -v rustup 2>/dev/null || echo "N/A")" - version="$(rustup --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(rustup toolchain list 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - uv) - location="$(command -v uv 2>/dev/null || echo "N/A")" - version="$(uv --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(uv tool list 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - pipx) - location="$(command -v pipx 2>/dev/null || echo "N/A")" - version="$(pipx --version 2>/dev/null || echo "unknown")" - pkg_count="$(pipx list --short 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - pip) - location="$(command -v pip3 2>/dev/null || command -v pip 2>/dev/null || echo "N/A")" - version="$(/usr/bin/python3 -m pip --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(/usr/bin/python3 -m pip list --user 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]' || echo "0")" - pkg_count="${pkg_count:-0}" - ;; - npm) - location="$(command -v npm 2>/dev/null || echo "N/A")" - version="$(npm --version 2>/dev/null || echo "unknown")" - pkg_count="$(npm list -g --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]' || echo "0")" - ;; - pnpm) - location="$(command -v pnpm 2>/dev/null || echo "N/A")" - version="$(pnpm --version 2>/dev/null || echo "unknown")" - pkg_count="$(pnpm list -g --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]' || echo "0")" - ;; - yarn) - location="$(command -v yarn 2>/dev/null || echo "N/A")" - version="$(yarn --version 2>/dev/null || echo "unknown")" - pkg_count="$(yarn global list 2>/dev/null | grep -c '^info' | tr -d '[:space:]' || echo "0")" - ;; - go) - location="$(command -v go 2>/dev/null || echo "N/A")" - version="$(go version 2>/dev/null | awk '{print $3}' | sed 's/go//' || echo "unknown")" - local gobin="$(go env GOBIN 2>/dev/null || echo "$(go env GOPATH 2>/dev/null)/bin")" - pkg_count="$([ -d "$gobin" ] && ls -1 "$gobin" 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - gem) - location="$(command -v gem 2>/dev/null || echo "N/A")" - version="$(gem --version 2>/dev/null || echo "unknown")" - pkg_count="$(gem list --no-versions 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - composer) - location="$(command -v composer 2>/dev/null || echo "N/A")" - version="$(composer --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown")" - pkg_count="$(composer global show 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - poetry) - location="$(command -v poetry 2>/dev/null || echo "N/A")" - version="$(poetry --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown")" - pkg_count="N/A" - ;; - conda) - location="$(command -v conda 2>/dev/null || echo "N/A")" - version="$(conda --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(conda list 2>/dev/null | tail -n +4 | wc -l | tr -d '[:space:]' || echo "0")" - ;; - mamba) - location="$(command -v mamba 2>/dev/null || echo "N/A")" - version="$(mamba --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(mamba list 2>/dev/null | tail -n +4 | wc -l | tr -d '[:space:]' || echo "0")" - ;; - bundler) - location="$(command -v bundle 2>/dev/null || echo "N/A")" - version="$(bundle --version 2>/dev/null | awk '{print $3}' || echo "unknown")" - pkg_count="N/A" - ;; - jspm) - location="$(command -v jspm 2>/dev/null || echo "N/A")" - version="$(jspm --version 2>/dev/null || echo "unknown")" - pkg_count="N/A" - ;; - nuget) - if command -v nuget >/dev/null 2>&1; then - location="$(command -v nuget)" - version="$(nuget help 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown")" - else - location="$(command -v dotnet 2>/dev/null || echo "N/A")" - version="$(dotnet --version 2>/dev/null || echo "unknown")" - fi - pkg_count="N/A" - ;; - gcloud) - location="$(command -v gcloud 2>/dev/null || echo "N/A")" - version="$(gcloud version 2>/dev/null | grep 'Google Cloud SDK' | awk '{print $4}' || echo "unknown")" - pkg_count="$(gcloud components list --filter='State.name:Installed' --format='value(id)' 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - ;; - az) - location="$(command -v az 2>/dev/null || echo "N/A")" - version="$(az version --output tsv 2>/dev/null | grep '^azure-cli' | awk '{print $2}' || echo "unknown")" - pkg_count="$(az extension list 2>/dev/null | grep -c '"name":' | tr -d '[:space:]' || echo "0")" - ;; - *) - location="unknown" - version="unknown" - pkg_count="0" - ;; - esac - - printf "%s|%s|%s" "$location" "$version" "$pkg_count" -} - -# Check if a package manager is outdated by querying the snapshot -check_manager_outdated() { - local mgr="$1" - local current_version="$2" - - # Skip if version is unknown - [ "$current_version" = "unknown" ] && return 1 - - # Use tools_snapshot.json directly (fast, no subprocess needed) - local snapshot_file="${CLI_AUDIT_SNAPSHOT_FILE:-tools_snapshot.json}" - - # Check if snapshot exists - [ ! -f "$snapshot_file" ] && return 1 - - # Extract status for this tool from JSON snapshot - local status - status="$(python3 -c " -import json, sys -try: - with open('$snapshot_file') as f: - data = json.load(f) - for tool in data.get('tools', []): - if tool.get('tool') == '$mgr': - print(tool.get('status', '')) - sys.exit(0) -except: - pass -" 2>/dev/null)" || status="" - - # Check if status is OUTDATED - if [ "$status" = "OUTDATED" ]; then - return 0 # Outdated - else - return 1 # Up-to-date or unknown - fi -} - -# Show update hint for a specific manager -show_manager_update_hint() { - local mgr="$1" - - case "$mgr" in - apt) - echo " • $mgr: Run 'sudo apt-get update && sudo apt-get upgrade -y' or 'make auto-update-system'" - ;; - snap) - echo " • $mgr: Run 'sudo snap refresh' or 'make auto-update-system'" - ;; - brew) - echo " • $mgr: Run 'brew update && brew upgrade' or 'make auto-update'" - ;; - flatpak) - echo " • $mgr: Run 'flatpak update -y' or 'make auto-update'" - ;; - cargo) - echo " • $mgr: Run 'cargo install cargo-update && cargo install-update -a' or 'make auto-update'" - ;; - rustup) - echo " • $mgr: Run 'rustup update' or 'make auto-update'" - ;; - uv) - echo " • $mgr: Run 'uv self update' or './scripts/auto_update.sh uv'" - ;; - pipx) - echo " • $mgr: Run 'pip3 install --user --upgrade pipx' or './scripts/auto_update.sh pipx'" - ;; - pip) - echo " • $mgr: Run 'python3 -m pip install --user --upgrade pip' or './scripts/auto_update.sh pip'" - ;; - npm) - echo " • $mgr: Run 'npm install -g npm@latest' or './scripts/auto_update.sh npm'" - ;; - pnpm) - echo " • $mgr: Run 'npm install -g pnpm@latest' or './scripts/auto_update.sh pnpm'" - ;; - yarn) - echo " • $mgr: Run 'npm install -g yarn@latest' or './scripts/auto_update.sh yarn'" - ;; - go) - echo " • $mgr: Download latest from https://go.dev/dl/ and install" - ;; - gem) - echo " • $mgr: Run 'gem update --system' or './scripts/auto_update.sh gem'" - ;; - composer) - echo " • $mgr: Run 'composer self-update'" - ;; - poetry) - echo " • $mgr: Run 'poetry self update' or 'uv tool upgrade poetry'" - ;; - conda) - echo " • $mgr: Run 'conda update -n base conda'" - ;; - mamba) - echo " • $mgr: Run 'conda update -n base mamba' or 'mamba update mamba'" - ;; - gcloud) - echo " • $mgr: Run 'gcloud components update' or './scripts/auto_update.sh gcloud'" - ;; - az) - echo " • $mgr: Run 'az upgrade' or './scripts/auto_update.sh az'" - ;; - *) - echo " • $mgr: Check official documentation for update instructions" - ;; - esac -} - -show_detected() { - log "Detecting installed package managers with scope information..." - echo "" - - local all_managers=(apt snap brew flatpak cargo rustup uv pipx pip npm pnpm yarn go gem composer poetry conda mamba bundler jspm nuget gcloud az) - local found_managers=0 - local found_scopes=0 - local outdated_managers=() - - # First pass: detect which managers are installed - local managers=() - for mgr in "${all_managers[@]}"; do - if command -v "$mgr" >/dev/null 2>&1 || \ - ([ "$mgr" = "apt" ] && command -v apt-get >/dev/null 2>&1) || \ - ([ "$mgr" = "bundler" ] && command -v bundle >/dev/null 2>&1) || \ - ([ "$mgr" = "nuget" ] && command -v dotnet >/dev/null 2>&1); then - managers+=("$mgr") - found_managers=$((found_managers + 1)) - fi - done - - if [ $found_managers -eq 0 ]; then - echo "No package managers detected." - return - fi - - echo "Found $found_managers package managers:" - echo "" - printf "%-12s %-8s %-8s %-8s %s\n" "MANAGER" "VERSION" "SCOPE" "PACKAGES" "LOCATION" - printf "%-12s %-8s %-8s %-8s %s\n" "-------" "-------" "-----" "--------" "--------" - - # Second pass: display one line per scope and check for updates - for mgr in "${managers[@]}"; do - # Get scopes for this manager - local scopes - scopes="$(get_manager_scopes "$mgr")" - - # Skip if no scopes detected - [ -z "$scopes" ] && continue - - # Get version and location once (reuse for all scopes) - local version location stats - stats="$(get_manager_stats "$mgr")" - IFS='|' read -r location version _ <<< "$stats" - - # Check if manager itself is outdated (only once per manager) - # Wrap in subshell to prevent pipefail from exiting on check failure - if ( check_manager_outdated "$mgr" "$version" ); then - outdated_managers+=("$mgr") - fi - - # Split scopes and print one line per scope - IFS=',' read -ra SCOPE_ARRAY <<< "$scopes" - for scope in "${SCOPE_ARRAY[@]}"; do - local pkg_count - pkg_count="$(get_manager_packages_by_scope "$mgr" "$scope")" - - printf "%-12s %-8s %-8s %-8s %s\n" "$mgr" "$version" "$scope" "$pkg_count" "$location" - found_scopes=$((found_scopes + 1)) - done - done - - echo "" - log "$found_scopes total scopes across $found_managers managers" - echo "" - - # Show outdated package managers if any - if [ ${#outdated_managers[@]} -gt 0 ]; then - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "⚠️ Outdated Package Managers Detected" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "" - echo "The following package managers have updates available:" - echo "" - for mgr in "${outdated_managers[@]}"; do - show_manager_update_hint "$mgr" - done - echo "" - echo "Run 'make auto-update' or './scripts/auto_update.sh update' to update all." - echo "" - fi -} - -confirm_project_update() { - local mgr="$1" - local project_file="$2" - - echo "" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "📦 PROJECT SCOPE UPDATE: $mgr" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "" - echo "Project: $(pwd)" - echo "File: $project_file" - echo "" - echo "This will update project dependencies" - echo "" - echo "⚠️ WARNING: This may break your project if dependencies are" - echo " version-pinned or have breaking changes" - echo "" - - read -p "Continue with project update? [y/N] " -n 1 -r - echo "" - - if [[ $REPLY =~ ^[Yy]$ ]]; then - return 0 - else - log "$mgr: Project update cancelled" - return 1 - fi -} - -run_all_updates() { - # Determine target scope - local target_scope="${SCOPE:-$(determine_default_scope)}" - - log "Starting auto-update for scope: $target_scope" - echo "" - - # System scope updates - if [ "$target_scope" = "system" ] || [ "$target_scope" = "all" ]; then - if [ "$SKIP_SYSTEM" = "0" ]; then - update_apt - update_snap - # Check if system-scoped - [ "$(get_brew_scopes)" = "system" ] && update_brew - [[ "$(get_flatpak_scopes)" == *"system"* ]] && update_flatpak - [[ "$(get_gem_scopes)" == *"system"* ]] && update_gem - else - log "Skipping system package managers (SKIP_SYSTEM=1)" - fi - fi - - # User scope updates - if [ "$target_scope" = "user" ] || [ "$target_scope" = "all" ]; then - # User-only managers - update_cargo - update_uv - update_pipx - update_pip - update_npm - update_pnpm - update_yarn - update_go - update_composer - update_poetry - update_gcloud - - # Check if user-scoped (conditional updates based on scope detection) - [ "$(get_brew_scopes)" = "user" ] && update_brew - [[ "$(get_flatpak_scopes)" == *"user"* ]] && update_flatpak - [[ "$(get_gem_scopes)" == *"user"* ]] && update_gem - [ "$(get_az_scopes)" = "user" ] && update_az - fi - - # Project scope updates (require confirmation) - if [ "$target_scope" = "project" ]; then - log "Project scope update - checking for project dependencies..." - - # NPM/PNPM/Yarn - if [ -f "./package.json" ]; then - if command -v npm >/dev/null 2>&1 && confirm_project_update "npm" "./package.json"; then - run_cmd "NPM: Update project dependencies" npm update - fi - fi - - # Pip/UV - if [ -f "./pyproject.toml" ] || [ -d "./.venv" ]; then - if command -v pip3 >/dev/null 2>&1 && [ -n "${VIRTUAL_ENV:-}" ] && confirm_project_update "pip" "./.venv"; then - run_cmd "Pip: Update project dependencies" python3 -m pip install --upgrade -r requirements.txt 2>/dev/null || true - fi - fi - - # Bundler/Gem - if [ -f "./Gemfile" ] && command -v bundle >/dev/null 2>&1 && confirm_project_update "bundler" "./Gemfile"; then - run_cmd "Bundler: Update project dependencies" bundle update - fi - - # Composer - if [ -f "./composer.json" ] && command -v composer >/dev/null 2>&1 && confirm_project_update "composer" "./composer.json"; then - run_cmd "Composer: Update project dependencies" composer update - fi - fi - - echo "" - log "Auto-update complete for scope: $target_scope" -} - -# ============================================================================ -# CLI Interface -# ============================================================================ - -usage() { - cat < Set update scope: system, user, project, all - (Default: auto-detect based on current directory) - -Scope Behavior: - - In project directory (has package.json, Gemfile, etc.): defaults to 'project' - - Outside project directory: defaults to 'user' - - 'system' scope: Updates system-wide packages (requires sudo) - - 'all' scope: Updates system + user (skips project for safety) - - Project updates always require explicit confirmation - -Examples: - $0 detect # List detected managers with scopes - $0 update # Update based on directory context - SCOPE=user $0 update # Update only user-scoped packages - SCOPE=system $0 update # Update only system-scoped packages - SCOPE=project $0 update # Update project dependencies (with confirmation) - SCOPE=all $0 update # Update system + user scopes - $0 --dry-run update # Show what would be updated - $0 cargo # Update only Cargo packages - DRY_RUN=1 SCOPE=user $0 update # Dry-run user-scope updates - -EOF -} - -# Parse options -while [[ $# -gt 0 ]]; do - case "$1" in - --dry-run) - DRY_RUN=1 - shift - ;; - --verbose) - VERBOSE=1 - shift - ;; - --skip-system) - SKIP_SYSTEM=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - detect) - show_detected - exit 0 - ;; - update) - run_all_updates - exit 0 - ;; - apt) - update_apt - exit 0 - ;; - brew) - update_brew - exit 0 - ;; - cargo) - update_cargo - exit 0 - ;; - uv) - update_uv - exit 0 - ;; - pipx) - update_pipx - exit 0 - ;; - pip) - update_pip - exit 0 - ;; - npm) - update_npm - exit 0 - ;; - pnpm) - update_pnpm - exit 0 - ;; - yarn) - update_yarn - exit 0 - ;; - go) - update_go - exit 0 - ;; - gem) - update_gem - exit 0 - ;; - snap) - update_snap - exit 0 - ;; - flatpak) - update_flatpak - exit 0 - ;; - gcloud) - update_gcloud - exit 0 - ;; - az) - update_az - exit 0 - ;; - *) - echo "Error: Unknown command '$1'" >&2 - echo "Run '$0 --help' for usage information." >&2 - exit 1 - ;; - esac -done - -# Default: show detected managers -show_detected diff --git a/scripts/auto_update_scope_prototype.sh b/scripts/auto_update_scope_prototype.sh deleted file mode 100755 index 7a9c4e6..0000000 --- a/scripts/auto_update_scope_prototype.sh +++ /dev/null @@ -1,538 +0,0 @@ -#!/usr/bin/env bash -# Prototype: Package Manager Scope Detection System -# This is a working proof-of-concept for scope distinction (system/user/project) - -set -euo pipefail - -# ============================================================================ -# Core Scope Detection Functions -# ============================================================================ - -get_manager_scope() { - local mgr="$1" - local scopes="" - - case "$mgr" in - apt) - # APT is always system-level - scopes="system" - ;; - - brew) - # Detect Homebrew installation type by checking installation directory - if [ -d "/home/linuxbrew" ] || [ -d "/opt/homebrew" ]; then - scopes="system" - elif [ -d "$HOME/.linuxbrew" ] || [ -d "$HOME/.brew" ]; then - scopes="user" - else - # Fallback: check brew --prefix ownership - local brew_prefix="$(brew --prefix 2>/dev/null || echo "")" - if [ -n "$brew_prefix" ]; then - if [[ "$brew_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - fi - ;; - - snap) - # Snap is always system-level - scopes="system" - ;; - - flatpak) - # Flatpak supports both system and user scopes - local system_count user_count - system_count="$(flatpak list --system --app 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - user_count="$(flatpak list --user --app 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - - if [ "${system_count:-0}" -gt 0 ] && [ "${user_count:-0}" -gt 0 ]; then - scopes="system,user" - elif [ "${system_count:-0}" -gt 0 ]; then - scopes="system" - elif [ "${user_count:-0}" -gt 0 ]; then - scopes="user" - fi - ;; - - cargo) - # Cargo is always user-scoped (installs to ~/.cargo) - scopes="user" - ;; - - rustup) - # Rustup is always user-scoped (manages user toolchains) - scopes="user" - ;; - - uv) - # UV tools are user-scoped, but can manage project venvs - scopes="user" - if [ -d "./.venv" ]; then - scopes="user,project" - fi - ;; - - pipx) - # Pipx is always user-scoped - scopes="user" - ;; - - pip) - # PIP supports user and project (venv) scopes - scopes="user" - # Check for active venv or project venv directory - if [ -n "${VIRTUAL_ENV:-}" ] || [ -d "./.venv" ] || [ -d "./venv" ]; then - scopes="user,project" - fi - ;; - - npm) - # NPM supports all three scopes - local npm_prefix="$(npm config get prefix 2>/dev/null || echo "")" - - if [ -n "$npm_prefix" ]; then - if [[ "$npm_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - # Check for project scope - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - pnpm) - # PNPM similar to NPM - local pnpm_prefix="$(pnpm config get prefix 2>/dev/null || echo "")" - - if [ -n "$pnpm_prefix" ]; then - if [[ "$pnpm_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - yarn) - # Yarn similar to NPM - local yarn_prefix="$(yarn global dir 2>/dev/null | head -n1 || echo "")" - - if [ -n "$yarn_prefix" ]; then - if [[ "$yarn_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - go) - # Go is typically user-scoped (GOPATH/GOBIN in home directory) - scopes="user" - ;; - - gem) - # RubyGems can be system or user - local gem_dir="$(gem environment gemdir 2>/dev/null || echo "")" - - if [ -n "$gem_dir" ]; then - if [[ "$gem_dir" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - # Check for project scope (Gemfile) - if [ -f "./Gemfile" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - composer) - # Composer: global (user) + project - scopes="user" - if [ -f "./composer.json" ]; then - scopes="user,project" - fi - ;; - - poetry) - # Poetry is project-only (manages project venvs) - scopes="project" - ;; - - conda) - # Conda: can have base (system/user) + environments (user/project) - local conda_prefix="${CONDA_PREFIX:-}" - if [ -n "$conda_prefix" ]; then - if [[ "$conda_prefix" == *"/base"* ]]; then - scopes="system" - else - scopes="user" - fi - else - scopes="user" - fi - - # Check for environment.yml (project) - if [ -f "./environment.yml" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - mamba) - # Mamba same as conda - local mamba_prefix="${MAMBA_PREFIX:-${CONDA_PREFIX:-}}" - if [ -n "$mamba_prefix" ]; then - if [[ "$mamba_prefix" == *"/base"* ]]; then - scopes="system" - else - scopes="user" - fi - else - scopes="user" - fi - - if [ -f "./environment.yml" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - bundler) - # Bundler is project-only (Gemfile) - if [ -f "./Gemfile" ]; then - scopes="project" - fi - ;; - - jspm) - # JSPM: global (user) + project - scopes="user" - if [ -f "./package.json" ]; then - scopes="user,project" - fi - ;; - - nuget) - # NuGet: global (system/user) + project - if command -v nuget >/dev/null 2>&1; then - scopes="system" - elif command -v dotnet >/dev/null 2>&1; then - scopes="user" - fi - - if [ -f "./*.csproj" ] 2>/dev/null || [ -f "./*.sln" ] 2>/dev/null; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - ;; - - gcloud) - # Google Cloud SDK is typically user-scoped - scopes="user" - ;; - - az) - # Azure CLI can be system or user - if command -v apt-get >/dev/null 2>&1 && dpkg -l azure-cli >/dev/null 2>&1; then - scopes="system" - else - scopes="user" - fi - ;; - - *) - scopes="unknown" - ;; - esac - - echo "$scopes" -} - -# ============================================================================ -# Enhanced Stats with Scope-Specific Counts -# ============================================================================ - -get_scope_details() { - local mgr="$1" - local scope_details="" - - case "$mgr" in - flatpak) - local sys_count usr_count - sys_count="$(flatpak list --system --app 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - usr_count="$(flatpak list --user --app 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - if [ "${sys_count:-0}" -gt 0 ] || [ "${usr_count:-0}" -gt 0 ]; then - scope_details="sys:$sys_count,usr:$usr_count" - fi - ;; - - npm) - local global_count="$(npm list -g --depth=0 2>/dev/null | grep -c '^[├└]' || echo "0")" - scope_details="global:$global_count" - - if [ -f "./package.json" ]; then - local project_count="$(npm list --depth=0 2>/dev/null | grep -c '^[├└]' || echo "0")" - scope_details="$scope_details,project:$project_count" - fi - ;; - - pip) - local user_count venv_count - user_count="$(python3 -m pip list --user 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]')" - user_count="${user_count:-0}" - scope_details="user:$user_count" - - if [ -n "${VIRTUAL_ENV:-}" ]; then - venv_count="$(python3 -m pip list 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]')" - venv_count="${venv_count:-0}" - scope_details="$scope_details,venv:$venv_count" - fi - ;; - - uv) - local tools_count - tools_count="$(uv tool list 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - scope_details="tools:$tools_count" - ;; - - gem) - local gem_count bundle_count - gem_count="$(gem list --no-versions 2>/dev/null | wc -l | tr -d '[:space:]' || echo "0")" - scope_details="gems:$gem_count" - - if [ -f "./Gemfile" ]; then - bundle_count="$(bundle list 2>/dev/null | grep -c '^\s*\*' || echo "0")" - scope_details="$scope_details,bundle:$bundle_count" - fi - ;; - - *) - scope_details="" - ;; - esac - - echo "$scope_details" -} - -get_manager_stats_with_scope() { - local mgr="$1" - local location version pkg_count scope scope_details - - # Detect location - case "$mgr" in - apt) - location="$(command -v apt-get 2>/dev/null || echo "N/A")" - version="$(apt-get --version 2>/dev/null | head -n1 | awk '{print $2}' || echo "unknown")" - pkg_count="$(dpkg -l 2>/dev/null | grep '^ii' | wc -l || echo "0")" - ;; - brew) - location="$(command -v brew 2>/dev/null || echo "N/A")" - version="$(brew --version 2>/dev/null | head -n1 | awk '{print $2}' || echo "unknown")" - pkg_count="$(brew list --formula 2>/dev/null | wc -l || echo "0")" - ;; - npm) - location="$(command -v npm 2>/dev/null || echo "N/A")" - version="$(npm --version 2>/dev/null || echo "unknown")" - pkg_count="$(npm list -g --depth=0 2>/dev/null | grep -c '^[├└]' || echo "0")" - ;; - pip) - location="$(command -v pip3 2>/dev/null || command -v pip 2>/dev/null || echo "N/A")" - version="$(python3 -m pip --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(python3 -m pip list --user 2>/dev/null | tail -n +3 | wc -l || echo "0")" - ;; - cargo) - location="$(command -v cargo 2>/dev/null || echo "N/A")" - version="$(cargo --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(cargo install --list 2>/dev/null | grep -c '^[^ ]' || echo "0")" - ;; - flatpak) - location="$(command -v flatpak 2>/dev/null || echo "N/A")" - version="$(flatpak --version 2>/dev/null | awk '{print $2}' || echo "unknown")" - pkg_count="$(flatpak list --app 2>/dev/null | wc -l || echo "0")" - ;; - *) - location="unknown" - version="unknown" - pkg_count="0" - ;; - esac - - # Get scope - scope="$(get_manager_scope "$mgr")" - - # Get scope-specific details - scope_details="$(get_scope_details "$mgr")" - - # Build display string for packages - local pkg_display="$pkg_count" - if [ -n "$scope_details" ]; then - pkg_display="$pkg_count ($scope_details)" - fi - - printf "%s|%s|%s|%s|%s" "$location" "$version" "$pkg_display" "$scope" "$scope_details" -} - -# ============================================================================ -# Display Functions -# ============================================================================ - -show_scope_detection() { - echo "Package Manager Scope Detection (Prototype)" - echo "============================================" - echo "" - - local managers=("apt" "brew" "npm" "pip" "cargo" "flatpak") - - echo "Detected Package Managers:" - echo "" - printf "%-12s %-8s %-12s %-20s %s\n" "MANAGER" "VERSION" "SCOPE" "PACKAGES" "LOCATION" - printf "%-12s %-8s %-12s %-20s %s\n" "-------" "-------" "-----" "--------" "--------" - - for mgr in "${managers[@]}"; do - # Check if manager exists - if ! command -v "$mgr" >/dev/null 2>&1 && ! command -v "${mgr}-get" >/dev/null 2>&1 && ! command -v "apt-get" >/dev/null 2>&1; then - continue - fi - - local stats location version pkg_display scope scope_details - stats="$(get_manager_stats_with_scope "$mgr")" - IFS='|' read -r location version pkg_display scope scope_details <<< "$stats" - - printf "%-12s %-8s %-12s %-20s %s\n" "$mgr" "$version" "$scope" "$pkg_display" "$location" - done - echo "" -} - -show_scope_analysis() { - echo "" - echo "Scope Analysis:" - echo "===============" - echo "" - - local managers=("apt" "brew" "npm" "pip" "cargo" "flatpak") - - for mgr in "${managers[@]}"; do - if ! command -v "$mgr" >/dev/null 2>&1 && ! command -v "${mgr}-get" >/dev/null 2>&1 && ! command -v "apt-get" >/dev/null 2>&1; then - continue - fi - - local scope="$(get_manager_scope "$mgr")" - local scope_details="$(get_scope_details "$mgr")" - - echo "[$mgr]" - echo " Scope: $scope" - - if [ -n "$scope_details" ]; then - echo " Details: $scope_details" - fi - - # Provide context-specific insights - if [[ "$scope" == *"project"* ]]; then - echo " ⚠️ Project scope detected - updates should be done manually in project context" - fi - - if [[ "$scope" == *"system"* ]]; then - echo " 🔒 System scope - updates may require sudo/administrator access" - fi - - echo "" - done -} - -# ============================================================================ -# Scope-Aware Update Example -# ============================================================================ - -update_npm_scope_aware() { - echo "Example: Scope-Aware NPM Update" - echo "================================" - echo "" - - local scope="$(get_manager_scope "npm")" - echo "Detected NPM scope: $scope" - echo "" - - if [[ "$scope" == *"user"* ]] || [[ "$scope" == *"system"* ]]; then - echo "Would update: Global NPM packages" - echo " Command: npm install -g npm@latest" - echo " Command: npm update -g" - fi - - if [[ "$scope" == *"project"* ]]; then - echo "" - echo "📦 Project scope detected (./package.json)" - echo " To update project dependencies, run:" - echo " - npm update (update within version ranges)" - echo " - npm outdated (check for newer versions)" - echo " - npx npm-check-updates -u (update to latest)" - fi - echo "" -} - -update_flatpak_scope_aware() { - echo "Example: Scope-Aware Flatpak Update" - echo "====================================" - echo "" - - local scope="$(get_manager_scope "flatpak")" - echo "Detected Flatpak scope: $scope" - echo "" - - if [[ "$scope" == *"system"* ]]; then - echo "Would update: System Flatpak applications" - echo " Command: flatpak update --system -y" - fi - - if [[ "$scope" == *"user"* ]]; then - echo "Would update: User Flatpak applications" - echo " Command: flatpak update --user -y" - fi - echo "" -} - -# ============================================================================ -# Main -# ============================================================================ - -main() { - show_scope_detection - show_scope_analysis - - echo "Scope-Aware Update Examples:" - echo "============================" - echo "" - - if command -v npm >/dev/null 2>&1; then - update_npm_scope_aware - fi - - if command -v flatpak >/dev/null 2>&1; then - update_flatpak_scope_aware - fi - - echo "Prototype Test Complete!" - echo "" - echo "Next Steps:" - echo " 1. Review scope detection accuracy" - echo " 2. Test edge cases (nvm, virtual envs, etc.)" - echo " 3. Integrate into main auto_update.sh" - echo " 4. Add --scope flag for selective updates" -} - -main "$@" diff --git a/scripts/check_node_package_managers.sh b/scripts/check_node_package_managers.sh deleted file mode 100755 index d3914ca..0000000 --- a/scripts/check_node_package_managers.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env bash -# Check for multiple Node.js package managers and recommend consolidation -set -euo pipefail - -# Colors for output -RED='\033[0;31m' -YELLOW='\033[1;33m' -GREEN='\033[0;32m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Detect installed package managers -has_npm=false -has_yarn=false -has_pnpm=false -has_bun=false - -if command -v npm >/dev/null 2>&1; then - has_npm=true -fi - -if command -v yarn >/dev/null 2>&1; then - has_yarn=true -fi - -if command -v pnpm >/dev/null 2>&1; then - has_pnpm=true -fi - -if command -v bun >/dev/null 2>&1; then - has_bun=true -fi - -# Count how many are installed -count=0 -managers=() -if $has_npm; then count=$((count + 1)); managers+=("npm"); fi -if $has_yarn; then count=$((count + 1)); managers+=("yarn"); fi -if $has_pnpm; then count=$((count + 1)); managers+=("pnpm"); fi -if $has_bun; then count=$((count + 1)); managers+=("bun"); fi - -# If only one is installed, check if it's the recommended one -if [ "$count" -eq 1 ]; then - if $has_pnpm; then - echo -e "${GREEN}✓ Only pnpm is installed - recommended configuration!${NC}" - echo " pnpm is fast, disk-efficient, and strictly follows package.json" - exit 0 - elif $has_bun; then - echo -e "${GREEN}✓ Only bun is installed - excellent choice!${NC}" - echo " bun is extremely fast and includes runtime + bundler" - exit 0 - elif $has_npm; then - echo -e "${YELLOW}ℹ npm is installed (bundled with Node.js)${NC}" - echo "" - echo -e "${BLUE}Recommendation: Consider pnpm for better performance and disk efficiency${NC}" - echo "" - echo "Why pnpm?" - echo " • 2x faster than npm" - echo " • Saves disk space with content-addressable storage" - echo " • Strict dependency resolution (no phantom dependencies)" - echo " • Drop-in replacement for npm" - echo "" - echo "Install pnpm:" - echo " npm install -g pnpm" - echo "" - exit 0 - else - echo -e "${GREEN}✓ Only ${managers[0]} is installed${NC}" - exit 0 - fi -fi - -# If multiple managers are installed, warn -if [ "$count" -gt 1 ]; then - echo -e "${YELLOW}⚠ Multiple Node.js package managers detected:${NC}" - for mgr in "${managers[@]}"; do - version="" - case $mgr in - npm) version=$($mgr --version 2>/dev/null || echo "unknown") ;; - yarn) version=$($mgr --version 2>/dev/null || echo "unknown") ;; - pnpm) version=$($mgr --version 2>/dev/null || echo "unknown") ;; - bun) version=$($mgr --version 2>/dev/null || echo "unknown") ;; - esac - echo " - $mgr ($version)" - done - echo "" - echo -e "${RED}Problems with multiple managers:${NC}" - echo " • Lock file conflicts (package-lock.json vs pnpm-lock.yaml vs yarn.lock)" - echo " • Different dependency resolution algorithms" - echo " • Wasted disk space from multiple caches" - echo " • Team confusion about which manager to use" - echo "" - echo -e "${BLUE}Recommendation: Choose ONE package manager per project${NC}" - echo "" - echo "Recommended priority:" - echo " 1. pnpm - Fast, disk-efficient, strict (recommended for most projects)" - echo " 2. bun - Extremely fast, includes runtime (good for new projects)" - echo " 3. npm - Default, bundled with Node.js (keep for compatibility)" - echo " 4. yarn - Classic choice (consider migrating to pnpm or bun)" - echo "" - echo "Project-specific guidance:" - echo " • Check for existing lock files to see what your project uses" - echo " • Use .npmrc or package.json 'packageManager' field to enforce choice" - echo " • Consider 'pnpm' as default for new projects" - echo "" - echo "Note: npm comes bundled with Node.js and should typically be kept installed." - echo "You can use other managers alongside npm, but choose ONE for each project." - echo "" - - exit 1 -fi - -# If none are installed (unlikely if Node.js is installed) -echo -e "${RED}⚠ No Node.js package manager found${NC}" -echo "Install Node.js to get npm, then optionally install pnpm:" -echo " nvm install --lts" -echo " npm install -g pnpm" -exit 1 diff --git a/scripts/check_python_package_managers.sh b/scripts/check_python_package_managers.sh deleted file mode 100755 index 681ef68..0000000 --- a/scripts/check_python_package_managers.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env bash -# Check for multiple Python package managers and recommend consolidation to uv -set -euo pipefail - -# Colors for output -RED='\033[0;31m' -YELLOW='\033[1;33m' -GREEN='\033[0;32m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Detect installed package managers -has_pip=false -has_pipx=false -has_uv=false - -if command -v pip >/dev/null 2>&1 || command -v pip3 >/dev/null 2>&1; then - has_pip=true -fi - -if command -v pipx >/dev/null 2>&1; then - has_pipx=true -fi - -if command -v uv >/dev/null 2>&1; then - has_uv=true -fi - -# Count how many are installed -count=0 -managers=() -if $has_pip; then count=$((count + 1)); managers+=("pip"); fi -if $has_pipx; then count=$((count + 1)); managers+=("pipx"); fi -if $has_uv; then count=$((count + 1)); managers+=("uv"); fi - -# If only uv is installed, all good -if $has_uv && ! $has_pip && ! $has_pipx; then - echo -e "${GREEN}✓ Only uv is installed - optimal configuration!${NC}" - exit 0 -fi - -# If multiple managers are installed, warn -if [ "$count" -gt 1 ]; then - echo -e "${YELLOW}⚠ Multiple Python package managers detected:${NC}" - for mgr in "${managers[@]}"; do - echo " - $mgr" - done - echo "" - echo -e "${BLUE}Recommendation: Consolidate to 'uv' for better performance and simplicity${NC}" - echo "" - echo "Why uv?" - echo " • 10-100x faster than pip/pipx" - echo " • Replaces both pip and pipx functionality" - echo " • Better dependency resolution" - echo " • Built-in virtual environment management" - echo "" - - if ! $has_uv; then - echo -e "${YELLOW}Install uv:${NC}" - echo " make install-uv" - echo "" - fi - - if $has_pipx; then - echo -e "${YELLOW}Migrate pipx tools to uv:${NC}" - echo " make reconcile-pipx-to-uv" - echo "" - fi - - if $has_pip; then - echo -e "${YELLOW}Migrate pip packages to uv:${NC}" - echo " make reconcile-pip-to-uv" - echo "" - fi - - echo -e "${BLUE}After migration, you can optionally remove old package managers.${NC}" - echo "Note: pip is often bundled with Python and can be left installed for compatibility." - echo "" - - exit 1 -fi - -# If only pip or pipx (but not uv), recommend installing uv -if ! $has_uv; then - echo -e "${YELLOW}⚠ Using legacy Python package manager(s):${NC}" - for mgr in "${managers[@]}"; do - echo " - $mgr" - done - echo "" - echo -e "${BLUE}Recommendation: Install 'uv' for 10-100x better performance${NC}" - echo "" - echo "Install uv:" - echo " make install-uv" - echo "" - exit 1 -fi - -echo -e "${GREEN}✓ Python package manager configuration looks good${NC}" diff --git a/scripts/guide.sh b/scripts/guide.sh index 621e198..7f46c86 100755 --- a/scripts/guide.sh +++ b/scripts/guide.sh @@ -1,281 +1,432 @@ #!/usr/bin/env bash set -euo pipefail +# Avoid aborting on SIGPIPE if any downstream reader closes early trap '' PIPE DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" ROOT="$(cd "$DIR/.." && pwd)" VERBOSE="${VERBOSE:-0}" OFFLINE="${OFFLINE:-0}" -CLI="${PYTHON:-python3}" -# Load catalog query functions -. "$DIR/lib/catalog.sh" +CLI="${PYTHON:-python3}" -ensure_perms() { - chmod +x "$ROOT"/scripts/*.sh 2>/dev/null || true - chmod +x "$ROOT"/scripts/lib/*.sh 2>/dev/null || true +run_audit() { + (cd "$ROOT" && CLI_AUDIT_OFFLINE="$OFFLINE" CLI_AUDIT_LINKS=0 CLI_AUDIT_EMOJI=0 CLI_AUDIT_TIMEOUT_SECONDS="${CLI_AUDIT_TIMEOUT_SECONDS:-3}" "$CLI" cli_audit.py) } -ensure_perms - -# Check cache age -SNAP_FILE="${CLI_AUDIT_SNAPSHOT_FILE:-$ROOT/tools_snapshot.json}" -CACHE_MAX_AGE_HOURS="${CACHE_MAX_AGE_HOURS:-24}" +get_field() { + # usage: get_field TOOL field_index + # fields: 1:state 2:tool 3:installed 4:installed_method 5:latest 6:upstream_method + local tool="$1" idx="$2" + awk -F'[|]' -v t="$tool" -v i="$idx" 'NR>1 && $2==t {gsub(/^ +| +$/,"",$i); print $i; exit}' <<< "$AUDIT_OUTPUT" +} -check_cache_age() { - [ ! -f "$SNAP_FILE" ] && { echo "⚠️ Warning: Snapshot cache missing" >&2; return 1; } - local now=$(date +%s) - local snap_time=$(stat -c %Y "$SNAP_FILE" 2>/dev/null || stat -f %m "$SNAP_FILE" 2>/dev/null || echo 0) - local age_hours=$(( (now - snap_time) / 3600 )) - if [ $age_hours -gt $CACHE_MAX_AGE_HOURS ]; then - echo "⚠️ Warning: Snapshot cache is ${age_hours}h old (threshold: ${CACHE_MAX_AGE_HOURS}h)" >&2 - return 2 +prompt_action() { + local title="$1" current="$2" method="$3" latest="$4" planned="$5" tool="$6" + printf "\n" + printf "==> %s\n" "$title" + printf " installed: %s via %s\n" "${current:-}" "${method:-unknown}" + printf " target: %s via %s\n" "${latest:-}" "${planned:-unknown}" + # Preview command to be executed + case "$tool" in + rust) printf " will run: scripts/install_rust.sh reconcile\n" ;; + core) printf " will run: scripts/install_core.sh update\n" ;; + python) printf " will run: scripts/install_python.sh update\n" ;; + pip|pipx|poetry|httpie|semgrep) + printf " will run: uv tool install --force --upgrade %s\n" "$tool" ;; + node) printf " will run: scripts/install_node.sh reconcile\n" ;; + go) printf " will run: scripts/install_go.sh\n" ;; + docker) printf " will run: scripts/install_docker.sh\n" ;; + docker-compose) printf " will run: echo 'Ensure Docker is up to date (Compose v2 plugin)'\n" ;; + aws) printf " will run: scripts/install_aws.sh\n" ;; + kubectl) printf " will run: scripts/install_kubectl.sh update\n" ;; + terraform) printf " will run: scripts/install_terraform.sh\n" ;; + ansible) printf " will run: scripts/install_ansible.sh update\n" ;; + *) printf " will run: scripts/install_core.sh reconcile %s\n" "$tool" ;; + esac + local ans + # Read from the real TTY to avoid broken pipes when stdout/stderr are piped + if [ -t 0 ]; then + read -r -p "Install/update? [y/N] " ans || true + else + # Fallback: read from /dev/tty if available + if [ -r /dev/tty ]; then + read -r -p "Install/update? [y/N] " ans /dev/null || true)" if [ "$VERBOSE" = "1" ]; then + # Pretty print the audit output for context printf "%s\n" "$AUDIT_OUTPUT" | "$CLI" smart_column.py -s '|' -t --right 3,5 --header || printf "%s\n" "$AUDIT_OUTPUT" fi -# JSON helper functions +# Helpers to read JSON and render links json_field() { + # usage: json_field TOOL KEY local tool="$1" key="$2" AUDIT_JSON="$AUDIT_JSON" "$CLI" - "$tool" "$key" <<'PY' import sys, json, os -data = os.environ.get("AUDIT_JSON", "").strip() -tool, key = sys.argv[1], sys.argv[2] +data = os.environ.get("AUDIT_JSON", "") +data = data.strip() +tool = sys.argv[1] +key = sys.argv[2] try: - for item in json.loads(data): + arr = json.loads(data) + for item in arr: if item.get("tool") == tool: - print(item.get(key, "")) + v = item.get(key, "") + if v is None: + v = "" + print(v) break -except: pass +except Exception: + print("") PY } json_bool() { + # usage: json_bool TOOL KEY -> prints 1 if true, else empty local tool="$1" key="$2" AUDIT_JSON="$AUDIT_JSON" "$CLI" - "$tool" "$key" <<'PY' import sys, json, os data = os.environ.get("AUDIT_JSON", "").strip() -tool, key = sys.argv[1], sys.argv[2] +tool = sys.argv[1] +key = sys.argv[2] try: - for item in json.loads(data): - if item.get("tool") == tool and item.get(key): - print("1") + arr = json.loads(data) + for item in arr: + if item.get("tool") == tool: + v = item.get(key) + if isinstance(v, bool) and v: + print("1") break -except: pass +except Exception: + pass PY } osc8() { local url="$1"; shift local text="$*" - [ -n "$url" ] && printf '\e]8;;%s\e\\%s\e]8;;\e\\' "$url" "$text" || printf '%s' "$text" + if [ -n "$url" ]; then + printf '\e]8;;%s\e\\%s\e]8;;\e\\' "$url" "$text" + else + printf '%s' "$text" + fi } -# Generic tool processing function - reads ALL metadata from catalog -process_tool() { - local tool="$1" - - # Get tool data from audit JSON - local icon="$(json_field "$tool" state_icon)" - local installed="$(json_field "$tool" installed)" - local latest="$(json_field "$tool" latest_upstream)" - local url="$(json_field "$tool" latest_url)" - local method="$(json_field "$tool" installed_method)" - local is_up_to_date="$(json_bool "$tool" is_up_to_date)" - - # Get metadata from catalog (with defaults) - local display="$(catalog_get_guide_property "$tool" display_name "$tool")" - local install_action="$(catalog_get_guide_property "$tool" install_action "")" - local description="$(catalog_get_property "$tool" description)" - local homepage="$(catalog_get_property "$tool" homepage)" - - # Check if up-to-date - if [ -n "$is_up_to_date" ] && [ -n "$installed" ]; then - printf "\n==> %s %s\n" "$icon" "$display" - printf " installed: %s via %s\n" "$installed" "$method" - printf " target: %s (same)\n" "$(osc8 "$url" "$latest")" - printf " up-to-date; skipping.\n" - return 0 +# Rust first (for cargo-based tools) - use JSON for accuracy +RUST_ICON="$(json_field rust state_icon)" +RUST_INSTALLED="$(json_field rust installed)" +RUST_METHOD="$(json_field rust installed_method)" +RUST_LATEST="$(json_field rust latest_upstream)" +RUST_URL="$(json_field rust latest_url)" +if [ -n "$(json_bool rust is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$RUST_ICON" "Rust (cargo)" + printf " installed: %s via %s\n" "${RUST_INSTALLED:-}" "${RUST_METHOD:-unknown}" + printf " target: %s via %s\n" "$(osc8 "$RUST_URL" "${RUST_LATEST:-}")" "$(json_field rust upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${RUST_ICON} Rust (cargo)" "$RUST_INSTALLED" "$RUST_METHOD" "$(osc8 "$RUST_URL" "$RUST_LATEST")" "$(json_field rust upstream_method)" rust; then + "$ROOT"/scripts/install_rust.sh reconcile || true fi +fi - # Prompt for installation/update - printf "\n==> %s %s\n" "$icon" "$display" - [ -n "$description" ] && printf " %s\n" "$description" - [ -n "$homepage" ] && printf " Homepage: %s\n" "$(osc8 "$homepage" "$homepage")" - printf " installed: %s via %s\n" "${installed:-}" "${method:-unknown}" - printf " target: %s\n" "$(osc8 "$url" "${latest:-}")" +# UV (ensure official binary) + Python stack (before Node/core tools) +UV_ICON="$(json_field uv state_icon)" +UV_CURR="$(json_field uv installed)" +UV_LATEST="$(json_field uv latest_upstream)" +UV_URL="$(json_field uv latest_url)" +if [ -n "$(json_bool uv is_up_to_date)" ] && [ -n "$UV_CURR" ]; then + printf "\n" + printf "==> %s %s\n" "$UV_ICON" "uv" + printf " installed: %s via %s\n" "${UV_CURR:-}" "$(json_field uv installed_method)" + printf " target: %s via %s\n" "$(osc8 "$UV_URL" "${UV_LATEST:-}")" "$(json_field uv upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${UV_ICON} uv" "$UV_CURR" "$(json_field uv installed_method)" "$(osc8 "$UV_URL" "$UV_LATEST")" "$(json_field uv upstream_method)" core; then + "$ROOT"/scripts/install_uv.sh reconcile || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +fi - # Build install command from catalog metadata - local install_cmd="install_tool.sh $tool" - if [ -n "$install_action" ]; then - install_cmd="install_tool.sh $tool $install_action" - elif [ -n "$installed" ]; then - # Tool is already installed, use "update" action - install_cmd="install_tool.sh $tool update" +# Python stack (after ensuring uv) +PY_ICON="$(json_field python state_icon)" +PY_CURR="$(json_field python installed)" +PY_LATEST="$(json_field python latest_upstream)" +PY_URL="$(json_field python latest_url)" +if [ -n "$(json_bool python is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$PY_ICON" "Python stack" + printf " installed: %s via %s\n" "${PY_CURR:-}" "$(json_field python installed_method)" + printf " target: %s via %s\n" "$(osc8 "$PY_URL" "${PY_LATEST:-}")" "$(json_field python upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${PY_ICON} Python stack" "$PY_CURR" "$(json_field python installed_method)" "$(osc8 "$PY_URL" "$PY_LATEST")" "$(json_field python upstream_method)" python; then + UV_PYTHON_SPEC="$PY_LATEST" "$ROOT"/scripts/install_python.sh update || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" fi - printf " will run: scripts/%s\n" "$install_cmd" +fi - # Prompt with options explained - printf " Options:\n" - printf " y = Install/upgrade now\n" - printf " N = Skip (ask again next time)\n" - if [ -n "$installed" ]; then - printf " s = Skip version %s (ask again if newer available)\n" "$latest" - printf " p = Pin to %s (don't ask for upgrades)\n" "$installed" - else - printf " s = Skip version %s (ask again if newer available)\n" "$latest" - printf " p = Never install (permanently skip this tool)\n" +# Node stack (Node + package managers) +NODE_ICON="$(json_field node state_icon)" +NODE_CURR="$(json_field node installed)" +NODE_LATEST="$(json_field node latest_upstream)" +NODE_URL="$(json_field node latest_url)" +# Treat stack as up-to-date only if node, npm, pnpm, and yarn are all up-to-date +NODE_ALL_OK="" +if [ -n "$(json_bool node is_up_to_date)" ] && [ -n "$(json_bool npm is_up_to_date)" ] && [ -n "$(json_bool pnpm is_up_to_date)" ] && [ -n "$(json_bool yarn is_up_to_date)" ]; then + NODE_ALL_OK="1" +fi +if [ -n "$NODE_ALL_OK" ]; then + printf "\n"; printf "==> %s %s\n" "$NODE_ICON" "Node.js stack"; printf " installed: %s via %s\n" "${NODE_CURR:-}" "$(json_field node installed_method)"; printf " target: %s via %s\n" "$(osc8 "$NODE_URL" "${NODE_LATEST:-}")" "$(json_field node upstream_method)"; printf " up-to-date; skipping.\n" +else + if prompt_action "${NODE_ICON} Node.js stack" "$NODE_CURR" "$(json_field node installed_method)" "$(osc8 "$NODE_URL" "$NODE_LATEST")" "$(json_field node upstream_method)" node; then + "$ROOT"/scripts/install_node.sh reconcile || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" fi +fi - local prompt_text="Install/update? [y/N/s/p] " +# Offer explicit package manager updates only when not up-to-date +if [ -z "$(json_bool npm is_up_to_date)" ]; then + if prompt_action "$(json_field npm state_icon) npm (global)" "$(json_field npm installed)" "$(json_field npm installed_method)" "$(osc8 "$(json_field npm latest_url)" "$(json_field npm latest_upstream)")" "$(json_field npm upstream_method)" npm; then + corepack enable >/dev/null 2>&1 || true + npm install -g npm@latest >/dev/null 2>&1 || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +fi +if [ -z "$(json_bool pnpm is_up_to_date)" ]; then + if prompt_action "$(json_field pnpm state_icon) pnpm" "$(json_field pnpm installed)" "$(json_field pnpm installed_method)" "$(osc8 "$(json_field pnpm latest_url)" "$(json_field pnpm latest_upstream)")" "$(json_field pnpm upstream_method)" pnpm; then + corepack prepare pnpm@latest --activate >/dev/null 2>&1 || npm install -g pnpm@latest >/dev/null 2>&1 || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +fi +if [ -z "$(json_bool yarn is_up_to_date)" ]; then + if prompt_action "$(json_field yarn state_icon) yarn" "$(json_field yarn installed)" "$(json_field yarn installed_method)" "$(osc8 "$(json_field yarn latest_url)" "$(json_field yarn latest_upstream)")" "$(json_field yarn upstream_method)" yarn; then + # Prefer stable tag for Yarn (Berry) + corepack prepare yarn@stable --activate >/dev/null 2>&1 || npm install -g yarn@latest >/dev/null 2>&1 || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +fi - local ans="" - if [ -t 0 ]; then - read -r -p "$prompt_text" ans || true - elif [ -r /dev/tty ]; then - read -r -p "$prompt_text" ans %s %s\n" "$GO_ICON" "Go toolchain"; printf " installed: %s via %s\n" "$GO_CURR" "$GO_METHOD"; printf " target: %s via %s\n" "$(osc8 "$GO_URL" "${GO_LATE:-}")" "brew"; printf " up-to-date; skipping.\n" +else + if prompt_action "${GO_ICON} Go toolchain" "$GO_CURR" "$GO_METHOD" "$(osc8 "$GO_URL" "$GO_LATE")" "brew" go; then + "$ROOT"/scripts/install_go.sh || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" fi +fi - case "$ans" in - [Yy]) - # Handle tool-specific version environment variables - local upgrade_success=0 - if [ "$tool" = "python" ]; then - UV_PYTHON_SPEC="$latest" "$ROOT"/scripts/$install_cmd && upgrade_success=1 || true - elif [ "$tool" = "ruby" ]; then - RUBY_VERSION="$latest" "$ROOT"/scripts/$install_cmd && upgrade_success=1 || true - else - "$ROOT"/scripts/$install_cmd && upgrade_success=1 || true +# Prefer uv for Python CLI tools: offer migration from pipx/user when detected +if command -v uv >/dev/null 2>&1 || "$ROOT"/scripts/install_uv.sh reconcile >/dev/null 2>&1; then + # Include all Python console CLIs we track (expandable). ansible is handled separately below. + for t in pip pipx poetry httpie pre-commit bandit semgrep black isort flake8; do + METHOD="$(json_field "$t" installed_method)" + if [ -n "$METHOD" ] && [ -z "$(json_bool "$t" is_up_to_date)" ]; then + : # keep normal outdated prompts elsewhere + fi + # Migrate only when pipx is the current method + if [ -n "$METHOD" ] && printf "%s" "$METHOD" | grep -Eqi "pipx|pip/user|pip"; then + ICON="$(json_field "$t" state_icon)" + CURR="$(json_field "$t" installed)" + LATE="$(json_field "$t" latest_upstream)" + URL="$(json_field "$t" latest_url)" + TITLE="$ICON $t (migrate to uv tool)" + if prompt_action "$TITLE" "$CURR" "$METHOD" "$(osc8 "$URL" "$LATE")" "uv tool" "$t"; then + # Install via uv tool, then remove pipx version to avoid shim conflicts + uv tool install --force --upgrade "$t" >/dev/null 2>&1 || true + if command -v pipx >/dev/null 2>&1; then pipx uninstall "$t" >/dev/null 2>&1 || true; fi + # Remove user pip scripts that might shadow uv tools + rm -f "$HOME/.local/bin/$t" >/dev/null 2>&1 || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" fi + fi + done +fi - # Re-audit - AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 CLI_AUDIT_RENDER=1 "$CLI" cli_audit.py || true)" +# Core tools (fd, fzf, rg, jq, yq, bat, delta, just, and npm/cargo/go tools) +CORE_TOOLS=(fd fzf ripgrep jq yq bat delta just curlie dive trivy gitleaks git-absorb git-branchless eslint prettier shfmt shellcheck fx glab ctags entr parallel ast-grep direnv git gh) +for t in "${CORE_TOOLS[@]}"; do + ICON="$(json_field "$t" state_icon)" + CURR="$(json_field "$t" installed)" + LATE="$(json_field "$t" latest_upstream)" + URL="$(json_field "$t" latest_url)" + if [ -n "$(json_bool "$t" is_up_to_date)" ]; then + printf "\n"; printf "==> %s %s\n" "$ICON" "$t"; printf " installed: %s via %s\n" "${CURR:-}" "$(json_field "$t" installed_method)"; printf " target: %s via %s\n" "$(osc8 "$URL" "${LATE:-}")" "$(json_field "$t" upstream_method)"; printf " up-to-date; skipping.\n"; continue + fi + if prompt_action "${ICON} $t" "$CURR" "$(json_field "$t" installed_method)" "$(osc8 "$URL" "$LATE")" "$(json_field "$t" upstream_method)" "$t"; then + "$ROOT"/scripts/install_core.sh reconcile "$t" || true + # Re-audit the single tool to reflect updated status inline + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +done - # Check if upgrade succeeded by comparing versions - local new_installed="$(json_field "$tool" installed)" - if [ "$upgrade_success" = "0" ] || [ "$new_installed" = "$installed" ]; then - # Upgrade failed or version didn't change - printf "\n ⚠️ Upgrade did not succeed (version unchanged)\n" - prompt_pin_version "$tool" "$installed" - fi - ;; - [Ss]) - # Skip this specific version - printf " Skipping version %s (will prompt again if newer version available)\n" "$latest" - "$ROOT"/scripts/pin_version.sh "$tool" "$latest" || true - ;; - [Pp]) - if [ -n "$installed" ]; then - # Pin to current version - printf " Pinning to current version %s\n" "$installed" - "$ROOT"/scripts/pin_version.sh "$tool" "$installed" || true +# Python utilities (managed via pipx): prompt individually if outdated/missing +for t in pip pipx poetry httpie semgrep black; do + if [ -z "$(json_bool "$t" is_up_to_date)" ]; then + ICON="$(json_field "$t" state_icon)" + CURR="$(json_field "$t" installed)" + LATE="$(json_field "$t" latest_upstream)" + URL="$(json_field "$t" latest_url)" + if prompt_action "${ICON} $t" "$CURR" "$(json_field "$t" installed_method)" "$(osc8 "$URL" "$LATE")" "$(json_field "$t" upstream_method)" "$t"; then + if command -v uv >/dev/null 2>&1; then + uv tool install --force --upgrade "$t" >/dev/null 2>&1 || true else - # Never install - pin to "never" - printf " Marking as 'never install' (permanently skip this tool)\n" - "$ROOT"/scripts/pin_version.sh "$tool" "never" || true + "$ROOT"/scripts/install_uv.sh reconcile || true + if command -v uv >/dev/null 2>&1; then + uv tool install --force --upgrade "$t" >/dev/null 2>&1 || true + else + if command -v pipx >/dev/null 2>&1; then + pipx upgrade "$t" >/dev/null 2>&1 || pipx install "$t" >/dev/null 2>&1 || true + else + if [ "$t" = pip ]; then + python3 -m pip install --user -U pip >/dev/null 2>&1 || true + elif [ "$t" = pipx ]; then + python3 -m pip install --user -U pipx >/dev/null 2>&1 || true + else + python3 -m pip install --user -U "$t" >/dev/null 2>&1 || true + fi + fi + fi fi - ;; - *) - # User declined (N or empty) - ;; - esac -} - -# Prompt user to pin version when upgrade is declined or fails -prompt_pin_version() { - local tool="$1" - local current_version="$2" - - [ -z "$current_version" ] && current_version="" - - printf " Pin to version %s to stop upgrade prompts? [y/N] " "$current_version" - - local pin_ans="" - if [ -t 0 ]; then - read -r pin_ans || true - elif [ -r /dev/tty ]; then - read -r pin_ans /dev/null || true)" + fi fi +done - if [[ "$pin_ans" =~ ^[Yy]$ ]]; then - "$ROOT"/scripts/pin_version.sh "$tool" "$current_version" || true +# Docker +DK_ICON="$(json_field docker state_icon)" +DK_CURR="$(json_field docker installed)" +DK_LATEST="$(json_field docker latest_upstream)" +DK_URL="$(json_field docker latest_url)" +if [ -n "$(json_bool docker is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$DK_ICON" "Docker Engine" + printf " installed: %s via %s\n" "${DK_CURR:-}" "$(json_field docker installed_method)" + printf " target: %s via %s\n" "$(osc8 "$DK_URL" "${DK_LATEST:-}")" "$(json_field docker upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${DK_ICON} Docker Engine" "$DK_CURR" "$(json_field docker installed_method)" "$(osc8 "$DK_URL" "$DK_LATEST")" "$(json_field docker upstream_method)" docker; then + "$ROOT"/scripts/install_docker.sh || true fi -} - -# Build tool list from audit output with catalog entries -TOOLS_TO_PROCESS=() -while read -r line; do - [[ "$line" =~ ^state ]] && continue - tool_name="$(echo "$line" | awk -F'|' '{gsub(/^ +| +$/,"",$2); print $2}')" - [ -z "$tool_name" ] && continue - - # Only process tools with catalog entries - if catalog_has_tool "$tool_name"; then - # Check if tool is pinned to a version >= latest available or "never" - pinned_version="$(catalog_get_property "$tool_name" pinned_version)" - - # Skip if pinned to "never" (permanently skip installation) - if [ "$pinned_version" = "never" ]; then - continue - fi +fi - latest_version="$(json_field "$tool_name" latest_version)" +# Docker Compose (plugin) +DC_ICON="$(json_field docker-compose state_icon)" +DC_CURR="$(json_field docker-compose installed)" +DC_LATEST="$(json_field docker-compose latest_upstream)" +DC_URL="$(json_field docker-compose latest_url)" +if [ -n "$(json_bool docker-compose is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$DC_ICON" "Docker Compose" + printf " installed: %s via %s\n" "${DC_CURR:-}" "$(json_field docker-compose installed_method)" + printf " target: %s via %s\n" "$(osc8 "$DC_URL" "${DC_LATEST:-}")" "$(json_field docker-compose upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${DC_ICON} Docker Compose" "$DC_CURR" "$(json_field docker-compose installed_method)" "$(osc8 "$DC_URL" "$DC_LATEST")" "$(json_field docker-compose upstream_method)" docker-compose; then + echo "Note: Docker Compose v2 is bundled as Docker plugin; ensure Docker is up to date." + fi +fi - if [ -n "$pinned_version" ] && [ -n "$latest_version" ]; then - # Compare versions: skip if latest <= pinned - # Simple numeric comparison for semantic versions - if "$CLI" - "$pinned_version" "$latest_version" <<'PY' -import sys -try: - pinned, latest = sys.argv[1], sys.argv[2] - # Strip 'v' prefix if present - pinned = pinned.lstrip('v') - latest = latest.lstrip('v') - # Split into parts and compare - p_parts = [int(x) for x in pinned.split('.')[:3]] - l_parts = [int(x) for x in latest.split('.')[:3]] - # Pad with zeros if needed - while len(p_parts) < 3: p_parts.append(0) - while len(l_parts) < 3: l_parts.append(0) - # Exit 0 (success) if latest <= pinned (should skip) - sys.exit(0 if tuple(l_parts) <= tuple(p_parts) else 1) -except Exception: - # On error, don't skip (exit 1) - sys.exit(1) -PY - then - # Skip this tool - pinned version is >= latest available - continue - fi - fi +# AWS CLI (ask if missing or outdated) +AWS_ICON="$(json_field aws state_icon)" +AWS_CURR="$(json_field aws installed)" +AWS_LATEST="$(json_field aws latest_upstream)" +AWS_URL="$(json_field aws latest_url)" +if [ -n "$(json_bool aws is_up_to_date)" ] && [ -n "$AWS_CURR" ]; then + printf "\n" + printf "==> %s %s\n" "$AWS_ICON" "AWS CLI" + printf " installed: %s via %s\n" "${AWS_CURR:-}" "$(json_field aws installed_method)" + printf " target: %s via %s\n" "$(osc8 "$AWS_URL" "${AWS_LATEST:-}")" "$(json_field aws upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${AWS_ICON} AWS CLI" "$AWS_CURR" "$(json_field aws installed_method)" "$(osc8 "$AWS_URL" "$AWS_LATEST")" "$(json_field aws upstream_method)" aws; then + "$ROOT"/scripts/install_aws.sh || true + fi +fi - TOOLS_TO_PROCESS+=("$tool_name") +# kubectl +K8S_ICON="$(json_field kubectl state_icon)" +K8S_CURR="$(json_field kubectl installed)" +K8S_LATEST="$(json_field kubectl latest_upstream)" +K8S_URL="$(json_field kubectl latest_url)" +if [ -n "$(json_bool kubectl is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$K8S_ICON" "kubectl" + printf " installed: %s via %s\n" "${K8S_CURR:-}" "$(json_field kubectl installed_method)" + printf " target: %s via %s\n" "$(osc8 "$K8S_URL" "${K8S_LATEST:-}")" "$(json_field kubectl upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${K8S_ICON} kubectl" "$K8S_CURR" "$(json_field kubectl installed_method)" "$(osc8 "$K8S_URL" "$K8S_LATEST")" "$(json_field kubectl upstream_method)" kubectl; then + "$ROOT"/scripts/install_kubectl.sh update || true fi -done <<< "$AUDIT_OUTPUT" +fi -# Sort tools by processing order from catalog -declare -A TOOL_LIST -for tool in "${TOOLS_TO_PROCESS[@]}"; do - order="$(catalog_get_guide_property "$tool" order "1000")" - TOOL_LIST[$order]="${TOOL_LIST[$order]:-} $tool" -done +# Terraform +TF_ICON="$(json_field terraform state_icon)" +TF_CURR="$(json_field terraform installed)" +TF_LATEST="$(json_field terraform latest_upstream)" +TF_URL="$(json_field terraform latest_url)" +if [ -n "$(json_bool terraform is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$TF_ICON" "Terraform" + printf " installed: %s via %s\n" "${TF_CURR:-}" "$(json_field terraform installed_method)" + printf " target: %s via %s\n" "$(osc8 "$TF_URL" "${TF_LATEST:-}")" "$(json_field terraform upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${TF_ICON} Terraform" "$TF_CURR" "$(json_field terraform installed_method)" "$(osc8 "$TF_URL" "$TF_LATEST")" "$(json_field terraform upstream_method)" terraform; then + "$ROOT"/scripts/install_terraform.sh || true + fi +fi -# Process tools in order -for order in $(printf '%s\n' "${!TOOL_LIST[@]}" | sort -n); do - for tool in ${TOOL_LIST[$order]}; do - process_tool "$tool" - done -done +# Ansible +ANS_ICON="$(json_field ansible state_icon)" +ANS_CURR="$(json_field ansible installed)" +ANS_LATEST="$(json_field ansible latest_upstream)" +ANS_URL="$(json_field ansible latest_url)" +if [ -n "$(json_bool ansible is_up_to_date)" ]; then + printf "\n" + printf "==> %s %s\n" "$ANS_ICON" "Ansible" + printf " installed: %s via %s\n" "${ANS_CURR:-}" "$(json_field ansible installed_method)" + printf " target: %s via %s\n" "$(osc8 "$ANS_URL" "${ANS_LATEST:-}")" "$(json_field ansible upstream_method)" + printf " up-to-date; skipping.\n" +else + if prompt_action "${ANS_ICON} Ansible" "$ANS_CURR" "$(json_field ansible installed_method)" "$(osc8 "$ANS_URL" "$ANS_LATEST")" "$(json_field ansible upstream_method)" ansible; then + "$ROOT"/scripts/install_ansible.sh update || true + AUDIT_JSON="$(cd "$ROOT" && CLI_AUDIT_JSON=1 "$CLI" cli_audit.py 2>/dev/null || true)" + fi +fi echo echo "All done. Re-run: make audit" + + + diff --git a/scripts/install_aws.sh b/scripts/install_aws.sh new file mode 100755 index 0000000..9b563a8 --- /dev/null +++ b/scripts/install_aws.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail + +TOOL="aws" +before="$(command -v aws >/dev/null 2>&1 && aws --version || true)" +TMP="$(mktemp -d)" +cd "$TMP" +curl -fsSL "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o awscliv2.zip +unzip -q awscliv2.zip +sudo ./aws/install --update || true +after="$(command -v aws >/dev/null 2>&1 && aws --version || true)" +path="$(command -v aws 2>/dev/null || true)" +printf "[%s] before: %s\n" "$TOOL" "${before:-}" +printf "[%s] after: %s\n" "$TOOL" "${after:-}" +if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi + + diff --git a/scripts/install_core.sh b/scripts/install_core.sh new file mode 100755 index 0000000..8e87e62 --- /dev/null +++ b/scripts/install_core.sh @@ -0,0 +1,887 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Install/update/uninstall simple, language-agnostic tools. + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +. "$DIR/lib/common.sh" + +ACTION="${1:-install}" +ONLY_TOOL="${2:-}" + +OS="$(uname -s | tr '[:upper:]' '[:lower:]')" +ARCH="$(uname -m)" +PREFIX="${PREFIX:-$HOME/.local}" +# Prefer /usr/local/bin to override system binaries when possible, but fall back +# to user bin when we cannot write and passwordless sudo isn't available. +if [ -d "/usr/local/bin" ] && [ -w "/usr/local/bin" ]; then + BIN_DIR="/usr/local/bin" +elif [ -d "/usr/local/bin" ] && command -v sudo >/dev/null 2>&1 && sudo -n true 2>/dev/null; then + BIN_DIR="/usr/local/bin" +else + BIN_DIR="$PREFIX/bin" +fi +mkdir -p "$BIN_DIR" 2>/dev/null || true +# Installer helper (use sudo only if BIN_DIR is /usr/local/bin and passwordless sudo available) +if [ "$BIN_DIR" = "/usr/local/bin" ]; then + if [ -w "$BIN_DIR" ]; then INSTALL="install -m 0755"; else INSTALL="sudo install -m 0755"; fi + if [ -w "$BIN_DIR" ]; then RM="rm -f"; else RM="sudo rm -f"; fi +else + INSTALL="install -m 0755" + RM="rm -f" +fi + +get_version() { + local t="$1" cmd vers + case "$t" in + git) cmd="$(command -v git || true)" ;; + gh) cmd="$(command -v gh || true)" ;; + fd) cmd="$(command -v fd || command -v fdfind || true)" ;; + ripgrep) cmd="$(command -v rg || true)" ;; + jq) cmd="$(command -v jq || true)" ;; + yq) cmd="$(command -v yq || true)" ;; + bat) cmd="$(command -v bat || command -v batcat || true)" ;; + delta) cmd="$(command -v delta || true)" ;; + just) cmd="$(command -v just || true)" ;; + fzf) cmd="$(command -v fzf || true)" ;; + curlie) cmd="$(command -v curlie || true)" ;; + dive) cmd="$(command -v dive || true)" ;; + trivy) cmd="$(command -v trivy || true)" ;; + gitleaks) cmd="$(command -v gitleaks || true)" ;; + git-absorb) cmd="$(command -v git-absorb || true)" ;; + git-branchless) cmd="$(command -v git-branchless || true)" ;; + eslint) cmd="$(command -v eslint || true)" ;; + prettier) cmd="$(command -v prettier || true)" ;; + shfmt) cmd="$(command -v shfmt || true)" ;; + shellcheck) cmd="$(command -v shellcheck || true)" ;; + fx) cmd="$(command -v fx || true)" ;; + entr) cmd="$(command -v entr || true)" ;; + glab) cmd="$(command -v glab || true)" ;; + *) cmd="" ;; + esac + if [ -z "$cmd" ]; then return 0; fi + case "$t" in + git) + # Emit semantic version only (e.g., 2.51.0) + "$cmd" --version 2>/dev/null | awk 'NR==1{print $3}' | head -n1; return 0 ;; + gh) + # gh --version first line includes "gh version X.Y.Z (...)" + "$cmd" --version 2>/dev/null | awk 'NR==1 && $1=="gh" && $2=="version"{print $3}'; return 0 ;; + fx) + # Prefer reading version from adjacent package.json (Node variant), else fallback to CLI flags + local real dir pkg + real="$(readlink -f "$cmd" 2>/dev/null || echo "$cmd")" + dir="$(dirname "$real")" + pkg="$dir/package.json" + if [ -f "$pkg" ] && command -v jq >/dev/null 2>&1; then + jq -r .version "$pkg" 2>/dev/null | head -n1; return 0 + fi + "$cmd" -v 2>/dev/null | head -n1 && return 0 + "$cmd" --version 2>/dev/null | head -n1 && return 0 + "$cmd" version 2>/dev/null | head -n1 && return 0 + return 0 ;; + curlie) + "$cmd" version 2>/dev/null | head -n1; return 0 ;; + shellcheck) + # ShellCheck prints multi-line version output; extract numeric from the 'version:' line + if out="$($cmd -V 2>/dev/null)"; then + printf '%s\n' "$out" | awk -F': ' '/^version:/ {print $2; exit}'; return 0 + fi + # Fallback: scan --version output for a semantic version number + "$cmd" --version 2>/dev/null | grep -Eo '[0-9]+(\.[0-9]+)+' | head -n1; return 0 ;; + entr) + # entr prints version as "release: X.Y" on usage output; no stable --version flag + "$cmd" 2>&1 | awk '/^release:/ {print $2; exit}'; return 0 ;; + *) + "$cmd" --version 2>/dev/null | head -n1; return 0 ;; + esac +} + +go_bin_path() { local p; p="$(go env GOBIN 2>/dev/null || true)"; if [ -z "$p" ]; then p="$(go env GOPATH 2>/dev/null || true)/bin"; fi; printf '%s' "$p"; } + +install_ctags() { + # Prefer upstream universal-ctags built from source and packaged via checkinstall + # to ensure clean uninstall and latest features. Fallback to package managers. + if have brew; then brew install universal-ctags || brew install ctags; return; fi + + # If using apt-based systems, attempt source build with checkinstall + if have apt-get; then + local target_ver pkg_ver bin ctags_ver tmp builddir prefix + target_ver="${CTAGS_VERSION:-${CTAGS_VERSION_PIN:-}}" + if [ -z "$target_ver" ]; then + # Derive from latest_versions.json if present; otherwise use fallback v6.2.0 + if [ -f "$DIR/../latest_versions.json" ] && command -v jq >/dev/null 2>&1; then + target_ver="$(jq -r '.ctags' "$DIR/../latest_versions.json" 2>/dev/null | sed 's/^v//')" + fi + [ -n "$target_ver" ] || target_ver="6.2.0" + fi + + bin="$(command -v ctags || true)" + ctags_ver="$(ctags --version 2>/dev/null | sed -n 's/^Universal Ctags \([0-9.][0-9.]*\).*/\1/p')" + if [ -n "$ctags_ver" ] && [ "$ctags_ver" = "$target_ver" ] && [ "$bin" = "/usr/local/bin/ctags" ]; then + # Already at desired version under /usr/local; ensure alternatives are set and return + sudo update-alternatives --install /usr/bin/ctags ctags /usr/local/bin/ctags 100 >/dev/null 2>&1 || true + sudo update-alternatives --set ctags /usr/local/bin/ctags >/dev/null 2>&1 || true + return + fi + + sudo apt-get update || true + # Minimal build deps + sudo apt-get install -y \ + build-essential autoconf automake libtool pkg-config \ + git libxml2-dev libyaml-dev libjansson-dev libpcre2-dev libssl-dev \ + checkinstall jq || true + + tmp="$(mktemp -d)" + builddir="$tmp/ctags" + prefix="/usr/local" + if [ ! -d "$builddir" ]; then + git clone https://github.com/universal-ctags/ctags.git "$builddir" >/dev/null 2>&1 || true + fi + if [ -d "$builddir" ]; then + ( + cd "$builddir" && + git fetch --tags >/dev/null 2>&1 || true && + git checkout "v${target_ver}" >/dev/null 2>&1 || true && + ./autogen.sh >/dev/null 2>&1 || true && + ./configure --prefix="$prefix" >/dev/null 2>&1 || true && + make -j"$(nproc)" >/dev/null 2>&1 || true + ) + if [ -f "$builddir/ctags" ] || [ -f "$builddir/ctags.exe" ] || [ -x "$builddir/ctags" ]; then + # Package and install via checkinstall for clean uninstall + ( + cd "$builddir" && + sudo checkinstall -y \ + --pkgname=universal-ctags \ + --pkgversion="${target_ver}" \ + --provides=ctags \ + --backup=no \ + --install=yes \ + --fstrans=no \ + make install >/dev/null 2>&1 || true + ) + # Register with update-alternatives + sudo update-alternatives --install /usr/bin/ctags ctags "$prefix/bin/ctags" 100 >/dev/null 2>&1 || true + sudo update-alternatives --set ctags "$prefix/bin/ctags" >/dev/null 2>&1 || true + # Validate; if not present, fall back to apt package + if command -v ctags >/dev/null 2>&1 && ctags --version >/dev/null 2>&1; then + return + fi + fi + fi + + # Fallback to distro packages if source build path failed + (sudo apt-get install -y universal-ctags || sudo apt-get install -y exuberant-ctags ctags) && return + fi +} + +install_entr() { + if have brew; then brew install entr; return; fi + # Try to build latest from GitHub to ensure up-to-date version + local tmp tag url srcdir cores rel html file prefix_dir SUDO_CMD + tmp="$(mktemp -d)" + # 1) Try to discover latest tag via redirect + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/eradman/entr/releases/latest | awk -F'/' '{print $NF}')" + # 2) Prefer an explicit release asset like entr-.tar.gz (parse HTML) to avoid tag name quirks + html="$(curl -fsSL https://github.com/eradman/entr/releases/latest 2>/dev/null || true)" + if [ -n "$html" ]; then + rel="$(printf '%s' "$html" | grep -Eo '/eradman/entr/releases/download/[^" ]+/entr-[0-9.]+\.tar\.gz' | head -n1)" + fi + if [ -n "$rel" ]; then + url="https://github.com${rel}" + elif [ -n "$tag" ]; then + # Fallback to tag tarball + url="https://github.com/eradman/entr/archive/refs/tags/${tag}.tar.gz" + else + url="" + fi + if [ -n "$url" ] && curl -fsSL "$url" -o "$tmp/entr.tgz"; then + if tar -C "$tmp" -xzf "$tmp/entr.tgz" >/dev/null 2>&1; then + # Extracted dir could be entr- or entr- + srcdir="$(find "$tmp" -maxdepth 1 -type d -name 'entr-*' | head -n1)" + if [ -z "$srcdir" ]; then srcdir="$(find "$tmp" -maxdepth 2 -type d -name 'entr*' | head -n1)"; fi + if [ -n "$srcdir" ] && [ -d "$srcdir" ]; then + # Ensure basic build deps + if have apt-get; then + sudo apt-get update >/dev/null 2>&1 || true + sudo apt-get install -y build-essential pkg-config libbsd-dev >/dev/null 2>&1 || true + fi + cores="$(getconf _NPROCESSORS_ONLN 2>/dev/null || echo 2)" + ( + cd "$srcdir" && ( [ -x ./configure ] && sh ./configure || true ) && make -j"$cores" + ) >/dev/null 2>&1 || true + # Try make install when possible + prefix_dir="$(dirname "$BIN_DIR")" + if [ "$BIN_DIR" = "/usr/local/bin" ] && [ ! -w "$BIN_DIR" ]; then SUDO_CMD="sudo"; else SUDO_CMD=""; fi + ( + cd "$srcdir" && ${SUDO_CMD} make install PREFIX="$prefix_dir" + ) >/dev/null 2>&1 || true + # If not installed by make, locate binary and install manually + file="$(find "$srcdir" -type f -name entr -perm -111 | head -n1)" + if [ -n "$file" ] && [ -f "$file" ]; then $INSTALL "$file" "$BIN_DIR/entr" && return; fi + # If installed into prefix/bin already, accept success + if command -v entr >/dev/null 2>&1; then return; fi + fi + fi + fi + # Fallback to system package if build path failed + if have apt-get; then sudo apt-get update && sudo apt-get install -y entr; return; fi +} + +install_parallel() { + # Prefer upstream GNU FTP to get latest/pinned release; fall back to package manager + if have brew; then brew install parallel; return; fi + local tmp url name ext srcdir tarflag ver prefix_dir SUDO_CMD + tmp="$(mktemp -d)" + ver="${PARALLEL_VERSION:-}" + # Discover latest version if not pinned via PARALLEL_VERSION + if [ -z "$ver" ]; then + if curl -fsSL "https://ftp.gnu.org/gnu/parallel/" -o "$tmp/index.html"; then + name="$(grep -Eo 'parallel-[0-9]{8}\.tar\.(bz2|xz|gz)' "$tmp/index.html" | sort -V | tail -n1)" || true + if [ -n "$name" ]; then + ver="$(printf '%s' "$name" | sed -E 's/parallel-([0-9]{8})\.tar\.(bz2|xz|gz)/\1/')" + fi + fi + fi + # Try to download from GNU ftpmirror first + if [ -n "$ver" ]; then + for ext in tar.bz2 tar.xz tar.gz; do + name="parallel-${ver}.${ext}" + url="https://ftpmirror.gnu.org/parallel/${name}" + if curl -fsSL "$url" -o "$tmp/${name}"; then + case "$ext" in + tar.bz2) tarflag="-xjf" ;; + tar.xz) tarflag="-xJf" ;; + tar.gz) tarflag="-xzf" ;; + *) tarflag="-xjf" ;; + esac + if tar -C "$tmp" $tarflag "$tmp/${name}" >/dev/null 2>&1; then + srcdir="$(find "$tmp" -maxdepth 1 -type d -name "parallel-*" | head -n1)" + if [ -n "$srcdir" ] && [ -d "$srcdir" ]; then + # Determine prefix from BIN_DIR (e.g., /usr/local/bin -> /usr/local) + prefix_dir="$(dirname "$BIN_DIR")" + # Use sudo for system prefix if needed (only when passwordless sudo available per earlier logic) + if [ "$BIN_DIR" = "/usr/local/bin" ] && [ ! -w "$BIN_DIR" ]; then SUDO_CMD="sudo"; else SUDO_CMD=""; fi + # Ensure minimal build deps on apt systems + if have apt-get; then sudo apt-get update >/dev/null 2>&1 || true; sudo apt-get install -y make perl build-essential >/dev/null 2>&1 || true; fi + ( + cd "$srcdir" && + ./configure --prefix="$prefix_dir" >/dev/null 2>&1 || true && + make >/dev/null 2>&1 || true && + ${SUDO_CMD} make install >/dev/null 2>&1 || true + ) + # If make install failed for any reason, fall back to copying the script + if ! command -v parallel >/dev/null 2>&1 || ! parallel --version >/dev/null 2>&1; then + if [ -f "$srcdir/src/parallel" ]; then + $INSTALL "$srcdir/src/parallel" "$BIN_DIR/parallel" && return + fi + else + return + fi + fi + fi + fi + done + fi + # Fallback to system package if all else fails + if have apt-get; then sudo apt-get update && sudo apt-get install -y parallel; return; fi +} + +install_ast_grep() { + if have brew; then brew install ast-grep; return; fi + if command -v cargo >/dev/null 2>&1; then cargo install ast-grep; return; fi +} + +install_direnv() { + if have brew; then brew install direnv; return; fi + # Try latest GitHub release (prefer official binary over distro package) + local tmp tag os arch name url + tmp="$(mktemp -d)" + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/direnv/direnv/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + case "$OS" in + linux) os="linux" ;; + darwin) os="darwin" ;; + *) os="linux" ;; + esac + case "$ARCH" in + x86_64|amd64) arch="amd64" ;; + aarch64|arm64) arch="arm64" ;; + *) arch="amd64" ;; + esac + name="direnv.${os}-${arch}" + url="https://github.com/direnv/direnv/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/direnv"; then + chmod +x "$tmp/direnv" || true + $INSTALL "$tmp/direnv" "$BIN_DIR/direnv" && return + fi + fi + # Fallback to apt if GitHub download fails + if have apt-get; then sudo apt-get update && sudo apt-get install -y direnv; return; fi +} + +install_git() { + if have brew; then brew install git || brew upgrade git || true; return; fi + if have apt-get; then + # Prefer official git-core PPA on Ubuntu-family to get newer Git than distro + case "$(os_id)" in + ubuntu|linuxmint|pop) + sudo apt-get update || true + sudo apt-get install -y software-properties-common ca-certificates gnupg || true + sudo add-apt-repository -y ppa:git-core/ppa || true + sudo apt-get update || true + sudo apt-get install -y git || true + ;; + *) + sudo apt-get update && (sudo apt-get install -y --only-upgrade git || sudo apt-get install -y git) + ;; + esac + # If still behind upstream, build latest from source into /usr/local + local installed tag latest tmp src nproc_val + installed="$(get_version git || true)" + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/git/git/releases/latest | awk -F'/' '{print $NF}')" + latest="${tag#v}" + if [ -n "$installed" ] && [ -n "$latest" ]; then + if command -v dpkg >/dev/null 2>&1; then + if dpkg --compare-versions "$installed" ge "$latest"; then return; fi + else + if [ "$(printf '%s\n%s\n' "$latest" "$installed" | sort -V | tail -n1)" = "$installed" ]; then return; fi + fi + fi + sudo apt-get update || true + sudo apt-get install -y build-essential dh-autoreconf libssl-dev libcurl4-gnutls-dev libexpat1-dev gettext zlib1g-dev tcl libpcre2-dev libzstd-dev || true + tmp="$(mktemp -d)" + if curl -fsSL "https://github.com/git/git/archive/refs/tags/${tag}.tar.gz" -o "$tmp/git.tar.gz"; then + if tar -C "$tmp" -xzf "$tmp/git.tar.gz" >/dev/null 2>&1; then + src="$(find "$tmp" -maxdepth 1 -type d -name 'git-*' | head -n1)" + if [ -n "$src" ] && [ -d "$src" ]; then + nproc_val="$(command -v nproc >/dev/null 2>&1 && nproc || echo 2)" + ( + cd "$src" && + make configure >/dev/null 2>&1 || true && + ./configure --prefix=/usr/local >/dev/null 2>&1 || true && + make -j"$nproc_val" all >/dev/null 2>&1 && + sudo make install >/dev/null 2>&1 + ) || true + fi + fi + fi + return + fi +} + +install_gh() { + if have brew; then brew install gh || brew upgrade gh || true; return; fi + # Try GitHub release binary (cli/cli) + local tmp tag ver url name file + tmp="$(mktemp -d)" + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/cli/cli/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + ver="${tag#v}" + case "$ARCH" in + x86_64|amd64) name="gh_${ver}_linux_amd64.tar.gz" ;; + aarch64|arm64) name="gh_${ver}_linux_arm64.tar.gz" ;; + *) name="gh_${ver}_linux_amd64.tar.gz" ;; + esac + url="https://github.com/cli/cli/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/gh.tgz"; then + if tar -C "$tmp" -xzf "$tmp/gh.tgz" >/dev/null 2>&1; then + file="$(find "$tmp" -type f -path "*/bin/gh" -perm -111 | head -n1)" + if [ -n "$file" ] && [ -f "$file" ]; then $INSTALL "$file" "$BIN_DIR/gh" && return; fi + fi + fi + fi + # Fallback to apt (available if GitHub apt repo configured) + if have apt-get; then sudo apt-get update && sudo apt-get install -y gh || true; fi +} + +install_fd() { + if have fd || have fdfind; then return; fi + if have brew; then brew install fd; return; fi + if have cargo; then cargo install fd-find; return; fi + if have apt-get; then sudo apt-get update && sudo apt-get install -y fd-find; return; fi +} + +install_fzf() { + # Prefer latest from Homebrew or GitHub release; avoid apt unless no alternative + if have brew; then brew install fzf; return; fi + # Install latest binary from GitHub + local tag ver tmp url + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/junegunn/fzf/releases/latest | awk -F'/' '{print $NF}')" + ver="${tag#v}" + if [ -n "$ver" ]; then + tmp="$(mktemp -d)" + url="https://github.com/junegunn/fzf/releases/download/${tag}/fzf-${ver}-linux_amd64.tar.gz" + curl -fsSL "$url" -o "$tmp/fzf.tar.gz" + tar -C "$tmp" -xzf "$tmp/fzf.tar.gz" || true + if [ -f "$tmp/fzf" ]; then + install -m 0755 "$tmp/fzf" "$BIN_DIR/fzf" + return + fi + fi + # Fallbacks + if have apt-get; then sudo apt-get update && sudo apt-get install -y fzf; return; fi + git clone --depth 1 https://github.com/junegunn/fzf.git "$HOME/.fzf" && "$HOME/.fzf/install" --no-update-rc --key-bindings --completion --no-bash --no-fish --no-zsh || true +} + +install_rg() { + if have rg; then return; fi + if have brew; then brew install ripgrep; return; fi + if have cargo; then cargo install ripgrep; return; fi + if have apt-get; then sudo apt-get update && sudo apt-get install -y ripgrep; return; fi +} + +install_jq() { + if have jq; then return; fi + if have brew; then brew install jq; return; fi + # Try latest GitHub release binary (jqlang/jq) + local tag url tmp + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/jqlang/jq/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + tmp="$(mktemp -d)" + for name in jq-linux-amd64 jq-linux64 jq-linux-x86_64; do + url="https://github.com/jqlang/jq/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/jq"; then + chmod +x "$tmp/jq" || true + $INSTALL "$tmp/jq" "$BIN_DIR/jq" && return + fi + done + fi + # Fallback to apt if nothing else worked + if have apt-get; then sudo apt-get update && sudo apt-get install -y jq; return; fi +} + +install_yq() { + if [ "${FORCE:-0}" != "1" ] && have yq; then return; fi + if have brew; then brew install yq; return; fi + # Prefer GitHub binary (map arch names to upstream asset names) + local tmp os arch url + tmp="$(mktemp -d)" + os="$(uname -s | tr '[:upper:]' '[:lower:]')" + case "$ARCH" in + x86_64|amd64) arch="amd64" ;; + aarch64|arm64) arch="arm64" ;; + *) arch="amd64" ;; + esac + url="https://github.com/mikefarah/yq/releases/latest/download/yq_${os}_${arch}" + if curl -fsSL "$url" -o "$tmp/yq"; then + chmod +x "$tmp/yq" || true + $INSTALL "$tmp/yq" "$BIN_DIR/yq" && return + fi + # Fallback to apt + if have apt-get; then sudo apt-get update && sudo apt-get install -y yq; return; fi +} + +install_bat() { + if have bat || have batcat; then return; fi + if have brew; then brew install bat; return; fi + if have cargo; then cargo install bat; return; fi + if have apt-get; then sudo apt-get update && sudo apt-get install -y bat || true; fi +} + +install_delta() { + if have delta; then return; fi + if have brew; then brew install git-delta; return; fi + if have cargo; then cargo install git-delta; return; fi +} + +install_curlie() { + if have brew; then brew install curlie; return; fi + # Try GitHub latest release via redirect to get tag, then download correct asset + local tmp tag ver arch url name file AUTH + tmp="$(mktemp -d)" + if [ -n "${GITHUB_TOKEN:-}" ]; then AUTH=( -H "Authorization: Bearer ${GITHUB_TOKEN}" ); else AUTH=(); fi + tag="$(curl -fsSIL ${AUTH[@]} -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' https://github.com/rs/curlie/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + ver="${tag#v}" + case "$ARCH" in + x86_64|amd64) arch="x86_64|amd64" ;; + aarch64|arm64) arch="arm64|aarch64" ;; + *) arch="x86_64|amd64" ;; + esac + # Try a set of common asset name patterns produced by goreleaser + for name in \ + "curlie_${ver}_Linux_x86_64.tar.gz" \ + "curlie_${ver}_linux_x86_64.tar.gz" \ + "curlie_${ver}_linux_amd64.tar.gz" \ + "curlie_${ver}_Linux_amd64.tar.gz" \ + "curlie_${ver}_Linux_arm64.tar.gz" \ + "curlie_${ver}_linux_arm64.tar.gz" \ + "curlie_${ver}_linux_${ARCH}.tar.gz"; do + url="https://github.com/rs/curlie/releases/download/${tag}/${name}" + if curl -fsSL ${AUTH[@]} -H "User-Agent: cli-audit" "$url" -o "$tmp/curlie.tgz"; then + tar -C "$tmp" -xzf "$tmp/curlie.tgz" || true + if [ -f "$tmp/curlie" ]; then $INSTALL "$tmp/curlie" "$BIN_DIR/curlie"; return; fi + # If unpacked into a directory, locate the binary + file="$(tar -tzf "$tmp/curlie.tgz" 2>/dev/null | awk -F/ '/(^|/)curlie$/{print $0; exit}')" + if [ -n "$file" ] && [ -f "$tmp/$file" ]; then $INSTALL "$tmp/$file" "$BIN_DIR/curlie"; return; fi + fi + done + fi + # Fallback: go install (may report 0.0.0-LOCAL) + if command -v go >/dev/null 2>&1; then GO111MODULE=on go install github.com/rs/curlie@latest && $INSTALL "$(go_bin_path)/curlie" "$BIN_DIR/curlie"; return; fi +} + +install_dive() { + if have brew; then brew install dive; return; fi + # GitHub release binary + local tag url tmp name + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/wagoodman/dive/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + tmp="$(mktemp -d)" + name="dive_${tag#v}_linux_amd64.tar.gz" + url="https://github.com/wagoodman/dive/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/dive.tgz"; then + tar -C "$tmp" -xzf "$tmp/dive.tgz" || true + if [ -f "$tmp/dive" ]; then $INSTALL "$tmp/dive" "$BIN_DIR/dive"; return; fi + fi + fi +} + +install_trivy() { + if have brew; then brew install trivy; return; fi + # GitHub release binary + local tag tmp url name + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/aquasecurity/trivy/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + tmp="$(mktemp -d)" + name="trivy_${tag#v}_Linux-64bit.tar.gz" + url="https://github.com/aquasecurity/trivy/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/trivy.tgz"; then + tar -C "$tmp" -xzf "$tmp/trivy.tgz" || true + if [ -f "$tmp/trivy" ]; then $INSTALL "$tmp/trivy" "$BIN_DIR/trivy"; return; fi + fi + fi +} + +install_gitleaks() { + if have brew; then brew install gitleaks; return; fi + # GitHub release binary + local tag tmp url name + tag="$(curl -fsSLI -o /dev/null -w '%{url_effective}' https://github.com/gitleaks/gitleaks/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + tmp="$(mktemp -d)" + name="gitleaks_${tag#v}_linux_x64.tar.gz" + url="https://github.com/gitleaks/gitleaks/releases/download/${tag}/${name}" + if curl -fsSL "$url" -o "$tmp/gitleaks.tgz"; then + tar -C "$tmp" -xzf "$tmp/gitleaks.tgz" || true + if [ -f "$tmp/gitleaks" ]; then $INSTALL "$tmp/gitleaks" "$BIN_DIR/gitleaks"; return; fi + fi + fi +} + +install_git_absorb() { + if have brew; then brew install git-absorb; return; fi + if command -v cargo >/dev/null 2>&1; then cargo install git-absorb; return; fi +} + +install_git_branchless() { + if have brew; then brew install git-branchless; return; fi + if command -v cargo >/dev/null 2>&1; then cargo install git-branchless; return; fi +} + +install_eslint() { + ensure_nvm_loaded || true + if command -v npm >/dev/null 2>&1; then + if env -u PREFIX npm install -g eslint >/dev/null 2>&1; then return; fi + env -u PREFIX npm install -g --prefix "$HOME/.local" eslint || true + if [ -x "$HOME/.local/bin/eslint" ]; then $INSTALL "$HOME/.local/bin/eslint" "$BIN_DIR/eslint"; return; fi + fi + if have brew; then brew install eslint; return; fi +} + +install_prettier() { + ensure_nvm_loaded || true + if command -v npm >/dev/null 2>&1; then + if env -u PREFIX npm install -g prettier >/dev/null 2>&1; then return; fi + env -u PREFIX npm install -g --prefix "$HOME/.local" prettier || true + if [ -x "$HOME/.local/bin/prettier" ]; then $INSTALL "$HOME/.local/bin/prettier" "$BIN_DIR/prettier"; return; fi + fi + if have brew; then brew install prettier; return; fi +} + +install_shfmt() { + if command -v go >/dev/null 2>&1; then GO111MODULE=on go install mvdan.cc/sh/v3/cmd/shfmt@latest && $INSTALL "$(go_bin_path)/shfmt" "$BIN_DIR/shfmt"; return; fi + if have brew; then brew install shfmt; return; fi +} + +install_shellcheck() { + if have brew; then brew install shellcheck; return; fi + # Try GitHub release binary first to get latest version + local tmp tag arch name url file AUTH + tmp="$(mktemp -d)" + if [ -n "${GITHUB_TOKEN:-}" ]; then AUTH=( -H "Authorization: Bearer ${GITHUB_TOKEN}" ); else AUTH=(); fi + tag="$(curl -fsSIL ${AUTH[@]} -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' https://github.com/koalaman/shellcheck/releases/latest | awk -F'/' '{print $NF}')" + case "$ARCH" in + x86_64|amd64) arch="x86_64" ;; + aarch64|arm64) arch="aarch64" ;; + armv6*|armv7*|arm*) arch="armv6" ;; + *) arch="x86_64" ;; + esac + if [ -n "$tag" ]; then + name="shellcheck-${tag}.linux.${arch}.tar.xz" + url="https://github.com/koalaman/shellcheck/releases/download/${tag}/${name}" + if curl -fsSL ${AUTH[@]} -H "User-Agent: cli-audit" "$url" -o "$tmp/sc.tar.xz"; then + # Extract and install the 'shellcheck' binary + if tar -C "$tmp" -xJf "$tmp/sc.tar.xz" >/dev/null 2>&1; then + file="$(tar -tJf "$tmp/sc.tar.xz" 2>/dev/null | awk -F/ '/(^|\/)shellcheck$/{print $0; exit}')" + if [ -n "$file" ] && [ -f "$tmp/$file" ]; then + $INSTALL "$tmp/$file" "$BIN_DIR/shellcheck" && return + fi + # Fallback: search extracted tree + file="$(find "$tmp" -type f -name shellcheck -perm -111 | head -n1)" + if [ -n "$file" ]; then $INSTALL "$file" "$BIN_DIR/shellcheck" && return; fi + fi + fi + fi + # Fallback to apt if GitHub binary not available + if have apt-get; then sudo apt-get update && sudo apt-get install -y shellcheck; return; fi +} + +install_fx() { + # Prefer Go implementation + if command -v go >/dev/null 2>&1; then + GO111MODULE=on go install github.com/antonmedv/fx@latest && $INSTALL "$(go_bin_path)/fx" "$BIN_DIR/fx" && return + fi + # Fallback + if have brew; then brew install fx; return; fi +} + +install_glab() { + if have brew; then brew install glab; return; fi + # Try GitHub release binary (profclems/glab) + local tmp tag ver url name file AUTH + tmp="$(mktemp -d)" + if [ -n "${GITHUB_TOKEN:-}" ]; then AUTH=( -H "Authorization: Bearer ${GITHUB_TOKEN}" ); else AUTH=(); fi + tag="$(curl -fsSIL ${AUTH[@]} -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' https://github.com/profclems/glab/releases/latest | awk -F'/' '{print $NF}')" + if [ -n "$tag" ]; then + ver="${tag#v}" + # Try a set of common asset name patterns produced by goreleaser + for name in \ + "glab_${ver}_Linux_x86_64.tar.gz" \ + "glab_${ver}_linux_x86_64.tar.gz" \ + "glab_${ver}_linux_amd64.tar.gz" \ + "glab_${ver}_Linux_amd64.tar.gz" \ + "glab_${ver}_Linux_arm64.tar.gz" \ + "glab_${ver}_linux_arm64.tar.gz" \ + "glab_${ver}_Linux_${ARCH}.tar.gz" \ + "glab_${ver}_linux_${ARCH}.tar.gz"; do + url="https://github.com/profclems/glab/releases/download/${tag}/${name}" + if curl -fsSL ${AUTH[@]} -H "User-Agent: cli-audit" "$url" -o "$tmp/glab.tgz"; then + if tar -C "$tmp" -xzf "$tmp/glab.tgz" >/dev/null 2>&1; then + # Look for an extracted 'glab' binary + file="$(tar -tzf "$tmp/glab.tgz" 2>/dev/null | awk -F/ '/(^|\/)glab$/{print $0; exit}')" + if [ -n "$file" ] && [ -f "$tmp/$file" ]; then + $INSTALL "$tmp/$file" "$BIN_DIR/glab" && return + fi + file="$(find "$tmp" -type f -name glab -perm -111 | head -n1)" + if [ -n "$file" ]; then + $INSTALL "$file" "$BIN_DIR/glab" && return + fi + fi + fi + done + fi + # Fallback: install via Go if available + if command -v go >/dev/null 2>&1; then + # Prefer the canonical module path if present + GO111MODULE=on go install gitlab.com/gitlab-org/cli/cmd/glab@latest 2>/dev/null || GO111MODULE=on go install github.com/profclems/glab@latest || true + if [ -x "$(go_bin_path)/glab" ]; then + $INSTALL "$(go_bin_path)/glab" "$BIN_DIR/glab" && return + fi + fi +} + +install_just() { + if have just; then return; fi + if have brew; then brew install just; return; fi + if have cargo; then cargo install just; return; fi +} + +install_core_tools() { + install_fd + install_fzf + install_rg + install_jq + install_yq + install_bat + install_delta + install_just +} + +update_core_tools() { + if have brew; then brew upgrade fd fzf ripgrep jq yq bat git-delta just || true; fi + # On apt systems, try to upgrade via apt-get if packages exist + if have apt-get; then sudo apt-get update || true; sudo apt-get install -y --only-upgrade fzf ripgrep jq yq bat || true; fi +} + +reconcile_one() { + local t="$1" + local before after path + before="$(get_version "$t" || true)" + case "$t" in + fd) + sudo apt-get remove -y fd-find >/dev/null 2>&1 || true + install_fd + ;; + ripgrep) + sudo apt-get remove -y ripgrep >/dev/null 2>&1 || true + install_rg + ;; + jq) + sudo apt-get remove -y jq >/dev/null 2>&1 || true + install_jq + ;; + yq) + sudo apt-get remove -y yq >/dev/null 2>&1 || true + $RM "/usr/local/bin/yq" >/dev/null 2>&1 || true + rm -f "$HOME/.local/bin/yq" >/dev/null 2>&1 || true + FORCE=1 install_yq + ;; + bat) + sudo apt-get remove -y bat >/dev/null 2>&1 || true + install_bat + ;; + delta) + sudo apt-get remove -y git-delta >/dev/null 2>&1 || true + install_delta + ;; + just) + sudo apt-get remove -y just >/dev/null 2>&1 || true + install_just + ;; + fzf) + sudo apt-get remove -y fzf >/dev/null 2>&1 || true + install_fzf + ;; + curlie) + sudo apt-get remove -y curlie >/dev/null 2>&1 || true + rm -f "/usr/local/bin/curlie" "$HOME/.local/bin/curlie" "$(go_bin_path)/curlie" >/dev/null 2>&1 || true + install_curlie + ;; + dive) + sudo apt-get remove -y dive >/dev/null 2>&1 || true + install_dive + ;; + trivy) + sudo apt-get remove -y trivy >/dev/null 2>&1 || true + install_trivy + ;; + gitleaks) + sudo apt-get remove -y gitleaks >/dev/null 2>&1 || true + install_gitleaks + ;; + git-absorb) + install_git_absorb + ;; + git-branchless) + install_git_branchless + ;; + eslint) + install_eslint + ;; + prettier) + install_prettier + ;; + shfmt) + install_shfmt + ;; + shellcheck) + sudo apt-get remove -y shellcheck >/dev/null 2>&1 || true + $RM "/usr/local/bin/shellcheck" >/dev/null 2>&1 || true + install_shellcheck + ;; + fx) + # Remove Node variant if present, then install Go variant + ensure_nvm_loaded || true + if command -v npm >/dev/null 2>&1; then + env -u PREFIX npm uninstall -g fx >/dev/null 2>&1 || true + env -u PREFIX npm uninstall -g --prefix "$HOME/.local" fx >/dev/null 2>&1 || true + node_root="$(npm root -g 2>/dev/null || true)"; if [ -n "$node_root" ]; then rm -rf "$node_root/fx" >/dev/null 2>&1 || true; fi + fi + rm -f "$HOME/.local/bin/fx" >/dev/null 2>&1 || true + install_fx + ;; + glab) + install_glab + ;; + ctags) + # Record baseline presence of distro ctags packages + base_ctags_pkg=0; base_exuberant_pkg=0; base_universal_pkg=0 + if have apt-get; then + dpkg -s ctags >/dev/null 2>&1 && base_ctags_pkg=1 || true + dpkg -s exuberant-ctags >/dev/null 2>&1 && base_exuberant_pkg=1 || true + dpkg -s universal-ctags >/dev/null 2>&1 && base_universal_pkg=1 || true + fi + install_ctags + # If we successfully installed to /usr/local and any distro ctags packages were not present before + # but are present now (unlikely unless installed earlier in this run), remove them to avoid confusion. + if have apt-get; then + cur_ctags_pkg=0; cur_exuberant_pkg=0; cur_universal_pkg=0 + dpkg -s ctags >/dev/null 2>&1 && cur_ctags_pkg=1 || true + dpkg -s exuberant-ctags >/dev/null 2>&1 && cur_exuberant_pkg=1 || true + dpkg -s universal-ctags >/dev/null 2>&1 && cur_universal_pkg=1 || true + ctags_path="$(command -v ctags 2>/dev/null || true)" + if [ "$ctags_path" = "/usr/local/bin/ctags" ]; then + if [ "$base_ctags_pkg" -eq 0 ] && [ "$cur_ctags_pkg" -eq 1 ]; then sudo apt-get remove -y ctags >/dev/null 2>&1 || true; fi + if [ "$base_exuberant_pkg" -eq 0 ] && [ "$cur_exuberant_pkg" -eq 1 ]; then sudo apt-get remove -y exuberant-ctags >/dev/null 2>&1 || true; fi + if [ "$base_universal_pkg" -eq 0 ] && [ "$cur_universal_pkg" -eq 1 ]; then sudo apt-get remove -y universal-ctags >/dev/null 2>&1 || true; fi + fi + fi + ;; + entr) + sudo apt-get remove -y entr >/dev/null 2>&1 || true + install_entr + ;; + parallel) + sudo apt-get remove -y parallel >/dev/null 2>&1 || true + install_parallel + ;; + ast-grep) + rm -f "/usr/local/bin/ast-grep" "$HOME/.local/bin/ast-grep" "$(go_bin_path)/ast-grep" >/dev/null 2>&1 || true + install_ast_grep + ;; + direnv) + sudo apt-get remove -y direnv >/dev/null 2>&1 || true + install_direnv + ;; + git) + install_git + ;; + gh) + install_gh + ;; + *) echo "Unknown tool: $t" ;; + esac + path="$(command -v "$t" 2>/dev/null || true)" + after="$(get_version "$t" || true)" + printf "[%s] before: %s\n" "$t" "${before:-}" + printf "[%s] after: %s\n" "$t" "${after:-}" + if [ -n "$path" ]; then printf "[%s] path: %s\n" "$t" "$path"; fi +} + +uninstall_core_tools() { + if have brew; then brew uninstall -f fd fzf ripgrep jq yq bat git-delta just || true; fi + apt_remove_if_present fd-find fzf ripgrep jq yq bat || true +} + +case "$ACTION" in + install) install_core_tools ;; + update) update_core_tools ;; + uninstall) uninstall_core_tools ;; + reconcile) + if [ -n "$ONLY_TOOL" ]; then + reconcile_one "$ONLY_TOOL" + else + for t in fd fzf ripgrep jq yq bat delta just; do reconcile_one "$t"; done + fi + ;; + *) echo "Usage: $0 {install|update|uninstall|reconcile}" ; exit 2 ;; +esac + +case ":$PATH:" in + *":$BIN_DIR:"*) ;; + *) echo "core: $ACTION complete (or attempted). You may need to add $BIN_DIR to PATH." ;; +esac + + diff --git a/scripts/install_gem.sh b/scripts/install_gem.sh deleted file mode 100755 index 93bcc2d..0000000 --- a/scripts/install_gem.sh +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -ACTION="${1:-update}" - -ensure_rbenv_loaded() { - # Add rbenv to PATH and initialize if available - if [ -d "$HOME/.rbenv" ]; then - export PATH="$HOME/.rbenv/bin:$PATH" - if command -v rbenv >/dev/null 2>&1; then - eval "$(rbenv init - bash)" || true - fi - fi -} - -check_rbenv_ruby() { - ensure_rbenv_loaded - - # Check if Ruby is rbenv-managed - local ruby_path - ruby_path="$(command -v ruby 2>/dev/null || echo '')" - - case "$ruby_path" in - "$HOME/.rbenv/"*) - return 0 # rbenv-managed - ;; - *) - return 1 # not rbenv-managed (apt, system, or missing) - ;; - esac -} - -update_gem() { - ensure_rbenv_loaded - - if ! command -v gem >/dev/null 2>&1; then - echo "[gem] Error: gem not found. Install Ruby first via 'make install-ruby' or 'scripts/install_ruby.sh'" >&2 - return 1 - fi - - # Check if Ruby is rbenv-managed before trying to update gem - if ! check_rbenv_ruby; then - echo "[gem] Error: Ruby is not rbenv-managed (currently apt/system)" >&2 - echo "[gem] Cannot update gem via 'gem update --system' for apt-managed Ruby" >&2 - echo "[gem] Please install Ruby via rbenv first:" >&2 - echo "[gem] make install-ruby" >&2 - echo "[gem] or: scripts/install_ruby.sh reconcile" >&2 - return 1 - fi - - local before after path - before="$(gem --version 2>/dev/null || echo '')" - - # Update RubyGems itself - gem update --system || true - - # Update bundler and rake - gem update bundler rake || true - - # Rehash rbenv shims if using rbenv - if command -v rbenv >/dev/null 2>&1; then - rbenv rehash || true - fi - - after="$(gem --version 2>/dev/null || echo '')" - path="$(command -v gem 2>/dev/null || echo '')" - - printf "[%s] before: %s\n" "gem" "$before" - printf "[%s] after: %s\n" "gem" "$after" - printf "[%s] path: %s\n" "gem" "$path" -} - -install_gem() { - echo "[gem] gem comes bundled with Ruby. Installing/updating Ruby instead..." - "$DIR/install_ruby.sh" install || true - update_gem -} - -reconcile_gem() { - ensure_rbenv_loaded - - if ! command -v gem >/dev/null 2>&1; then - echo "[gem] gem not found. Installing Ruby (which includes gem)..." - "$DIR/install_ruby.sh" reconcile || true - elif ! check_rbenv_ruby; then - echo "[gem] Ruby is not rbenv-managed. Installing Ruby via rbenv first..." - "$DIR/install_ruby.sh" reconcile || true - fi - - update_gem -} - -uninstall_gem() { - echo "[gem] gem is bundled with Ruby. To remove gem, uninstall Ruby via 'scripts/install_ruby.sh uninstall'" >&2 -} - -case "$ACTION" in - install) install_gem ;; - update) update_gem ;; - uninstall) uninstall_gem ;; - reconcile) reconcile_gem ;; - *) echo "Usage: $0 {install|update|uninstall|reconcile}" ; exit 2 ;; -esac diff --git a/scripts/install_go.sh b/scripts/install_go.sh index c90a60f..37cd855 100755 --- a/scripts/install_go.sh +++ b/scripts/install_go.sh @@ -5,65 +5,11 @@ have() { command -v "$1" >/dev/null 2>&1; } TOOL="go" before="$(have go && go version || true)" - if have brew; then - # Use homebrew for installation/upgrade - if have go; then brew upgrade go || brew install go || true; else brew install go || true; fi + if have go; then brew upgrade go || true; else brew install go; fi else - # Manual installation from official Go downloads - # Determine OS and architecture - OS="$(uname -s | tr '[:upper:]' '[:lower:]')" - ARCH="$(uname -m)" - case "$ARCH" in - x86_64|amd64) GOARCH="amd64" ;; - aarch64|arm64) GOARCH="arm64" ;; - armv6*) GOARCH="armv6l" ;; - *) GOARCH="amd64" ;; - esac - - # Get latest version from Go download page - TMP="$(mktemp -d)" - VERSION_URL="https://go.dev/VERSION?m=text" - if curl -fsSL "$VERSION_URL" -o "$TMP/version.txt" 2>/dev/null; then - # VERSION file contains lines like "go1.25.2" - take first line - TARGET_VERSION="$(head -n1 "$TMP/version.txt" | tr -d '\n\r')" - if [ -n "$TARGET_VERSION" ]; then - # Download archive - ARCHIVE="${TARGET_VERSION}.${OS}-${GOARCH}.tar.gz" - DOWNLOAD_URL="https://go.dev/dl/${ARCHIVE}" - - echo "Downloading ${ARCHIVE}..." - if curl -fsSL "$DOWNLOAD_URL" -o "$TMP/${ARCHIVE}"; then - # Remove existing installation - if [ -d "/usr/local/go" ]; then - echo "Removing existing /usr/local/go..." - sudo rm -rf /usr/local/go - fi - - # Extract new version - echo "Extracting to /usr/local/go..." - sudo tar -C /usr/local -xzf "$TMP/${ARCHIVE}" - - # Ensure /usr/local/go/bin is in PATH - if [ ! -f "/usr/local/bin/go" ]; then - sudo ln -sf /usr/local/go/bin/go /usr/local/bin/go 2>/dev/null || true - sudo ln -sf /usr/local/go/bin/gofmt /usr/local/bin/gofmt 2>/dev/null || true - fi - else - echo "Failed to download ${DOWNLOAD_URL}" - echo "Please install Go manually from https://go.dev/dl/" - fi - else - echo "Failed to determine latest Go version" - echo "Please install Go from https://go.dev/dl/" - fi - else - echo "Failed to fetch Go version information" - echo "Please install Go from https://go.dev/dl/" - fi - rm -rf "$TMP" 2>/dev/null || true + echo "Please install Go from https://go.dev/dl/" fi - after="$(have go && go version || true)" path="$(command -v go 2>/dev/null || true)" printf "[%s] before: %s\n" "$TOOL" "${before:-}" diff --git a/scripts/install_group.sh b/scripts/install_group.sh deleted file mode 100755 index 77e8953..0000000 --- a/scripts/install_group.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Install tools by catalog tag -# Usage: install_group.sh TAG [install|update|uninstall] - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -TAG="${1:-}" -ACTION="${2:-install}" - -if [ -z "$TAG" ]; then - echo "Usage: $0 TAG [install|update|uninstall]" >&2 - echo "" >&2 - echo "Available tags:" >&2 - # Extract all unique tags from catalog - find "$DIR/../catalog" -name "*.json" -exec jq -r '.tags[]? // empty' {} \; 2>/dev/null | sort -u | sed 's/^/ - /' - exit 1 -fi - -# Check if jq is available -if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq is required but not found" >&2 - exit 1 -fi - -# Find all tools with the specified tag -TOOLS=() -for json in "$DIR"/../catalog/*.json; do - if [ -f "$json" ]; then - # Check if this tool has the tag - if jq -e --arg tag "$TAG" '.tags[]? | select(. == $tag)' "$json" >/dev/null 2>&1; then - tool_name="$(jq -r '.name' "$json")" - TOOLS+=("$tool_name") - fi - fi -done - -if [ ${#TOOLS[@]} -eq 0 ]; then - echo "No tools found with tag: $TAG" >&2 - exit 1 -fi - -echo "[$TAG] Found ${#TOOLS[@]} tools: ${TOOLS[*]}" - -# Install/update/uninstall each tool -for tool in "${TOOLS[@]}"; do - case "$ACTION" in - install|reconcile) - "$DIR/install_tool.sh" "$tool" - ;; - update) - "$DIR/install_tool.sh" "$tool" - ;; - uninstall) - # For uninstall, we'd need to implement uninstall support in installers - echo "[$tool] Uninstall not yet implemented" >&2 - ;; - *) - echo "Unknown action: $ACTION" >&2 - exit 2 - ;; - esac -done - -echo "[$TAG] Completed $ACTION for ${#TOOLS[@]} tools" diff --git a/scripts/install_kubectl.sh b/scripts/install_kubectl.sh new file mode 100755 index 0000000..6ba89f0 --- /dev/null +++ b/scripts/install_kubectl.sh @@ -0,0 +1,63 @@ +#!/usr/bin/env bash +set -euo pipefail + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +. "$DIR/lib/common.sh" + +TOOL="kubectl" +before="$(kubectl version --client --short 2>/dev/null || true)" + +# Detect OS and architecture +OS="linux" +case "$(uname -m)" in + x86_64|amd64) ARCH="amd64" ;; + aarch64|arm64) ARCH="arm64" ;; + armv7l) ARCH="armv7" ;; + s390x) ARCH="s390x" ;; + ppc64le) ARCH="ppc64le" ;; + *) ARCH="amd64" ;; +esac + +# Resolve latest stable version from primary and fallback endpoints +LATEST="$(curl -fsSL https://dl.k8s.io/release/stable.txt 2>/dev/null || true)" +if [ -z "$LATEST" ]; then + LATEST="$(curl -fsSL https://storage.googleapis.com/kubernetes-release/release/stable.txt 2>/dev/null || true)" +fi + +# If still empty, do not proceed blindly +if [ -z "$LATEST" ]; then + printf "[%s] error: unable to resolve latest version from upstream.\n" "$TOOL" >&2 + printf "[%s] before: %s\n" "$TOOL" "${before:-}" + printf "[%s] after: %s\n" "$TOOL" "" + path="$(command -v kubectl 2>/dev/null || true)" + if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi + exit 1 +fi + +PRIMARY_URL="https://dl.k8s.io/release/${LATEST}/bin/${OS}/${ARCH}/kubectl" +FALLBACK_URL="https://storage.googleapis.com/kubernetes-release/release/${LATEST}/bin/${OS}/${ARCH}/kubectl" + +# Download with retry and fallback +tmpfile="/tmp/kubectl" +rm -f "$tmpfile" +if ! curl -fL --retry 3 --retry-delay 1 --connect-timeout 10 -o "$tmpfile" "$PRIMARY_URL"; then + curl -fL --retry 3 --retry-delay 1 --connect-timeout 10 -o "$tmpfile" "$FALLBACK_URL" +fi + +# Basic validation: ensure file is not empty and looks like ELF/Mach-O +if ! [ -s "$tmpfile" ]; then + printf "[%s] error: downloaded file is empty from both endpoints.\n" "$TOOL" >&2 + exit 1 +fi + +chmod +x "$tmpfile" +# Install atomically with proper permissions +sudo install -m 0755 -T "$tmpfile" /usr/local/bin/kubectl + +after="$(kubectl version --client --short 2>/dev/null || true)" +path="$(command -v kubectl 2>/dev/null || true)" +printf "[%s] before: %s\n" "$TOOL" "${before:-}" +printf "[%s] after: %s\n" "$TOOL" "${after:-}" +if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi + + diff --git a/scripts/install_node.sh b/scripts/install_node.sh index eeaf95b..de7b25e 100755 --- a/scripts/install_node.sh +++ b/scripts/install_node.sh @@ -29,10 +29,10 @@ install_node() { nvm alias default "$NODE_CHANNEL" || true nvm use default || true fi - corepack enable 2>/dev/null || true + corepack enable || true npm install -g npm@latest || true - corepack prepare pnpm@latest --activate 2>/dev/null || true - corepack prepare yarn@1 --activate 2>/dev/null || true + corepack prepare pnpm@latest --activate || true + corepack prepare yarn@1 --activate || true npm install -g eslint prettier || true } @@ -50,11 +50,11 @@ update_node() { nvm use default || true fi # Ensure corepack shims are present - corepack enable 2>/dev/null || true + corepack enable || true npm install -g npm@latest || true # Update pnpm and yarn via corepack; fall back to npm global if corepack unavailable - corepack prepare pnpm@latest --activate 2>/dev/null || npm install -g pnpm@latest || true - corepack prepare yarn@1 --activate 2>/dev/null || npm install -g yarn@latest || true + corepack prepare pnpm@latest --activate || npm install -g pnpm@latest || true + corepack prepare yarn@1 --activate || npm install -g yarn@latest || true npm update -g eslint prettier || true } diff --git a/scripts/install_ruby.sh b/scripts/install_ruby.sh deleted file mode 100755 index b7af0ba..0000000 --- a/scripts/install_ruby.sh +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -ACTION="${1:-reconcile}" -# Target Ruby version (default: latest stable) -RUBY_VERSION="${RUBY_VERSION:-3.3.6}" - -ensure_rbenv() { - if [ ! -d "$HOME/.rbenv" ]; then - echo "Installing rbenv..." - git clone https://github.com/rbenv/rbenv.git "$HOME/.rbenv" - cd "$HOME/.rbenv" && src/configure && make -C src || true - fi - - # Install ruby-build plugin if missing - if [ ! -d "$HOME/.rbenv/plugins/ruby-build" ]; then - echo "Installing ruby-build plugin..." - git clone https://github.com/rbenv/ruby-build.git "$HOME/.rbenv/plugins/ruby-build" - fi - - ensure_rbenv_loaded -} - -ensure_rbenv_loaded() { - # Add rbenv to PATH and initialize if not already done - export PATH="$HOME/.rbenv/bin:$PATH" - if command -v rbenv >/dev/null 2>&1; then - eval "$(rbenv init - bash)" || true - fi -} - -get_latest_ruby_version() { - ensure_rbenv - # Get latest stable Ruby version from rbenv - rbenv install --list 2>/dev/null | grep -E '^\s*[0-9]+\.[0-9]+\.[0-9]+$' | tail -1 | tr -d ' ' || echo "3.3.6" -} - -install_ruby() { - ensure_rbenv - - # Use latest if RUBY_VERSION not specified - if [ "$RUBY_VERSION" = "latest" ]; then - RUBY_VERSION=$(get_latest_ruby_version) - fi - - echo "Installing Ruby $RUBY_VERSION via rbenv..." - rbenv install --skip-existing "$RUBY_VERSION" || true - rbenv global "$RUBY_VERSION" || true - rbenv rehash || true - - # Update gem itself - gem update --system || true - - # Install common gems - gem install bundler rake || true - rbenv rehash || true -} - -update_ruby() { - ensure_rbenv - - # Get current version - local current_version target_version - current_version=$(rbenv global 2>/dev/null || echo "") - - # Use RUBY_VERSION if set, otherwise get latest from rbenv - if [ -n "${RUBY_VERSION:-}" ] && [ "$RUBY_VERSION" != "latest" ]; then - target_version="$RUBY_VERSION" - else - target_version=$(get_latest_ruby_version) - RUBY_VERSION="$target_version" - fi - - echo "Current Ruby: $current_version" - echo "Target Ruby: $target_version" - - # Install target if different - if [ "$current_version" != "$target_version" ]; then - install_ruby - else - # Just update gems - gem update --system || true - gem update bundler rake || true - rbenv rehash || true - fi -} - -uninstall_ruby() { - # Remove rbenv-managed Ruby - if [ -d "$HOME/.rbenv" ]; then - rm -rf "$HOME/.rbenv" - fi - - # Remove apt Ruby if present - apt_remove_if_present ruby ruby-dev ruby-rubygems || true -} - -prefers_rbenv_ruby() { - local ruby_path - ruby_path="$(command -v ruby 2>/dev/null || true)" - - # Prefer rbenv if Ruby is under ~/.rbenv - case "$ruby_path" in - "$HOME/.rbenv/"*) return 0 ;; - *) return 1 ;; - esac -} - -reconcile_ruby() { - local before after path - before="$(command -v ruby >/dev/null 2>&1 && ruby --version || true)" - - if ! prefers_rbenv_ruby; then - echo "Removing apt-managed Ruby in favor of rbenv..." - apt_remove_if_present ruby ruby-dev ruby-rubygems || true - install_ruby - else - update_ruby - fi - - after="$(command -v ruby >/dev/null 2>&1 && ruby --version || true)" - path="$(command -v ruby 2>/dev/null || true)" - - printf "[%s] before: %s\n" "ruby" "${before:-}" - printf "[%s] after: %s\n" "ruby" "${after:-}" - if [ -n "$path" ]; then printf "[%s] path: %s\n" "ruby" "$path"; fi -} - -case "$ACTION" in - install) install_ruby ;; - update) update_ruby ;; - uninstall) uninstall_ruby ;; - reconcile) reconcile_ruby ;; - *) echo "Usage: $0 {install|update|uninstall|reconcile}" ; exit 2 ;; -esac diff --git a/scripts/install_rust.sh b/scripts/install_rust.sh index 4e027aa..ec5e45d 100755 --- a/scripts/install_rust.sh +++ b/scripts/install_rust.sh @@ -29,8 +29,6 @@ reconcile_rust() { uninstall_rust || true fi install_rust - # Always update to latest after ensuring rustup installation - update_rust } case "$ACTION" in diff --git a/scripts/install_terraform.sh b/scripts/install_terraform.sh new file mode 100755 index 0000000..4e7f736 --- /dev/null +++ b/scripts/install_terraform.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash +set -euo pipefail + +DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +. "$DIR/lib/common.sh" + +OS="linux" +ARCH_RAW="$(uname -m)" +case "$ARCH_RAW" in + x86_64|amd64) ARCH="amd64" ;; + aarch64|arm64) ARCH="arm64" ;; + *) ARCH="amd64" ;; +esac + +# Prefer installing latest official release from HashiCorp; avoid early exit + +before="$(command -v terraform >/dev/null 2>&1 && terraform version 2>/dev/null | head -n1 || true)" + +# Remove distro package first so new binary takes precedence +apt_remove_if_present terraform || true + +TMP="$(mktemp -d)" + +# Get latest version from GitHub releases redirect +LATEST_TAG="$(curl -fsSIL -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' https://github.com/hashicorp/terraform/releases/latest | awk -F'/' '{print $NF}')" +VER="${LATEST_TAG#v}" +if [ -z "$VER" ]; then + echo "Could not resolve latest Terraform version" >&2 + VER="" +fi + +if [ -n "$VER" ]; then + URL="https://releases.hashicorp.com/terraform/${VER}/terraform_${VER}_${OS}_${ARCH}.zip" + if curl -fsSL "$URL" -o "$TMP/terraform.zip"; then + unzip -q "$TMP/terraform.zip" -d "$TMP" || true + if [ -f "$TMP/terraform" ]; then + # Install to /usr/local/bin with sudo if needed + if [ -w "/usr/local/bin" ]; then INST="install -m 0755"; else INST="sudo install -m 0755"; fi + $INST "$TMP/terraform" "/usr/local/bin/terraform" + fi + fi +fi + +# Fallbacks +if ! command -v terraform >/dev/null 2>&1; then + if have brew; then brew install terraform || true; fi +fi + +after="$(command -v terraform >/dev/null 2>&1 && terraform version 2>/dev/null | head -n1 || true)" +path="$(command -v terraform 2>/dev/null || true)" +printf "[%s] before: %s\n" "terraform" "${before:-}" +printf "[%s] after: %s\n" "terraform" "${after:-}" +if [ -n "$path" ]; then printf "[%s] path: %s\n" "terraform" "$path"; fi + + diff --git a/scripts/install_tool.sh b/scripts/install_tool.sh deleted file mode 100755 index 5a20aca..0000000 --- a/scripts/install_tool.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env bash -# Main orchestrator for tool installation -# Reads catalog and delegates to appropriate installer or reconciliation system -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -# Source reconciliation libraries -. "$DIR/lib/reconcile.sh" - -TOOL="${1:-}" -ACTION="${2:-install}" - -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME [ACTION]" >&2 - echo "Actions: install, update, reconcile, status, uninstall" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" - -# Check if tool has catalog entry -if [ ! -f "$CATALOG_FILE" ]; then - echo "[$TOOL] Error: No catalog entry found" >&2 - echo "[$TOOL] Available tools: $(find "$DIR/../catalog" -name '*.json' -exec basename {} .json \; | tr '\n' ' ')" >&2 - exit 1 -fi - -# Check if jq is available -if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq is required but not found" >&2 - exit 1 -fi - -# Read install method from catalog -INSTALL_METHOD="$(jq -r '.install_method' "$CATALOG_FILE")" - -if [ -z "$INSTALL_METHOD" ] || [ "$INSTALL_METHOD" = "null" ]; then - echo "[$TOOL] Error: No install_method specified in catalog" >&2 - exit 1 -fi - -# Check if tool uses reconciliation system (install_method == "auto") -if [ "$INSTALL_METHOD" = "auto" ]; then - # Use reconciliation system - case "$ACTION" in - install|update|reconcile) - # Pass the actual action to reconcile_tool - reconcile_tool "$CATALOG_FILE" "$ACTION" - exit $? - ;; - status) - reconcile_tool "$CATALOG_FILE" "status" - exit $? - ;; - uninstall) - # Get current method and remove it - binary_name="$(jq -r '.binary_name // ""' "$CATALOG_FILE" 2>/dev/null || echo "$TOOL")" - current_method="$(detect_install_method "$TOOL" "$binary_name")" - if [ "$current_method" != "none" ]; then - remove_installation "$TOOL" "$current_method" "$binary_name" - echo "[$TOOL] Uninstalled (was via $current_method)" - else - echo "[$TOOL] Not installed" - fi - exit 0 - ;; - *) - echo "[$TOOL] Error: Unknown action: $ACTION" >&2 - exit 1 - ;; - esac -fi - -# Traditional path: delegate to appropriate installer -INSTALLER_SCRIPT="$DIR/installers/${INSTALL_METHOD}.sh" - -if [ ! -f "$INSTALLER_SCRIPT" ]; then - echo "[$TOOL] Error: Installer not found: $INSTALLER_SCRIPT" >&2 - echo "[$TOOL] install_method: $INSTALL_METHOD" >&2 - exit 1 -fi - -if [ ! -x "$INSTALLER_SCRIPT" ]; then - echo "[$TOOL] Error: Installer not executable: $INSTALLER_SCRIPT" >&2 - exit 1 -fi - -# Execute installer with all remaining arguments -shift # Remove TOOL from $@ -exec "$INSTALLER_SCRIPT" "$TOOL" "$@" diff --git a/scripts/install_uv.sh b/scripts/install_uv.sh index a5e5d37..6ba7331 100755 --- a/scripts/install_uv.sh +++ b/scripts/install_uv.sh @@ -48,20 +48,11 @@ install_official_uv() { self_update_uv() { command -v uv >/dev/null 2>&1 || return 0 - local before after - before="$(uv --version 2>/dev/null | awk '{print $2}' || echo 'unknown')" - uv self update --no-progress 2>&1 | grep -v "^info:" || true - after="$(uv --version 2>/dev/null | awk '{print $2}' || echo 'unknown')" - if [ "$before" != "$after" ]; then - echo "uv upgraded: $before → $after" - else - echo "uv already at latest version: $after" - fi + uv self update --no-progress >/dev/null 2>&1 || true } upgrade_uv_tools() { command -v uv >/dev/null 2>&1 || return 0 - echo "Checking uv-managed tools..." uv tool upgrade --all || true } diff --git a/scripts/install_yarn.sh b/scripts/install_yarn.sh deleted file mode 100755 index 43bbb43..0000000 --- a/scripts/install_yarn.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -ACTION="${1:-update}" - -check_nvm_node() { - # Check if Node.js is nvm-managed - local node_path - node_path="$(command -v node 2>/dev/null || echo '')" - - case "$node_path" in - "$HOME/.nvm/"*) - return 0 # nvm-managed - ;; - *) - return 1 # not nvm-managed (apt, system, or missing) - ;; - esac -} - -update_yarn() { - ensure_nvm_loaded - - if ! command -v node >/dev/null 2>&1; then - echo "[yarn] Error: Node.js not found. Install Node.js first via 'make install-node' or 'scripts/install_node.sh'" >&2 - return 1 - fi - - # Check if Node.js is nvm-managed before trying to update yarn - if ! check_nvm_node; then - echo "[yarn] Error: Node.js is not nvm-managed (currently apt/system)" >&2 - echo "[yarn] Cannot update yarn for system Node.js" >&2 - echo "[yarn] Please install Node.js via nvm first:" >&2 - echo "[yarn] make install-node" >&2 - echo "[yarn] or: scripts/install_node.sh reconcile" >&2 - return 1 - fi - - local before after path - before="$(yarn --version 2>/dev/null || echo '')" - - # Try corepack first (modern approach) - if command -v corepack >/dev/null 2>&1; then - echo "[yarn] Updating yarn via corepack..." >&2 - corepack enable || true - corepack prepare yarn@stable --activate || true - else - # Fallback to npm global install - echo "[yarn] Updating yarn via npm..." >&2 - npm install -g yarn@latest || true - fi - - after="$(yarn --version 2>/dev/null || echo '')" - path="$(command -v yarn 2>/dev/null || echo '')" - - printf "[%s] before: %s\n" "yarn" "$before" - printf "[%s] after: %s\n" "yarn" "$after" - printf "[%s] path: %s\n" "yarn" "$path" -} - -install_yarn() { - echo "[yarn] yarn should be installed via Node.js corepack/npm. Installing/updating Node.js first..." - "$DIR/install_node.sh" install || true - update_yarn -} - -reconcile_yarn() { - ensure_nvm_loaded - - if ! command -v node >/dev/null 2>&1; then - echo "[yarn] Node.js not found. Installing Node.js (which includes yarn via corepack)..." - "$DIR/install_node.sh" reconcile || true - elif ! check_nvm_node; then - echo "[yarn] Node.js is not nvm-managed. Installing Node.js via nvm first..." - "$DIR/install_node.sh" reconcile || true - fi - - # Remove apt-installed cmdtest if present (Ubuntu's yarn package conflict) - if command -v dpkg >/dev/null 2>&1 && dpkg -l | grep -q "^ii.*cmdtest"; then - echo "[yarn] Removing apt package 'cmdtest' (conflicts with yarn)..." >&2 - apt_remove_if_present cmdtest yarnpkg || true - fi - - update_yarn -} - -uninstall_yarn() { - echo "[yarn] yarn is managed by Node.js/npm. To remove:" >&2 - echo "[yarn] npm uninstall -g yarn" >&2 - echo "[yarn] or: corepack disable" >&2 -} - -case "$ACTION" in - install) install_yarn ;; - update) update_yarn ;; - uninstall) uninstall_yarn ;; - reconcile) reconcile_yarn ;; - *) echo "Usage: $0 {install|update|uninstall|reconcile}" ; exit 2 ;; -esac diff --git a/scripts/installers/aws_installer.sh b/scripts/installers/aws_installer.sh deleted file mode 100755 index dfc95d2..0000000 --- a/scripts/installers/aws_installer.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash -# AWS CLI installer -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-aws}" -CATALOG_FILE="$DIR/../catalog/$TOOL.json" - -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -INSTALLER_URL="$(jq -r '.installer_url' "$CATALOG_FILE")" -BINARY_NAME="$(jq -r '.binary_name' "$CATALOG_FILE")" - -# Get current version -before="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && "$BINARY_NAME" --version || true)" - -# Determine installation directory -BIN_DIR="$(get_install_dir "$BINARY_NAME")" -get_install_cmd "$BIN_DIR" -mkdir -p "$BIN_DIR" 2>/dev/null || true - -# Download and install -TMP="$(mktemp -d)" -cd "$TMP" -curl -fsSL "$INSTALLER_URL" -o awscliv2.zip -unzip -q awscliv2.zip - -# AWS installer supports --bin-dir and --install-dir options -./aws/install --bin-dir "$BIN_DIR" --install-dir "${BIN_DIR%/bin}/aws-cli" --update 2>/dev/null || \ - ./aws/install --bin-dir "$BIN_DIR" --install-dir "${BIN_DIR%/bin}/aws-cli" 2>/dev/null || true - -cd - >/dev/null -rm -rf "$TMP" - -# Report -after="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && "$BINARY_NAME" --version || true)" -path="$(command -v "$BINARY_NAME" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -refresh_snapshot "$TOOL" diff --git a/scripts/installers/dedicated_script.sh b/scripts/installers/dedicated_script.sh deleted file mode 100755 index b74b999..0000000 --- a/scripts/installers/dedicated_script.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -# Delegator for tools with dedicated installation scripts -# Reads catalog to find which script to run, then delegates -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Check if jq is available -if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq is required but not found" >&2 - exit 1 -fi - -# Read script name from catalog -SCRIPT_NAME="$(jq -r '.script' "$CATALOG_FILE")" - -if [ -z "$SCRIPT_NAME" ] || [ "$SCRIPT_NAME" = "null" ]; then - echo "[$TOOL] Error: No script specified in catalog" >&2 - exit 1 -fi - -SCRIPT_PATH="$DIR/$SCRIPT_NAME" - -if [ ! -f "$SCRIPT_PATH" ]; then - echo "[$TOOL] Error: Script not found: $SCRIPT_PATH" >&2 - exit 1 -fi - -if [ ! -x "$SCRIPT_PATH" ]; then - echo "[$TOOL] Error: Script not executable: $SCRIPT_PATH" >&2 - exit 1 -fi - -# Delegate to dedicated script (skip TOOL argument, pass only ACTION) -shift # Remove TOOL from $@ -exec "$SCRIPT_PATH" "$@" diff --git a/scripts/installers/github_release_binary.sh b/scripts/installers/github_release_binary.sh deleted file mode 100755 index 89a4247..0000000 --- a/scripts/installers/github_release_binary.sh +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/env bash -# Generic installer for GitHub release binaries -# Reads tool metadata from catalog and installs binary -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/common.sh" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -BINARY_NAME="$(jq -r '.binary_name' "$CATALOG_FILE")" -VERSION_URL="$(jq -r '.version_url // empty' "$CATALOG_FILE")" -DOWNLOAD_URL_TEMPLATE="$(jq -r '.download_url_template' "$CATALOG_FILE")" -FALLBACK_URL_TEMPLATE="$(jq -r '.fallback_url_template // empty' "$CATALOG_FILE")" -GITHUB_REPO="$(jq -r '.github_repo // empty' "$CATALOG_FILE")" -PRESERVE_DIR="$(jq -r '.preserve_directory // empty' "$CATALOG_FILE")" - -# Get current version (try multiple version command formats) -before="" -if command -v "$BINARY_NAME" >/dev/null 2>&1; then - before="$(timeout 2 "$BINARY_NAME" --version /dev/null || \ - timeout 2 "$BINARY_NAME" version --client /dev/null | head -1 || \ - timeout 2 "$BINARY_NAME" version /dev/null | head -1 || true)" -fi - -# Detect OS and architecture -OS="linux" -ARCH_RAW="$(uname -m)" -ARCH="$ARCH_RAW" - -# Apply architecture mapping if present -if jq -e ".arch_map.\"$ARCH_RAW\"" "$CATALOG_FILE" >/dev/null 2>&1; then - ARCH="$(jq -r ".arch_map.\"$ARCH_RAW\"" "$CATALOG_FILE")" -fi - -# Determine installation directory -BIN_DIR="$(get_install_dir "$BINARY_NAME")" -get_install_cmd "$BIN_DIR" -mkdir -p "$BIN_DIR" 2>/dev/null || true - -# Resolve latest version -LATEST="" -if [ -n "$VERSION_URL" ]; then - LATEST="$(curl -fsSL "$VERSION_URL" 2>/dev/null || true)" -fi - -# Try GitLab project if available -GITLAB_PROJECT="$(jq -r '.gitlab_project // empty' "$CATALOG_FILE")" -if [ -z "$LATEST" ] && [ -n "$GITLAB_PROJECT" ]; then - ENCODED_PROJECT="${GITLAB_PROJECT//\//%2F}" - LATEST="$(curl -fsSL "https://gitlab.com/api/v4/projects/${ENCODED_PROJECT}/releases?per_page=1" 2>/dev/null | \ - jq -r '.[0].tag_name // empty' 2>/dev/null || true)" -fi - -# Fallback to GitHub releases if no version URL -if [ -z "$LATEST" ] && [ -n "$GITHUB_REPO" ]; then - LATEST="$(curl -fsSIL -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' \ - "https://github.com/$GITHUB_REPO/releases/latest" 2>/dev/null | awk -F'/' '{print $NF}')" -fi - -if [ -z "$LATEST" ]; then - echo "[$TOOL] Error: Unable to resolve latest version" >&2 - echo "[$TOOL] before: ${before:-}" >&2 - exit 1 -fi - -# Normalize version: strip tool name prefix if present -# Some projects tag releases as "toolname-version" (e.g., jq-1.8.1) -# but their download URLs expect just the version number -# Example: jq tags as "jq-1.8.1" but URL is "...jq-{version}/..." where version=1.8.1 -if [[ "$LATEST" == "${BINARY_NAME}-"* ]]; then - LATEST="${LATEST#${BINARY_NAME}-}" -fi - -# Build download URL -# Support {version_nov} for version without 'v' prefix -# Support {arch_suffix} for tools like ninja that use empty suffix for x86_64 -LATEST_NOV="${LATEST#v}" -DOWNLOAD_URL="${DOWNLOAD_URL_TEMPLATE//\{version\}/$LATEST}" -DOWNLOAD_URL="${DOWNLOAD_URL//\{version_nov\}/$LATEST_NOV}" -DOWNLOAD_URL="${DOWNLOAD_URL//\{os\}/$OS}" -DOWNLOAD_URL="${DOWNLOAD_URL//\{arch\}/$ARCH}" -DOWNLOAD_URL="${DOWNLOAD_URL//\{arch_suffix\}/$ARCH}" - -# Download with retry and fallback -tmpfile="/tmp/$BINARY_NAME.$$" -rm -f "$tmpfile" - -if ! curl -fL --retry 3 --retry-delay 1 --connect-timeout 10 -o "$tmpfile" "$DOWNLOAD_URL" 2>/dev/null; then - if [ -n "$FALLBACK_URL_TEMPLATE" ]; then - FALLBACK_URL="${FALLBACK_URL_TEMPLATE//\{version\}/$LATEST}" - FALLBACK_URL="${FALLBACK_URL//\{os\}/$OS}" - FALLBACK_URL="${FALLBACK_URL//\{arch\}/$ARCH}" - curl -fL --retry 3 --retry-delay 1 --connect-timeout 10 -o "$tmpfile" "$FALLBACK_URL" - else - echo "[$TOOL] Error: Download failed" >&2 - exit 1 - fi -fi - -# Validate download -if ! [ -s "$tmpfile" ]; then - echo "[$TOOL] Error: Downloaded file is empty" >&2 - rm -f "$tmpfile" - exit 1 -fi - -# Extract if archive, otherwise use directly -BINARY_PATH="$tmpfile" -EXTRACT_DIR="" - -if [[ "$DOWNLOAD_URL" == *.tar.gz ]] || [[ "$DOWNLOAD_URL" == *.tgz ]]; then - # Extract tar.gz - EXTRACT_DIR="/tmp/${BINARY_NAME}-extract.$$" - mkdir -p "$EXTRACT_DIR" - - if ! tar -xzf "$tmpfile" -C "$EXTRACT_DIR" 2>/dev/null; then - echo "[$TOOL] Error: Failed to extract tar.gz archive" >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - # Find the binary in extracted files - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" -executable 2>/dev/null | head -1)" - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - # Try without executable check (some archives don't preserve execute bit) - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" 2>/dev/null | head -1)" - fi - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - echo "[$TOOL] Error: Binary '$BINARY_NAME' not found in archive" >&2 - echo "[$TOOL] Archive contents:" >&2 - find "$EXTRACT_DIR" -type f 2>/dev/null | head -10 >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - rm -f "$tmpfile" -elif [[ "$DOWNLOAD_URL" == *.tar.xz ]]; then - # Extract tar.xz - EXTRACT_DIR="/tmp/${BINARY_NAME}-extract.$$" - mkdir -p "$EXTRACT_DIR" - - if ! tar -xJf "$tmpfile" -C "$EXTRACT_DIR" 2>/dev/null; then - echo "[$TOOL] Error: Failed to extract tar.xz archive" >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - # Find the binary in extracted files - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" -executable 2>/dev/null | head -1)" - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - # Try without executable check (some archives don't preserve execute bit) - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" 2>/dev/null | head -1)" - fi - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - echo "[$TOOL] Error: Binary '$BINARY_NAME' not found in archive" >&2 - echo "[$TOOL] Archive contents:" >&2 - find "$EXTRACT_DIR" -type f 2>/dev/null | head -10 >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - rm -f "$tmpfile" -elif [[ "$DOWNLOAD_URL" == *.zip ]]; then - # Extract zip - EXTRACT_DIR="/tmp/${BINARY_NAME}-extract.$$" - mkdir -p "$EXTRACT_DIR" - - if ! unzip -q "$tmpfile" -d "$EXTRACT_DIR" 2>/dev/null; then - echo "[$TOOL] Error: Failed to extract zip archive" >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - # Find the binary in extracted files - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" -executable 2>/dev/null | head -1)" - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - BINARY_PATH="$(find "$EXTRACT_DIR" -type f -name "$BINARY_NAME" 2>/dev/null | head -1)" - fi - - if [ -z "$BINARY_PATH" ] || [ ! -f "$BINARY_PATH" ]; then - echo "[$TOOL] Error: Binary '$BINARY_NAME' not found in archive" >&2 - echo "[$TOOL] Archive contents:" >&2 - find "$EXTRACT_DIR" -type f 2>/dev/null | head -10 >&2 - rm -rf "$tmpfile" "$EXTRACT_DIR" - exit 1 - fi - - rm -f "$tmpfile" -fi - -# Note: We intentionally do NOT remove existing installations from apt/brew/cargo -# The new version in ~/.local/bin will take precedence via PATH ordering -# This allows: -# - No sudo password prompts -# - No disruption to package manager state -# - Clean fallback if ~/.local/bin version is removed -# - System packages can still satisfy dependencies for other tools - -# Install -if [ -n "$PRESERVE_DIR" ] && [ -n "$EXTRACT_DIR" ]; then - # Tool requires full directory structure (e.g., GAM with bundled Python) - LIB_DIR="$(dirname "$BIN_DIR")/lib" - mkdir -p "$LIB_DIR" - - # Remove old installation - rm -rf "$LIB_DIR/$PRESERVE_DIR" - - # Move entire directory to ~/.local/lib - mv "$EXTRACT_DIR/$PRESERVE_DIR" "$LIB_DIR/" - - # Create symlink in bin directory - ln -sf "$LIB_DIR/$PRESERVE_DIR/$BINARY_NAME" "$BIN_DIR/$BINARY_NAME" -else - # Standard binary installation - chmod +x "$BINARY_PATH" - $INSTALL -T "$BINARY_PATH" "$BIN_DIR/$BINARY_NAME" -fi - -# Cleanup -rm -f "$tmpfile" -if [ -n "$EXTRACT_DIR" ] && [ -d "$EXTRACT_DIR" ]; then - rm -rf "$EXTRACT_DIR" -fi - -# Report -after="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && \ - (timeout 2 "$BINARY_NAME" --version /dev/null || \ - timeout 2 "$BINARY_NAME" version --client /dev/null | head -1 || \ - timeout 2 "$BINARY_NAME" version /dev/null | head -1 || true))" -path="$(command -v "$BINARY_NAME" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -refresh_snapshot "$TOOL" diff --git a/scripts/installers/hashicorp_zip.sh b/scripts/installers/hashicorp_zip.sh deleted file mode 100755 index 6f218b4..0000000 --- a/scripts/installers/hashicorp_zip.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env bash -# Generic installer for HashiCorp zip releases -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/common.sh" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -PRODUCT_NAME="$(jq -r '.product_name' "$CATALOG_FILE")" -BINARY_NAME="$(jq -r '.binary_name' "$CATALOG_FILE")" -GITHUB_REPO="$(jq -r '.github_repo // empty' "$CATALOG_FILE")" - -# Get current version -before="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && "$BINARY_NAME" version 2>/dev/null | head -n1 || true)" - -# Detect OS and architecture -OS="linux" -ARCH_RAW="$(uname -m)" -ARCH="$ARCH_RAW" - -# Apply architecture mapping if present -if jq -e ".arch_map.\"$ARCH_RAW\"" "$CATALOG_FILE" >/dev/null 2>&1; then - ARCH="$(jq -r ".arch_map.\"$ARCH_RAW\"" "$CATALOG_FILE")" -fi - -# Determine installation directory -BIN_DIR="$(get_install_dir "$BINARY_NAME")" -get_install_cmd "$BIN_DIR" -mkdir -p "$BIN_DIR" 2>/dev/null || true - -# Remove distro package first if it exists -apt_remove_if_present "$BINARY_NAME" || true - -# Get latest version from GitHub releases -LATEST_TAG="" -if [ -n "$GITHUB_REPO" ]; then - LATEST_TAG="$(curl -fsSIL -H "User-Agent: cli-audit" -o /dev/null -w '%{url_effective}' \ - "https://github.com/$GITHUB_REPO/releases/latest" 2>/dev/null | awk -F'/' '{print $NF}')" -fi - -VER="${LATEST_TAG#v}" -if [ -z "$VER" ]; then - echo "[$TOOL] Error: Could not resolve latest version" >&2 - echo "[$TOOL] before: ${before:-}" >&2 - exit 1 -fi - -# Download and install -TMP="$(mktemp -d)" -URL="https://releases.hashicorp.com/${PRODUCT_NAME}/${VER}/${PRODUCT_NAME}_${VER}_${OS}_${ARCH}.zip" - -if curl -fsSL "$URL" -o "$TMP/${PRODUCT_NAME}.zip"; then - unzip -q "$TMP/${PRODUCT_NAME}.zip" -d "$TMP" || true - if [ -f "$TMP/$BINARY_NAME" ]; then - $INSTALL "$TMP/$BINARY_NAME" "$BIN_DIR/$BINARY_NAME" - else - echo "[$TOOL] Error: Binary not found in zip" >&2 - rm -rf "$TMP" - exit 1 - fi -else - echo "[$TOOL] Error: Download failed from $URL" >&2 - rm -rf "$TMP" - exit 1 -fi - -rm -rf "$TMP" - -# Report -after="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && "$BINARY_NAME" version 2>/dev/null | head -n1 || true)" -path="$(command -v "$BINARY_NAME" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -refresh_snapshot "$TOOL" diff --git a/scripts/installers/npm_global.sh b/scripts/installers/npm_global.sh deleted file mode 100755 index 3fa9f45..0000000 --- a/scripts/installers/npm_global.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash -# Generic installer for npm global packages -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -PACKAGE_NAME="$(jq -r '.package_name // .name' "$CATALOG_FILE")" - -# Ensure npm is available -if ! command -v npm >/dev/null 2>&1; then - echo "[$TOOL] Error: npm not found. Please install Node.js and npm first." >&2 - exit 1 -fi - -# Get current version -before="" -if command -v "$TOOL" >/dev/null 2>&1; then - before="$("$TOOL" --version 2>/dev/null || true)" -fi - -# Install or upgrade globally -echo "[$TOOL] Installing npm package globally: $PACKAGE_NAME" >&2 -npm install -g "$PACKAGE_NAME" || { - echo "[$TOOL] Error: npm install failed" >&2 - exit 1 -} - -# Report -after="" -if command -v "$TOOL" >/dev/null 2>&1; then - after="$("$TOOL" --version 2>/dev/null || true)" -fi - -path="$(command -v "$TOOL" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -refresh_snapshot "$TOOL" diff --git a/scripts/installers/npm_self_update.sh b/scripts/installers/npm_self_update.sh deleted file mode 100755 index c7138ea..0000000 --- a/scripts/installers/npm_self_update.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env bash -# npm installer - upgrades npm independently from Node.js -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/common.sh" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -BINARY_NAME="npm" - -# Get current version -before="$(timeout 2 npm --version /dev/null || echo '')" - -# Check if npm is available -if ! command -v npm >/dev/null 2>&1; then - echo "[$TOOL] Error: npm not found. Install Node.js first." >&2 - exit 1 -fi - -# Upgrade npm to latest version -# npm can be upgraded independently from Node.js -echo "[$TOOL] Upgrading npm to latest version..." -npm install -g npm@latest 2>&1 | grep -v "^npm " || true - -# Get new version -after="$(timeout 2 npm --version /dev/null || echo '')" -path="$(command -v npm 2>/dev/null || true)" - -# Report -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -if [ "$before" != "$after" ]; then - echo "[$TOOL] Successfully upgraded: $before → $after" -else - echo "[$TOOL] Already at latest version: $after" -fi - -# Refresh snapshot after successful installation -refresh_snapshot "$TOOL" diff --git a/scripts/installers/package_manager.sh b/scripts/installers/package_manager.sh deleted file mode 100755 index 283e996..0000000 --- a/scripts/installers/package_manager.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env bash -# Generic installer for package manager tools -# Installs tools via system package managers (apt, brew, etc.) -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -. "$DIR/lib/common.sh" -. "$DIR/lib/install_strategy.sh" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -BINARY_NAME="$(jq -r '.binary_name // .name' "$CATALOG_FILE")" -PACKAGES="$(jq -r '.packages // {}' "$CATALOG_FILE")" -NOTES="$(jq -r '.notes // empty' "$CATALOG_FILE")" -VERSION_CMD="$(jq -r '.version_command // empty' "$CATALOG_FILE")" - -# Get current version -if [ -n "$VERSION_CMD" ]; then - # Use custom version command if specified - before="$(eval "$VERSION_CMD" 2>/dev/null || true)" -else - # Default version detection - before="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && timeout 2 "$BINARY_NAME" --version /dev/null | head -1 || true)" -fi - -# Check if tool is already available (e.g., comes with runtime) -if command -v "$BINARY_NAME" >/dev/null 2>&1; then - if [ -n "$NOTES" ] && echo "$NOTES" | grep -q "comes with\|bundled with"; then - # Tool is already available and comes bundled - after="$before" - path="$(command -v "$BINARY_NAME" 2>/dev/null || true)" - printf "[%s] before: %s\n" "$TOOL" "${before:-}" - printf "[%s] after: %s\n" "$TOOL" "${after:-}" - if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - printf "[%s] note: %s\n" "$TOOL" "Already available (bundled with runtime)" - - # Refresh snapshot to record current version - refresh_snapshot "$TOOL" - exit 0 - fi -fi - -# Install via appropriate package manager -installed=false - -if have brew; then - pkg="$(echo "$PACKAGES" | jq -r '.brew // empty')" - if [ "$pkg" != "null" ] && [ -n "$pkg" ]; then - brew install "$pkg" || brew upgrade "$pkg" || true - installed=true - fi -fi - -if ! $installed && have apt-get; then - pkg="$(echo "$PACKAGES" | jq -r '.apt // empty')" - if [ "$pkg" != "null" ] && [ -n "$pkg" ]; then - sudo apt-get update && sudo apt-get install -y "$pkg" || true - installed=true - fi -fi - -if ! $installed && have dnf; then - pkg="$(echo "$PACKAGES" | jq -r '.dnf // .rpm // empty')" - if [ "$pkg" != "null" ] && [ -n "$pkg" ]; then - sudo dnf install -y "$pkg" || true - installed=true - fi -fi - -if ! $installed && have pacman; then - pkg="$(echo "$PACKAGES" | jq -r '.pacman // .arch // empty')" - if [ "$pkg" != "null" ] && [ -n "$pkg" ]; then - sudo pacman -S --noconfirm "$pkg" || true - installed=true - fi -fi - -if ! $installed; then - echo "[$TOOL] No supported package manager found (tried: brew, apt, dnf, pacman)" >&2 - exit 1 -fi - -# Report -if [ -n "$VERSION_CMD" ]; then - # Use custom version command if specified - after="$(eval "$VERSION_CMD" 2>/dev/null || true)" -else - # Default version detection - after="$(command -v "$BINARY_NAME" >/dev/null 2>&1 && timeout 2 "$BINARY_NAME" --version /dev/null | head -1 || true)" -fi -path="$(command -v "$BINARY_NAME" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -# Need to source install_strategy.sh for refresh_snapshot function -. "$(dirname "${BASH_SOURCE[0]}")/../lib/install_strategy.sh" -refresh_snapshot "$TOOL" diff --git a/scripts/installers/uv_tool.sh b/scripts/installers/uv_tool.sh deleted file mode 100755 index f9ebd60..0000000 --- a/scripts/installers/uv_tool.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash -# Generic installer for uv tools -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" - -TOOL="${1:-}" -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - exit 1 -fi - -CATALOG_FILE="$DIR/../catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Catalog file not found: $CATALOG_FILE" >&2 - exit 1 -fi - -# Parse catalog -PACKAGE_NAME="$(jq -r '.package_name' "$CATALOG_FILE")" -PYTHON_VERSION="$(jq -r '.python_version // empty' "$CATALOG_FILE")" - -# Ensure uv is available -if ! command -v uv >/dev/null 2>&1; then - echo "[$TOOL] Error: uv not found. Please install uv first." >&2 - exit 1 -fi - -# Get current version -before="$(command -v "$TOOL" >/dev/null 2>&1 && timeout 2 "$TOOL" --version /dev/null || true)" - -# Install or upgrade with optional Python version pinning -if [ -n "$PYTHON_VERSION" ]; then - echo "[$TOOL] Installing with Python $PYTHON_VERSION..." - uv tool install --force --upgrade --python "$PYTHON_VERSION" "$PACKAGE_NAME" || true -else - uv tool install --force --upgrade "$PACKAGE_NAME" || true -fi - -# Report -after="$(command -v "$TOOL" >/dev/null 2>&1 && "$TOOL" --version 2>/dev/null || true)" -path="$(command -v "$TOOL" 2>/dev/null || true)" -printf "[%s] before: %s\n" "$TOOL" "${before:-}" -printf "[%s] after: %s\n" "$TOOL" "${after:-}" -if [ -n "$path" ]; then printf "[%s] path: %s\n" "$TOOL" "$path"; fi - -# Refresh snapshot after successful installation -# Source install_strategy.sh for refresh_snapshot function -. "$(dirname "${BASH_SOURCE[0]}")/../lib/install_strategy.sh" -refresh_snapshot "$TOOL" diff --git a/scripts/lib/capability.sh b/scripts/lib/capability.sh deleted file mode 100755 index 05c7e6d..0000000 --- a/scripts/lib/capability.sh +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env bash -# capability.sh - Installation method detection and availability checking -# -# This library provides capability detection for the reconciliation system: -# 1. Detect current installation method for a tool -# 2. Check if installation methods are available on the system -# 3. Get detailed information about current installations - -set -euo pipefail - -# Detect which installation method was used for a tool -# Args: tool_name, binary_name -# Returns: apt|cargo|npm|gem|pip|pipx|brew|github_release_binary|unknown|none -detect_install_method() { - local tool="$1" - local binary="${2:-$tool}" - - # Check if binary exists - if ! command -v "$binary" >/dev/null 2>&1; then - echo "none" - return 0 - fi - - local binary_path - binary_path="$(command -v "$binary")" - - # Detect by path patterns - case "$binary_path" in - "$HOME/.cargo/bin/"*) - echo "cargo" - return 0 - ;; - "$HOME/.local/bin/"*) - # Could be github_release_binary or pipx - # Check if pipx knows about it - if command -v pipx >/dev/null 2>&1 && pipx list 2>/dev/null | grep -q "package $tool"; then - echo "pipx" - else - echo "github_release_binary" - fi - return 0 - ;; - "$HOME/.rbenv/"*) - echo "gem" - return 0 - ;; - "$HOME/.nvm/"*) - echo "npm" - return 0 - ;; - "/usr/local/bin/"*) - # Could be brew or manual install - if command -v brew >/dev/null 2>&1 && brew list --formula 2>/dev/null | grep -q "^${tool}\$"; then - echo "brew" - else - echo "unknown" - fi - return 0 - ;; - "/usr/bin/"*|"/bin/"*) - # Check if it's an apt package - if command -v dpkg >/dev/null 2>&1; then - if dpkg -S "$binary_path" >/dev/null 2>&1; then - echo "apt" - return 0 - fi - fi - # Check if it's pip-installed - if command -v pip >/dev/null 2>&1 || command -v pip3 >/dev/null 2>&1; then - local pip_cmd="${PIP:-pip3}" - if ! command -v "$pip_cmd" >/dev/null 2>&1; then - pip_cmd="pip" - fi - if "$pip_cmd" show "$tool" >/dev/null 2>&1; then - echo "pip" - return 0 - fi - fi - echo "unknown" - return 0 - ;; - *) - echo "unknown" - return 0 - ;; - esac -} - -# Check if an installation method is available on this system -# Args: method_name -# Returns: 0 if available, 1 if not -is_method_available() { - local method="$1" - - case "$method" in - apt) - # Check if dpkg exists and user has sudo access (or is root) - if ! command -v dpkg >/dev/null 2>&1; then - return 1 - fi - # Check sudo access (try non-interactively) - if [ "$(id -u)" -eq 0 ]; then - return 0 # root user - fi - if sudo -n true 2>/dev/null; then - return 0 # Has cached sudo credentials - fi - # Can't determine sudo access without prompting, assume available - # The actual operation will fail if sudo isn't available - return 0 - ;; - cargo) - command -v cargo >/dev/null 2>&1 - return $? - ;; - npm) - command -v npm >/dev/null 2>&1 - return $? - ;; - gem) - command -v gem >/dev/null 2>&1 - return $? - ;; - pip) - command -v pip >/dev/null 2>&1 || command -v pip3 >/dev/null 2>&1 - return $? - ;; - pipx) - command -v pipx >/dev/null 2>&1 - return $? - ;; - brew) - command -v brew >/dev/null 2>&1 - return $? - ;; - github_release_binary) - # Check if we have curl or wget, and can write to ~/.local/bin - if command -v curl >/dev/null 2>&1 || command -v wget >/dev/null 2>&1; then - if [ -d "$HOME/.local/bin" ] || mkdir -p "$HOME/.local/bin" 2>/dev/null; then - return 0 - fi - fi - return 1 - ;; - dedicated_script) - # Dedicated scripts are always "available" as they handle their own logic - return 0 - ;; - *) - echo "Unknown method: $method" >&2 - return 1 - ;; - esac -} - -# Get detailed information about current installation -# Args: tool_name, binary_name -# Returns: JSON-like output with path, method, package info -get_current_method_details() { - local tool="$1" - local binary="${2:-$tool}" - - if ! command -v "$binary" >/dev/null 2>&1; then - echo "method=none" - return 0 - fi - - local binary_path - binary_path="$(command -v "$binary")" - local method - method="$(detect_install_method "$tool" "$binary")" - - echo "path=$binary_path" - echo "method=$method" - - # Get additional details based on method - case "$method" in - apt) - if command -v dpkg >/dev/null 2>&1; then - local pkg - pkg="$(dpkg -S "$binary_path" 2>/dev/null | cut -d: -f1 || echo "unknown")" - echo "package=$pkg" - local version - version="$(dpkg-query -W -f='${Version}' "$pkg" 2>/dev/null || echo "unknown")" - echo "version=$version" - fi - ;; - cargo) - if command -v cargo >/dev/null 2>&1; then - # Try to get version from cargo - local version - version="$("$binary" --version 2>/dev/null | head -1 || echo "unknown")" - echo "version=$version" - fi - ;; - npm) - if command -v npm >/dev/null 2>&1; then - local version - version="$(npm list -g --depth=0 2>/dev/null | grep "$tool@" | sed 's/.*@//' || echo "unknown")" - echo "version=$version" - fi - ;; - pip|pipx) - local pip_cmd="${PIP:-pip3}" - if ! command -v "$pip_cmd" >/dev/null 2>&1; then - pip_cmd="pip" - fi - if command -v "$pip_cmd" >/dev/null 2>&1; then - local version - version="$("$pip_cmd" show "$tool" 2>/dev/null | grep "^Version:" | awk '{print $2}' || echo "unknown")" - echo "version=$version" - fi - ;; - brew) - if command -v brew >/dev/null 2>&1; then - local version - version="$(brew info "$tool" 2>/dev/null | head -1 | awk '{print $3}' || echo "unknown")" - echo "version=$version" - fi - ;; - github_release_binary) - local version - version="$("$binary" --version 2>/dev/null | head -1 || echo "unknown")" - echo "version=$version" - ;; - esac -} - -# List all available installation methods on this system -list_available_methods() { - local methods=("apt" "cargo" "npm" "gem" "pip" "pipx" "brew" "github_release_binary") - local available=() - - for method in "${methods[@]}"; do - if is_method_available "$method"; then - available+=("$method") - fi - done - - printf '%s\n' "${available[@]}" -} - -# Check if a specific tool can be installed via a method -# This checks both method availability AND tool-specific requirements -# Args: tool_name, method, catalog_config (JSON string) -can_install_via_method() { - local tool="$1" - local method="$2" - local config="${3:-{}}" - - # First check if method is available - if ! is_method_available "$method"; then - return 1 - fi - - # Method-specific checks could go here - # For now, if method is available, assume tool can be installed - return 0 -} diff --git a/scripts/lib/catalog.sh b/scripts/lib/catalog.sh deleted file mode 100755 index 75f1cbe..0000000 --- a/scripts/lib/catalog.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -# Catalog query functions for reading tool metadata -# Assumes: Scripts are run from app root, catalog is at $ROOT/catalog - -# Get all tools with a specific tag -catalog_get_tools_by_tag() { - local tag="$1" - local catalog_dir="$ROOT/catalog" - - if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq required for catalog operations" >&2 - return 1 - fi - - for json in "$catalog_dir"/*.json; do - [ -f "$json" ] || continue - if jq -e --arg tag "$tag" '.tags[]? | select(. == $tag)' "$json" >/dev/null 2>&1; then - jq -r '.name' "$json" - fi - done -} - -# Get all available tags -catalog_get_all_tags() { - local catalog_dir="$ROOT/catalog" - - if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq required for catalog operations" >&2 - return 1 - fi - - find "$catalog_dir" -name "*.json" -exec jq -r '.tags[]? // empty' {} \; 2>/dev/null | sort -u -} - -# Check if tool has catalog entry -catalog_has_tool() { - local tool="$1" - local catalog_dir="$ROOT/catalog" - [ -f "$catalog_dir/$tool.json" ] -} - -# Get tool property from catalog -catalog_get_property() { - local tool="$1" - local property="$2" - local catalog_dir="$ROOT/catalog" - - if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq required for catalog operations" >&2 - return 1 - fi - - local json="$catalog_dir/$tool.json" - if [ -f "$json" ]; then - jq -r ".$property // empty" "$json" - fi -} - -# Get guide-specific metadata from catalog -catalog_get_guide_property() { - local tool="$1" - local property="$2" - local default="${3:-}" - local catalog_dir="$ROOT/catalog" - - if ! command -v jq >/dev/null 2>&1; then - echo "$default" - return - fi - - local json="$catalog_dir/$tool.json" - if [ -f "$json" ]; then - local value="$(jq -r ".guide.$property // empty" "$json")" - if [ -n "$value" ] && [ "$value" != "null" ]; then - echo "$value" - else - echo "$default" - fi - else - echo "$default" - fi -} diff --git a/scripts/lib/common.sh b/scripts/lib/common.sh index c19e7f9..4a1e319 100755 --- a/scripts/lib/common.sh +++ b/scripts/lib/common.sh @@ -65,21 +65,4 @@ prefers_rustup() { is_path_under "$p" "$HOME/.cargo" || return 1 } -# rbenv helpers -ensure_rbenv_loaded() { - # Add rbenv to PATH and initialize if available - if [ -d "$HOME/.rbenv" ]; then - export PATH="$HOME/.rbenv/bin:$PATH" - if command -v rbenv >/dev/null 2>&1; then - eval "$(rbenv init - bash)" || true - fi - fi -} - -prefers_rbenv_ruby() { - local p - p="$(command -v ruby || true)" - is_path_under "$p" "$HOME/.rbenv" || return 1 -} - diff --git a/scripts/lib/dependency.sh b/scripts/lib/dependency.sh deleted file mode 100755 index cef40aa..0000000 --- a/scripts/lib/dependency.sh +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/env bash -# dependency.sh - Dependency resolution and ordering -# -# This library handles tool dependencies: -# - Check if dependencies are satisfied -# - Resolve installation order via topological sort -# - Detect circular dependencies -# - Validate catalog order field consistency - -set -euo pipefail - -# Check if dependencies for a tool are satisfied -# Args: catalog_file -# Returns: 0 if satisfied, 1 if not -check_dependencies() { - local catalog_file="$1" - local tool - tool="$(basename "$catalog_file" .json)" - - if ! command -v jq >/dev/null 2>&1; then - echo "[$tool] Warning: jq not available, cannot check dependencies" >&2 - return 0 # Assume satisfied - fi - - # Get requires array - local requires_count - requires_count="$(jq '.requires // [] | length' "$catalog_file" 2>/dev/null || echo "0")" - - if [ "$requires_count" -eq 0 ]; then - return 0 # No dependencies - fi - - local missing=() - for ((i=0; i/dev/null || echo "")" - [ -z "$dep" ] && continue - - # Check if dependency is installed - if ! command -v "$dep" >/dev/null 2>&1; then - missing+=("$dep") - fi - done - - if [ ${#missing[@]} -gt 0 ]; then - echo "[$tool] Missing dependencies: ${missing[*]}" >&2 - return 1 - fi - - return 0 -} - -# Get list of dependencies for a tool -# Args: catalog_file -# Returns: space-separated list of dependencies -get_dependencies() { - local catalog_file="$1" - - if ! command -v jq >/dev/null 2>&1; then - echo "" - return 0 - fi - - local deps - deps="$(jq -r '.requires // [] | join(" ")' "$catalog_file" 2>/dev/null || echo "")" - echo "$deps" -} - -# Topological sort for installation order -# Args: catalog_dir -# Returns: ordered list of tools (one per line) -topological_sort() { - local catalog_dir="$1" - - if [ ! -d "$catalog_dir" ]; then - echo "Error: Catalog directory not found: $catalog_dir" >&2 - return 1 - fi - - if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq not available for topological sort" >&2 - return 1 - fi - - # Build dependency graph - declare -A deps # tool -> space-separated dependencies - declare -A in_degree # tool -> number of incoming edges - declare -a all_tools - - for catalog_file in "$catalog_dir"/*.json; do - [ -f "$catalog_file" ] || continue - - local tool - tool="$(basename "$catalog_file" .json)" - all_tools+=("$tool") - - # Get dependencies - local tool_deps - tool_deps="$(get_dependencies "$catalog_file")" - deps[$tool]="$tool_deps" - - # Initialize in_degree - in_degree[$tool]=0 - done - - # Calculate in_degree - for tool in "${all_tools[@]}"; do - for dep in ${deps[$tool]}; do - if [ -n "${in_degree[$dep]+x}" ]; then - in_degree[$dep]=$((in_degree[$dep] + 1)) - fi - done - done - - # Find tools with no dependencies (in_degree == 0) - local queue=() - for tool in "${all_tools[@]}"; do - if [ "${in_degree[$tool]}" -eq 0 ]; then - queue+=("$tool") - fi - done - - # Process queue - local sorted=() - while [ ${#queue[@]} -gt 0 ]; do - # Pop from queue - local current="${queue[0]}" - queue=("${queue[@]:1}") - sorted+=("$current") - - # Reduce in_degree for dependents - for tool in "${all_tools[@]}"; do - if [[ " ${deps[$tool]} " == *" $current "* ]]; then - in_degree[$tool]=$((in_degree[$tool] - 1)) - if [ "${in_degree[$tool]}" -eq 0 ]; then - queue+=("$tool") - fi - fi - done - done - - # Check for cycles - if [ ${#sorted[@]} -ne ${#all_tools[@]} ]; then - echo "Error: Circular dependency detected" >&2 - # Find tools not in sorted (they're part of cycle) - for tool in "${all_tools[@]}"; do - if [[ ! " ${sorted[*]} " =~ " ${tool} " ]]; then - echo " Tool in cycle: $tool (depends on: ${deps[$tool]})" >&2 - fi - done - return 1 - fi - - # Output sorted list - printf '%s\n' "${sorted[@]}" -} - -# Validate that catalog "order" field matches dependency requirements -# Args: catalog_dir -# Returns: 0 if consistent, 1 if not -validate_order_consistency() { - local catalog_dir="$1" - - if [ ! -d "$catalog_dir" ]; then - echo "Error: Catalog directory not found: $catalog_dir" >&2 - return 1 - fi - - if ! command -v jq >/dev/null 2>&1; then - echo "Warning: jq not available, cannot validate order consistency" >&2 - return 0 - fi - - local errors=0 - - for catalog_file in "$catalog_dir"/*.json; do - [ -f "$catalog_file" ] || continue - - local tool - tool="$(basename "$catalog_file" .json)" - - # Get tool's order - local tool_order - tool_order="$(jq -r '.guide.order // 999' "$catalog_file" 2>/dev/null || echo "999")" - - # Get dependencies - local requires_count - requires_count="$(jq '.requires // [] | length' "$catalog_file" 2>/dev/null || echo "0")" - - for ((i=0; i/dev/null || echo "")" - [ -z "$dep" ] && continue - - # Find dependency's catalog file - local dep_catalog="$catalog_dir/$dep.json" - if [ ! -f "$dep_catalog" ]; then - echo "Warning: Dependency $dep for $tool not found in catalog" >&2 - continue - fi - - # Get dependency's order - local dep_order - dep_order="$(jq -r '.guide.order // 999' "$dep_catalog" 2>/dev/null || echo "999")" - - # Check if dependency has lower order (installed first) - if [ "$dep_order" -ge "$tool_order" ]; then - echo "Error: Order inconsistency: $tool (order=$tool_order) depends on $dep (order=$dep_order)" >&2 - echo " Dependency $dep should have lower order number (installed before $tool)" >&2 - errors=$((errors + 1)) - fi - done - done - - if [ $errors -gt 0 ]; then - echo "" - echo "Found $errors order consistency errors" >&2 - return 1 - fi - - echo "✓ All order fields are consistent with dependencies" - return 0 -} - -# Get installation order respecting dependencies -# Args: catalog_dir, tool_list (optional, space-separated) -# Returns: ordered list of tools -get_install_order() { - local catalog_dir="$1" - local tool_list="${2:-}" - - if [ -z "$tool_list" ]; then - # No specific tools requested, sort all tools - topological_sort "$catalog_dir" - else - # Build subgraph for requested tools + their dependencies - declare -A needed - declare -a queue - - # Add requested tools to queue - for tool in $tool_list; do - queue+=("$tool") - done - - # BFS to collect all dependencies - while [ ${#queue[@]} -gt 0 ]; do - local current="${queue[0]}" - queue=("${queue[@]:1}") - - # Skip if already processed - [ -n "${needed[$current]+x}" ] && continue - needed[$current]=1 - - # Get dependencies - local catalog_file="$catalog_dir/$current.json" - if [ -f "$catalog_file" ]; then - local deps - deps="$(get_dependencies "$catalog_file")" - for dep in $deps; do - queue+=("$dep") - done - fi - done - - # Now run topological sort on full catalog, filter to needed tools - local sorted - sorted="$(topological_sort "$catalog_dir")" - - # Filter to only needed tools - while IFS= read -r tool; do - if [ -n "${needed[$tool]+x}" ]; then - echo "$tool" - fi - done <<< "$sorted" - fi -} diff --git a/scripts/lib/install_strategy.sh b/scripts/lib/install_strategy.sh deleted file mode 100755 index 88b3e7a..0000000 --- a/scripts/lib/install_strategy.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env bash -# Shared installation strategy logic for all install scripts - -# Determine installation directory based on INSTALL_STRATEGY -# Usage: get_install_dir TOOL_NAME -# Returns: Directory path where tool should be installed -get_install_dir() { - local tool_name="${1:-}" - local strategy="${INSTALL_STRATEGY:-USER}" - local prefix="${PREFIX:-$HOME/.local}" - local bin_dir="" - - case "$strategy" in - CURRENT) - # Keep tool where it is currently installed - if [ -n "$tool_name" ]; then - local current_path="$(command -v "$tool_name" 2>/dev/null || true)" - if [ -n "$current_path" ]; then - bin_dir="$(dirname "$current_path")" - else - # Not installed, fall back to USER - bin_dir="$prefix/bin" - fi - else - # No specific tool, fall back to USER - bin_dir="$prefix/bin" - fi - ;; - GLOBAL) - bin_dir="/usr/local/bin" - ;; - PROJECT) - bin_dir="./.local/bin" - ;; - USER|*) - bin_dir="$prefix/bin" - ;; - esac - - echo "$bin_dir" -} - -# Get install command based on target directory -# Usage: get_install_cmd BIN_DIR -# Sets: INSTALL and RM variables -get_install_cmd() { - local bin_dir="$1" - - if [ "$bin_dir" = "/usr/local/bin" ]; then - if [ -w "$bin_dir" ]; then - INSTALL="install -m 0755" - RM="rm -f" - else - INSTALL="sudo install -m 0755" - RM="sudo rm -f" - fi - else - INSTALL="install -m 0755" - RM="rm -f" - fi -} - -# Refresh snapshot for a specific tool after installation -# Usage: refresh_snapshot TOOL_NAME -# Updates tools_snapshot.json with latest version of installed tool -refresh_snapshot() { - local tool_name="${1:-}" - - if [ -z "$tool_name" ]; then - echo "# Warning: No tool name provided to refresh_snapshot" >&2 - return 1 - fi - - # Path to project root (scripts/lib -> scripts -> root) - local project_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" - local audit_script="$project_root/cli_audit.py" - - if [ ! -f "$audit_script" ]; then - echo "# Warning: cli_audit.py not found at $audit_script" >&2 - return 1 - fi - - echo "# Refreshing snapshot for $tool_name..." >&2 - - # Brief delay to ensure binary is fully updated and PATH is refreshed - sleep 0.5 - - # Run audit in merge mode for this specific tool - CLI_AUDIT_COLLECT=1 CLI_AUDIT_MERGE=1 python3 "$audit_script" --only "$tool_name" >/dev/null 2>&1 || { - echo "# Warning: Failed to refresh snapshot for $tool_name" >&2 - return 1 - } - - echo "# ✓ Snapshot updated for $tool_name" >&2 - return 0 -} diff --git a/scripts/lib/path_check.sh b/scripts/lib/path_check.sh deleted file mode 100755 index 9565711..0000000 --- a/scripts/lib/path_check.sh +++ /dev/null @@ -1,506 +0,0 @@ -#!/usr/bin/env bash -# path_check.sh - PATH validation and auto-fix for package managers and language environments - -set -euo pipefail - -PATH_CHECK_LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -# Detect user's shell RC file -detect_shell_rc() { - local shell_name="${SHELL##*/}" - - case "$shell_name" in - bash) - # Prefer .bashrc for interactive shells - if [ -f "$HOME/.bashrc" ]; then - echo "$HOME/.bashrc" - elif [ -f "$HOME/.bash_profile" ]; then - echo "$HOME/.bash_profile" - else - echo "$HOME/.bashrc" # Create it - fi - ;; - zsh) - if [ -f "$HOME/.zshrc" ]; then - echo "$HOME/.zshrc" - else - echo "$HOME/.zshrc" # Create it - fi - ;; - fish) - echo "$HOME/.config/fish/config.fish" - ;; - *) - # Fallback to .profile - echo "$HOME/.profile" - ;; - esac -} - -# Check if a PATH entry exists -path_contains() { - local dir="$1" - case ":$PATH:" in - *":$dir:"*) return 0 ;; - *) return 1 ;; - esac -} - -# Check if a PATH entry comes before another -path_order_ok() { - local earlier="$1" - local later="$2" - - # Get positions - local pos_earlier=-1 - local pos_later=-1 - local pos=0 - - IFS=':' read -ra PATHS <<< "$PATH" - for p in "${PATHS[@]}"; do - if [ "$p" = "$earlier" ]; then - pos_earlier=$pos - fi - if [ "$p" = "$later" ]; then - pos_later=$pos - fi - pos=$((pos + 1)) - done - - # If earlier not found, bad - [ $pos_earlier -eq -1 ] && return 1 - - # If later not found, order is OK (nothing to conflict with) - [ $pos_later -eq -1 ] && return 0 - - # Earlier should have lower position number - [ $pos_earlier -lt $pos_later ] -} - -# Package manager PATH requirements -# Format: name|path|init_command|description|priority(1=highest) -declare -A PATH_REQUIREMENTS=( - # Python/UV - ["uv"]="$HOME/.local/bin|export PATH=\"\$HOME/.local/bin:\$PATH\"|UV package manager binaries|1" - - # Ruby/rbenv - ["rbenv"]="$HOME/.rbenv/bin|export PATH=\"\$HOME/.rbenv/bin:\$PATH\"\neval \"\$(rbenv init - bash)\"|rbenv Ruby version manager|2" - ["rbenv-shims"]="$HOME/.rbenv/shims||rbenv shims (auto-managed)|1" - - # Node/nvm (requires sourcing, not just PATH) - ["nvm"]="$HOME/.nvm|export NVM_DIR=\"\$HOME/.nvm\"\n[ -s \"\$NVM_DIR/nvm.sh\" ] && . \"\$NVM_DIR/nvm.sh\"|nvm Node.js version manager|1" - - # Python/pyenv - ["pyenv"]="$HOME/.pyenv/bin|export PYENV_ROOT=\"\$HOME/.pyenv\"\nexport PATH=\"\$PYENV_ROOT/bin:\$PATH\"\neval \"\$(pyenv init --path)\"\neval \"\$(pyenv init -)\"|pyenv Python version manager|2" - ["pyenv-shims"]="$HOME/.pyenv/shims||pyenv shims (auto-managed)|1" - - # Rust/cargo - ["cargo"]="$HOME/.cargo/bin|. \"\$HOME/.cargo/env\"|Rust cargo package manager|3" - - # Go - ["go-bin"]="$HOME/go/bin|export PATH=\"\$HOME/go/bin:\$PATH\"|Go installed binaries|3" - - # General user binaries - ["local-bin"]="$HOME/.local/bin|export PATH=\"\$HOME/.local/bin:\$PATH\"|User-installed binaries|2" -) - -# Shell integration requirements (tools that need hooks/eval, not just PATH) -# Format: name|command_to_check|hook_command|description -declare -A SHELL_INTEGRATIONS=( - # Environment managers - ["direnv"]="direnv|eval \"\$(direnv hook bash)\"|direnv environment variable manager" - - # Version managers (check for the manager command itself) - ["nvm"]="nvm|[ -s \"\$NVM_DIR/nvm.sh\" ] && . \"\$NVM_DIR/nvm.sh\"|nvm Node.js version manager" - ["pyenv"]="pyenv|eval \"\$(pyenv init -)\"|pyenv Python version manager" - ["rbenv"]="rbenv|eval \"\$(rbenv init - bash)\"|rbenv Ruby version manager" - ["asdf"]="asdf|. \"\$HOME/.asdf/asdf.sh\"|asdf universal version manager" - - # Shell enhancements - ["starship"]="starship|eval \"\$(starship init bash)\"|starship cross-shell prompt" - ["zoxide"]="zoxide|eval \"\$(zoxide init bash)\"|zoxide smarter cd" - ["atuin"]="atuin|eval \"\$(atuin init bash)\"|atuin shell history sync" - - # Completions - ["kubectl"]="kubectl|source <(kubectl completion bash)|Kubernetes CLI completion" -) - -# Priority order rules: lower number = should come earlier in PATH -# This ensures language version managers take precedence over system packages - -# Check if a tool is properly configured in RC file -is_configured_in_rc() { - local init_cmd="$1" - local rc_file - rc_file=$(detect_shell_rc) - - # Extract first significant line from init command - local first_line - first_line=$(echo -e "$init_cmd" | head -1) - - # Check if it's in the RC file - grep -qF "${first_line}" "$rc_file" 2>/dev/null -} - -# Check if shell integration is properly configured -check_shell_integration() { - local name="$1" - local integration="${SHELL_INTEGRATIONS[$name]}" - - IFS='|' read -r command hook_cmd description <<< "$integration" - - local result="" - local warning="" - local fix="" - - # Check if tool is installed (special handling for nvm which is a function) - if [ "$name" = "nvm" ]; then - # Check if nvm directory exists instead of command - if [ ! -d "$HOME/.nvm" ]; then - result="not_installed" - echo "$result|$warning|$fix" - return 0 - fi - elif ! command -v "$command" >/dev/null 2>&1; then - result="not_installed" - echo "$result|$warning|$fix" - return 0 - fi - - # Check if hook is in RC file - local rc_file - rc_file=$(detect_shell_rc) - - # Extract key part of hook command for matching - local hook_pattern - case "$name" in - direnv) - hook_pattern="direnv hook" - ;; - nvm) - hook_pattern="NVM_DIR/nvm.sh" - ;; - pyenv) - hook_pattern="pyenv init" - ;; - rbenv) - hook_pattern="rbenv init" - ;; - asdf) - hook_pattern="asdf.sh" - ;; - starship) - hook_pattern="starship init" - ;; - zoxide) - hook_pattern="zoxide init" - ;; - atuin) - hook_pattern="atuin init" - ;; - kubectl) - hook_pattern="kubectl completion" - ;; - *) - hook_pattern="$name" - ;; - esac - - if grep -q "$hook_pattern" "$rc_file" 2>/dev/null; then - result="ok" - else - result="missing" - warning="⚠️ $description needs shell integration (hook not configured)" - fix="add_shell_hook|$name|$hook_cmd" - fi - - echo "$result|$warning|$fix" -} - -# Check all shell integrations -check_all_shell_integrations() { - local issues=() - - for name in "${!SHELL_INTEGRATIONS[@]}"; do - local check_result - check_result=$(check_shell_integration "$name") - - IFS='|' read -r result warning fix <<< "$check_result" - - if [ "$result" = "missing" ]; then - issues+=("$name|$warning|$fix") - fi - done - - # Print issues if found - if [ ${#issues[@]} -gt 0 ]; then - echo "# Shell Integration Issues Found:" >&2 - echo "#" >&2 - for issue in "${issues[@]}"; do - IFS='|' read -r name warning fix <<< "$issue" - echo "# $warning" >&2 - done - echo "#" >&2 - echo "# Run 'make fix-path' to automatically configure shell integrations" >&2 - return 1 - fi - - return 0 -} - -# Check a single PATH requirement -check_path_requirement() { - local name="$1" - local requirement="${PATH_REQUIREMENTS[$name]}" - - IFS='|' read -r dir init_cmd description priority <<< "$requirement" - - # Expand home directory - dir="${dir/#\~/$HOME}" - - local result="" - local warning="" - local fix="" - - # Check if directory exists - if [ ! -d "$dir" ]; then - result="not_installed" - return 0 - fi - - # Check if already configured in RC file (more reliable than checking current PATH) - if is_configured_in_rc "$init_cmd"; then - result="ok" - echo "$result|$warning|$fix" - return 0 - fi - - # Not in RC file - check if in current PATH (might be manually set) - if ! path_contains "$dir"; then - result="missing" - warning="⚠️ $description not configured in shell RC file" - fix="add_to_path|$name|$dir|$init_cmd" - else - result="ok" - - # Check priority ordering (e.g., ~/.local/bin should come before /usr/bin) - if [ "$name" = "local-bin" ] || [ "$name" = "cargo" ] || [ "$name" = "rbenv-shims" ]; then - if ! path_order_ok "$dir" "/usr/bin"; then - result="wrong_order" - warning="⚠️ $description should come before /usr/bin in PATH" - fix="reorder_path|$name|$dir|$init_cmd" - fi - fi - fi - - echo "$result|$warning|$fix" -} - -# Check all PATH requirements -check_all_paths() { - local path_issues=() - local shell_issues=() - - # Check PATH requirements - for name in "${!PATH_REQUIREMENTS[@]}"; do - local check_result - check_result=$(check_path_requirement "$name") - - IFS='|' read -r result warning fix <<< "$check_result" - - if [ "$result" = "missing" ] || [ "$result" = "wrong_order" ]; then - path_issues+=("$name|$warning|$fix") - fi - done - - # Check shell integrations - for name in "${!SHELL_INTEGRATIONS[@]}"; do - local check_result - check_result=$(check_shell_integration "$name") - - IFS='|' read -r result warning fix <<< "$check_result" - - if [ "$result" = "missing" ]; then - shell_issues+=("$name|$warning|$fix") - fi - done - - # Print PATH issues - if [ ${#path_issues[@]} -gt 0 ]; then - echo "# PATH Configuration Issues Found:" >&2 - echo "#" >&2 - for issue in "${path_issues[@]}"; do - IFS='|' read -r name warning fix <<< "$issue" - echo "# $warning" >&2 - done - echo "#" >&2 - fi - - # Print shell integration issues - if [ ${#shell_issues[@]} -gt 0 ]; then - echo "# Shell Integration Issues Found:" >&2 - echo "#" >&2 - for issue in "${shell_issues[@]}"; do - IFS='|' read -r name warning fix <<< "$issue" - echo "# $warning" >&2 - done - echo "#" >&2 - fi - - # Summary - if [ ${#path_issues[@]} -gt 0 ] || [ ${#shell_issues[@]} -gt 0 ]; then - echo "# Run 'make fix-path' to automatically fix these issues" >&2 - return 1 - fi - - return 0 -} - -# Add init commands to shell RC file -add_to_shell_rc() { - local name="$1" - local init_cmd="$2" - local rc_file - rc_file=$(detect_shell_rc) - - # Check if already present - local first_line - first_line=$(echo -e "$init_cmd" | head -1) - - if grep -qF "${first_line}" "$rc_file" 2>/dev/null; then - echo "# [$name] Already configured in $rc_file" >&2 - return 0 - fi - - echo "# [$name] Adding to $rc_file..." >&2 - - # Add with clear section marker - { - echo "" - echo "# $name initialization (added by ai_cli_preparation)" - echo -e "$init_cmd" - } >> "$rc_file" - - echo "# [$name] ✓ Added to $rc_file" >&2 -} - -# Add shell hook to RC file -add_shell_hook() { - local name="$1" - local hook_cmd="$2" - local rc_file - rc_file=$(detect_shell_rc) - - # Check if _eval_if helper exists - if ! grep -q "^_eval_if()" "$rc_file" 2>/dev/null; then - echo "# [shell-helper] Adding _eval_if helper function..." >&2 - { - echo "" - echo "# Shell integration helper (added by ai_cli_preparation)" - echo "_eval_if() { command -v \"\$1\" >/dev/null 2>&1 && eval \"\$2\"; }" - } >> "$rc_file" - fi - - # Extract command name from hook_cmd for pattern matching - local hook_pattern - case "$name" in - direnv) hook_pattern="direnv hook" ;; - nvm) hook_pattern="NVM_DIR/nvm.sh" ;; - pyenv) hook_pattern="pyenv init" ;; - rbenv) hook_pattern="rbenv init" ;; - asdf) hook_pattern="asdf.sh" ;; - starship) hook_pattern="starship init" ;; - zoxide) hook_pattern="zoxide init" ;; - atuin) hook_pattern="atuin init" ;; - kubectl) hook_pattern="kubectl completion" ;; - *) hook_pattern="$name" ;; - esac - - # Check if already present - if grep -q "$hook_pattern" "$rc_file" 2>/dev/null; then - echo "# [$name] Shell integration already configured in $rc_file" >&2 - return 0 - fi - - echo "# [$name] Adding shell integration to $rc_file..." >&2 - - # Add with clear section marker - { - echo "" - echo "# $name shell integration (added by ai_cli_preparation)" - echo -e "$hook_cmd" - } >> "$rc_file" - - echo "# [$name] ✓ Shell integration added to $rc_file" >&2 -} - -# Fix all PATH issues -fix_all_paths() { - local rc_file - rc_file=$(detect_shell_rc) - local path_fixed=0 - local shell_fixed=0 - - echo "# Fixing PATH and shell integration issues..." >&2 - echo "# Target RC file: $rc_file" >&2 - echo "#" >&2 - - # Fix PATH requirements - for name in "${!PATH_REQUIREMENTS[@]}"; do - local check_result - check_result=$(check_path_requirement "$name") - - IFS='|' read -r result warning fix <<< "$check_result" - - if [ "$result" = "missing" ] || [ "$result" = "wrong_order" ]; then - IFS='|' read -r fix_type fix_name fix_dir fix_init_cmd <<< "$fix" - - if [ -n "$fix_init_cmd" ]; then - add_to_shell_rc "$fix_name" "$fix_init_cmd" - path_fixed=$((path_fixed + 1)) - fi - fi - done - - # Fix shell integrations - for name in "${!SHELL_INTEGRATIONS[@]}"; do - local check_result - check_result=$(check_shell_integration "$name") - - IFS='|' read -r result warning fix <<< "$check_result" - - if [ "$result" = "missing" ]; then - IFS='|' read -r fix_type fix_name fix_hook_cmd <<< "$fix" - - if [ -n "$fix_hook_cmd" ]; then - add_shell_hook "$fix_name" "$fix_hook_cmd" - shell_fixed=$((shell_fixed + 1)) - fi - fi - done - - # Summary - if [ $path_fixed -gt 0 ] || [ $shell_fixed -gt 0 ]; then - echo "#" >&2 - [ $path_fixed -gt 0 ] && echo "# ✓ Fixed $path_fixed PATH issues" >&2 - [ $shell_fixed -gt 0 ] && echo "# ✓ Fixed $shell_fixed shell integration issues" >&2 - echo "#" >&2 - echo "# To apply changes, run:" >&2 - echo "# source $rc_file" >&2 - echo "# Or start a new shell session" >&2 - else - echo "# No PATH or shell integration issues found" >&2 - fi -} - -# Export functions for use in other scripts -export -f detect_shell_rc -export -f path_contains -export -f path_order_ok -export -f check_path_requirement -export -f check_shell_integration -export -f check_all_shell_integrations -export -f check_all_paths -export -f add_to_shell_rc -export -f add_shell_hook -export -f fix_all_paths diff --git a/scripts/lib/policy.sh b/scripts/lib/policy.sh deleted file mode 100755 index be2fc3e..0000000 --- a/scripts/lib/policy.sh +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash -# policy.sh - Installation method policy resolution -# -# This library resolves which installation method to use by evaluating: -# 1. Catalog available methods + priorities (maintainer knowledge) -# 2. User preferences (user configuration) -# 3. System capabilities (what's actually available) -# -# Decision: best_method = highest priority from (catalog ∩ user ∩ available) - -set -euo pipefail - -POLICY_LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$POLICY_LIB_DIR/capability.sh" - -# Default config location -CONFIG_FILE="${AI_CLI_PREP_CONFIG:-$HOME/.ai_cli_prep/config.json}" - -# Get user's preferred strategy from config -# Returns: auto|github_first|cargo_first|apt_first|npm_first -get_user_strategy() { - if [ ! -f "$CONFIG_FILE" ]; then - echo "auto" - return 0 - fi - - # Parse JSON config for preferred_strategy - if command -v jq >/dev/null 2>&1; then - local strategy - strategy="$(jq -r '.preferred_strategy // "auto"' "$CONFIG_FILE" 2>/dev/null || echo "auto")" - echo "$strategy" - else - # Fallback without jq - simple grep - if grep -q '"preferred_strategy"' "$CONFIG_FILE" 2>/dev/null; then - grep '"preferred_strategy"' "$CONFIG_FILE" | sed 's/.*: *"\([^"]*\)".*/\1/' || echo "auto" - else - echo "auto" - fi - fi -} - -# Get user override for a specific tool -# Args: tool_name -# Returns: method name or empty string -get_user_override() { - local tool="$1" - - if [ ! -f "$CONFIG_FILE" ]; then - echo "" - return 0 - fi - - # Parse JSON config for overrides.tool - if command -v jq >/dev/null 2>&1; then - local override - override="$(jq -r ".overrides.\"$tool\" // empty" "$CONFIG_FILE" 2>/dev/null || echo "")" - echo "$override" - else - # Fallback without jq - if grep -q "\"$tool\"" "$CONFIG_FILE" 2>/dev/null; then - grep "\"$tool\"" "$CONFIG_FILE" | sed 's/.*: *"\([^"]*\)".*/\1/' || echo "" - else - echo "" - fi - fi -} - -# Check if user config allows sudo operations -# Returns: 0 if allowed, 1 if not -is_sudo_allowed() { - if [ ! -f "$CONFIG_FILE" ]; then - return 0 # Default: allow sudo - fi - - if command -v jq >/dev/null 2>&1; then - local allow_sudo - allow_sudo="$(jq -r '.allow_sudo // true' "$CONFIG_FILE" 2>/dev/null || echo "true")" - [ "$allow_sudo" = "true" ] - else - # Fallback: check for explicit "allow_sudo": false - if grep -q '"allow_sudo" *: *false' "$CONFIG_FILE" 2>/dev/null; then - return 1 - fi - return 0 - fi -} - -# Apply user strategy to adjust method priorities -# Args: method, base_priority, strategy -# Returns: adjusted priority -apply_strategy_to_priority() { - local method="$1" - local base_priority="$2" - local strategy="$3" - - case "$strategy" in - auto) - # Use catalog priorities as-is - echo "$base_priority" - ;; - github_first) - case "$method" in - github_release_binary) echo 1 ;; - cargo) echo 2 ;; - npm) echo 3 ;; - apt) echo 4 ;; - brew) echo 5 ;; - *) echo "$base_priority" ;; - esac - ;; - cargo_first) - case "$method" in - cargo) echo 1 ;; - github_release_binary) echo 2 ;; - npm) echo 3 ;; - apt) echo 4 ;; - brew) echo 5 ;; - *) echo "$base_priority" ;; - esac - ;; - npm_first) - case "$method" in - npm) echo 1 ;; - github_release_binary) echo 2 ;; - cargo) echo 3 ;; - apt) echo 4 ;; - brew) echo 5 ;; - *) echo "$base_priority" ;; - esac - ;; - apt_first) - case "$method" in - apt) echo 1 ;; - brew) echo 2 ;; - github_release_binary) echo 3 ;; - cargo) echo 4 ;; - npm) echo 5 ;; - *) echo "$base_priority" ;; - esac - ;; - *) - echo "$base_priority" - ;; - esac -} - -# Parse catalog available_methods and resolve best method -# Args: catalog_json_file -# Returns: method name or empty string if error -resolve_best_method() { - local catalog_file="$1" - local tool - tool="$(basename "$catalog_file" .json)" - - if [ ! -f "$catalog_file" ]; then - echo "Error: Catalog file not found: $catalog_file" >&2 - return 1 - fi - - # Check if tool uses reconciliation (install_method == "auto") - local install_method - if command -v jq >/dev/null 2>&1; then - install_method="$(jq -r '.install_method // ""' "$catalog_file" 2>/dev/null || echo "")" - else - install_method="$(grep '"install_method"' "$catalog_file" | head -1 | sed 's/.*: *"\([^"]*\)".*/\1/' || echo "")" - fi - - if [ "$install_method" != "auto" ]; then - echo "Error: Tool $tool does not use reconciliation (install_method != 'auto')" >&2 - return 1 - fi - - # Get user preferences - local user_strategy - user_strategy="$(get_user_strategy)" - local user_override - user_override="$(get_user_override "$tool")" - - # If user has an override, use it (if available) - if [ -n "$user_override" ]; then - if is_method_available "$user_override"; then - echo "$user_override" - return 0 - else - echo "Error: User override method '$user_override' not available for $tool" >&2 - return 1 - fi - fi - - # Parse available_methods from catalog - if ! command -v jq >/dev/null 2>&1; then - echo "Error: jq not available, cannot parse catalog" >&2 - return 1 - fi - - # Get all available methods with priorities - local best_method="" - local best_priority=9999 - - # Read available_methods array - local methods_count - methods_count="$(jq '.available_methods | length' "$catalog_file" 2>/dev/null || echo "0")" - - if [ "$methods_count" -eq 0 ]; then - echo "Error: No available_methods defined in catalog for $tool" >&2 - return 1 - fi - - for ((i=0; i/dev/null || echo "")" - [ -z "$method" ] && continue - - local catalog_priority - catalog_priority="$(jq -r ".available_methods[$i].priority // 999" "$catalog_file" 2>/dev/null || echo "999")" - - # Check if method is available on system - if ! is_method_available "$method"; then - continue - fi - - # Skip apt if sudo not allowed - if [ "$method" = "apt" ] && ! is_sudo_allowed; then - continue - fi - - # Apply user strategy to adjust priority - local adjusted_priority - adjusted_priority="$(apply_strategy_to_priority "$method" "$catalog_priority" "$user_strategy")" - - # Track best (lowest priority number) - if [ "$adjusted_priority" -lt "$best_priority" ]; then - best_priority="$adjusted_priority" - best_method="$method" - fi - done - - if [ -z "$best_method" ]; then - echo "Error: No available installation method found for $tool" >&2 - return 1 - fi - - echo "$best_method" - return 0 -} - -# Get configuration for a specific method from catalog -# Args: catalog_file, method -# Returns: JSON config object or empty -get_method_config() { - local catalog_file="$1" - local method="$2" - - if ! command -v jq >/dev/null 2>&1; then - echo "{}" - return 0 - fi - - # Find the method in available_methods array and return its config - local config - config="$(jq -r ".available_methods[] | select(.method == \"$method\") | .config // {}" "$catalog_file" 2>/dev/null || echo "{}")" - echo "$config" -} - -# Explain the decision made for a tool -# Args: catalog_file -# Returns: human-readable explanation -explain_method_decision() { - local catalog_file="$1" - local tool - tool="$(basename "$catalog_file" .json)" - - local best_method - best_method="$(resolve_best_method "$catalog_file" 2>/dev/null || echo "none")" - - echo "[$tool] Policy decision:" - echo "[$tool] User strategy: $(get_user_strategy)" - local override - override="$(get_user_override "$tool")" - if [ -n "$override" ]; then - echo "[$tool] User override: $override" - fi - echo "[$tool] Best available method: $best_method" -} diff --git a/scripts/lib/reconcile.sh b/scripts/lib/reconcile.sh deleted file mode 100755 index f12b004..0000000 --- a/scripts/lib/reconcile.sh +++ /dev/null @@ -1,342 +0,0 @@ -#!/usr/bin/env bash -# reconcile.sh - Installation method reconciliation -# -# This library aligns reality with policy: -# - Detects current installation method -# - Resolves best method via policy -# - Removes current installation if it doesn't match best -# - Installs via best method - -set -euo pipefail - -RECONCILE_LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$RECONCILE_LIB_DIR/common.sh" -. "$RECONCILE_LIB_DIR/capability.sh" -. "$RECONCILE_LIB_DIR/policy.sh" - -# Remove an installation via a specific method -# Args: tool_name, method, binary_name -remove_installation() { - local tool="$1" - local method="$2" - local binary="${3:-$tool}" - - echo "[$tool] Removing installation via $method..." >&2 - - case "$method" in - apt) - # Find package name - local binary_path - binary_path="$(command -v "$binary" 2>/dev/null || echo "")" - if [ -z "$binary_path" ]; then - echo "[$tool] Binary not found, nothing to remove" >&2 - return 0 - fi - - if command -v dpkg >/dev/null 2>&1; then - local pkg - pkg="$(dpkg -S "$binary_path" 2>/dev/null | cut -d: -f1 || echo "")" - if [ -n "$pkg" ]; then - echo "[$tool] Removing apt package: $pkg" >&2 - apt_remove_if_present "$pkg" || true - fi - fi - ;; - cargo) - if command -v cargo >/dev/null 2>&1; then - echo "[$tool] Uninstalling cargo package: $tool" >&2 - cargo uninstall "$tool" 2>/dev/null || true - fi - ;; - npm) - if command -v npm >/dev/null 2>&1; then - echo "[$tool] Uninstalling npm global package: $tool" >&2 - npm uninstall -g "$tool" 2>/dev/null || true - fi - ;; - gem) - if command -v gem >/dev/null 2>&1; then - echo "[$tool] Uninstalling gem: $tool" >&2 - gem uninstall -x "$tool" 2>/dev/null || true - fi - ;; - pip|pipx) - if command -v pipx >/dev/null 2>&1 && pipx list 2>/dev/null | grep -q "package $tool"; then - echo "[$tool] Uninstalling pipx package: $tool" >&2 - pipx uninstall "$tool" 2>/dev/null || true - elif command -v pip >/dev/null 2>&1 || command -v pip3 >/dev/null 2>&1; then - local pip_cmd="${PIP:-pip3}" - if ! command -v "$pip_cmd" >/dev/null 2>&1; then - pip_cmd="pip" - fi - echo "[$tool] Uninstalling pip package: $tool" >&2 - "$pip_cmd" uninstall -y "$tool" 2>/dev/null || true - fi - ;; - brew) - if command -v brew >/dev/null 2>&1; then - echo "[$tool] Uninstalling brew formula: $tool" >&2 - brew uninstall "$tool" 2>/dev/null || true - fi - ;; - github_release_binary) - local binary_path - binary_path="$(command -v "$binary" 2>/dev/null || echo "")" - if [ -n "$binary_path" ] && [ -f "$binary_path" ]; then - echo "[$tool] Removing binary: $binary_path" >&2 - rm -f "$binary_path" || true - fi - ;; - unknown|none) - echo "[$tool] Unknown installation method, cannot remove automatically" >&2 - return 1 - ;; - *) - echo "[$tool] Unsupported removal method: $method" >&2 - return 1 - ;; - esac - - return 0 -} - -# Install a tool via a specific method -# Args: tool_name, method, config_json, binary_name -install_via_method() { - local tool="$1" - local method="$2" - local config="${3:-{}}" - local binary="${4:-$tool}" - - echo "[$tool] Installing via $method..." >&2 - - case "$method" in - apt) - local package - if command -v jq >/dev/null 2>&1; then - package="$(echo "$config" | jq -r '.package // ""')" - fi - package="${package:-$tool}" - - echo "[$tool] Installing apt package: $package" >&2 - apt_install_if_missing "$package" || return 1 - ;; - cargo) - local crate - if command -v jq >/dev/null 2>&1; then - crate="$(echo "$config" | jq -r '.crate // ""')" - fi - crate="${crate:-$tool}" - - echo "[$tool] Installing cargo crate: $crate" >&2 - cargo install "$crate" || return 1 - ;; - npm) - local package - if command -v jq >/dev/null 2>&1; then - package="$(echo "$config" | jq -r '.package // ""')" - fi - package="${package:-$tool}" - - echo "[$tool] Installing npm global package: $package" >&2 - npm install -g "$package" || return 1 - ;; - gem) - local gem_name - if command -v jq >/dev/null 2>&1; then - gem_name="$(echo "$config" | jq -r '.gem // ""')" - fi - gem_name="${gem_name:-$tool}" - - echo "[$tool] Installing gem: $gem_name" >&2 - gem install "$gem_name" || return 1 - ;; - pip) - local package - if command -v jq >/dev/null 2>&1; then - package="$(echo "$config" | jq -r '.package // ""')" - fi - package="${package:-$tool}" - - local pip_cmd="${PIP:-pip3}" - if ! command -v "$pip_cmd" >/dev/null 2>&1; then - pip_cmd="pip" - fi - - echo "[$tool] Installing pip package: $package" >&2 - "$pip_cmd" install "$package" || return 1 - ;; - pipx) - local package - if command -v jq >/dev/null 2>&1; then - package="$(echo "$config" | jq -r '.package // ""')" - fi - package="${package:-$tool}" - - echo "[$tool] Installing pipx package: $package" >&2 - pipx install "$package" || return 1 - ;; - brew) - local formula - if command -v jq >/dev/null 2>&1; then - formula="$(echo "$config" | jq -r '.formula // ""')" - fi - formula="${formula:-$tool}" - - echo "[$tool] Installing brew formula: $formula" >&2 - brew install "$formula" || return 1 - ;; - github_release_binary) - # Use existing github_release_binary.sh installer - local installer="$RECONCILE_LIB_DIR/../installers/github_release_binary.sh" - if [ ! -f "$installer" ]; then - echo "[$tool] Error: github_release_binary installer not found" >&2 - return 1 - fi - - # The github_release_binary installer expects catalog file as input - # We need to temporarily set up environment for it - echo "[$tool] Installing via GitHub release binary installer" >&2 - "$installer" "$tool" install || return 1 - ;; - *) - echo "[$tool] Unsupported installation method: $method" >&2 - return 1 - ;; - esac - - return 0 -} - -# Main reconciliation function -# Args: catalog_file, action (reconcile|status) -reconcile_tool() { - local catalog_file="$1" - local action="${2:-reconcile}" - local tool - tool="$(basename "$catalog_file" .json)" - - # Load catalog metadata - if ! command -v jq >/dev/null 2>&1; then - echo "[$tool] Error: jq not available, cannot parse catalog" >&2 - return 1 - fi - - local install_method - install_method="$(jq -r '.install_method // ""' "$catalog_file" 2>/dev/null || echo "")" - - # Check if tool uses reconciliation - if [ "$install_method" != "auto" ]; then - if [ "$action" = "status" ]; then - echo "[$tool] Uses dedicated installer (install_method: $install_method), reconciliation not applicable" - fi - return 0 - fi - - local binary_name - binary_name="$(jq -r '.binary_name // ""' "$catalog_file" 2>/dev/null || echo "$tool")" - - # Detect current installation - local current_method - current_method="$(detect_install_method "$tool" "$binary_name")" - - # Resolve best method via policy - local best_method - best_method="$(resolve_best_method "$catalog_file" 2>&1)" - if [ $? -ne 0 ]; then - echo "[$tool] Error resolving best method: $best_method" >&2 - return 1 - fi - - # Status mode: just report - if [ "$action" = "status" ]; then - echo "[$tool] Current method: $current_method" - echo "[$tool] Best method: $best_method" - if [ "$current_method" = "$best_method" ]; then - echo "[$tool] Status: ✓ Already using best method" - elif [ "$current_method" = "none" ]; then - echo "[$tool] Status: Not installed, would install via $best_method" - else - echo "[$tool] Status: ⚠ Reconciliation needed (current: $current_method → best: $best_method)" - fi - return 0 - fi - - # Reconcile mode: align current with best - if [ "$current_method" = "$best_method" ]; then - # If action is "reconcile" (not update/install), skip if already via best method - if [ "$action" = "reconcile" ]; then - echo "[$tool] ✓ Already installed via best method: $best_method" >&2 - return 0 - fi - # For update/install action, continue to reinstall/upgrade even if via best method - echo "[$tool] Upgrading (currently via $best_method)" >&2 - fi - - if [ "$current_method" = "none" ]; then - echo "[$tool] Not installed, installing via best method: $best_method" >&2 - else - echo "[$tool] Reconciliation needed: current=$current_method, best=$best_method" >&2 - - # Remove current installation - if ! remove_installation "$tool" "$current_method" "$binary_name"; then - echo "[$tool] Error: Failed to remove current installation via $current_method" >&2 - return 1 - fi - fi - - # Get method config - local method_config - method_config="$(get_method_config "$catalog_file" "$best_method")" - - # Install via best method - if ! install_via_method "$tool" "$best_method" "$method_config" "$binary_name"; then - echo "[$tool] Error: Failed to install via $best_method" >&2 - return 1 - fi - - # Verify installation - if command -v "$binary_name" >/dev/null 2>&1; then - local new_method - new_method="$(detect_install_method "$tool" "$binary_name")" - echo "[$tool] ✓ Reconciliation complete: now installed via $new_method" >&2 - return 0 - else - echo "[$tool] Error: Installation via $best_method completed but binary not found" >&2 - return 1 - fi -} - -# Batch reconciliation for multiple tools -# Args: catalog_dir, action (reconcile|status) -reconcile_all_tools() { - local catalog_dir="$1" - local action="${2:-reconcile}" - - if [ ! -d "$catalog_dir" ]; then - echo "Error: Catalog directory not found: $catalog_dir" >&2 - return 1 - fi - - local tool_count=0 - local reconciled_count=0 - local error_count=0 - - for catalog_file in "$catalog_dir"/*.json; do - [ -f "$catalog_file" ] || continue - - local tool - tool="$(basename "$catalog_file" .json)" - tool_count=$((tool_count + 1)) - - if reconcile_tool "$catalog_file" "$action"; then - reconciled_count=$((reconciled_count + 1)) - else - error_count=$((error_count + 1)) - fi - done - - echo "" - echo "Summary: $tool_count tools processed, $reconciled_count succeeded, $error_count errors" - return 0 -} diff --git a/scripts/lib/scope_detection.sh b/scripts/lib/scope_detection.sh deleted file mode 100755 index ed20607..0000000 --- a/scripts/lib/scope_detection.sh +++ /dev/null @@ -1,739 +0,0 @@ -#!/usr/bin/env bash -# Scope Detection Library for Package Managers -# Provides functions to detect and count packages by scope (system/user/project) - -# ============================================================================ -# Project Marker Detection -# ============================================================================ - -has_project_markers() { - # Check for common project markers - [ -f "./package.json" ] || \ - [ -f "./Gemfile" ] || \ - [ -f "./composer.json" ] || \ - [ -f "./Cargo.toml" ] || \ - [ -f "./pyproject.toml" ] || \ - [ -f "./environment.yml" ] || \ - [ -d "./.venv" ] || \ - [ -d "./venv" ] || \ - [ -d "./node_modules" ] || \ - [ -d "./vendor" ] -} - -determine_default_scope() { - # Determine default scope based on current directory context - if has_project_markers; then - echo "project" - else - echo "user" - fi -} - -# ============================================================================ -# System-Only Managers (2) -# ============================================================================ - -get_apt_scopes() { - echo "system" -} - -get_apt_packages_by_scope() { - local scope="$1" - if [ "$scope" = "system" ]; then - dpkg -l 2>/dev/null | grep '^ii' | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -get_snap_scopes() { - echo "system" -} - -get_snap_packages_by_scope() { - local scope="$1" - if [ "$scope" = "system" ]; then - snap list 2>/dev/null | tail -n +2 | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -# ============================================================================ -# User-Only Managers (7) -# ============================================================================ - -get_cargo_scopes() { - echo "user" -} - -get_cargo_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - cargo install --list 2>/dev/null | grep -c '^[^ ]' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi -} - -get_rustup_scopes() { - echo "user" -} - -get_rustup_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - rustup toolchain list 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -get_pipx_scopes() { - echo "user" -} - -get_pipx_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - pipx list --short 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -get_go_scopes() { - echo "user" -} - -get_go_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - local gobin - gobin="$(go env GOBIN 2>/dev/null || echo "$(go env GOPATH 2>/dev/null)/bin")" - if [ -d "$gobin" ]; then - ls -1 "$gobin" 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi - else - echo "0" - fi -} - -get_gcloud_scopes() { - echo "user" -} - -get_gcloud_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - gcloud components list --filter='State.name:Installed' --format='value(id)' 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -get_poetry_scopes() { - echo "user" -} - -get_poetry_packages_by_scope() { - local scope="$1" - if [ "$scope" = "user" ]; then - echo "N/A" - else - echo "0" - fi -} - -# ============================================================================ -# System or User Managers (5) -# ============================================================================ - -get_brew_scopes() { - local brew_prefix - brew_prefix="$(brew --prefix 2>/dev/null || echo "")" - - if [ -z "$brew_prefix" ]; then - echo "" - return - fi - - # Check if in user's home directory - if [[ "$brew_prefix" == "$HOME"* ]] || [ -d "$HOME/.linuxbrew" ]; then - echo "user" - else - echo "system" - fi -} - -get_brew_packages_by_scope() { - local scope="$1" - local detected_scope - detected_scope="$(get_brew_scopes)" - - if [ "$scope" = "$detected_scope" ]; then - brew list --formula 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi -} - -get_flatpak_scopes() { - local scopes="" - local sys_count usr_count - - sys_count="$(flatpak list --system --app 2>/dev/null | wc -l | tr -d '[:space:]')" - usr_count="$(flatpak list --user --app 2>/dev/null | wc -l | tr -d '[:space:]')" - - [ "${sys_count:-0}" -gt 0 ] && scopes="system" - [ "${usr_count:-0}" -gt 0 ] && { [ -n "$scopes" ] && scopes="$scopes,user" || scopes="user"; } - - echo "$scopes" -} - -get_flatpak_packages_by_scope() { - local scope="$1" - case "$scope" in - system) - flatpak list --system --app 2>/dev/null | wc -l | tr -d '[:space:]' - ;; - user) - flatpak list --user --app 2>/dev/null | wc -l | tr -d '[:space:]' - ;; - *) - echo "0" - ;; - esac -} - -get_gem_scopes() { - local scopes="" - local gem_dir - - gem_dir="$(gem environment gemdir 2>/dev/null || echo "")" - - if [ -n "$gem_dir" ]; then - if [[ "$gem_dir" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./Gemfile" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_gem_packages_by_scope() { - local scope="$1" - case "$scope" in - user|system) - gem list --no-versions 2>/dev/null | wc -l | tr -d '[:space:]' - ;; - project) - if [ -f "./Gemfile" ]; then - bundle list 2>/dev/null | grep -c '^\s*\*' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_nuget_scopes() { - local scopes="" - - if command -v nuget >/dev/null 2>&1; then - scopes="system" - elif command -v dotnet >/dev/null 2>&1; then - scopes="user" - fi - - if compgen -G "./*.csproj" >/dev/null 2>&1 || compgen -G "./*.sln" >/dev/null 2>&1; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_nuget_packages_by_scope() { - local scope="$1" - case "$scope" in - system|user) - echo "N/A" - ;; - project) - if command -v dotnet >/dev/null 2>&1; then - dotnet list package 2>/dev/null | grep -c '>' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_az_scopes() { - if command -v apt-get >/dev/null 2>&1 && dpkg -l azure-cli >/dev/null 2>&1; then - echo "system" - else - echo "user" - fi -} - -get_az_packages_by_scope() { - local scope="$1" - local detected_scope - detected_scope="$(get_az_scopes)" - - if [ "$scope" = "$detected_scope" ]; then - az extension list 2>/dev/null | grep -c '"name":' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi -} - -# ============================================================================ -# User + Project Managers (7) -# ============================================================================ - -get_uv_scopes() { - local scopes="user" - - if [ -d "./.venv" ] || [ -f "./pyproject.toml" ]; then - scopes="user,project" - fi - - echo "$scopes" -} - -get_uv_packages_by_scope() { - local scope="$1" - case "$scope" in - user) - uv tool list 2>/dev/null | wc -l | tr -d '[:space:]' - ;; - project) - if [ -d "./.venv" ]; then - ./.venv/bin/python -m pip list 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]' - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_pip_scopes() { - local scopes="user" - - if [ -n "${VIRTUAL_ENV:-}" ] || [ -d "./.venv" ] || [ -d "./venv" ] || [ -f "./pyproject.toml" ]; then - scopes="user,project" - fi - - echo "$scopes" -} - -get_pip_packages_by_scope() { - local scope="$1" - local count - case "$scope" in - user) - # Use system python3 explicitly (not virtualenv) - count=$(/usr/bin/python3 -m pip list --user 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]') || count="0" - echo "${count:-0}" - ;; - project) - if [ -n "${VIRTUAL_ENV:-}" ]; then - count=$(python3 -m pip list 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]') || count="0" - echo "${count:-0}" - elif [ -d "./.venv" ]; then - count=$(./.venv/bin/python -m pip list 2>/dev/null | tail -n +3 | wc -l | tr -d '[:space:]') || count="0" - echo "${count:-0}" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_npm_scopes() { - local scopes="" - local npm_prefix - - npm_prefix="$(npm config get prefix 2>/dev/null || echo "")" - - if [ -n "$npm_prefix" ]; then - if [[ "$npm_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_npm_packages_by_scope() { - local scope="$1" - case "$scope" in - user|system) - npm list -g --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]' || echo "0" - ;; - project) - if [ -f "./package.json" ]; then - npm list --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_pnpm_scopes() { - local scopes="" - local pnpm_prefix - - pnpm_prefix="$(pnpm config get prefix 2>/dev/null || echo "")" - - if [ -n "$pnpm_prefix" ]; then - if [[ "$pnpm_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_pnpm_packages_by_scope() { - local scope="$1" - local count - case "$scope" in - user|system) - count=$(pnpm list -g --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]') || count="0" - echo "${count:-0}" - ;; - project) - if [ -f "./package.json" ]; then - count=$(pnpm list --depth=0 2>/dev/null | grep -c '^[├└]' | tr -d '[:space:]') || count="0" - echo "${count:-0}" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_yarn_scopes() { - local scopes="" - local yarn_prefix - - yarn_prefix="$(yarn global dir 2>/dev/null | head -n1 || echo "")" - - if [ -n "$yarn_prefix" ]; then - if [[ "$yarn_prefix" == "$HOME"* ]]; then - scopes="user" - else - scopes="system" - fi - fi - - if [ -f "./package.json" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_yarn_packages_by_scope() { - local scope="$1" - local count - case "$scope" in - user|system) - count=$(yarn global list 2>/dev/null | grep -c '^info' | tr -d '[:space:]') || count="0" - echo "${count:-0}" - ;; - project) - if [ -f "./package.json" ]; then - count=$(yarn list --depth=0 2>/dev/null | grep -c '^├─' | tr -d '[:space:]') || count="0" - echo "${count:-0}" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_composer_scopes() { - local scopes="user" - - if [ -f "./composer.json" ]; then - scopes="user,project" - fi - - echo "$scopes" -} - -get_composer_packages_by_scope() { - local scope="$1" - case "$scope" in - user) - composer global show 2>/dev/null | wc -l | tr -d '[:space:]' - ;; - project) - if [ -f "./composer.json" ]; then - composer show 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_jspm_scopes() { - local scopes="user" - - if [ -f "./package.json" ]; then - scopes="user,project" - fi - - echo "$scopes" -} - -get_jspm_packages_by_scope() { - local scope="$1" - case "$scope" in - user) - echo "N/A" - ;; - project) - if [ -f "./package.json" ] && [ -d "./jspm_packages" ]; then - find ./jspm_packages -maxdepth 2 -type d 2>/dev/null | wc -l | tr -d '[:space:]' - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -# ============================================================================ -# System + User + Project Managers (3) -# ============================================================================ - -get_conda_scopes() { - local scopes="" - local conda_prefix="${CONDA_PREFIX:-}" - - if [ -n "$conda_prefix" ]; then - if [[ "$conda_prefix" == *"/base"* ]]; then - scopes="system" - else - scopes="user" - fi - else - scopes="user" - fi - - if [ -f "./environment.yml" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_conda_packages_by_scope() { - local scope="$1" - case "$scope" in - system|user) - conda list 2>/dev/null | tail -n +4 | wc -l | tr -d '[:space:]' - ;; - project) - if [ -f "./environment.yml" ]; then - echo "N/A" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_mamba_scopes() { - local scopes="" - local mamba_prefix="${MAMBA_PREFIX:-${CONDA_PREFIX:-}}" - - if [ -n "$mamba_prefix" ]; then - if [[ "$mamba_prefix" == *"/base"* ]]; then - scopes="system" - else - scopes="user" - fi - else - scopes="user" - fi - - if [ -f "./environment.yml" ]; then - [ -n "$scopes" ] && scopes="$scopes,project" || scopes="project" - fi - - echo "$scopes" -} - -get_mamba_packages_by_scope() { - local scope="$1" - case "$scope" in - system|user) - mamba list 2>/dev/null | tail -n +4 | wc -l | tr -d '[:space:]' - ;; - project) - if [ -f "./environment.yml" ]; then - echo "N/A" - else - echo "0" - fi - ;; - *) - echo "0" - ;; - esac -} - -get_bundler_scopes() { - if [ -f "./Gemfile" ]; then - echo "project" - else - echo "" - fi -} - -get_bundler_packages_by_scope() { - local scope="$1" - if [ "$scope" = "project" ] && [ -f "./Gemfile" ]; then - bundle list 2>/dev/null | grep -c '^\s*\*' | tr -d '[:space:]' || echo "0" - else - echo "0" - fi -} - -# ============================================================================ -# Unified Scope Detection -# ============================================================================ - -get_manager_scopes() { - local mgr="$1" - - case "$mgr" in - # System-only - apt) get_apt_scopes ;; - snap) get_snap_scopes ;; - - # User-only - cargo) get_cargo_scopes ;; - rustup) get_rustup_scopes ;; - pipx) get_pipx_scopes ;; - go) get_go_scopes ;; - gcloud) get_gcloud_scopes ;; - poetry) get_poetry_scopes ;; - - # System or User - brew) get_brew_scopes ;; - flatpak) get_flatpak_scopes ;; - gem) get_gem_scopes ;; - nuget) get_nuget_scopes ;; - az) get_az_scopes ;; - - # User + Project - uv) get_uv_scopes ;; - pip) get_pip_scopes ;; - npm) get_npm_scopes ;; - pnpm) get_pnpm_scopes ;; - yarn) get_yarn_scopes ;; - composer) get_composer_scopes ;; - jspm) get_jspm_scopes ;; - - # System + User + Project - conda) get_conda_scopes ;; - mamba) get_mamba_scopes ;; - bundler) get_bundler_scopes ;; - - *) - echo "unknown" - ;; - esac -} - -get_manager_packages_by_scope() { - local mgr="$1" - local scope="$2" - - case "$mgr" in - apt) get_apt_packages_by_scope "$scope" ;; - snap) get_snap_packages_by_scope "$scope" ;; - cargo) get_cargo_packages_by_scope "$scope" ;; - rustup) get_rustup_packages_by_scope "$scope" ;; - pipx) get_pipx_packages_by_scope "$scope" ;; - go) get_go_packages_by_scope "$scope" ;; - gcloud) get_gcloud_packages_by_scope "$scope" ;; - poetry) get_poetry_packages_by_scope "$scope" ;; - brew) get_brew_packages_by_scope "$scope" ;; - flatpak) get_flatpak_packages_by_scope "$scope" ;; - gem) get_gem_packages_by_scope "$scope" ;; - nuget) get_nuget_packages_by_scope "$scope" ;; - az) get_az_packages_by_scope "$scope" ;; - uv) get_uv_packages_by_scope "$scope" ;; - pip) get_pip_packages_by_scope "$scope" ;; - npm) get_npm_packages_by_scope "$scope" ;; - pnpm) get_pnpm_packages_by_scope "$scope" ;; - yarn) get_yarn_packages_by_scope "$scope" ;; - composer) get_composer_packages_by_scope "$scope" ;; - jspm) get_jspm_packages_by_scope "$scope" ;; - conda) get_conda_packages_by_scope "$scope" ;; - mamba) get_mamba_packages_by_scope "$scope" ;; - bundler) get_bundler_packages_by_scope "$scope" ;; - *) - echo "0" - ;; - esac -} diff --git a/scripts/pin_version.sh b/scripts/pin_version.sh deleted file mode 100755 index 32d314b..0000000 --- a/scripts/pin_version.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env bash -# pin_version.sh - Pin a tool to a specific version to suppress upgrade prompts -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -ROOT="$(cd "$DIR/.." && pwd)" - -TOOL="${1:-}" -VERSION="${2:-}" - -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME [VERSION]" >&2 - echo "" >&2 - echo "Pin a tool to a specific version to suppress upgrade prompts." >&2 - echo "If VERSION is omitted, uses the currently installed version." >&2 - exit 1 -fi - -CATALOG_FILE="$ROOT/catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Tool '$TOOL' not found in catalog" >&2 - exit 1 -fi - -# If no version provided, get current installed version -if [ -z "$VERSION" ]; then - # Try to get version from tool - BINARY_NAME="$(jq -r '.binary_name // .name' "$CATALOG_FILE")" - - if command -v "$BINARY_NAME" >/dev/null 2>&1; then - # Try various version command formats - VERSION="$(timeout 2 "$BINARY_NAME" --version /dev/null | head -1 | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -1 || true)" - - if [ -z "$VERSION" ]; then - echo "Error: Could not detect current version for '$TOOL'" >&2 - echo "Please specify version explicitly: $0 $TOOL VERSION" >&2 - exit 1 - fi - else - echo "Error: '$BINARY_NAME' not installed, cannot detect version" >&2 - echo "Please specify version explicitly: $0 $TOOL VERSION" >&2 - exit 1 - fi -fi - -echo "Pinning '$TOOL' to version $VERSION..." - -# Update catalog with pinned_version field -TMP_FILE=$(mktemp) -jq --arg version "$VERSION" '. + {pinned_version: $version}' "$CATALOG_FILE" > "$TMP_FILE" -mv "$TMP_FILE" "$CATALOG_FILE" - -echo "✓ Pinned '$TOOL' to version $VERSION" -echo "" -echo "This tool will no longer appear in upgrade prompts unless:" -echo " - A version newer than $VERSION is available AND" -echo " - You remove the pin with: ./scripts/unpin_version.sh $TOOL" diff --git a/scripts/reconcile_pip_to_uv.sh b/scripts/reconcile_pip_to_uv.sh deleted file mode 100755 index 8fca348..0000000 --- a/scripts/reconcile_pip_to_uv.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# reconcile_pip_to_uv.sh - Migrate user-installed pip packages to UV -# This script moves Python packages from pip (user-installed) to UV tool management - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -DRY_RUN="${DRY_RUN:-0}" - -echo "════════════════════════════════════════════════════════" -echo " Reconcile: pip → UV" -echo "════════════════════════════════════════════════════════" -echo "" - -# Check prerequisites -if ! command -v uv >/dev/null 2>&1; then - echo "❌ UV not installed. Install UV first:" - echo " curl -LsSf https://astral.sh/uv/install.sh | sh" - exit 1 -fi - -if ! command -v python3 >/dev/null 2>&1; then - echo "❌ python3 not found" - exit 1 -fi - -# Get list of user-installed packages -echo "📋 Scanning user-installed pip packages..." -user_packages=$(python3 -m pip list --user --format=freeze 2>/dev/null | grep -v "^#" || true) - -if [ -z "$user_packages" ]; then - echo "✓ No user-installed pip packages found" - exit 0 -fi - -package_count=$(echo "$user_packages" | wc -l) -echo "Found $package_count user-installed packages:" -echo "$user_packages" | sed 's/^/ - /' -echo "" - -if [ "$DRY_RUN" = "1" ]; then - echo "🔍 DRY-RUN MODE: Would migrate these packages" - exit 0 -fi - -# Ask for confirmation -read -p "Migrate these packages to UV? (y/N): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "❌ Migration cancelled" - exit 0 -fi - -echo "" -echo "🔄 Migrating packages to UV..." -echo "" - -# Process each package -migrated=0 -failed=0 -skipped=0 - -while IFS= read -r package_spec; do - [ -z "$package_spec" ] && continue - - # Extract package name (before ==, >=, etc.) - package_name=$(echo "$package_spec" | sed 's/[=<>].*//') - - echo "→ Processing: $package_name" - - # Check if it's a tool candidate (has CLI entry point) - # Try to find the package in PATH - if command -v "$package_name" >/dev/null 2>&1; then - echo " Installing as UV tool..." - if uv tool install "$package_name" >/dev/null 2>&1; then - echo " ✓ Installed: $package_name (as UV tool)" - - # Uninstall from pip - echo " Removing from pip..." - if python3 -m pip uninstall -y "$package_name" >/dev/null 2>&1; then - echo " ✓ Removed from pip" - migrated=$((migrated + 1)) - else - echo " ⚠ Warning: Failed to remove from pip (but UV tool installed)" - migrated=$((migrated + 1)) - fi - else - echo " ❌ Failed to install as UV tool" - failed=$((failed + 1)) - fi - else - # Not a tool, just a library - keep in pip or skip - echo " ⏭ Skipped: $package_name (library, not a tool)" - skipped=$((skipped + 1)) - fi - echo "" -done <<< "$user_packages" - -echo "════════════════════════════════════════════════════════" -echo "Migration Summary:" -echo " ✓ Migrated: $migrated packages" -echo " ⏭ Skipped: $skipped packages (libraries)" -echo " ❌ Failed: $failed packages" -echo "════════════════════════════════════════════════════════" - -if [ "$skipped" -gt 0 ]; then - echo "" - echo "Note: Library packages (non-CLI tools) were skipped." - echo " These should remain managed by pip or moved to project requirements." -fi - -if [ "$migrated" -gt 0 ]; then - echo "" - echo "✓ Migration complete! UV is now managing your Python tools." - echo " Run 'uv tool list' to see installed tools" -fi diff --git a/scripts/reconcile_pipx_to_uv.sh b/scripts/reconcile_pipx_to_uv.sh deleted file mode 100755 index ddc8929..0000000 --- a/scripts/reconcile_pipx_to_uv.sh +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# reconcile_pipx_to_uv.sh - Migrate pipx tools to UV -# This script moves all pipx-installed tools to UV tool management - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -. "$DIR/lib/common.sh" - -DRY_RUN="${DRY_RUN:-0}" - -echo "════════════════════════════════════════════════════════" -echo " Reconcile: pipx → UV" -echo "════════════════════════════════════════════════════════" -echo "" - -# Check prerequisites -if ! command -v uv >/dev/null 2>&1; then - echo "❌ UV not installed. Install UV first:" - echo " curl -LsSf https://astral.sh/uv/install.sh | sh" - exit 1 -fi - -if ! command -v pipx >/dev/null 2>&1; then - echo "✓ pipx not installed, nothing to migrate" - exit 0 -fi - -# Get list of pipx tools -echo "📋 Scanning pipx tools..." -pipx_tools=$(pipx list --short 2>/dev/null || true) - -if [ -z "$pipx_tools" ]; then - echo "✓ No pipx tools found" - exit 0 -fi - -tool_count=$(echo "$pipx_tools" | wc -l) -echo "Found $tool_count pipx tools:" -echo "$pipx_tools" | sed 's/^/ - /' -echo "" - -if [ "$DRY_RUN" = "1" ]; then - echo "🔍 DRY-RUN MODE: Would migrate these tools" - exit 0 -fi - -# Ask for confirmation -read -p "Migrate all pipx tools to UV? (y/N): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "❌ Migration cancelled" - exit 0 -fi - -echo "" -echo "🔄 Migrating tools to UV..." -echo "" - -# Process each tool -migrated=0 -failed=0 - -while IFS= read -r tool_name; do - [ -z "$tool_name" ] && continue - - # Extract just the package name (first word) - package_name=$(echo "$tool_name" | awk '{print $1}') - - echo "→ Migrating: $package_name" - - # Install in UV - echo " Installing with UV..." - if uv tool install "$package_name" >/dev/null 2>&1; then - echo " ✓ Installed in UV" - - # Uninstall from pipx - echo " Removing from pipx..." - if pipx uninstall "$package_name" >/dev/null 2>&1; then - echo " ✓ Removed from pipx" - migrated=$((migrated + 1)) - else - echo " ⚠ Warning: Failed to remove from pipx (but UV tool installed)" - migrated=$((migrated + 1)) - fi - else - echo " ❌ Failed to install in UV" - failed=$((failed + 1)) - fi - echo "" -done <<< "$pipx_tools" - -echo "════════════════════════════════════════════════════════" -echo "Migration Summary:" -echo " ✓ Migrated: $migrated tools" -echo " ❌ Failed: $failed tools" -echo "════════════════════════════════════════════════════════" - -if [ "$migrated" -gt 0 ]; then - echo "" - echo "✓ Migration complete! UV is now managing your Python tools." - echo " Run 'uv tool list' to see installed tools" - echo "" - echo "Optional cleanup:" - echo " - Remove pipx itself: pip3 uninstall pipx" - echo " - Remove pipx directory: rm -rf ~/.local/pipx" -fi diff --git a/scripts/unpin_version.sh b/scripts/unpin_version.sh deleted file mode 100755 index 5dfb751..0000000 --- a/scripts/unpin_version.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -# unpin_version.sh - Remove version pin from a tool -set -euo pipefail - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -ROOT="$(cd "$DIR/.." && pwd)" - -TOOL="${1:-}" - -if [ -z "$TOOL" ]; then - echo "Usage: $0 TOOL_NAME" >&2 - echo "" >&2 - echo "Remove version pin from a tool to resume upgrade prompts." >&2 - exit 1 -fi - -CATALOG_FILE="$ROOT/catalog/$TOOL.json" -if [ ! -f "$CATALOG_FILE" ]; then - echo "Error: Tool '$TOOL' not found in catalog" >&2 - exit 1 -fi - -# Check if pinned -PINNED_VERSION="$(jq -r '.pinned_version // empty' "$CATALOG_FILE")" -if [ -z "$PINNED_VERSION" ]; then - echo "Tool '$TOOL' is not pinned" >&2 - exit 0 -fi - -echo "Removing version pin for '$TOOL' (was pinned to $PINNED_VERSION)..." - -# Remove pinned_version field from catalog -TMP_FILE=$(mktemp) -jq 'del(.pinned_version)' "$CATALOG_FILE" > "$TMP_FILE" -mv "$TMP_FILE" "$CATALOG_FILE" - -echo "✓ Removed pin for '$TOOL'" -echo "" -echo "This tool will now appear in upgrade prompts when updates are available." diff --git a/scripts/upgrade_all.sh b/scripts/upgrade_all.sh deleted file mode 100755 index 773b39d..0000000 --- a/scripts/upgrade_all.sh +++ /dev/null @@ -1,744 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# upgrade_all.sh - Orchestrated full system upgrade -# 6-stage workflow: refresh data → upgrade managers → upgrade runtimes → upgrade user packages → upgrade tools → health checks - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PROJECT_ROOT="$(cd "$DIR/.." && pwd)" - -# Source common helpers -. "$DIR/lib/common.sh" - -DRY_RUN="${DRY_RUN:-0}" -LOG_DIR="$PROJECT_ROOT/logs" -LOG_FILE="$LOG_DIR/upgrade-$(date +%Y%m%d-%H%M%S).log" - -# Stats tracking -TOTAL_UPGRADED=0 -TOTAL_FAILED=0 -TOTAL_SKIPPED=0 -START_TIME=$(date +%s) - -# Colors (if terminal supports it) -if [ -t 1 ] && command -v tput >/dev/null 2>&1 && [ "$(tput colors)" -ge 8 ]; then - BOLD=$(tput bold) - GREEN=$(tput setaf 2) - YELLOW=$(tput setaf 3) - RED=$(tput setaf 1) - BLUE=$(tput setaf 4) - RESET=$(tput sgr0) -else - BOLD="" - GREEN="" - YELLOW="" - RED="" - BLUE="" - RESET="" -fi - -log() { - local msg="$*" - echo "$msg" | tee -a "$LOG_FILE" -} - -log_stage() { - local stage="$1" - local total="$2" - local desc="$3" - echo "" | tee -a "$LOG_FILE" - echo "${BOLD}${BLUE}[$stage/$total] $desc${RESET}" | tee -a "$LOG_FILE" -} - -log_success() { - echo " ${GREEN}✓${RESET} $*" | tee -a "$LOG_FILE" - TOTAL_UPGRADED=$((TOTAL_UPGRADED + 1)) -} - -log_success_with_info() { - local name="$1" - local version_cmd="$2" - local location - location="$(command -v "$name" 2>/dev/null || echo "unknown")" - local version - version="$(eval "$version_cmd" 2>/dev/null || echo "unknown")" - echo " ${GREEN}✓${RESET} $name ($version at $location)" | tee -a "$LOG_FILE" - TOTAL_UPGRADED=$((TOTAL_UPGRADED + 1)) -} - -log_skip() { - echo " ${YELLOW}⏭${RESET} $*" | tee -a "$LOG_FILE" - TOTAL_SKIPPED=$((TOTAL_SKIPPED + 1)) -} - -log_fail() { - echo " ${RED}❌${RESET} $*" | tee -a "$LOG_FILE" - TOTAL_FAILED=$((TOTAL_FAILED + 1)) -} - -log_info() { - echo " ${BLUE}→${RESET} $*" | tee -a "$LOG_FILE" -} - -log_reconcile() { - echo " ${YELLOW}⚙${RESET} $*" | tee -a "$LOG_FILE" - TOTAL_SKIPPED=$((TOTAL_SKIPPED + 1)) -} - -run_cmd() { - local desc="$1" - shift - - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: $desc" - log_info " Command: $*" - return 0 - fi - - if "$@" >> "$LOG_FILE" 2>&1; then - log_success "$desc" - return 0 - else - log_fail "$desc (see $LOG_FILE for details)" - return 1 - fi -} - -# ============================================================================ -# Stage 1: Refresh Version Data -# ============================================================================ -stage_1_refresh() { - log_stage 1 5 "Refreshing version data..." - - cd "$PROJECT_ROOT" - - # Check if version data is fresh (updated within last hour) - local versions_file="$PROJECT_ROOT/latest_versions.json" - local cache_ttl=3600 # 1 hour in seconds - - if [ -f "$versions_file" ]; then - local file_age=$(($(date +%s) - $(stat -c %Y "$versions_file" 2>/dev/null || stat -f %m "$versions_file" 2>/dev/null || echo 0))) - if [ "$file_age" -lt "$cache_ttl" ]; then - local age_minutes=$((file_age / 60)) - log_skip "Version data is fresh (updated ${age_minutes}m ago, cache: 60m)" - return 0 - fi - fi - - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: make update" - log_skip "Version data refresh (dry-run)" - else - local start=$(date +%s) - log_info "Fetching latest versions (this may take a minute)..." - - # Run make update with progress indication - ( - make update >> "$LOG_FILE" 2>&1 & - local pid=$! - local spinner='⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏' - local i=0 - while kill -0 $pid 2>/dev/null; do - printf "\r ${BLUE}→${RESET} Fetching... %s" "${spinner:i++%${#spinner}:1}" - sleep 0.1 - done - wait $pid - local exit_code=$? - printf "\r" - return $exit_code - ) - - if [ $? -eq 0 ]; then - local end=$(date +%s) - local duration=$((end - start)) - log_success "Fetched latest version data (${duration}s)" - else - log_fail "Failed to refresh version data" - return 1 - fi - fi -} - -# ============================================================================ -# Stage 2: Upgrade Package Managers -# ============================================================================ -stage_2_managers() { - log_stage 2 5 "Upgrading package managers..." - - cd "$PROJECT_ROOT" - - # System package managers - if command -v apt-get >/dev/null 2>&1; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: apt-get update && apt-get upgrade" - else - run_cmd "apt (system)" sudo apt-get update >/dev/null 2>&1 && sudo apt-get upgrade -y >/dev/null 2>&1 || log_skip "apt (not available or failed)" - fi - else - log_skip "apt (not installed)" - fi - - if command -v brew >/dev/null 2>&1; then - run_cmd "brew" brew update >/dev/null 2>&1 && brew upgrade >/dev/null 2>&1 || log_skip "brew (failed)" - else - log_skip "brew (not installed)" - fi - - if command -v snap >/dev/null 2>&1; then - run_cmd "snap" sudo snap refresh || log_skip "snap (failed)" - else - log_skip "snap (not installed)" - fi - - if command -v flatpak >/dev/null 2>&1; then - run_cmd "flatpak" flatpak update -y || log_skip "flatpak (failed)" - else - log_skip "flatpak (not installed)" - fi - - # Language-specific package managers - if command -v pip3 >/dev/null 2>&1; then - # Skip pip if uv is managing Python packages, suggest migration - if command -v uv >/dev/null 2>&1; then - # Check if there are user-installed pip packages to migrate - local user_packages=$(python3 -m pip list --user --format=freeze 2>/dev/null | grep -v "^#" | wc -l) - if [ "$user_packages" -gt 0 ]; then - log_reconcile "pip ($user_packages user packages, run: make reconcile-pip-to-uv to migrate)" - else - log_skip "pip (uv is managing Python packages)" - fi - # Check if pip module is actually available - elif ! python3 -m pip --version >/dev/null 2>&1; then - log_skip "pip (python3 has no pip module)" - elif [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: pip upgrade" - else - local upgrade_success=0 - # Check if in virtualenv - skip --user flag if so - if [ -n "${VIRTUAL_ENV:-}" ]; then - python3 -m pip install --upgrade pip >> "$LOG_FILE" 2>&1 && upgrade_success=1 - else - python3 -m pip install --user --upgrade pip >> "$LOG_FILE" 2>&1 && upgrade_success=1 - fi - - if [ "$upgrade_success" = "1" ]; then - log_success_with_info "pip" "python3 -m pip --version | awk '{print \$2}'" - else - log_fail "pip (see $LOG_FILE for details)" - fi - fi - else - log_skip "pip (not installed)" - fi - - if command -v uv >/dev/null 2>&1; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: uv self update" - else - if uv self update >> "$LOG_FILE" 2>&1; then - log_success_with_info "uv" "uv --version | awk '{print \$2}'" - else - log_fail "uv (see $LOG_FILE for details)" - fi - fi - else - log_skip "uv (not installed)" - fi - - if command -v pipx >/dev/null 2>&1; then - # Skip pipx if uv is managing Python tools, suggest migration - if command -v uv >/dev/null 2>&1; then - # Check if there are pipx tools to migrate - local pipx_tools=$(pipx list --short 2>/dev/null | wc -l) - if [ "$pipx_tools" -gt 0 ]; then - log_reconcile "pipx ($pipx_tools tools installed, run: make reconcile-pipx-to-uv to migrate)" - else - log_skip "pipx (uv is managing Python tools)" - fi - elif [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: pip3 install --upgrade pipx" - else - # Check if in virtualenv - skip --user flag if so - local upgrade_success=0 - if [ -n "${VIRTUAL_ENV:-}" ]; then - pip3 install --upgrade pipx >> "$LOG_FILE" 2>&1 && upgrade_success=1 - else - pip3 install --user --upgrade pipx >> "$LOG_FILE" 2>&1 && upgrade_success=1 - fi - - if [ "$upgrade_success" = "1" ]; then - log_success_with_info "pipx" "pipx --version" - else - log_fail "pipx (see $LOG_FILE for details)" - fi - fi - else - log_skip "pipx (not installed)" - fi - - if command -v npm >/dev/null 2>&1; then - local npm_path - npm_path="$(command -v npm)" - # Check if npm is system-managed (not nvm) - if [[ "$npm_path" == /usr/bin/npm ]] || [[ "$npm_path" == /usr/local/bin/npm ]]; then - log_reconcile "npm (system-managed at $npm_path, run: ./scripts/install_node.sh reconcile)" - else - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: npm install -g npm@latest" - else - if npm install -g npm@latest >> "$LOG_FILE" 2>&1; then - log_success_with_info "npm" "npm --version" - else - log_fail "npm (see $LOG_FILE for details)" - fi - fi - fi - else - log_skip "npm (not installed)" - fi - - if command -v pnpm >/dev/null 2>&1; then - local pnpm_path - pnpm_path="$(command -v pnpm)" - # Check if pnpm is system-managed (not nvm/corepack) - if [[ "$pnpm_path" == /usr/bin/pnpm ]] || [[ "$pnpm_path" == /usr/local/bin/pnpm ]]; then - log_reconcile "pnpm (system-managed at $pnpm_path, run: ./scripts/install_node.sh reconcile)" - else - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: pnpm upgrade" - else - local upgrade_success=0 - if command -v corepack >/dev/null 2>&1; then - corepack prepare pnpm@latest --activate >> "$LOG_FILE" 2>&1 && upgrade_success=1 - else - npm install -g pnpm@latest >> "$LOG_FILE" 2>&1 && upgrade_success=1 - fi - - if [ "$upgrade_success" = "1" ]; then - log_success_with_info "pnpm" "pnpm --version" - else - log_fail "pnpm (see $LOG_FILE for details)" - fi - fi - fi - else - log_skip "pnpm (not installed)" - fi - - if command -v yarn >/dev/null 2>&1; then - local yarn_path - yarn_path="$(command -v yarn)" - # Check if yarn is system-managed (not nvm/corepack) - if [[ "$yarn_path" == /usr/bin/yarn ]] || [[ "$yarn_path" == /usr/local/bin/yarn ]]; then - log_reconcile "yarn (system-managed at $yarn_path, run: ./scripts/install_node.sh reconcile)" - else - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: yarn upgrade" - else - local upgrade_success=0 - if command -v corepack >/dev/null 2>&1; then - corepack prepare yarn@stable --activate >> "$LOG_FILE" 2>&1 && upgrade_success=1 - else - npm install -g yarn@latest >> "$LOG_FILE" 2>&1 && upgrade_success=1 - fi - - if [ "$upgrade_success" = "1" ]; then - log_success_with_info "yarn" "yarn --version" - else - log_fail "yarn (see $LOG_FILE for details)" - fi - fi - fi - else - log_skip "yarn (not installed)" - fi - - if command -v cargo >/dev/null 2>&1 && command -v rustup >/dev/null 2>&1; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: rustup update" - else - if rustup update >> "$LOG_FILE" 2>&1; then - log_success_with_info "rustup" "rustup --version | head -1 | awk '{print \$2}'" - else - log_fail "rustup (see $LOG_FILE for details)" - fi - fi - else - log_skip "rustup (not installed)" - fi - - if command -v gem >/dev/null 2>&1; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: gem update --system" - else - if gem update --system >> "$LOG_FILE" 2>&1; then - log_success_with_info "gem" "gem --version" - else - log_fail "gem (see $LOG_FILE for details)" - fi - fi - else - log_skip "gem (not installed)" - fi - - if command -v composer >/dev/null 2>&1; then - # Check if composer is system-installed (can't self-update) - if [ "$(which composer)" = "/usr/bin/composer" ] || [ "$(which composer)" = "/usr/local/bin/composer" ]; then - log_skip "composer (system-managed, use apt/brew to update)" - elif [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: composer self-update" - else - if composer self-update >> "$LOG_FILE" 2>&1; then - log_success_with_info "composer" "composer --version | head -1 | awk '{print \$3}'" - else - log_fail "composer (see $LOG_FILE for details)" - fi - fi - else - log_skip "composer (not installed)" - fi - - if command -v poetry >/dev/null 2>&1; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: poetry upgrade" - else - local upgrade_success=0 - # Try poetry self update first (Poetry 1.2+) - if poetry self update --help >/dev/null 2>&1; then - poetry self update >> "$LOG_FILE" 2>&1 && upgrade_success=1 - # Fallback to uv tool upgrade if poetry is managed by uv - elif command -v uv >/dev/null 2>&1 && uv tool list 2>/dev/null | grep -q "^poetry"; then - uv tool upgrade poetry >> "$LOG_FILE" 2>&1 && upgrade_success=1 - # Fallback to pipx upgrade if poetry is managed by pipx - elif command -v pipx >/dev/null 2>&1 && pipx list 2>/dev/null | grep -q "poetry"; then - pipx upgrade poetry >> "$LOG_FILE" 2>&1 && upgrade_success=1 - else - log_skip "poetry (no automatic update method)" - upgrade_success=-1 - fi - - if [ "$upgrade_success" = "1" ]; then - log_success_with_info "poetry" "poetry --version | awk '{print \$3}'" - elif [ "$upgrade_success" = "0" ]; then - log_fail "poetry (see $LOG_FILE for details)" - fi - fi - else - log_skip "poetry (not installed)" - fi -} - -# ============================================================================ -# Stage 3: Upgrade Language Runtimes -# ============================================================================ -stage_3_runtimes() { - log_stage 3 5 "Upgrading language runtimes..." - - cd "$PROJECT_ROOT" - - # Python - if [ -f "./scripts/install_python.sh" ]; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: ./scripts/install_python.sh update" - else - if ./scripts/install_python.sh update >> "$LOG_FILE" 2>&1; then - if command -v python3 >/dev/null 2>&1; then - log_success_with_info "Python" "python3 --version | awk '{print \$2}'" - else - log_success "Python runtime" - fi - else - log_skip "Python (upgrade failed or not managed)" - fi - fi - else - log_skip "Python (install script not found)" - fi - - # Node.js - if [ -f "./scripts/install_node.sh" ]; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: ./scripts/install_node.sh update" - else - if ./scripts/install_node.sh update >> "$LOG_FILE" 2>&1; then - if command -v node >/dev/null 2>&1; then - log_success_with_info "Node.js" "node --version | sed 's/^v//'" - else - log_success "Node.js runtime" - fi - else - log_skip "Node.js (upgrade failed or not managed)" - fi - fi - else - log_skip "Node.js (install script not found)" - fi - - # Go - if [ -f "./scripts/install_go.sh" ]; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: ./scripts/install_go.sh update" - else - if ./scripts/install_go.sh update >> "$LOG_FILE" 2>&1; then - if command -v go >/dev/null 2>&1; then - log_success_with_info "Go" "go version | awk '{print \$3}' | sed 's/^go//'" - else - log_success "Go runtime" - fi - else - log_skip "Go (upgrade failed or not managed)" - fi - fi - else - log_skip "Go (install script not found)" - fi - - # Ruby - if [ -f "./scripts/install_ruby.sh" ]; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: ./scripts/install_ruby.sh update" - else - if ./scripts/install_ruby.sh update >> "$LOG_FILE" 2>&1; then - if command -v ruby >/dev/null 2>&1; then - log_success_with_info "Ruby" "ruby --version | awk '{print \$2}'" - else - log_success "Ruby runtime" - fi - else - log_skip "Ruby (upgrade failed or not managed)" - fi - fi - else - log_skip "Ruby (install script not found)" - fi - - # Rust - if [ -f "./scripts/install_rust.sh" ]; then - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: ./scripts/install_rust.sh update" - else - if ./scripts/install_rust.sh update >> "$LOG_FILE" 2>&1; then - if command -v rustc >/dev/null 2>&1; then - log_success_with_info "Rust" "rustc --version | awk '{print \$2}'" - else - log_success "Rust runtime" - fi - else - log_skip "Rust (upgrade failed or not managed)" - fi - fi - else - log_skip "Rust (install script not found)" - fi -} - -# ============================================================================ -# Stage 4: Upgrade Packages Managed by Package Managers -# ============================================================================ -stage_4_user_packages() { - log_stage 4 5 "Upgrading packages managed by package managers..." - - cd "$PROJECT_ROOT" - - # UV tools - if command -v uv >/dev/null 2>&1; then - log_info "Upgrading uv tools..." - if [ "$DRY_RUN" = "0" ]; then - local tools - # Filter out binary lines (starting with dash) and keep only tool names - tools="$(uv tool list 2>/dev/null | grep -v '^-' | awk 'NF > 0 {print $1}' || true)" - if [ -n "$tools" ]; then - local count=$(echo "$tools" | wc -l) - log_info "Found $count uv tools to upgrade" - while IFS= read -r tool; do - [ -z "$tool" ] && continue - if uv tool upgrade "$tool" >> "$LOG_FILE" 2>&1; then - if command -v "$tool" >/dev/null 2>&1; then - log_success_with_info "$tool" "$tool --version 2>/dev/null | head -1 | awk '{print \$NF}' || echo 'installed'" - else - log_success "uv tool: $tool" - fi - else - log_skip "uv tool: $tool (failed)" - fi - done <<< "$tools" - else - log_skip "uv (no tools installed)" - fi - else - log_info "DRY-RUN: uv tool upgrade " - fi - else - log_skip "uv (not installed)" - fi - - # Pipx packages - if command -v pipx >/dev/null 2>&1; then - # Skip pipx packages if uv is managing Python tools - if command -v uv >/dev/null 2>&1; then - local pipx_tools=$(pipx list --short 2>/dev/null | wc -l) - if [ "$pipx_tools" -gt 0 ]; then - log_reconcile "pipx packages ($pipx_tools tools, run: make reconcile-pipx-to-uv to migrate)" - else - log_skip "pipx packages (uv tools handle this)" - fi - elif [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: pipx upgrade-all" - else - local temp_log=$(mktemp) - if pipx upgrade-all >> "$LOG_FILE" 2>&1; then - log_success "pipx packages" - else - # Check if only failure was missing metadata (known issue) - if grep -q "missing internal pipx metadata" "$LOG_FILE" 2>/dev/null; then - local broken_pkg=$(grep -oP "Not upgrading \K\w+" "$LOG_FILE" 2>/dev/null | tail -1) - log_reconcile "pipx packages (partial: $broken_pkg has missing metadata, run: pipx uninstall $broken_pkg && pipx install $broken_pkg)" - else - log_fail "pipx packages (see $LOG_FILE for details)" - fi - fi - rm -f "$temp_log" - fi - else - log_skip "pipx (not installed)" - fi - - # Cargo install-update - if command -v cargo >/dev/null 2>&1; then - if ! command -v cargo-install-update >/dev/null 2>&1; then - run_cmd "cargo-update tool" cargo install cargo-update || log_skip "cargo-update (install failed)" - fi - - if command -v cargo-install-update >/dev/null 2>&1; then - run_cmd "cargo packages" cargo install-update -a || log_skip "cargo packages (failed)" - fi - else - log_skip "cargo (not installed)" - fi - - # Gem packages - if command -v gem >/dev/null 2>&1; then - run_cmd "gem packages" gem update || log_skip "gem packages (failed)" - else - log_skip "gem (not installed)" - fi - - # Composer global packages - if command -v composer >/dev/null 2>&1; then - run_cmd "composer packages" composer global update || log_skip "composer packages (failed)" - else - log_skip "composer (not installed)" - fi -} - -# ============================================================================ -# Stage 5: Upgrade CLI Tools -# ============================================================================ -stage_5_tools() { - log_stage 5 6 "Upgrading CLI tools..." - - cd "$PROJECT_ROOT" - - log_info "Using upgrade-managed for comprehensive package upgrades" - - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: make upgrade-managed-user" - log_skip "CLI tools upgrade (dry-run)" - else - if make upgrade-managed-user >> "$LOG_FILE" 2>&1; then - log_success "Upgraded user-scoped packages via upgrade-managed" - else - log_skip "upgrade-managed (failed or not available)" - fi - fi - - log_info "For tool-specific upgrades, run: make upgrade-" - log_info "For interactive guide, run: make upgrade" -} - -# ============================================================================ -# Stage 6: Package Manager Health Checks -# ============================================================================ -stage_6_health_checks() { - log_stage 6 6 "Running package manager health checks..." - - cd "$PROJECT_ROOT" - - # Python package manager health check - log_info "Checking Python package managers..." - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: make check-python-managers" - else - if make check-python-managers >> "$LOG_FILE" 2>&1; then - log_success "Python package managers (no conflicts)" - else - log_reconcile "Python package managers (conflicts detected, see log for guidance)" - fi - fi - - # Node.js package manager health check - log_info "Checking Node.js package managers..." - if [ "$DRY_RUN" = "1" ]; then - log_info "DRY-RUN: make check-node-managers" - else - if make check-node-managers >> "$LOG_FILE" 2>&1; then - log_success "Node.js package managers (no conflicts)" - else - log_reconcile "Node.js package managers (conflicts detected, see log for guidance)" - fi - fi -} - -# ============================================================================ -# Main Execution -# ============================================================================ - -main() { - # Create log directory if it doesn't exist - mkdir -p "$LOG_DIR" - - # Header - echo "${BOLD}═══════════════════════════════════════════════════════════${RESET}" - echo "${BOLD} Full System Upgrade${RESET}" - echo "${BOLD}═══════════════════════════════════════════════════════════${RESET}" - - if [ "$DRY_RUN" = "1" ]; then - echo "${YELLOW}DRY-RUN MODE: No changes will be made${RESET}" - fi - - echo "" - echo "Log: $LOG_FILE" - echo "" - - # Execute stages - stage_1_refresh || true - stage_2_managers || true - stage_3_runtimes || true - stage_4_user_packages || true - stage_5_tools || true - stage_6_health_checks || true - - # Summary - local end_time=$(date +%s) - local total_time=$((end_time - START_TIME)) - local minutes=$((total_time / 60)) - local seconds=$((total_time % 60)) - - echo "" - echo "${BOLD}═══════════════════════════════════════════════════════════${RESET}" - echo "${BOLD}Upgrade Summary:${RESET}" - echo " ${GREEN}✓ Successful: $TOTAL_UPGRADED components${RESET}" - echo " ${YELLOW}⏭ Skipped: $TOTAL_SKIPPED components${RESET}" - echo " ${RED}❌ Failed: $TOTAL_FAILED components${RESET}" - echo "" - echo "Time: ${minutes}m ${seconds}s" - echo "Log: $LOG_FILE" - echo "${BOLD}═══════════════════════════════════════════════════════════${RESET}" - - # Exit with error if any failures - if [ "$TOTAL_FAILED" -gt 0 ]; then - exit 1 - fi -} - -main "$@" diff --git a/smart_column.py b/smart_column.py index 10d1013..2407667 100644 --- a/smart_column.py +++ b/smart_column.py @@ -217,19 +217,5 @@ def main(): pass os._exit(0) -def _sigint_handler(signum, frame): - """Handle SIGINT (Ctrl-C) with immediate clean exit.""" - # Suppress any partial output issues by forcing immediate exit - print("", file=sys.stderr) - os._exit(130) # Standard Unix exit code for SIGINT, immediate exit - - if __name__ == '__main__': - # Install signal handler for clean Ctrl-C behavior - signal.signal(signal.SIGINT, _sigint_handler) - try: - main() - except KeyboardInterrupt: - # Fallback: clean exit on Ctrl-C without stack trace - print("", file=sys.stderr) - os._exit(130) # Standard Unix exit code for SIGINT, immediate exit + main() diff --git a/tools_snapshot.json b/tools_snapshot.json deleted file mode 100644 index 8331cf0..0000000 --- a/tools_snapshot.json +++ /dev/null @@ -1,1061 +0,0 @@ -{ - "__meta__": { - "count": 70, - "created_at": "2025-10-23T13:46:16Z", - "offline": false, - "partial_failures": 0, - "schema_version": 1 - }, - "tools": [ - { - "category": "runtimes", - "classification_reason_selected": "no-match", - "installed": "1.25.3", - "installed_method": "unknown", - "installed_path_selected": "/usr/local/go/bin/go", - "installed_version": "1.25.3", - "latest_upstream": "1.25.3", - "latest_url": "https://github.com/golang/go/releases/tag/go1.25.3", - "latest_version": "1.25.3", - "status": "UP-TO-DATE", - "tool": "go", - "tool_url": "https://github.com/golang/go", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "official-installer", - "installed": "0.9.4", - "installed_method": "github binary", - "installed_path_selected": "/home/cybot/.local/bin/uv", - "installed_version": "0.9.4", - "latest_upstream": "0.9.4", - "latest_url": "https://github.com/astral-sh/uv/releases/tag/0.9.4", - "latest_version": "0.9.4", - "status": "UP-TO-DATE", - "tool": "uv", - "tool_url": "https://github.com/astral-sh/uv", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-contains-uv-python", - "installed": "3.14.0", - "installed_method": "uv python", - "installed_path_selected": "/home/cybot/.venvs/dev/bin/python", - "installed_version": "3.14.0", - "latest_upstream": "3.15.0", - "latest_url": "https://github.com/python/cpython/releases/tag/v3.15.0a1", - "latest_version": "3.15.0", - "status": "OUTDATED", - "tool": "python", - "tool_url": "https://github.com/python/cpython", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "25.2", - "latest_url": "https://pypi.org/project/pip/", - "latest_version": "25.2", - "status": "NOT INSTALLED", - "tool": "pip", - "tool_url": "https://pypi.org/project/pip/", - "upstream_method": "uv tool" - }, - { - "category": "runtimes", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "1.8.0", - "latest_url": "https://pypi.org/project/pipx/", - "latest_version": "1.8.0", - "status": "NOT INSTALLED", - "tool": "pipx", - "tool_url": "https://pypi.org/project/pipx/", - "upstream_method": "uv tool" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-contains-uv", - "installed": "2.2.1", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/poetry", - "installed_version": "2.2.1", - "latest_upstream": "2.2.1", - "latest_url": "https://pypi.org/project/poetry/", - "latest_version": "2.2.1", - "status": "UP-TO-DATE", - "tool": "poetry", - "tool_url": "https://pypi.org/project/poetry/", - "upstream_method": "uv tool" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-under-~/.cargo/bin", - "installed": "1.90.0", - "installed_method": "rustup/cargo", - "installed_path_selected": "/home/cybot/.cargo/bin/rustc", - "installed_version": "1.90.0", - "latest_upstream": "1.90.0", - "latest_url": "https://github.com/rust-lang/rust/releases/tag/1.90.0", - "latest_version": "1.90.0", - "status": "UP-TO-DATE", - "tool": "rust", - "tool_url": "https://github.com/rust-lang/rust", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-under-~/.nvm", - "installed": "24.10.0", - "installed_method": "nvm/npm", - "installed_path_selected": "/home/cybot/.nvm/versions/node/v24.10.0/bin/node", - "installed_version": "24.10.0", - "latest_upstream": "25.0.0", - "latest_url": "https://github.com/nodejs/node/releases/tag/v25.0.0", - "latest_version": "25.0.0", - "status": "OUTDATED", - "tool": "node", - "tool_url": "https://github.com/nodejs/node", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-under-~/.nvm", - "installed": "11.6.2", - "installed_method": "nvm/npm", - "installed_path_selected": "/home/cybot/.nvm/versions/node/v24.10.0/bin/npm", - "installed_version": "11.6.2", - "latest_upstream": "11.6.2", - "latest_url": "https://www.npmjs.com/package/npm", - "latest_version": "11.6.2", - "status": "UP-TO-DATE", - "tool": "npm", - "tool_url": "https://www.npmjs.com/package/npm", - "upstream_method": "npm (nvm)" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-under-~/.nvm", - "installed": "10.18.3", - "installed_method": "nvm/npm", - "installed_path_selected": "/home/cybot/.nvm/versions/node/v24.10.0/bin/pnpm", - "installed_version": "10.18.3", - "latest_upstream": "10.18.3", - "latest_url": "https://www.npmjs.com/package/pnpm", - "latest_version": "10.18.3", - "status": "UP-TO-DATE", - "tool": "pnpm", - "tool_url": "https://www.npmjs.com/package/pnpm", - "upstream_method": "npm (nvm)" - }, - { - "category": "runtimes", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "4.10.3", - "latest_url": "https://www.npmjs.com/package/yarn", - "latest_version": "4.10.3", - "status": "NOT INSTALLED", - "tool": "yarn", - "tool_url": "https://www.npmjs.com/package/yarn", - "upstream_method": "yarn-tags" - }, - { - "category": "runtimes", - "classification_reason_selected": "path-under-/usr/local/bin", - "installed": "2.8.12", - "installed_method": "/usr/local/bin", - "installed_path_selected": "/usr/local/bin/composer", - "installed_version": "2.8.12", - "latest_upstream": "2.8.12", - "latest_url": "https://github.com/composer/composer/releases/tag/2.8.12", - "latest_version": "2.8.12", - "status": "UP-TO-DATE", - "tool": "composer", - "tool_url": "https://github.com/composer/composer", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "rbenv-shim-or-version", - "installed": "3.4.7", - "installed_method": "rbenv", - "installed_path_selected": "/home/sme/.rbenv/shims/ruby", - "installed_version": "3.4.7", - "latest_upstream": "3.4.7", - "latest_url": "https://github.com/ruby/ruby/releases/tag/v3.4.7", - "latest_version": "3.4.7", - "status": "UP-TO-DATE", - "tool": "ruby", - "tool_url": "https://github.com/ruby/ruby", - "upstream_method": "github" - }, - { - "category": "runtimes", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "3.7.2", - "latest_url": "https://github.com/rubygems/rubygems/releases/tag/v3.7.2", - "latest_version": "3.7.2", - "status": "NOT INSTALLED", - "tool": "gem", - "tool_url": "https://github.com/rubygems/rubygems", - "upstream_method": "github" - }, - { - "category": "search", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "10.3.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/fd", - "installed_version": "10.3.0", - "latest_upstream": "10.3.0", - "latest_url": "https://github.com/sharkdp/fd/releases/tag/v10.3.0", - "latest_version": "10.3.0", - "status": "UP-TO-DATE", - "tool": "fd", - "tool_url": "https://github.com/sharkdp/fd", - "upstream_method": "github" - }, - { - "category": "search", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.66.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/fzf", - "installed_version": "0.66.0", - "latest_upstream": "0.66.0", - "latest_url": "https://github.com/junegunn/fzf/releases/tag/v0.66.0", - "latest_version": "0.66.0", - "status": "UP-TO-DATE", - "tool": "fzf", - "tool_url": "https://github.com/junegunn/fzf", - "upstream_method": "github" - }, - { - "category": "editors", - "classification_reason_selected": "dpkg-query", - "installed": "5.9.0", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/ctags", - "installed_version": "5.9.0", - "latest_upstream": "6.2.0", - "latest_url": "https://github.com/universal-ctags/ctags/releases/tag/v6.2.0", - "latest_version": "6.2.0", - "status": "OUTDATED", - "tool": "ctags", - "tool_url": "https://github.com/universal-ctags/ctags", - "upstream_method": "github" - }, - { - "category": "search", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.10.9", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/rga", - "installed_version": "0.10.9", - "latest_upstream": "0.10.9", - "latest_url": "https://github.com/phiresky/ripgrep-all/releases/tag/v0.10.9", - "latest_version": "0.10.9", - "status": "UP-TO-DATE", - "tool": "rga", - "tool_url": "https://github.com/phiresky/ripgrep-all", - "upstream_method": "github" - }, - { - "category": "json-yaml", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.8.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/jq", - "installed_version": "1.8.1", - "latest_upstream": "1.8.1", - "latest_url": "https://github.com/jqlang/jq/releases/tag/jq-1.8.1", - "latest_version": "1.8.1", - "status": "UP-TO-DATE", - "tool": "jq", - "tool_url": "https://github.com/jqlang/jq", - "upstream_method": "github" - }, - { - "category": "json-yaml", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "4.48.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/yq", - "installed_version": "4.48.1", - "latest_upstream": "4.48.1", - "latest_url": "https://github.com/mikefarah/yq/releases/tag/v4.48.1", - "latest_version": "4.48.1", - "status": "UP-TO-DATE", - "tool": "yq", - "tool_url": "https://github.com/mikefarah/yq", - "upstream_method": "github" - }, - { - "category": "json-yaml", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.8.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/dasel", - "installed_version": "2.8.1", - "latest_upstream": "2.8.1", - "latest_url": "https://github.com/TomWright/dasel/releases/tag/v2.8.1", - "latest_version": "2.8.1", - "status": "UP-TO-DATE", - "tool": "dasel", - "tool_url": "https://github.com/TomWright/dasel", - "upstream_method": "github" - }, - { - "category": "editors", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.0.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/sd", - "installed_version": "1.0.0", - "latest_upstream": "1.0.0", - "latest_url": "https://crates.io/crates/sd", - "latest_version": "1.0.0", - "status": "UP-TO-DATE", - "tool": "sd", - "tool_url": "https://crates.io/crates/sd", - "upstream_method": "cargo" - }, - { - "category": "editors", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "", - "latest_url": "", - "latest_version": "", - "status": "NOT INSTALLED", - "tool": "prename", - "tool_url": "", - "upstream_method": "" - }, - { - "category": "editors", - "classification_reason_selected": "dpkg-query", - "installed": "2.39.3", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/rename.ul", - "installed_version": "2.39.3", - "latest_upstream": "", - "latest_url": "", - "latest_version": "", - "status": "UNKNOWN", - "tool": "rename.ul", - "tool_url": "", - "upstream_method": "" - }, - { - "category": "editors", - "classification_reason_selected": "dpkg-query", - "installed": "0.69", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/sponge", - "installed_version": "0.69", - "latest_upstream": "", - "latest_url": "", - "latest_version": "", - "status": "UNKNOWN", - "tool": "sponge", - "tool_url": "", - "upstream_method": "" - }, - { - "category": "data", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.13.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/xsv", - "installed_version": "0.13.0", - "latest_upstream": "0.13.0", - "latest_url": "https://crates.io/crates/xsv", - "latest_version": "0.13.0", - "status": "UP-TO-DATE", - "tool": "xsv", - "tool_url": "https://crates.io/crates/xsv", - "upstream_method": "cargo" - }, - { - "category": "editors", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.26.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/bat", - "installed_version": "0.26.0", - "latest_upstream": "0.26.0", - "latest_url": "https://github.com/sharkdp/bat/releases/tag/v0.26.0", - "latest_version": "0.26.0", - "status": "UP-TO-DATE", - "tool": "bat", - "tool_url": "https://github.com/sharkdp/bat", - "upstream_method": "github" - }, - { - "category": "editors", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.18.2", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/delta", - "installed_version": "0.18.2", - "latest_upstream": "0.18.2", - "latest_url": "https://github.com/dandavison/delta/releases/tag/0.18.2", - "latest_version": "0.18.2", - "status": "UP-TO-DATE", - "tool": "delta", - "tool_url": "https://github.com/dandavison/delta", - "upstream_method": "github" - }, - { - "category": "automation", - "classification_reason_selected": "dpkg-query", - "installed": "5.5", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/entr", - "installed_version": "5.5", - "latest_upstream": "5.7", - "latest_url": "https://github.com/eradman/entr/releases/tag/5.7", - "latest_version": "5.7", - "status": "OUTDATED", - "tool": "entr", - "tool_url": "https://github.com/eradman/entr", - "upstream_method": "github" - }, - { - "category": "automation", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.3.2", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/watchexec", - "installed_version": "2.3.2", - "latest_upstream": "2.3.2", - "latest_url": "https://github.com/watchexec/watchexec/releases/tag/v2.3.2", - "latest_version": "2.3.2", - "status": "UP-TO-DATE", - "tool": "watchexec", - "tool_url": "https://github.com/watchexec/watchexec", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "dpkg-query", - "installed": "20231122", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/parallel", - "installed_version": "20231122", - "latest_upstream": "20250922", - "latest_url": "https://ftp.gnu.org/gnu/parallel/", - "latest_version": "20250922", - "status": "OUTDATED", - "tool": "parallel", - "tool_url": "https://ftp.gnu.org/gnu/parallel/", - "upstream_method": "gnu-ftp" - }, - { - "category": "search", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "15.0.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/rg", - "installed_version": "15.0.0", - "latest_upstream": "15.0.0", - "latest_url": "https://github.com/BurntSushi/ripgrep/releases/tag/15.0.0", - "latest_version": "15.0.0", - "status": "UP-TO-DATE", - "tool": "ripgrep", - "tool_url": "https://github.com/BurntSushi/ripgrep", - "upstream_method": "github" - }, - { - "category": "search", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.39.6", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/ast-grep", - "installed_version": "0.39.6", - "latest_upstream": "0.39.6", - "latest_url": "https://github.com/ast-grep/ast-grep/releases/tag/0.39.6", - "latest_version": "0.39.6", - "status": "UP-TO-DATE", - "tool": "ast-grep", - "tool_url": "https://github.com/ast-grep/ast-grep", - "upstream_method": "github" - }, - { - "category": "http", - "classification_reason_selected": "path-contains-uv", - "installed": "3.2.4", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/http", - "installed_version": "3.2.4", - "latest_upstream": "3.2.4", - "latest_url": "https://pypi.org/project/httpie/", - "latest_version": "3.2.4", - "status": "UP-TO-DATE", - "tool": "httpie", - "tool_url": "https://pypi.org/project/httpie/", - "upstream_method": "uv tool" - }, - { - "category": "http", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.8.2", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/curlie", - "installed_version": "1.8.2", - "latest_upstream": "1.8.2", - "latest_url": "https://github.com/rs/curlie/releases/tag/v1.8.2", - "latest_version": "1.8.2", - "status": "UP-TO-DATE", - "tool": "curlie", - "tool_url": "https://github.com/rs/curlie", - "upstream_method": "github" - }, - { - "category": "automation", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.37.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/direnv", - "installed_version": "2.37.1", - "latest_upstream": "2.37.1", - "latest_url": "https://github.com/direnv/direnv/releases/tag/v2.37.1", - "latest_version": "2.37.1", - "status": "UP-TO-DATE", - "tool": "direnv", - "tool_url": "https://github.com/direnv/direnv", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.13.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/dive", - "installed_version": "0.13.1", - "latest_upstream": "0.13.1", - "latest_url": "https://github.com/wagoodman/dive/releases/tag/v0.13.1", - "latest_version": "0.13.1", - "status": "UP-TO-DATE", - "tool": "dive", - "tool_url": "https://github.com/wagoodman/dive", - "upstream_method": "github" - }, - { - "category": "security", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.67.2", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/trivy", - "installed_version": "0.67.2", - "latest_upstream": "0.67.2", - "latest_url": "https://github.com/aquasecurity/trivy/releases/tag/v0.67.2", - "latest_version": "0.67.2", - "status": "UP-TO-DATE", - "tool": "trivy", - "tool_url": "https://github.com/aquasecurity/trivy", - "upstream_method": "github" - }, - { - "category": "security", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "8.28.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/gitleaks", - "installed_version": "8.28.0", - "latest_upstream": "8.28.0", - "latest_url": "https://github.com/gitleaks/gitleaks/releases/tag/v8.28.0", - "latest_version": "8.28.0", - "status": "UP-TO-DATE", - "tool": "gitleaks", - "tool_url": "https://github.com/gitleaks/gitleaks", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "path-contains-uv", - "installed": "4.3.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/pre-commit", - "installed_version": "4.3.0", - "latest_upstream": "4.3.0", - "latest_url": "https://pypi.org/project/pre-commit/", - "latest_version": "4.3.0", - "status": "UP-TO-DATE", - "tool": "pre-commit", - "tool_url": "https://pypi.org/project/pre-commit/", - "upstream_method": "uv tool" - }, - { - "category": "security", - "classification_reason_selected": "path-contains-uv", - "installed": "1.8.6", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/bandit", - "installed_version": "1.8.6", - "latest_upstream": "1.8.6", - "latest_url": "https://pypi.org/project/bandit/", - "latest_version": "1.8.6", - "status": "UP-TO-DATE", - "tool": "bandit", - "tool_url": "https://pypi.org/project/bandit/", - "upstream_method": "uv tool" - }, - { - "category": "security", - "classification_reason_selected": "path-contains-uv", - "installed": "1.140.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/semgrep", - "installed_version": "1.140.0", - "latest_upstream": "1.140.0", - "latest_url": "https://pypi.org/project/semgrep/", - "latest_version": "1.140.0", - "status": "UP-TO-DATE", - "tool": "semgrep", - "tool_url": "https://pypi.org/project/semgrep/", - "upstream_method": "uv tool" - }, - { - "category": "automation", - "classification_reason_selected": "path-contains-uv", - "installed": "12.1.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/ansible-community", - "installed_version": "12.1.0", - "latest_upstream": "12.1.0", - "latest_url": "https://pypi.org/project/ansible/", - "latest_version": "12.1.0", - "status": "UP-TO-DATE", - "tool": "ansible", - "tool_url": "https://pypi.org/project/ansible/", - "upstream_method": "uv tool" - }, - { - "category": "automation", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "2.19.3", - "latest_url": "https://pypi.org/project/ansible-core/", - "latest_version": "2.19.3", - "status": "NOT INSTALLED", - "tool": "ansible-core", - "tool_url": "https://pypi.org/project/ansible-core/", - "upstream_method": "uv tool" - }, - { - "category": "git-helpers", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.8.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/git-absorb", - "installed_version": "0.8.0", - "latest_upstream": "0.8.0", - "latest_url": "https://github.com/tummychow/git-absorb/releases/tag/0.8.0", - "latest_version": "0.8.0", - "status": "UP-TO-DATE", - "tool": "git-absorb", - "tool_url": "https://github.com/tummychow/git-absorb", - "upstream_method": "github" - }, - { - "category": "git-helpers", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.9.0", - "installed_method": "/home/sme/.local/bin", - "installed_path_selected": "/home/sme/.local/bin/git-branchless", - "installed_version": "0.9.0", - "latest_upstream": "0.10.0", - "latest_url": "https://github.com/arxanas/git-branchless/releases/tag/v0.10.0", - "latest_version": "0.10.0", - "status": "OUTDATED", - "tool": "git-branchless", - "tool_url": "https://github.com/arxanas/git-branchless", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "3.7.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/git-lfs", - "installed_version": "3.7.1", - "latest_upstream": "3.7.1", - "latest_url": "https://github.com/git-lfs/git-lfs/releases/tag/v3.7.1", - "latest_version": "3.7.1", - "status": "UP-TO-DATE", - "tool": "git-lfs", - "tool_url": "https://github.com/git-lfs/git-lfs", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.28.14", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/tfsec", - "installed_version": "1.28.14", - "latest_upstream": "1.28.14", - "latest_url": "https://github.com/aquasecurity/tfsec/releases/tag/v1.28.14", - "latest_version": "1.28.14", - "status": "UP-TO-DATE", - "tool": "tfsec", - "tool_url": "https://github.com/aquasecurity/tfsec", - "upstream_method": "github" - }, - { - "category": "formatters", - "classification_reason_selected": "path-contains-uv", - "installed": "25.9.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/black", - "installed_version": "25.9.0", - "latest_upstream": "25.9.0", - "latest_url": "https://pypi.org/project/black/", - "latest_version": "25.9.0", - "status": "UP-TO-DATE", - "tool": "black", - "tool_url": "https://pypi.org/project/black/", - "upstream_method": "uv tool" - }, - { - "category": "formatters", - "classification_reason_selected": "path-contains-uv", - "installed": "7.0.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/isort", - "installed_version": "7.0.0", - "latest_upstream": "7.0.0", - "latest_url": "https://pypi.org/project/isort/", - "latest_version": "7.0.0", - "status": "UP-TO-DATE", - "tool": "isort", - "tool_url": "https://pypi.org/project/isort/", - "upstream_method": "uv tool" - }, - { - "category": "formatters", - "classification_reason_selected": "path-contains-uv", - "installed": "7.3.0", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/flake8", - "installed_version": "7.3.0", - "latest_upstream": "7.3.0", - "latest_url": "https://pypi.org/project/flake8/", - "latest_version": "7.3.0", - "status": "UP-TO-DATE", - "tool": "flake8", - "tool_url": "https://pypi.org/project/flake8/", - "upstream_method": "uv tool" - }, - { - "category": "formatters", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "9.38.0", - "latest_url": "https://github.com/eslint/eslint/releases/tag/v9.38.0", - "latest_version": "9.38.0", - "status": "NOT INSTALLED", - "tool": "eslint", - "tool_url": "https://github.com/eslint/eslint", - "upstream_method": "github" - }, - { - "category": "formatters", - "classification_reason_selected": "path-under-~/.nvm", - "installed": "3.6.2", - "installed_method": "nvm/npm", - "installed_path_selected": "/home/cybot/.nvm/versions/node/v24.10.0/bin/prettier", - "installed_version": "3.6.2", - "latest_upstream": "3.6.2", - "latest_url": "https://github.com/prettier/prettier/releases/tag/3.6.2", - "latest_version": "3.6.2", - "status": "UP-TO-DATE", - "tool": "prettier", - "tool_url": "https://github.com/prettier/prettier", - "upstream_method": "github" - }, - { - "category": "formatters", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "3.12.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/shfmt", - "installed_version": "3.12.0", - "latest_upstream": "3.12.0", - "latest_url": "https://github.com/mvdan/sh/releases/tag/v3.12.0", - "latest_version": "3.12.0", - "status": "UP-TO-DATE", - "tool": "shfmt", - "tool_url": "https://github.com/mvdan/sh", - "upstream_method": "github" - }, - { - "category": "formatters", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "0.11.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/shellcheck", - "installed_version": "0.11.0", - "latest_upstream": "0.11.0", - "latest_url": "https://github.com/koalaman/shellcheck/releases/tag/v0.11.0", - "latest_version": "0.11.0", - "status": "UP-TO-DATE", - "tool": "shellcheck", - "tool_url": "https://github.com/koalaman/shellcheck", - "upstream_method": "github" - }, - { - "category": "formatters", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.5.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/golangci-lint", - "installed_version": "2.5.0", - "latest_upstream": "2.5.0", - "latest_url": "https://github.com/golangci/golangci-lint/releases/tag/v2.5.0", - "latest_version": "2.5.0", - "status": "UP-TO-DATE", - "tool": "golangci-lint", - "tool_url": "https://github.com/golangci/golangci-lint", - "upstream_method": "github" - }, - { - "category": "json-yaml", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "39.1.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/fx", - "installed_version": "39.1.0", - "latest_upstream": "39.1.0", - "latest_url": "https://github.com/antonmedv/fx/releases/tag/39.1.0", - "latest_version": "39.1.0", - "status": "UP-TO-DATE", - "tool": "fx", - "tool_url": "https://github.com/antonmedv/fx", - "upstream_method": "github" - }, - { - "category": "ai-assistants", - "classification_reason_selected": "", - "installed": "", - "installed_method": "", - "installed_path_selected": "", - "installed_version": "", - "latest_upstream": "0.47.0", - "latest_url": "https://www.npmjs.com/package/@openai/codex", - "latest_version": "0.47.0", - "status": "NOT INSTALLED", - "tool": "codex", - "tool_url": "https://www.npmjs.com/package/@openai/codex", - "upstream_method": "npm (nvm)" - }, - { - "category": "ai-assistants", - "classification_reason_selected": "no-match", - "installed": "2.0.24", - "installed_method": "unknown", - "installed_path_selected": "/home/cybot/.claude/local/claude", - "installed_version": "2.0.24", - "latest_upstream": "2.0.24", - "latest_url": "https://www.npmjs.com/package/@anthropic-ai/claude-code", - "latest_version": "2.0.24", - "status": "UP-TO-DATE", - "tool": "claude", - "tool_url": "https://www.npmjs.com/package/@anthropic-ai/claude-code", - "upstream_method": "npm (nvm)" - }, - { - "category": "vcs", - "classification_reason_selected": "dpkg-query", - "installed": "2.43.0", - "installed_method": "apt/dpkg", - "installed_path_selected": "/usr/bin/git", - "installed_version": "2.43.0", - "latest_upstream": "2.51.1", - "latest_url": "https://github.com/git/git/releases/tag/v2.51.1", - "latest_version": "2.51.1", - "status": "OUTDATED", - "tool": "git", - "tool_url": "https://github.com/git/git", - "upstream_method": "github" - }, - { - "category": "vcs", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.82.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/gh", - "installed_version": "2.82.0", - "latest_upstream": "2.82.0", - "latest_url": "https://github.com/cli/cli/releases/tag/v2.82.0", - "latest_version": "2.82.0", - "status": "UP-TO-DATE", - "tool": "gh", - "tool_url": "https://github.com/cli/cli", - "upstream_method": "github" - }, - { - "category": "vcs", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.22.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/glab", - "installed_version": "1.22.0", - "latest_upstream": "1.22.0", - "latest_url": "https://github.com/profclems/glab/releases/tag/v1.22.0", - "latest_version": "1.22.0", - "status": "UP-TO-DATE", - "tool": "glab", - "tool_url": "https://github.com/profclems/glab", - "upstream_method": "github" - }, - { - "category": "vcs", - "classification_reason_selected": "path-contains-uv", - "installed": "7.27.00", - "installed_method": "uv tool", - "installed_path_selected": "/home/cybot/.local/bin/gam", - "installed_version": "7.27.00", - "latest_upstream": "7.27.00", - "latest_url": "https://github.com/GAM-team/GAM/releases/tag/v7.27.00", - "latest_version": "7.27.00", - "status": "UP-TO-DATE", - "tool": "gam", - "tool_url": "https://github.com/GAM-team/GAM", - "upstream_method": "github" - }, - { - "category": "task-runners", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.43.0", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/just", - "installed_version": "1.43.0", - "latest_upstream": "1.43.0", - "latest_url": "https://github.com/casey/just/releases/tag/1.43.0", - "latest_version": "1.43.0", - "status": "UP-TO-DATE", - "tool": "just", - "tool_url": "https://github.com/casey/just", - "upstream_method": "github" - }, - { - "category": "other", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.13.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/ninja", - "installed_version": "1.13.1", - "latest_upstream": "1.13.1", - "latest_url": "https://github.com/ninja-build/ninja/releases/tag/v1.13.1", - "latest_version": "1.13.1", - "status": "UP-TO-DATE", - "tool": "ninja", - "tool_url": "https://github.com/ninja-build/ninja", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "2.31.18", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/aws", - "installed_version": "2.31.18", - "latest_upstream": "2.31.18", - "latest_url": "https://github.com/aws/aws-cli/releases/tag/2.31.18", - "latest_version": "2.31.18", - "status": "UP-TO-DATE", - "tool": "aws", - "tool_url": "https://github.com/aws/aws-cli", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.34.1", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/kubectl", - "installed_version": "1.34.1", - "latest_upstream": "1.34.1", - "latest_url": "https://github.com/kubernetes/kubernetes/releases/tag/v1.34.1", - "latest_version": "1.34.1", - "status": "UP-TO-DATE", - "tool": "kubectl", - "tool_url": "https://github.com/kubernetes/kubernetes", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "path-under-~/.local/bin", - "installed": "1.13.4", - "installed_method": "/home/cybot/.local/bin", - "installed_path_selected": "/home/cybot/.local/bin/terraform", - "installed_version": "1.13.4", - "latest_upstream": "1.13.4", - "latest_url": "https://github.com/hashicorp/terraform/releases/tag/v1.13.4", - "latest_version": "1.13.4", - "status": "UP-TO-DATE", - "tool": "terraform", - "tool_url": "https://github.com/hashicorp/terraform", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "no-match", - "installed": "28.5.1", - "installed_method": "unknown", - "installed_path_selected": "/usr/bin/docker", - "installed_version": "28.5.1", - "latest_upstream": "29.0.0", - "latest_url": "https://github.com/docker/cli/releases/tag/v29.0.0-rc.1", - "latest_version": "29.0.0", - "status": "OUTDATED", - "tool": "docker", - "tool_url": "https://github.com/docker/cli", - "upstream_method": "github" - }, - { - "category": "cloud-infra", - "classification_reason_selected": "docker-info-os", - "installed": "2.40.0", - "installed_method": "docker-desktop (WSL)", - "installed_path_selected": "/usr/bin/docker-compose", - "installed_version": "2.40.0", - "latest_upstream": "2.40.1", - "latest_url": "https://github.com/docker/compose/releases/tag/v2.40.1", - "latest_version": "2.40.1", - "status": "OUTDATED", - "tool": "docker-compose", - "tool_url": "https://github.com/docker/compose", - "upstream_method": "github" - } - ] -} \ No newline at end of file