diff --git a/.cargo/config.toml b/.cargo/config.toml index c00438ac..39ceb4d8 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,2 +1,9 @@ [alias] xtask = "run --locked --package xtask --manifest-path xtask/Cargo.toml --" + +# Build configuration for release +[profile.release] +lto = true +codegen-units = 1 +panic = "abort" +strip = true diff --git a/.github/workflows/canary-release.yml b/.github/workflows/canary-release.yml deleted file mode 100644 index f9c1c877..00000000 --- a/.github/workflows/canary-release.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Canary Release -on: - push: - # don't run on tags, run on commits - # https://github.com/orgs/community/discussions/25615 - tags-ignore: - - "**" - paths-ignore: - - '.github/**' - - '.cargo/**' - - '.direnv/**' - - '.vscode/**' - - 'docs/**' - - 'Cargo.*' - - 'crates/**/Cargo.*' - - '*.md' - branches: - - develop - workflow_dispatch: - -permissions: - contents: read - packages: write - -concurrency: - group: canary-${{ github.ref }} - cancel-in-progress: true - -jobs: - compute_canary_version: - runs-on: ubuntu-24.04 - outputs: - version: ${{ steps.canary_version.outputs.version }} - steps: - - name: Compute canary version - id: canary_version - run: | - SHORT_SHA=${GITHUB_SHA::7} - DATE=$(date -u +%Y%m%dT%H%M%SZ) - echo "version=canary-${DATE}-${SHORT_SHA}" >> "$GITHUB_OUTPUT" - - release_canary_container: - needs: compute_canary_version - permissions: - contents: read - packages: write - attestations: write - id-token: write - uses: ./.github/workflows/release-container.yml - with: - version: ${{ needs.compute_canary_version.outputs.version }} - secrets: inherit \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb42f641..49552e0a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,8 +1,7 @@ -name: Nix CI +name: CI + on: push: - # don't run on tags, run on commits - # https://github.com/orgs/community/discussions/25615 tags-ignore: - "**" branches: @@ -11,135 +10,71 @@ on: pull_request: workflow_dispatch: -env: - # We want the cache to be as full as possible, so we instruct nix to keep derivations - # and other related outputs around in its cache - nix_conf: | - keep-env-derivations = true - keep-outputs = true - jobs: - # Cache the nix store so that subsequent runs are almost instantaneous - # See https://github.com/marketplace/actions/restore-and-save-nix-store#inputs - cache: - name: Cache nix store - runs-on: ubuntu-24.04 - permissions: - actions: write - contents: read + test: + name: Test + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + + - name: Cache cargo registry + uses: actions/cache@v4 with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: nixbuild/nix-quick-install-action@v30 + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v4 with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v4 with: - primary-key: build-${{ runner.os }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - restore-prefixes-first-match: build-${{ runner.os }}- - purge: true - purge-prefixes: build-${{ runner.os }}- - purge-created: 0 - purge-primary-key: never - gc-max-store-size: 5G - - name: Save flake attributes from garbage collection - run: nix profile install .#saveFromGC + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Run tests + run: cargo test --workspace check: - name: Run checks - runs-on: ubuntu-24.04 - needs: cache - permissions: - actions: write - contents: read + name: Check and Lint + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: nixbuild/nix-quick-install-action@v30 - with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 - with: - primary-key: build-${{ runner.os }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - purge: true - purge-prefixes: build-${{ runner.os }}- - purge-created: 0 - purge-primary-key: never - gc-max-store-size: 5G - - name: Run checks - run: nix flake check + - uses: actions/checkout@v4 - build: - name: Build - runs-on: ubuntu-24.04 - needs: cache - permissions: - actions: write - contents: read - steps: - - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: nixbuild/nix-quick-install-action@v30 - with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 - with: - primary-key: build-${{ runner.os }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - purge: true - purge-prefixes: build-${{ runner.os }}- - purge-created: 0 - purge-primary-key: never - gc-max-store-size: 5G - - name: Build - run: nix build .# + components: rustfmt, clippy - test: - name: Run Tests - runs-on: ubuntu-24.04 - needs: cache - permissions: - actions: write - contents: read - steps: - - uses: actions/checkout@v5 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: nixbuild/nix-quick-install-action@v30 - with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 + - name: Cache cargo registry + uses: actions/cache@v4 with: - primary-key: build-${{ runner.os }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - purge: true - purge-prefixes: build-${{ runner.os }}- - purge-created: 0 - purge-primary-key: never - gc-max-store-size: 5G - - name: Run Tests - run: 'nix develop --command bash -c "cargo test"' + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - coverage: - name: Run Coverage - runs-on: ubuntu-24.04 - permissions: - contents: read - steps: - - uses: actions/checkout@v5 + - name: Cache cargo index + uses: actions/cache@v4 with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: taiki-e/install-action@cargo-llvm-cov - - name: Generate code coverage - run: cargo llvm-cov --all-features --workspace --codecov --output-path codecov.json - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v4 with: - token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos - files: codecov.json - fail_ci_if_error: true + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Check formatting + run: cargo fmt --all -- --check + + - name: Run clippy + run: cargo clippy --all-targets --all-features -- -D warnings + + - name: Build binary + run: cargo build --release --package dc-mcp-server diff --git a/.github/workflows/prep-release.yml b/.github/workflows/prep-release.yml deleted file mode 100644 index 04dc979c..00000000 --- a/.github/workflows/prep-release.yml +++ /dev/null @@ -1,256 +0,0 @@ -name: Prep release - -on: - workflow_dispatch: - inputs: - version_bump: - type: choice - description: "Type of version bump" - default: patch - required: true - options: - - major - - minor - - patch - - custom - custom_version: - type: string - required: false - description: "Custom version (ignored for other bump types)" - -permissions: - contents: write - pull-requests: write - -concurrency: - group: pre-release - cancel-in-progress: false - -jobs: - validate: - runs-on: ubuntu-latest - steps: - - name: Enforce custom_version when bump=custom - run: | - if [[ "${{ inputs.version_bump }}" == "custom" ]]; then - if [[ -z "${{ inputs.custom_version }}" ]]; then - echo "::error title=Missing input::Set 'custom_version' when version_bump=custom"; exit 1 - fi - if [[ ! "${{ inputs.custom_version }}" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$ ]]; then - echo "::error title=Invalid SemVer::Use x.y.z (can use optional pre-release/build identifiers)"; exit 1 - fi - fi - prep-release: - runs-on: ubuntu-latest - - env: - GH_TOKEN: ${{ secrets.GH_PAT }} - - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 - - - name: Configure git author - run: | - git config --local user.name "Apollo Bot" - git config --local user.email "svc-apollo-bot-2@apollographql.com" - - - name: Retrieve current version from Cargo.toml - id: meta - run: | - set -eu - VERSION=$(cargo metadata --no-deps --format-version=1 | jq -er --arg NAME "apollo-mcp-server" '.packages[] | select(.name == $NAME) | .version') - [ -n "$VERSION" ] || { echo "::error::Could not determine version"; exit 1; } - echo "current_version=$VERSION" >> "$GITHUB_OUTPUT" - - - name: Bump the version - id: bump - shell: bash - env: - CURR: ${{ steps.meta.outputs.current_version }} - CUSTOM: ${{ inputs.custom_version }} - BUMP: ${{ inputs.version_bump }} - run: | - set -euo pipefail - - if [[ -n "${CUSTOM:-}" ]]; then - echo "new_version=$CUSTOM" >> "$GITHUB_OUTPUT" - echo "Custom Bumped: $CURR -> $CUSTOM" - else - # strip any pre-release / build metadata for arithmetic (e.g., -rc.1, +build.5) - BASE="${CURR%%[-+]*}" - - IFS=. read -r MA MI PA <<< "$BASE" - - case "$BUMP" in - major) MA=$((MA+1)); MI=0; PA=0 ;; - minor) MI=$((MI+1)); PA=0 ;; - patch) PA=$((PA+1)) ;; - *) echo "::error::Unknown bump '$BUMP'"; exit 1 ;; - esac - - NEW_VERSION="$MA.$MI.$PA" - echo "new_version=$NEW_VERSION" >> "$GITHUB_OUTPUT" - echo "Bumped: $CURR -> $NEW_VERSION" - fi - - - name: Prepare release branch - id: prep_branch - run: | - set -e - git fetch origin develop - git switch -c "release/${{ steps.bump.outputs.new_version }}" "origin/develop" - echo "release_branch=release/${{ steps.bump.outputs.new_version }}" >> "$GITHUB_OUTPUT" - - - name: Update Cargo version - run: | - cargo install cargo-edit --locked - cargo set-version --workspace "${{ steps.bump.outputs.new_version }}" - - - name: Replace versions in scripts and docs - env: - CURR_VERSION: ${{ steps.meta.outputs.current_version }} - NEW_VERSION: ${{ steps.bump.outputs.new_version }} - run: | - python3 - <<'PY' - try: - import os, re, sys, glob, pathlib - - current_version = os.environ["CURR_VERSION"] - new_version = os.environ["NEW_VERSION"] - - print(f"current={current_version} new={new_version}") - - # negative lookbehind (word,., or -) + optional 'v' + the escaped current version + negative lookahead (word or .) - # e.g. current version of 1.0.1 will match 1.0.1, v1.0.1, v1.0.1-rc.1 - # e.g. current version of 1.0.1 will not match ver1.0.1, 1.0.1x, 1.0.11, 1.0.1.beta - pat = re.compile(rf'(? CHANGELOG_SECTION.md - try: - import os, re, sys, pathlib - new = os.environ["NEW"] - old = os.environ["OLD"] - - p = pathlib.Path("CHANGELOG.md") - if not p.exists(): - raise FileNotFoundError("CHANGELOG.md not found at repo root") - text = p.read_text(encoding="utf-8") - - # Find header for the new version - start = re.search(rf'(?m)^# \[{re.escape(new)}\]', text) - if not start: - print(f"::error::Could not find changelog entry for {new}", file=sys.stderr) - sys.exit(1) - - # Prefer the *specific* previous version header if present; otherwise, next '# ['; else, EOF - segment = text[start.start():] - end_old = re.search(rf'(?m)^# \[{re.escape(old)}\]', segment) - if end_old: - segment = segment[:end_old.start()] - else: - nxt = re.search(r'(?m)^# \[', segment[len('# [' + new + ']'):]) - if nxt: - # adjust to absolute end - segment = segment[: (len('# [' + new + ']') + nxt.start())] - - segment = segment.rstrip() + "\n" - print(segment) - except Exception: - import traceback - traceback.print_exc() - sys.exit(1) - PY - - { - echo 'body<> "$GITHUB_OUTPUT" - - - name: Commit and push changelog updates - shell: bash - run: | - set -euo pipefail - git add -A || true - git commit -m "chore(release): changelog for ${{ steps.bump.outputs.new_version }}" || echo "No changelog updates to commit" - git push origin HEAD - - - name: Open/Update draft PR to main - env: - HEAD: release/${{ steps.bump.outputs.new_version }} - TITLE: Releasing ${{ steps.bump.outputs.new_version }} - shell: bash - run: | - set -euo pipefail - # Try to create; if it already exists, update it - if ! gh pr create \ - --base main \ - --head "$HEAD" \ - --title "$TITLE" \ - --draft \ - --body-file CHANGELOG_SECTION.md \ - --label release - then - num=$(gh pr list --head "$HEAD" --base main --state open --json number -q '.[0].number' || true) - if [[ -n "$num" ]]; then - gh pr edit "$num" --title "$TITLE" --body-file CHANGELOG_SECTION.md --add-label release - else - echo "::error::Failed to create or find PR from $HEAD to main" - exit 1 - fi - fi \ No newline at end of file diff --git a/.github/workflows/release-bins.yml b/.github/workflows/release-bins.yml deleted file mode 100644 index c73063b1..00000000 --- a/.github/workflows/release-bins.yml +++ /dev/null @@ -1,214 +0,0 @@ -name: Build Release Binaries -on: - push: - tags: - - "v[0-9]+.[0-9]+.[0-9]+" - - "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+" - workflow_dispatch: - inputs: - version: - description: Version to publish - required: true - type: string - -env: - VERSION: ${{ inputs.version || github.ref_name }} - -jobs: - build: - name: Release binaries - strategy: - matrix: - include: - # Linux compiles itself - - os: ubuntu-24.04 - bundle: linux - targets: cross-aarch64-unknown-linux-gnu cross-aarch64-unknown-linux-musl cross-x86_64-unknown-linux-gnu cross-x86_64-unknown-linux-musl - - # We can compile the windows target from linux - - os: ubuntu-24.04 - bundle: windows - targets: cross-aarch64-pc-windows-gnullvm cross-x86_64-pc-windows-gnullvm - - # Apple SDK does not allow us to cross compile from non-apple-branded - # machines, so we run that bundle on a macOS runner - - os: macos-latest - bundle: darwin - targets: cross-aarch64-apple-darwin cross-x86_64-apple-darwin - runs-on: ${{ matrix.os }} - permissions: - contents: write - packages: write - attestations: write - id-token: write - steps: - - uses: actions/checkout@v5 - with: - ref: ${{ github.ref }} - - uses: nixbuild/nix-quick-install-action@v30 - with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 - with: - primary-key: release-${{ matrix.bundle }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - restore-prefixes-first-match: | - release-${{ matrix.bundle }}- - build-${{ runner.os }}- - purge: true - purge-prefixes: release-${{ matrix.bundle }}- - purge-created: 0 - purge-primary-key: never - gc-max-store-size: 5G - - - name: Build binaries - run: | - mkdir release - for BUILD_TARGET in ${{ matrix.targets }}; do - TARGET=${BUILD_TARGET#"cross-"} - - echo "Scaffolding release for $TARGET..." - mkdir -p "release/$TARGET/dist" - cp README.md LICENSE "release/$TARGET/dist" - - echo "Building release for $TARGET..." - nix build .#$BUILD_TARGET - cp result/bin/* "release/$TARGET/dist/" - done - - - name: Sign Apple Binary - if: ${{ runner.os == 'macOS' }} - env: - MACOS_CERT_BUNDLE_PASSWORD: ${{ secrets.MACOS_CERT_BUNDLE_PASSWORD }} - MACOS_CERT_BUNDLE_BASE64: ${{ secrets.MACOS_CERT_BUNDLE_BASE64 }} - MACOS_KEYCHAIN_PASSWORD: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }} - - APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} - APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} - APPLE_USERNAME: ${{ secrets.APPLE_USERNAME }} - - KEYCHAIN_NAME: "apollo-mcp-server-keychain" - ENTITLEMENTS_PATH: "macos-entitlements.plist" - run: | - echo "Pre-check: Valid Codesigning Identify" - security find-identity -v -p codesigning - echo "Pre-check: Codesigning Identify" - security find-identity -p codesigning - echo "Pre-check: Any Identify" - security find-identity - - echo "|||||||||||||||||||||||||||||||||||||||||||||" - - # Create a temporary keychain - EPHEMERAL_KEYCHAIN=`mktemp -d` - - echo "Creating keychain..." - security create-keychain -p "${MACOS_KEYCHAIN_PASSWORD}" $KEYCHAIN_NAME - echo "Removing relock timeout on keychain..." - security set-keychain-settings $KEYCHAIN_NAME - - echo "Decoding certificate bundle..." - echo "${MACOS_CERT_BUNDLE_BASE64}" | base64 --decode > $EPHEMERAL_KEYCHAIN/certificate.p12 - - echo "Importing codesigning certificate to build keychain..." - security import $EPHEMERAL_KEYCHAIN/certificate.p12 -k $KEYCHAIN_NAME -P "${MACOS_CERT_BUNDLE_PASSWORD}" -T /usr/bin/codesign - - echo "Adding the codesign tool to the security partition-list..." - security set-key-partition-list -S "apple-tool:,apple:,codesign:" -s -k "${MACOS_KEYCHAIN_PASSWORD}" $KEYCHAIN_NAME - - echo "Setting default keychain..." - security default-keychain -d user -s $KEYCHAIN_NAME - - echo "Unlocking keychain..." - security unlock-keychain -p "${MACOS_KEYCHAIN_PASSWORD}" $KEYCHAIN_NAME - - echo "Verifying keychain is set up correctly..." - security find-identity -v -p codesigning - - echo "|||||||||||||||||||||||||||||||||||||||||||||" - - echo "Post-check: Valid Codesigning Identify" - security find-identity -v -p codesigning - echo "Post-check: Codesigning Identify" - security find-identity -p codesigning - echo "Post-check: Any Identify" - security find-identity - - echo "|||||||||||||||||||||||||||||||||||||||||||||" - # Sign each binary - for RELEASE in release/*/; do - RELEASE=${RELEASE%/} - RELEASE=${RELEASE#"release/"} - - BINARY_PATH="release/$RELEASE/dist/apollo-mcp-server" - echo "Starting code signing for $RELEASE..." - - echo "> Signing code (step 1)..." - codesign --sign "$APPLE_TEAM_ID" --options runtime --entitlements $ENTITLEMENTS_PATH --force --timestamp "$BINARY_PATH" -v - - echo "> Signing code (step 2)..." - codesign -vvv --deep --strict "$BINARY_PATH" - - echo "> Zipping dist..." - TMP_DIST=`mktemp -d` - mkdir $TMP_DIST/dist - cp "$BINARY_PATH" "$TMP_DIST/dist/" - zip -r "$TMP_DIST/apollo-mcp-server-$VERSION.zip" "$TMP_DIST/dist" - - echo "> Beginning notarization process (might take up to 20m)..." - xcrun notarytool submit "$TMP_DIST/apollo-mcp-server-$VERSION.zip" \ - --apple-id "$APPLE_USERNAME" \ - --password "$APPLE_NOTARIZATION_PASSWORD" \ - --team-id "$APPLE_TEAM_ID" \ - --wait \ - --timeout 20m - - echo "> Cleaning up release..." - rm -rf $TMP_DIST - done - - echo "Cleaning up ephemeral keychain..." - rm -rf $EPHEMERAL_KEYCHAIN/ - - - name: Create release bundles - run: | - mkdir artifacts - for RELEASE in release/*/; do - # Remove trailing slash and leading parent - RELEASE=${RELEASE%/} - RELEASE=${RELEASE#"release/"} - RENAMED=${RELEASE/x86_64-pc-windows-gnullvm/x86_64-pc-windows-msvc} - RENAMED=${RENAMED/aarch64-pc-windows-gnullvm/aarch64-pc-windows-msvc} - - echo "Creating an artifact for $RELEASE" - tar -C release/$RELEASE -cf - dist/ | gzip -9 > artifacts/apollo-mcp-server-$VERSION-$RENAMED.tar.gz - done - - # We only need to generate the config schema for a release once, so we do it - # on the linux host since it is the cheapest. - - name: Generate config schema - if: ${{ matrix.bundle == 'linux' }} - run: | - ./release/x86_64-unknown-linux-musl/dist/config-schema > artifacts/config.schema.json - - - name: Upload release artifacts - uses: softprops/action-gh-release@v2 - with: - files: artifacts/* - prerelease: ${{ contains(env.VERSION, '-rc.') }} - make_latest: false # this runs for each combination in the matrix - don't mark as latest until all are done - - - name: Generate artifact attestation - uses: actions/attest-build-provenance@v2 - with: - subject-path: "artifacts/*" - - publish: - name: Publish the release - needs: build - runs-on: ubuntu-24.04 - steps: - - name: Make latest - uses: softprops/action-gh-release@v2 - with: - prerelease: ${{ contains(env.VERSION, '-rc.') }} diff --git a/.github/workflows/release-container.yml b/.github/workflows/release-container.yml deleted file mode 100644 index 64bde965..00000000 --- a/.github/workflows/release-container.yml +++ /dev/null @@ -1,110 +0,0 @@ -name: Build Release Container -on: - push: - tags: - - "v[0-9]+.[0-9]+.[0-9]+" - - "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+" - workflow_dispatch: - inputs: &release_inputs - version: - description: Version to publish - required: true - type: string - workflow_call: - inputs: *release_inputs - -env: - REGISTRY: ghcr.io - FQDN: ghcr.io/${{ github.repository }} - VERSION: ${{ inputs.version || github.ref_name }} - -jobs: - # Build a container for x86_64 and aarch64 linux - build: - name: Release Container - strategy: - matrix: - os: ["ubuntu-24.04", "ubuntu-24.04-arm"] - runs-on: ${{ matrix.os }} - permissions: - contents: read - packages: write - attestations: write - id-token: write - steps: - - uses: actions/checkout@v5 - with: - ref: ${{ github.ref }} - - - uses: nixbuild/nix-quick-install-action@v30 - with: - nix_conf: ${{ env.nix_conf }} - - name: Restore and save Nix store - uses: nix-community/cache-nix-action@v6 - with: - primary-key: build-${{ runner.os }}-${{ hashFiles('Cargo.lock', '**/Cargo.toml', 'flake.nix', 'flake.lock', 'rust-toolchain.toml') }} - restore-prefixes-first-match: build-${{ runner.os }}- - # We don't want to affect the cache when building the container - purge: false - save: false - - - name: Log in to the Container registry - uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - id: build - name: Build Container - shell: bash - run: | - nix run .#streamImage | docker image load - echo "id=`docker image ls -q | head -n1`" >> $GITHUB_OUTPUT - echo "arch=`docker image ls --format '{{ .Tag }}' | head -n1`" >> $GITHUB_OUTPUT - - - id: deploy - name: Tag and push the container - env: - TAG: ${{ env.VERSION }}-${{ steps.build.outputs.arch }} - run: | - docker image tag "${{ steps.build.outputs.id }}" "$FQDN:$TAG" - docker image push "$FQDN:$TAG" - echo "digest=`docker manifest inspect $FQDN:$TAG --verbose | nix run --inputs-from .# nixpkgs#jq -- -r .Descriptor.digest`" >> $GITHUB_OUTPUT - - - name: Generate artifact attestation - uses: actions/attest-build-provenance@v2 - with: - subject-name: ${{ env.FQDN }} - subject-digest: ${{ steps.deploy.outputs.digest }} - push-to-registry: true - - bundle: - name: Bundle into multiarch container - needs: build - runs-on: ubuntu-24.04 - steps: - - name: Log in to the Container registry - uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Create multiarch manifest - run: | - docker manifest create $FQDN:$VERSION $FQDN:$VERSION-amd64 $FQDN:$VERSION-arm64 - docker manifest annotate $FQDN:$VERSION $FQDN:$VERSION-amd64 --arch amd64 - docker manifest annotate $FQDN:$VERSION $FQDN:$VERSION-arm64 --arch arm64 - - docker manifest create $FQDN:latest $FQDN:$VERSION-amd64 $FQDN:$VERSION-arm64 - docker manifest annotate $FQDN:latest $FQDN:$VERSION-amd64 --arch amd64 - docker manifest annotate $FQDN:latest $FQDN:$VERSION-arm64 --arch arm64 - - name: Push the multiarch manifests - shell: bash - run: | - docker manifest push $FQDN:$VERSION - - # push :latest only if version DOES NOT start with canary OR end with -rc. - if [[ ! "$VERSION" =~ (^canary|-rc\.[0-9]+$) ]]; then - docker manifest push $FQDN:latest - fi diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..b7ac5f5d --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,122 @@ +name: Release + +on: + push: + tags: + - 'v*' + workflow_dispatch: + inputs: + tag: + description: 'Release tag (e.g., v1.0.0)' + required: true + type: string + +jobs: + build: + name: Build (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: ubuntu-latest + artifact_name: dc-mcp-server-linux-x86_64 + binary_path: target/release/dc-mcp-server + - os: macos-latest + artifact_name: dc-mcp-server-macos-aarch64 + binary_path: target/release/dc-mcp-server + - os: windows-latest + artifact_name: dc-mcp-server-windows-x86_64 + binary_path: target/release/dc-mcp-server.exe + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + + - name: Cache cargo registry + uses: actions/cache@v4 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v4 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v4 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Build and Test + run: | + cargo build --release --package dc-mcp-server + cargo test --workspace + cargo clippy --all-targets --all-features -- -D warnings + cargo fmt --all -- --check + + - name: Create release package + shell: bash + run: | + RELEASE_DIR="release" + BIN_NAME="${{ matrix.binary_path }}" + ARTIFACT_NAME="${{ matrix.artifact_name }}" + VERSION=${{ github.event.inputs.tag || github.ref_name }} + + mkdir -p "$RELEASE_DIR/$ARTIFACT_NAME" + cp "$BIN_NAME" "$RELEASE_DIR/$ARTIFACT_NAME/" + cp README.md LICENSE "$RELEASE_DIR/$ARTIFACT_NAME/" 2>/dev/null || true + + # Create checksums + cd "$RELEASE_DIR/$ARTIFACT_NAME" + BINARY_FILE=$(basename "$BIN_NAME") + if command -v sha256sum &> /dev/null; then + sha256sum "$BINARY_FILE" > "${BINARY_FILE}.sha256" + else + shasum -a 256 "$BINARY_FILE" > "${BINARY_FILE}.sha256" + fi + cd - + + # Create tar.gz archive + tar -czvf "${ARTIFACT_NAME}.tar.gz" -C "$RELEASE_DIR" "$ARTIFACT_NAME" + + - name: Upload build artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact_name }} + path: ${{ matrix.artifact_name }}.tar.gz + + release: + name: Create Release + needs: build + runs-on: ubuntu-latest + steps: + - name: Download all artifacts + uses: actions/download-artifact@v4 + with: + path: artifacts + + - name: Display structure of downloaded files + run: ls -R artifacts + + - name: Move artifacts to root + run: | + mkdir -p release-assets + find artifacts -name "*.tar.gz" -exec cp {} release-assets/ \; + + - name: Upload release assets + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ github.event.inputs.tag || github.ref_name }} + name: Release ${{ github.event.inputs.tag || github.ref_name }} + files: release-assets/*.tar.gz + draft: false + prerelease: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/sync-develop.yml b/.github/workflows/sync-develop.yml deleted file mode 100644 index 79cbc72f..00000000 --- a/.github/workflows/sync-develop.yml +++ /dev/null @@ -1,197 +0,0 @@ -name: Sync main → develop - -on: - pull_request: - types: [closed] - branches: [main] - workflow_dispatch: - inputs: - head_branch: - description: "Branch to merge FROM (default: main)." - required: false - default: "main" - base_branch: - description: "Branch to merge INTO (default: develop)." - required: false - default: "develop" - source_pr_number: - description: "If testing, the PR number to comment on (optional)." - required: false - test_mode: - description: "Bypass PR/push guards for manual testing" - required: false - default: "true" - -permissions: - contents: write - pull-requests: write - issues: write - -concurrency: - group: sync-main-into-develop - cancel-in-progress: false - -jobs: - open-sync-pr: - if: | - github.actor != 'github-actions[bot]' && ( - ( - github.event_name == 'pull_request' && github.event.pull_request.merged == true - ) || ( - github.event_name == 'workflow_dispatch' && (inputs.test_mode == 'true') - ) - ) - runs-on: ubuntu-latest - - env: - # Use inputs for dispatch (testing), defaults for normal triggers - HEAD_BRANCH: ${{ (github.event_name == 'workflow_dispatch' && inputs.head_branch) || 'main' }} - BASE_BRANCH: ${{ (github.event_name == 'workflow_dispatch' && inputs.base_branch) || 'develop' }} - SOURCE_PR: ${{ (github.event_name == 'pull_request' && github.event.pull_request.number) || inputs.source_pr_number || '' }} - GH_TOKEN: ${{ secrets.GH_PAT }} - - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 - token: ${{ secrets.GH_PAT }} - - - name: Configure git author - run: | - git config --local user.name "Apollo Bot" - git config --local user.email "svc-apollo-bot-2@apollographql.com" - - # Generate branch name from PR# when available, otherwise use first 7 commit SHA characters - - name: Compute branch/name metadata - id: meta - run: | - pr=${{ github.event.pull_request.number }} - echo "sync_branch=sync/main-into-develop-pr-${pr}" >> $GITHUB_OUTPUT - echo "sync_title=Sync main → develop (PR #${pr})" >> $GITHUB_OUTPUT - echo "sync_body=Auto-opened after merging \`${{ github.event.pull_request.head.ref }}\` into \`main\`. Source PR: #${pr}." >> $GITHUB_OUTPUT - echo "conflict_branch=conflict/main-into-develop-pr-${pr}" >> $GITHUB_OUTPUT - echo "conflict_title=Sync main → develop (resolve conflicts)" >> $GITHUB_OUTPUT - echo "conflict_body=Opened from a copy of \`main\` so conflicts can be resolved without pushing to a protected branch." >> $GITHUB_OUTPUT - - # Short-lived sync branch from develop and merge main into it (do NOT rebase) - # use +e to stop errors from short-circuiting the script - - name: Prepare sync branch - id: prep - run: | - set -e - git fetch origin "${BASE_BRANCH}" "${HEAD_BRANCH}" - git switch -c "${{ steps.meta.outputs.sync_branch }}" "origin/${BASE_BRANCH}" - set +e - git merge --no-ff "origin/${HEAD_BRANCH}" - rc=$? - set -e - git add -A || true - git commit -m "WIP: merge ${HEAD_BRANCH} into ${BASE_BRANCH} via ${{ steps.meta.outputs.branch }}" || true - git push origin HEAD - - right=$(git rev-list --count --right-only "origin/${BASE_BRANCH}...HEAD") - - echo "merge_status=$rc" >> "$GITHUB_OUTPUT" - echo "sync_right=$right" >> "$GITHUB_OUTPUT" - echo "Merge exit=$rc, sync branch ahead-by=$right" - - # If no merge conflicts and there are changes, open the PR targeting develop - - name: Open clean PR to develop - id: sync_pr - if: ${{ steps.prep.outputs.merge_status == '0' && steps.prep.outputs.sync_right != '0' }} - run: | - # Avoid duplicate PRs - existing=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.sync_branch }}" --state open --json number --jq '.[0].number' || true) - if [ -n "$existing" ] && [ "$existing" != "null" ]; then - echo "pr_number=$existing" >> "$GITHUB_OUTPUT" - url=$(gh pr view "$existing" --json url --jq .url) - echo "pr_url=$url" >> "$GITHUB_OUTPUT" - exit 0 - fi - - gh pr create \ - --base "${BASE_BRANCH}" \ - --head "${{ steps.meta.outputs.sync_branch }}" \ - --title "${{ steps.meta.outputs.sync_title }}" \ - --body "${{ steps.meta.outputs.sync_body }} (created via gh CLI)" \ - --label back-merge \ - --label skip-changeset \ - --label automation - - # Fetch the newly created PR number, then its URL so that we display in a PR comment - num=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.sync_branch }}" --state open --json number --jq '.[0].number') - url=$(gh pr view "$num" --json url --jq .url) - echo "pr_number=$num" >> "$GITHUB_OUTPUT" - echo "pr_url=$url" >> "$GITHUB_OUTPUT" - - # If the merge hit conflicts, open a DIRECT PR: HEAD_BRANCH -> BASE_BRANCH so conflicts can be resolved prior to merge - - name: Open conflict PR - id: conflict_pr - if: ${{ steps.prep.outputs.merge_status != '0' }} - run: | - set -e - git fetch origin "${HEAD_BRANCH}" "${BASE_BRANCH}" - - git switch -c "${{ steps.meta.outputs.conflict_branch }}" "origin/${HEAD_BRANCH}" - git push -u origin HEAD - - # Skip if no diff between conflict branch and base (should be unlikely) - right=$(git rev-list --right-only --count "origin/${BASE_BRANCH}...origin/${{ steps.meta.outputs.conflict_branch }}") - if [ "$right" -eq 0 ]; then - echo "No diff between ${HEAD_BRANCH} and ${BASE_BRANCH}; nothing to open." - exit 0 - fi - - # Reuse existing open PR if present - existing=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.conflict_branch }}" --state open --json number --jq '.[0].number' || true) - if [ -n "$existing" ] && [ "$existing" != "null" ]; then - echo "pr_number=$existing" >> "$GITHUB_OUTPUT" - url=$(gh pr view "$existing" --json url --jq .url) - echo "pr_url=$url" >> "$GITHUB_OUTPUT" - exit 0 - fi - - gh pr create \ - --base "${BASE_BRANCH}" \ - --head "${{ steps.meta.outputs.conflict_branch }}" \ - --title "${{ steps.meta.outputs.conflict_title }}" \ - --body "${{ steps.meta.outputs.conflict_body }}" \ - --label back-merge \ - --label automation \ - --label skip-changeset \ - --label conflicts - - # Fetch the newly created conflict PR number, then its URL so that we display in a PR comment - num=$(gh pr list --base "${BASE_BRANCH}" --head "${{ steps.meta.outputs.conflict_branch }}" --state open --json number --jq '.[0].number') - url=$(gh pr view "$num" --json url --jq .url) - echo "pr_number=$num" >> "$GITHUB_OUTPUT" - echo "pr_url=$url" >> "$GITHUB_OUTPUT" - - # Comment back on the ORIGINAL merged PR with a link to the sync PR - - name: Comment on source PR with sync PR link - if: ${{ env.SOURCE_PR != '' && (steps.sync_pr.outputs.pr_number != '' || steps.conflict_pr.outputs.pr_number != '') }} - uses: actions/github-script@v7 - with: - script: | - const owner = context.repo.owner; - const repo = context.repo.repo; - const issue_number = Number(process.env.SOURCE_PR); - - const hadConflicts = '${{ steps.prep.outputs.merge_status }}' !== '0'; - const syncUrl = '${{ steps.sync_pr.outputs.pr_url || steps.conflict_pr.outputs.pr_url }}'; - const head = process.env.HEAD_BRANCH; - const base = process.env.BASE_BRANCH; - - const status = hadConflicts ? 'conflicts ❗' : 'clean ✅'; - const note = hadConflicts - ? 'Opened from a copy of main so conflicts can be resolved safely.' - : 'Opened from a sync branch created off develop.'; - - const body = [ - `Opened sync PR **${head} → ${base}**: ${syncUrl}`, - ``, - `Merge status: **${status}**`, - note - ].join('\n'); - - await github.rest.issues.createComment({ owner, repo, issue_number, body }); \ No newline at end of file diff --git a/.github/workflows/verify-changeset.yml b/.github/workflows/verify-changeset.yml deleted file mode 100644 index 2bac53f0..00000000 --- a/.github/workflows/verify-changeset.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: Verify Changeset -on: - pull_request: - types: [opened, reopened, synchronize, ready_for_review] - branches-ignore: - - main - - release/** - - conflict/* - - sync/* - paths-ignore: - - '.github/**' - - '.cargo/**' - - '.direnv/**' - - '.vscode/**' - - 'docs/**' - - 'Cargo.*' - - 'crates/**/Cargo.*' - - '*.md' - workflow_dispatch: - -jobs: - verify-changeset: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') && !github.event.pull_request.draft }} - name: Verify - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - contents: read - steps: - - name: Verify changeset included - uses: actions/github-script@v7 - with: - script: | - const dir = '.changesets/'; - const pr = context.payload.pull_request; - const files = await github.paginate( - github.rest.pulls.listFiles, - { owner: context.repo.owner, repo: context.repo.repo, pull_number: pr.number, per_page: 100 } - ); - const ok = files.some(f => - f.filename.startsWith(dir) && - ['added','modified','renamed'].includes(f.status) - ); - if (!ok) { - core.setFailed(`No changeset added to ${dir}.`); - } else { - core.info(`Changeset found under ${dir}.`); - } - core.setOutput('ok', ok ? 'true' : 'false'); - - name: Add changeset missing comment on failure - uses: actions/github-script@v7 - if: failure() - with: - script: | - const pr = context.payload.pull_request; - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pr.number, - body: [ - "❌ **Changeset file missing for PR**", - "", - "All changes should include an associated changeset file.", - "Please refer to [README](https://github.com/apollographql/apollo-mcp-server/blob/main/.changesets/README.md) for more information on generating changesets." - ].join("\n") - }); diff --git a/Cargo.lock b/Cargo.lock index 5dbcd15e..d039baa4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -197,69 +197,6 @@ dependencies = [ "wiremock", ] -[[package]] -name = "apollo-mcp-server" -version = "1.0.0" -dependencies = [ - "anyhow", - "apollo-compiler", - "apollo-federation", - "apollo-mcp-registry", - "apollo-schema-index", - "async-trait", - "axum", - "axum-extra", - "axum-otel-metrics", - "axum-tracing-opentelemetry", - "bon", - "chrono", - "clap", - "cruet", - "figment", - "futures", - "headers", - "http", - "humantime-serde", - "insta", - "jsonschema", - "jsonwebtoken", - "jwks", - "lz-str", - "mockito", - "opentelemetry", - "opentelemetry-appender-log", - "opentelemetry-otlp", - "opentelemetry-resource-detectors", - "opentelemetry-semantic-conventions", - "opentelemetry-stdout", - "opentelemetry_sdk", - "prettyplease", - "quote", - "regex", - "reqwest", - "reqwest-middleware", - "reqwest-tracing", - "rmcp", - "rstest", - "schemars", - "serde", - "serde_json", - "syn 2.0.106", - "thiserror 2.0.17", - "tokio", - "tokio-util", - "toml", - "tower", - "tower-http", - "tracing", - "tracing-appender", - "tracing-core", - "tracing-opentelemetry", - "tracing-subscriber", - "tracing-test", - "url", -] - [[package]] name = "apollo-parser" version = "0.8.4" @@ -628,6 +565,12 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.42" @@ -767,9 +710,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.4" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", @@ -909,6 +852,70 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "dc-mcp-server" +version = "1.0.0" +dependencies = [ + "anyhow", + "apollo-compiler", + "apollo-federation", + "apollo-mcp-registry", + "apollo-schema-index", + "async-trait", + "axum", + "axum-extra", + "axum-otel-metrics", + "axum-tracing-opentelemetry", + "bon", + "chrono", + "clap", + "cruet", + "figment", + "futures", + "headers", + "http", + "humantime-serde", + "insta", + "jsonschema", + "jsonwebtoken", + "jwks", + "lz-str", + "mockito", + "opentelemetry", + "opentelemetry-appender-log", + "opentelemetry-otlp", + "opentelemetry-resource-detectors", + "opentelemetry-semantic-conventions", + "opentelemetry-stdout", + "opentelemetry_sdk", + "prettyplease", + "quote", + "regex", + "reqwest", + "reqwest-middleware", + "reqwest-tracing", + "rmcp", + "rstest", + "schemars", + "serde", + "serde_json", + "syn 2.0.106", + "tempfile", + "thiserror 2.0.17", + "tokio", + "tokio-util", + "toml", + "tower", + "tower-http", + "tracing", + "tracing-appender", + "tracing-core", + "tracing-opentelemetry", + "tracing-subscriber", + "tracing-test", + "url", +] + [[package]] name = "deadpool" version = "0.12.3" @@ -1181,21 +1188,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" version = "1.2.2" @@ -1360,9 +1352,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", "wasi 0.14.7+wasi-0.2.4", + "wasm-bindgen", ] [[package]] @@ -1629,31 +1623,32 @@ dependencies = [ ] [[package]] -name = "hyper-timeout" -version = "0.5.2" +name = "hyper-rustls" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ + "http", "hyper", "hyper-util", - "pin-project-lite", + "rustls", + "rustls-native-certs", + "rustls-pki-types", "tokio", + "tokio-rustls", "tower-service", ] [[package]] -name = "hyper-tls" -version = "0.6.0" +name = "hyper-timeout" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "bytes", - "http-body-util", "hyper", "hyper-util", - "native-tls", + "pin-project-lite", "tokio", - "tokio-native-tls", "tower-service", ] @@ -2128,6 +2123,12 @@ dependencies = [ "hashbrown 0.15.5", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lz-str" version = "0.2.1" @@ -2257,23 +2258,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2195bf6aa996a481483b29d62a7663eed3fe39600c460e323f8ff41e90bdd89b" -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "nom" version = "7.1.3" @@ -2469,60 +2453,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ce411919553d3f9fa53a0880544cda985a112117a0444d5ff1e870a893d6ea" -[[package]] -name = "openssl" -version = "0.10.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" -dependencies = [ - "bitflags 2.9.4", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" -[[package]] -name = "openssl-src" -version = "300.5.3+3.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc6bad8cd0233b63971e232cc9c5e83039375b8586d2312f31fda85db8f888c2" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - [[package]] name = "opentelemetry" version = "0.30.0" @@ -2903,6 +2839,61 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.1", + "rustls", + "socket2", + "thiserror 2.0.17", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash 2.1.1", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.17", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.52.0", +] + [[package]] name = "quote" version = "1.0.41" @@ -3101,20 +3092,22 @@ dependencies = [ "http-body", "http-body-util", "hyper", - "hyper-tls", + "hyper-rustls", "hyper-util", "js-sys", "log", - "native-tls", "percent-encoding", "pin-project-lite", + "quinn", + "rustls", + "rustls-native-certs", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-native-tls", + "tokio-rustls", "tokio-util", "tower", "tower-http", @@ -3321,15 +3314,53 @@ dependencies = [ "windows-sys 0.61.1", ] +[[package]] +name = "rustls" +version = "0.23.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pki-types" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" dependencies = [ + "web-time", "zeroize", ] +[[package]] +name = "rustls-webpki" +version = "0.103.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.22" @@ -3405,9 +3436,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.1" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ "bitflags 2.9.4", "core-foundation", @@ -3697,6 +3728,12 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "syn" version = "1.0.109" @@ -4017,6 +4054,21 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.47.1" @@ -4049,12 +4101,12 @@ dependencies = [ ] [[package]] -name = "tokio-native-tls" -version = "0.3.1" +name = "tokio-rustls" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "native-tls", + "rustls", "tokio", ] @@ -4497,12 +4549,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "version_check" version = "0.9.5" diff --git a/Cargo.toml b/Cargo.toml index 12f4cdee..bc580719 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,16 +1,16 @@ [workspace] resolver = "2" members = [ - "crates/apollo-mcp-server", + "crates/dc-mcp-server", "crates/apollo-mcp-registry", "crates/apollo-schema-index", ] [workspace.package] -authors = ["Apollo "] +authors = ["DoControl "] edition = "2024" license-file = "LICENSE" -repository = "https://github.com/apollographql/apollo-mcp-server" +repository = "https://github.com/docontrol-io/dc-mcp-server" rust-version = "1.89.0" version = "1.0.0" @@ -27,7 +27,7 @@ insta = { version = "1.43.1", features = [ reqwest = { version = "0.12.15", default-features = false, features = [ "gzip", "json", - "native-tls-vendored", + "rustls-tls-native-roots", ] } rstest = "0.25.0" secrecy = { version = "0.10.3", features = ["serde"] } diff --git a/README.md b/README.md index 196e5561..5def9024 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,435 @@ -
-Apollo Client -
+# DoControl MCP Server -![version](https://img.shields.io/github/v/release/apollographql/apollo-mcp-server) -![ci workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/ci.yml) -![release binaries workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-bins.yml?label=release%20binaries) -![release container workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-container.yml?label=release%20container) -![license](https://img.shields.io/github/license/apollographql/apollo-mcp-server) -[![codecov](https://codecov.io/github/apollographql/apollo-mcp-server/graph/badge.svg?token=6NHuvZQ8ak)](https://codecov.io/github/apollographql/apollo-mcp-server) +This is a thin wrapper around [Apollo MCP Server](https://github.com/apollographql/apollo-mcp-server) configured specifically for DoControl's authentication flow. -# Apollo MCP Server +## What is Apollo MCP Server? -Apollo MCP Server is a [Model Context Protocol](https://modelcontextprotocol.io/) server that exposes GraphQL operations as MCP tools. It provides a standard way for AI models to access and orchestrate your APIs running with Apollo. +Apollo MCP Server is a [Model Context Protocol](https://modelcontextprotocol.io/) server that exposes GraphQL operations as MCP tools. It provides a standard way for AI models to access and orchestrate GraphQL APIs. -## Documentation +For full documentation about Apollo MCP Server capabilities, see the [official documentation](https://www.apollographql.com/docs/apollo-mcp-server/). -See [the documentation](https://www.apollographql.com/docs/apollo-mcp-server/) for full details. This README shows the basics of getting this MCP server running. More details are available on the documentation site. +## DoControl Authentication Flow + +This wrapper handles DoControl's OAuth token refresh flow automatically: + +### How It Works + +1. **On-Demand Refresh**: Tokens are refreshed automatically before each request when needed +2. **Smart Detection**: Refreshes if token has less than 2 minutes remaining (out of 5-minute lifetime) +3. **Config Update**: Fresh tokens are written back to the config file's auth section +4. **Shared Headers**: Tokens are updated in both config file and in-memory headers atomically +5. **No Background Tasks**: Refresh happens synchronously when needed, not in background +6. **GraphQL Requests**: All operations use the current valid access token + +This approach ensures: +- ✅ **No wasted refreshes** - Only refresh when token is actually needed +- ✅ **No startup delay** - Server starts instantly without initial token verification +- ✅ **Thread-safe** - Global token manager accessible from all request handlers +- ✅ **Reliable** - Synchronous refresh ensures token is valid before each request + +**Note**: DoControl tokens have a 5-minute lifetime. The server refreshes tokens on-demand before executing requests to ensure they're always valid. + +### Environment Variables + +The server requires these environment variables: + +```bash +# DoControl Token Refresh +DC_TOKEN_REFRESH_ENABLED="true" +DC_REFRESH_TOKEN="your-refresh-token-from-docontrol" +DC_REFRESH_URL="https://auth.prod.docontrol.io/refresh" +DC_GRAPHQL_ENDPOINT="https://apollo-gateway-v4-api.prod.docontrol.io/graphql" + +# Apollo GraphOS API Key +DC_API_KEY="service:docontrol-api:your-apollo-key" + +# Optional: Override the hardcoded graph ref (defaults to "docontrol-api@current") +# DC_GRAPH_REF="docontrol-api@current" +``` + +### Configuration File + +Create a YAML configuration file (e.g., `config.yaml`): + +```yaml +# GraphQL endpoint URL +endpoint: https://apollo-gateway-v4-api.prod.docontrol.io/graphql + +# Transport configuration (stdio for MCP) +transport: + type: stdio + +# Authentication headers (automatically managed by token refresh) +headers: + Authorization: Bearer + +# Apollo GraphOS integration +graphos: + apollo-graph-ref: docontrol-api@current + apollo-key: service:docontrol-api:your-apollo-key + +# Mutation mode: "none" (read-only), "all" (full access) +allow-mutations: none + +# Operation source: use introspection to discover operations +operations: + - introspect + +# Logging configuration +logging: + level: error + format: plain + color: false + +# Introspection tools configuration +introspection: + execute: + enabled: true # Enable execute tool to run queries + introspect: + enabled: true # Enable introspect tool for schema discovery + minify: true # Minify schema output + search: + enabled: true # Enable search tool for finding types + minify: true # Minify search results + index_memory_bytes: 50000000 # Memory limit for search index + leaf_depth: 1 # Depth for leaf type expansion + validate: + enabled: true # Enable validate tool for query validation +``` + +#### Configuration Options Explained + +**Core Settings:** +- `endpoint`: The DoControl GraphQL API endpoint +- `transport.type`: `stdio` for MCP communication (required for MCP clients) + +**Authentication:** +- `headers.Authorization`: Automatically updated by token refresh system +- The token in this section is managed by the server - it will be overwritten on startup and during refresh + +**GraphOS Integration:** +- `apollo-graph-ref`: Your graph reference in Apollo Studio (e.g., `docontrol-api@current`) +- `apollo-key`: Your Apollo Studio API key for schema registry access + +**Security:** +- `allow-mutations`: Set to `none` for read-only access, `all` to allow mutations + +**Operations:** +- `introspect`: Use introspection to discover all queries and mutations automatically +- Alternative: `uplink` to use Apollo Studio operation collections + +**Introspection Tools:** +The server provides 4 MCP tools when introspection is enabled: + +1. **`execute`**: Run GraphQL queries and mutations + - Validates operation syntax + - Executes against the live endpoint + - Returns JSON results + +2. **`introspect`**: Explore the GraphQL schema + - Get type information with hierarchy + - Discover fields, arguments, and descriptions + - Navigate relationships between types + +3. **`search`**: Find types in the schema by name + - Fuzzy search across all types + - Returns matching type definitions + - Useful for discovery + +4. **`validate`**: Validate GraphQL operations before execution + - Syntax checking + - Schema validation + - Helpful for debugging + +**Note**: The `apollo_key` can reference environment variables using `${DC_API_KEY}` syntax. + +**Note**: The `Authorization` header is automatically managed by the token refresh system. You don't need to manually update it. + +## Quick Start Setup Guide + +### Step 1: Install the Server + +**Option A: Download from Releases** +```bash +# macOS (Apple Silicon) +curl -L https://github.com/docontrol-io/dc-mcp-server/releases/latest/download/dc-mcp-server-macos-aarch64.tar.gz | tar xz +chmod +x dc-mcp-server + +# Linux +curl -L https://github.com/docontrol-io/dc-mcp-server/releases/latest/download/dc-mcp-server-linux-x86_64.tar.gz | tar xz +chmod +x dc-mcp-server + +# Move to a permanent location +sudo mv dc-mcp-server /usr/local/bin/ +``` + +**Option B: Build from Source** +```bash +git clone https://github.com/docontrol-io/dc-mcp-server.git +cd dc-mcp-server +cargo build --release +sudo cp target/release/dc-mcp-server /usr/local/bin/ +``` + +### Step 2: Create Configuration File + +Create a file named `docontrol-config.yaml`: + +```yaml +endpoint: https://apollo-gateway-v4-api.prod.docontrol.io/graphql +transport: + type: stdio +headers: + Authorization: Bearer placeholder # Will be auto-updated +graphos: + apollo-graph-ref: docontrol-api@current + apollo-key: service:docontrol-api:YOUR_APOLLO_KEY_HERE +allow-mutations: none +operations: + - introspect +logging: + level: error + format: plain + color: false +introspection: + execute: + enabled: true + introspect: + enabled: true + minify: true + search: + enabled: true + minify: true + validate: + enabled: true +``` + +**Replace `YOUR_APOLLO_KEY_HERE`** with your actual Apollo Studio API key. + +### Step 3: Get Your Credentials + +You'll need two secrets from DoControl: + +1. **Refresh Token** (`DC_REFRESH_TOKEN`): + - Obtain from DoControl OAuth authentication flow + - This is a long-lived token used to get fresh access tokens + - Keep this secret secure! + +2. **Apollo API Key** (`DC_API_KEY`): + - Format: `service:docontrol-api:xxxxx` + - Used to access Apollo Studio for schema registry + +### Step 4: Configure Your MCP Client + +**For Cursor:** + +Edit `~/.cursor/mcp.json`: + +```json +{ + "mcpServers": { + "dc-mcp-server": { + "command": "/usr/local/bin/dc-mcp-server", + "args": ["/absolute/path/to/docontrol-config.yaml"], + "env": { + "DC_TOKEN_REFRESH_ENABLED": "true", + "DC_REFRESH_TOKEN": "YOUR_REFRESH_TOKEN_HERE", + "DC_REFRESH_URL": "https://auth.prod.docontrol.io/refresh", + "DC_GRAPHQL_ENDPOINT": "https://apollo-gateway-v4-api.prod.docontrol.io/graphql", + "DC_API_KEY": "service:docontrol-api:YOUR_KEY_HERE", + "RUST_LOG": "info", + "RUSTLS_SYSTEM_CERT_ROOT": "1" + } + } + } +} +``` + +**For Claude Desktop (macOS):** + +Edit `~/Library/Application Support/Claude/claude_desktop_config.json`: + +```json +{ + "mcpServers": { + "dc-mcp-server": { + "command": "/usr/local/bin/dc-mcp-server", + "args": ["/absolute/path/to/docontrol-config.yaml"], + "env": { + "DC_TOKEN_REFRESH_ENABLED": "true", + "DC_REFRESH_TOKEN": "YOUR_REFRESH_TOKEN_HERE", + "DC_REFRESH_URL": "https://auth.prod.docontrol.io/refresh", + "DC_GRAPHQL_ENDPOINT": "https://apollo-gateway-v4-api.prod.docontrol.io/graphql", + "DC_API_KEY": "service:docontrol-api:YOUR_KEY_HERE", + "RUST_LOG": "info", + "RUSTLS_SYSTEM_CERT_ROOT": "1" + } + } + } +} +``` + +**Important Notes:** +- Use **absolute paths** for both the command and config file +- Replace `YOUR_REFRESH_TOKEN_HERE` and `YOUR_KEY_HERE` with actual values +- The `RUSTLS_SYSTEM_CERT_ROOT=1` is required for SSL certificate validation + +### Step 5: Restart Your MCP Client + +- **Cursor**: Restart the application or reload the window +- **Claude Desktop**: Quit and restart the application + +### Step 6: Verify Setup + +In your MCP client, you should see 4 new tools available: +- ✅ `execute` - Run GraphQL queries +- ✅ `introspect` - Explore the schema +- ✅ `search` - Search for types +- ✅ `validate` - Validate queries + +Try asking: "What is the company information?" or "Show me the company details" + +## MCP Client Configuration Reference + +### Using with MCP Inspector + +For testing and debugging: + +```bash +export DC_TOKEN_REFRESH_ENABLED="true" +export DC_REFRESH_TOKEN="your-refresh-token" +export DC_REFRESH_URL="https://auth.prod.docontrol.io/refresh" +export DC_GRAPHQL_ENDPOINT="https://apollo-gateway-v4-api.prod.docontrol.io/graphql" +export DC_GRAPH_REF="docontrol-api@current" +export DC_API_KEY="service:docontrol-api:your-key" + +npx @modelcontextprotocol/inspector dc-mcp-server config.yaml +``` + +## How Introspection Works + +The server automatically discovers all available GraphQL operations by introspecting the schema: + +1. **Schema Discovery**: Introspects the GraphQL endpoint to discover all types and fields +2. **Tool Generation**: Each query and mutation becomes an MCP tool +3. **Dynamic**: Changes to the GraphQL schema are automatically reflected +4. **No Manual Configuration**: No need to maintain operation files + +All queries and mutations from the DoControl GraphQL API are automatically available as tools to AI models. ## Installation -You can either build this server from source, if you have Rust installed on your workstation, or you can follow the [installation guide](https://www.apollographql.com/docs/apollo-mcp-server/run). To build from source, run `cargo build` from the root of this repository and the server will be built in the `target/debug` directory. +### From Release + +Download the latest release for your platform from the [releases page](https://github.com/docontrol-io/dc-mcp-server/releases): +- **Linux**: `dc-mcp-server-linux-x86_64.tar.gz` +- **macOS**: `dc-mcp-server-macos-aarch64.tar.gz` +- **Windows**: `dc-mcp-server-windows-x86_64.tar.gz` + +### From Source + +```bash +cargo build --release --package dc-mcp-server +cp target/release/dc-mcp-server /usr/local/bin/ +``` + +## Example Setup + +1. **Create configuration file** (`config.yaml`): +```yaml +endpoint: "https://apollo-gateway-v4-api.prod.docontrol.io/graphql" +operations: introspect +introspection: + query: true + mutation: true +``` + +2. **Get your credentials**: + - **Refresh Token**: From DoControl OAuth flow (secret) + - **Apollo Graph Ref**: Your graph identifier, e.g., `docontrol-api@current` (internal) + - **Apollo Key**: API key from Apollo Studio (secret) + +3. **Configure your MCP client** with environment variables (see configuration examples above) + +4. **Start your MCP client** - the server handles all authentication and operation discovery automatically! + +The AI assistant will have access to all GraphQL queries and mutations from the DoControl API. + +## Security Best Practices + +**All credentials are secrets and should be protected:** + +- ✅ **Never commit credentials** to version control +- ✅ **Use environment variables** instead of hardcoding in config files +- ✅ **Store tokens securely** - use secret management systems (e.g., 1Password, AWS Secrets Manager) +- ✅ **Rotate tokens regularly** - follow DoControl security best practices +- ✅ **Limit permissions** - use read-only tokens when possible + +**Secrets to protect:** +- `DC_REFRESH_TOKEN` - DoControl OAuth refresh token +- `DC_API_KEY` - Apollo Studio API key +- Config files containing tokens + +## How Token Refresh Works + +The server uses an intelligent on-demand token refresh strategy: + +### Startup +1. **Server Startup**: Reads `DC_REFRESH_TOKEN` from environment +2. **No Initial Refresh**: Server starts immediately without fetching tokens +3. **Global Token Manager**: TokenManager is initialized and stored globally +4. **Fast Startup**: No blocking network calls during initialization + +### During Operation +1. **Before Each Request**: Token manager checks if current token is valid +2. **Token Expiry Check**: Refreshes if less than 2 minutes remaining (out of 5-minute lifetime) +3. **Synchronous Refresh**: If needed, refreshes token before executing the request +4. **Atomic Updates**: Updates both config file and in-memory headers together +5. **Error Handling**: If refresh fails, request proceeds with current token + +### Token Lifetime +- **DoControl tokens expire after 5 minutes** +- **Refresh threshold: 2 minutes remaining** - ensures token won't expire during request +- First request after startup will always refresh (no initial token) +- Token is reused across multiple requests within the 3-minute window (5min - 2min threshold) +- Proactive refresh prevents mid-request token expiry + +### Benefits +- **Efficient**: Tokens are reused across multiple requests +- **Reliable**: Token is always validated before use +- **Fast Startup**: Server is ready instantly +- **Thread-Safe**: Global static ensures safe concurrent access +- **No Background Tasks**: Simpler architecture, easier to debug -## Getting started +## Development -Follow the [quickstart tutorial](https://www.apollographql.com/docs/apollo-mcp-server/quickstart) to get started with this server. +### Running Tests -## Usage +```bash +cargo test --workspace +``` -Full usage of Apollo MCP Server is documented on the [user guide](https://www.apollographql.com/docs/apollo-mcp-server/run). There are a few items that are necessary for this server to function. Specifically, the following things must be configured: +### Building -1. A graph for the MCP server to sit in front of. -2. Definitions for the GraphQL operations that should be exposed as MCP tools. -3. A configuration file describing how the MCP server should run. -4. A connection to an MCP client, such as an LLM or [MCP inspector](https://modelcontextprotocol.io/legacy/tools/inspector). +```bash +cargo build --release +``` -These are all described on the user guide. Specific configuration options for the configuration file are documented in the [config file reference](https://www.apollographql.com/docs/apollo-mcp-server/config-file). +### Debugging -## Contributions +Enable debug logging: +```bash +RUST_LOG=debug dc-mcp-server config.yaml +``` -Checkout the [contributor guidelines](https://github.com/apollographql/apollo-mcp-server/blob/main/CONTRIBUTING.md) for more information. +## Upstream -## Licensing +This project is based on [Apollo MCP Server](https://github.com/apollographql/apollo-mcp-server). For general MCP server features and documentation, refer to the upstream project. -This project is licensed under the MIT License. See the [LICENSE](./LICENSE) file for the full license text. +## License -# Security +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. -Refer to our [security policy](https://github.com/apollographql/.github/blob/main/SECURITY.md). +## Contributing -> [!IMPORTANT] -> **Do not open up a GitHub issue if a found bug is a security vulnerability**, and instead to refer to our [security policy](https://github.com/apollographql/.github/blob/main/SECURITY.md). +Contributions are welcome! Please feel free to submit a Pull Request. diff --git a/crates/apollo-mcp-server/Cargo.toml b/crates/dc-mcp-server/Cargo.toml similarity index 95% rename from crates/apollo-mcp-server/Cargo.toml rename to crates/dc-mcp-server/Cargo.toml index b2b0fcbc..0cd2bbc5 100644 --- a/crates/apollo-mcp-server/Cargo.toml +++ b/crates/dc-mcp-server/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "apollo-mcp-server" +name = "dc-mcp-server" authors.workspace = true edition.workspace = true license-file.workspace = true @@ -8,7 +8,7 @@ rust-version.workspace = true version.workspace = true build = "build.rs" -default-run = "apollo-mcp-server" +default-run = "dc-mcp-server" [dependencies] anyhow = "1.0.98" @@ -70,14 +70,15 @@ tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } tracing.workspace = true url.workspace = true async-trait = "0.1.89" +chrono = { version = "0.4.41", default-features = false, features = ["serde"] } [dev-dependencies] -chrono = { version = "0.4.41", default-features = false, features = ["now"] } figment = { version = "0.10.19", features = ["test"] } insta.workspace = true mockito = "1.7.0" opentelemetry_sdk = { version = "0.30.0", features = ["testing"] } rstest.workspace = true +tempfile = "3.8.1" tokio.workspace = true tower = "0.5.2" tracing-test = "0.2.5" @@ -94,7 +95,7 @@ toml = "0.9.5" workspace = true [[bin]] -name = "apollo-mcp-server" +name = "dc-mcp-server" path = "src/main.rs" [[bin]] diff --git a/crates/apollo-mcp-server/build.rs b/crates/dc-mcp-server/build.rs similarity index 100% rename from crates/apollo-mcp-server/build.rs rename to crates/dc-mcp-server/build.rs diff --git a/crates/apollo-mcp-server/src/auth.rs b/crates/dc-mcp-server/src/auth.rs similarity index 100% rename from crates/apollo-mcp-server/src/auth.rs rename to crates/dc-mcp-server/src/auth.rs diff --git a/crates/apollo-mcp-server/src/auth/networked_token_validator.rs b/crates/dc-mcp-server/src/auth/networked_token_validator.rs similarity index 100% rename from crates/apollo-mcp-server/src/auth/networked_token_validator.rs rename to crates/dc-mcp-server/src/auth/networked_token_validator.rs diff --git a/crates/apollo-mcp-server/src/auth/protected_resource.rs b/crates/dc-mcp-server/src/auth/protected_resource.rs similarity index 100% rename from crates/apollo-mcp-server/src/auth/protected_resource.rs rename to crates/dc-mcp-server/src/auth/protected_resource.rs diff --git a/crates/apollo-mcp-server/src/auth/valid_token.rs b/crates/dc-mcp-server/src/auth/valid_token.rs similarity index 100% rename from crates/apollo-mcp-server/src/auth/valid_token.rs rename to crates/dc-mcp-server/src/auth/valid_token.rs diff --git a/crates/apollo-mcp-server/src/auth/www_authenticate.rs b/crates/dc-mcp-server/src/auth/www_authenticate.rs similarity index 100% rename from crates/apollo-mcp-server/src/auth/www_authenticate.rs rename to crates/dc-mcp-server/src/auth/www_authenticate.rs diff --git a/crates/dc-mcp-server/src/config_manager.rs b/crates/dc-mcp-server/src/config_manager.rs new file mode 100644 index 00000000..7a6a96a0 --- /dev/null +++ b/crates/dc-mcp-server/src/config_manager.rs @@ -0,0 +1,220 @@ +//! Configuration file management for Apollo MCP Server + +use crate::errors::McpError; +use rmcp::model::ErrorCode; +use std::fs; +use std::path::Path; +use tracing::{debug, error, info, warn}; + +pub struct ConfigManager { + config_path: String, +} + +impl ConfigManager { + pub fn new(config_path: String) -> Self { + Self { config_path } + } + + /// Update the authorization token in the config file + pub fn update_auth_token(&self, new_token: &str) -> Result<(), McpError> { + info!("🔧 Updating config file with new token..."); + + // Create backup + let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S"); + let backup_path = format!("{}.backup.{}", self.config_path, timestamp); + + if let Err(e) = fs::copy(&self.config_path, &backup_path) { + warn!("Failed to create backup: {}", e); + } else { + info!("💾 Backup created: {}", backup_path); + } + + // Read current config + let config_content = fs::read_to_string(&self.config_path).map_err(|e| { + error!("Failed to read config file: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to read config file: {}", e), + None, + ) + })?; + + // Update authorization header + let updated_content = config_content + .lines() + .map(|line| { + if line.contains("Authorization: Bearer") { + // Preserve leading whitespace (indentation) + let indent = line + .chars() + .take_while(|c| c.is_whitespace()) + .collect::(); + format!("{}Authorization: Bearer {}", indent, new_token) + } else { + line.to_string() + } + }) + .collect::>() + .join("\n"); + + // Write updated config + fs::write(&self.config_path, updated_content).map_err(|e| { + error!("Failed to write updated config file: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to write updated config file: {}", e), + None, + ) + })?; + + info!("✅ Config file updated with new token"); + Ok(()) + } + + /// Read the current authorization token from config file + pub fn get_current_token(&self) -> Result, McpError> { + let config_content = fs::read_to_string(&self.config_path).map_err(|e| { + error!("Failed to read config file: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to read config file: {}", e), + None, + ) + })?; + + for line in config_content.lines() { + if line.contains("Authorization: Bearer") + && let Some(token) = line.split("Bearer ").nth(1) + { + return Ok(Some(token.trim().to_string())); + } + } + + Ok(None) + } + + /// Verify config file exists and is readable + pub fn verify_config(&self) -> Result<(), McpError> { + if !Path::new(&self.config_path).exists() { + return Err(McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Config file does not exist: {}", self.config_path), + None, + )); + } + + fs::read_to_string(&self.config_path).map_err(|e| { + error!("Config file is not readable: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Config file is not readable: {}", e), + None, + ) + })?; + + debug!("Config file verified: {}", self.config_path); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + /// Test config file creation and reading + #[test] + fn test_config_file_operations() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test_config.yaml"); + + // Create initial config + let initial_config = r#" +endpoint: "https://api.example.com/graphql" +headers: + Authorization: Bearer initial_token + Content-Type: "application/json" +"#; + fs::write(&config_path, initial_config).unwrap(); + + let config_manager = ConfigManager::new(config_path.to_string_lossy().to_string()); + + // Test reading current token + let token = config_manager.get_current_token().unwrap(); + assert_eq!(token, Some("initial_token".to_string())); + + // Test updating token + config_manager.update_auth_token("new_token").unwrap(); + + // Verify token was updated + let updated_token = config_manager.get_current_token().unwrap(); + assert_eq!(updated_token, Some("new_token".to_string())); + + // Verify config file content + let config_content = fs::read_to_string(&config_path).unwrap(); + assert!(config_content.contains("Authorization: Bearer new_token")); + } + + /// Test config file backup creation + #[test] + fn test_config_backup_creation() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test_config.yaml"); + + let initial_config = r#" +endpoint: "https://api.example.com/graphql" +headers: + Authorization: Bearer old_token +"#; + fs::write(&config_path, initial_config).unwrap(); + + let config_manager = ConfigManager::new(config_path.to_string_lossy().to_string()); + + // Count files before update + let files_before: Vec<_> = fs::read_dir(temp_dir.path()).unwrap().collect(); + let count_before = files_before.len(); + + // Update token + config_manager.update_auth_token("new_token").unwrap(); + + // Count files after update + let files_after: Vec<_> = fs::read_dir(temp_dir.path()).unwrap().collect(); + let count_after = files_after.len(); + + // Should have one more file (backup) + assert_eq!(count_after, count_before + 1); + + // Verify backup file exists + let backup_exists = fs::read_dir(temp_dir.path()).unwrap().any(|entry| { + let entry = entry.unwrap(); + entry.path().to_string_lossy().contains(".backup.") + }); + assert!(backup_exists, "Backup file should exist"); + } + + /// Test config file verification + #[test] + fn test_config_verification() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test_config.yaml"); + + let config_manager = ConfigManager::new(config_path.to_string_lossy().to_string()); + + // Test with non-existent file + let result = config_manager.verify_config(); + assert!(result.is_err()); + + // Create valid config file + let valid_config = r#" +endpoint: "https://api.example.com/graphql" +headers: + Authorization: Bearer test_token +"#; + fs::write(&config_path, valid_config).unwrap(); + + // Should now pass verification + let result = config_manager.verify_config(); + assert!(result.is_ok()); + } +} diff --git a/crates/apollo-mcp-server/src/config_schema.rs b/crates/dc-mcp-server/src/config_schema.rs similarity index 100% rename from crates/apollo-mcp-server/src/config_schema.rs rename to crates/dc-mcp-server/src/config_schema.rs diff --git a/crates/apollo-mcp-server/src/cors.rs b/crates/dc-mcp-server/src/cors.rs similarity index 100% rename from crates/apollo-mcp-server/src/cors.rs rename to crates/dc-mcp-server/src/cors.rs diff --git a/crates/apollo-mcp-server/src/custom_scalar_map.rs b/crates/dc-mcp-server/src/custom_scalar_map.rs similarity index 100% rename from crates/apollo-mcp-server/src/custom_scalar_map.rs rename to crates/dc-mcp-server/src/custom_scalar_map.rs diff --git a/crates/apollo-mcp-server/src/errors.rs b/crates/dc-mcp-server/src/errors.rs similarity index 100% rename from crates/apollo-mcp-server/src/errors.rs rename to crates/dc-mcp-server/src/errors.rs diff --git a/crates/apollo-mcp-server/src/event.rs b/crates/dc-mcp-server/src/event.rs similarity index 100% rename from crates/apollo-mcp-server/src/event.rs rename to crates/dc-mcp-server/src/event.rs diff --git a/crates/apollo-mcp-server/src/explorer.rs b/crates/dc-mcp-server/src/explorer.rs similarity index 100% rename from crates/apollo-mcp-server/src/explorer.rs rename to crates/dc-mcp-server/src/explorer.rs diff --git a/crates/apollo-mcp-server/src/graphql.rs b/crates/dc-mcp-server/src/graphql.rs similarity index 85% rename from crates/apollo-mcp-server/src/graphql.rs rename to crates/dc-mcp-server/src/graphql.rs index 8ed86941..bda98d88 100644 --- a/crates/apollo-mcp-server/src/graphql.rs +++ b/crates/dc-mcp-server/src/graphql.rs @@ -5,10 +5,9 @@ use crate::generated::telemetry::{TelemetryAttribute, TelemetryMetric}; use crate::meter; use opentelemetry::KeyValue; use reqwest::header::{HeaderMap, HeaderValue}; -use reqwest_middleware::{ClientBuilder, Extension}; -use reqwest_tracing::{OtelName, TracingMiddleware}; use rmcp::model::{CallToolResult, Content, ErrorCode}; use serde_json::{Map, Value}; +use std::time::Duration; use url::Url; #[derive(Debug)] @@ -39,7 +38,6 @@ pub trait Executable { fn headers(&self, default_headers: &HeaderMap) -> HeaderMap; /// Execute as a GraphQL operation using the endpoint and headers - #[tracing::instrument(skip(self, request))] async fn execute(&self, request: Request<'_>) -> Result { let meter = &meter::METER; let start = std::time::Instant::now(); @@ -86,14 +84,47 @@ pub trait Executable { } } - let client = ClientBuilder::new(reqwest::Client::new()) - .with_init(Extension(OtelName("mcp-graphql-client".into()))) - .with(TracingMiddleware::default()) - .build(); + let client = reqwest::Client::builder() + .timeout(Duration::from_secs( + std::env::var("REQWEST_TIMEOUT") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(30), + )) + .connect_timeout(Duration::from_secs( + std::env::var("REQWEST_CONNECT_TIMEOUT") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(10), + )) + .user_agent( + std::env::var("REQWEST_USER_AGENT").unwrap_or_else(|_| "curl/8.4.0".to_string()), + ) + .danger_accept_invalid_certs( + std::env::var("REQWEST_SSL_VERIFY") + .ok() + .map(|s| s == "false") + .unwrap_or(false), + ) + .danger_accept_invalid_hostnames( + std::env::var("REQWEST_SSL_VERIFY_HOSTNAME") + .ok() + .map(|s| s == "false") + .unwrap_or(false), + ) + .build() + .map_err(|e| { + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to create HTTP client: {e}"), + None, + ) + })?; - let result = client + let response = client .post(request.endpoint.as_str()) .headers(self.headers(&request.headers)) + .header("Content-Type", "application/json") .body(Value::Object(request_body).to_string()) .send() .await @@ -103,30 +134,42 @@ pub trait Executable { format!("Failed to send GraphQL request: {reqwest_error}"), None, ) - })? - .json::() - .await - .map_err(|reqwest_error| { - McpError::new( - ErrorCode::INTERNAL_ERROR, - format!("Failed to read GraphQL response body: {reqwest_error}"), - None, - ) - }) - .map(|json| CallToolResult { - content: vec![Content::json(&json).unwrap_or(Content::text(json.to_string()))], - is_error: Some( - json.get("errors") - .filter(|value| !matches!(value, Value::Null)) - .is_some() - && json - .get("data") - .filter(|value| !matches!(value, Value::Null)) - .is_none(), + })?; + + let status = response.status(); + let response_text = response.text().await.map_err(|reqwest_error| { + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to read response text: {reqwest_error}"), + None, + ) + })?; + + let json: Value = serde_json::from_str(&response_text).map_err(|reqwest_error| { + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!( + "Failed to parse JSON response (status: {}, body: {}): {reqwest_error}", + status, response_text ), - meta: None, - structured_content: Some(json), - }); + None, + ) + })?; + + let result = Ok(json).map(|json| CallToolResult { + content: vec![Content::json(&json).unwrap_or(Content::text(json.to_string()))], + is_error: Some( + json.get("errors") + .filter(|value| !matches!(value, Value::Null)) + .is_some() + && json + .get("data") + .filter(|value| !matches!(value, Value::Null)) + .is_none(), + ), + meta: None, + structured_content: Some(json), + }); // Record response metrics let attributes = vec![ @@ -372,7 +415,7 @@ mod test { assert!( e.message .to_string() - .starts_with("Failed to read GraphQL response body") + .starts_with("Failed to parse JSON response") ); } _ => { diff --git a/crates/apollo-mcp-server/src/health.rs b/crates/dc-mcp-server/src/health.rs similarity index 100% rename from crates/apollo-mcp-server/src/health.rs rename to crates/dc-mcp-server/src/health.rs diff --git a/crates/apollo-mcp-server/src/introspection.rs b/crates/dc-mcp-server/src/introspection.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection.rs rename to crates/dc-mcp-server/src/introspection.rs diff --git a/crates/apollo-mcp-server/src/introspection/minify.rs b/crates/dc-mcp-server/src/introspection/minify.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/minify.rs rename to crates/dc-mcp-server/src/introspection/minify.rs diff --git a/crates/apollo-mcp-server/src/introspection/snapshots/apollo_mcp_server__introspection__minify__tests__minify_schema.snap b/crates/dc-mcp-server/src/introspection/snapshots/dc_mcp_server__introspection__minify__tests__minify_schema.snap similarity index 100% rename from crates/apollo-mcp-server/src/introspection/snapshots/apollo_mcp_server__introspection__minify__tests__minify_schema.snap rename to crates/dc-mcp-server/src/introspection/snapshots/dc_mcp_server__introspection__minify__tests__minify_schema.snap diff --git a/crates/apollo-mcp-server/src/introspection/tools.rs b/crates/dc-mcp-server/src/introspection/tools.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools.rs rename to crates/dc-mcp-server/src/introspection/tools.rs diff --git a/crates/apollo-mcp-server/src/introspection/tools/execute.rs b/crates/dc-mcp-server/src/introspection/tools/execute.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/execute.rs rename to crates/dc-mcp-server/src/introspection/tools/execute.rs diff --git a/crates/apollo-mcp-server/src/introspection/tools/introspect.rs b/crates/dc-mcp-server/src/introspection/tools/introspect.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/introspect.rs rename to crates/dc-mcp-server/src/introspection/tools/introspect.rs diff --git a/crates/apollo-mcp-server/src/introspection/tools/search.rs b/crates/dc-mcp-server/src/introspection/tools/search.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/search.rs rename to crates/dc-mcp-server/src/introspection/tools/search.rs diff --git a/crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap b/crates/dc-mcp-server/src/introspection/tools/snapshots/dc_mcp_server__introspection__tools__search__tests__search_tool.snap similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap rename to crates/dc-mcp-server/src/introspection/tools/snapshots/dc_mcp_server__introspection__tools__search__tests__search_tool.snap diff --git a/crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql b/crates/dc-mcp-server/src/introspection/tools/testdata/schema.graphql similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql rename to crates/dc-mcp-server/src/introspection/tools/testdata/schema.graphql diff --git a/crates/apollo-mcp-server/src/introspection/tools/validate.rs b/crates/dc-mcp-server/src/introspection/tools/validate.rs similarity index 100% rename from crates/apollo-mcp-server/src/introspection/tools/validate.rs rename to crates/dc-mcp-server/src/introspection/tools/validate.rs diff --git a/crates/apollo-mcp-server/src/json_schema.rs b/crates/dc-mcp-server/src/json_schema.rs similarity index 100% rename from crates/apollo-mcp-server/src/json_schema.rs rename to crates/dc-mcp-server/src/json_schema.rs diff --git a/crates/apollo-mcp-server/src/lib.rs b/crates/dc-mcp-server/src/lib.rs similarity index 90% rename from crates/apollo-mcp-server/src/lib.rs rename to crates/dc-mcp-server/src/lib.rs index 1737b4e1..154cc184 100644 --- a/crates/apollo-mcp-server/src/lib.rs +++ b/crates/dc-mcp-server/src/lib.rs @@ -1,6 +1,7 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] pub mod auth; +pub mod config_manager; pub mod cors; pub mod custom_scalar_map; pub mod errors; @@ -15,7 +16,9 @@ pub mod operations; pub mod sanitize; pub(crate) mod schema_tree_shake; pub mod server; +pub mod startup; pub mod telemetry_attributes; +pub mod token_manager; /// These values are generated at build time by build.rs using telemetry.toml as input. pub mod generated { diff --git a/crates/apollo-mcp-server/src/main.rs b/crates/dc-mcp-server/src/main.rs similarity index 69% rename from crates/apollo-mcp-server/src/main.rs rename to crates/dc-mcp-server/src/main.rs index 0d80e937..b2ec025d 100644 --- a/crates/apollo-mcp-server/src/main.rs +++ b/crates/dc-mcp-server/src/main.rs @@ -1,16 +1,19 @@ use std::path::PathBuf; +use std::sync::Arc; use apollo_mcp_registry::platform_api::operation_collections::collection_poller::CollectionSource; use apollo_mcp_registry::uplink::persisted_queries::ManifestSource; use apollo_mcp_registry::uplink::schema::SchemaSource; -use apollo_mcp_server::custom_scalar_map::CustomScalarMap; -use apollo_mcp_server::errors::ServerError; -use apollo_mcp_server::operations::OperationSource; -use apollo_mcp_server::server::Server; use clap::Parser; use clap::builder::Styles; use clap::builder::styling::{AnsiColor, Effects}; +use dc_mcp_server::custom_scalar_map::CustomScalarMap; +use dc_mcp_server::errors::ServerError; +use dc_mcp_server::operations::OperationSource; +use dc_mcp_server::server::Server; +use dc_mcp_server::startup; use runtime::IdOrDefault; +use tokio::sync::{Mutex, RwLock}; use tracing::{info, warn}; mod runtime; @@ -36,8 +39,12 @@ struct Args { #[tokio::main] async fn main() -> anyhow::Result<()> { - let config: runtime::Config = match Args::parse().config { - Some(config_path) => runtime::read_config(config_path)?, + let args = Args::parse(); + let config_path = args.config.clone(); + + // Read config for initial setup (telemetry) + let config: runtime::Config = match config_path.clone() { + Some(ref path) => runtime::read_config(path.clone())?, None => runtime::read_config_from_env().unwrap_or_default(), }; @@ -48,6 +55,42 @@ async fn main() -> anyhow::Result<()> { env!("CARGO_PKG_VERSION") ); + // Create shared headers that can be updated by token refresh + let shared_headers = Arc::new(RwLock::new(config.headers.clone())); + + // Initialize token manager if token refresh is enabled + let token_manager = if startup::is_token_refresh_enabled() { + if let (Some(refresh_token), Some(refresh_url), Some(graphql_endpoint), Some(config_file)) = ( + startup::get_refresh_token(), + startup::get_refresh_url(), + startup::get_graphql_endpoint(), + config_path.as_ref(), + ) { + info!("Token refresh enabled, initializing..."); + match startup::create_token_manager( + config_file.to_string_lossy().to_string(), + refresh_token, + refresh_url, + graphql_endpoint, + Arc::clone(&shared_headers), + ) { + Ok(tm) => { + info!("✅ Token manager ready - will refresh tokens on-demand before requests"); + Some(Arc::new(Mutex::new(tm))) + } + Err(e) => { + warn!("Token manager initialization failed: {}", e); + None + } + } + } else { + warn!("Token refresh enabled but missing required environment variables"); + None + } + } else { + None + }; + let schema_source = match config.schema { runtime::SchemaSource::Local { path } => SchemaSource::File { path, watch: true }, runtime::SchemaSource::Uplink => SchemaSource::Registry(config.graphos.uplink_config()?), @@ -108,13 +151,17 @@ async fn main() -> anyhow::Result<()> { let transport = config.transport.clone(); + // Read current headers from shared state + let current_headers = shared_headers.read().await.clone(); + Ok(Server::builder() .transport(config.transport) .schema_source(schema_source) .operation_source(operation_source) .endpoint(config.endpoint.into_inner()) .maybe_explorer_graph_ref(explorer_graph_ref) - .headers(config.headers) + .headers(current_headers) + .maybe_shared_headers(Some(shared_headers)) .execute_introspection(config.introspection.execute.enabled) .validate_introspection(config.introspection.validate.enabled) .introspect_introspection(config.introspection.introspect.enabled) @@ -125,11 +172,11 @@ async fn main() -> anyhow::Result<()> { .disable_type_description(config.overrides.disable_type_description) .disable_schema_description(config.overrides.disable_schema_description) .disable_auth_token_passthrough(match transport { - apollo_mcp_server::server::Transport::Stdio => false, - apollo_mcp_server::server::Transport::SSE { auth, .. } => auth + dc_mcp_server::server::Transport::Stdio => false, + dc_mcp_server::server::Transport::SSE { auth, .. } => auth .map(|a| a.disable_auth_token_passthrough) .unwrap_or(false), - apollo_mcp_server::server::Transport::StreamableHttp { auth, .. } => auth + dc_mcp_server::server::Transport::StreamableHttp { auth, .. } => auth .map(|a| a.disable_auth_token_passthrough) .unwrap_or(false), }) @@ -143,6 +190,7 @@ async fn main() -> anyhow::Result<()> { .index_memory_bytes(config.introspection.search.index_memory_bytes) .health_check(config.health_check) .cors(config.cors) + .maybe_token_manager(token_manager) .build() .start() .await?) diff --git a/crates/apollo-mcp-server/src/meter.rs b/crates/dc-mcp-server/src/meter.rs similarity index 100% rename from crates/apollo-mcp-server/src/meter.rs rename to crates/dc-mcp-server/src/meter.rs diff --git a/crates/apollo-mcp-server/src/operations.rs b/crates/dc-mcp-server/src/operations.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations.rs rename to crates/dc-mcp-server/src/operations.rs diff --git a/crates/apollo-mcp-server/src/operations/mutation_mode.rs b/crates/dc-mcp-server/src/operations/mutation_mode.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/mutation_mode.rs rename to crates/dc-mcp-server/src/operations/mutation_mode.rs diff --git a/crates/apollo-mcp-server/src/operations/operation.rs b/crates/dc-mcp-server/src/operations/operation.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/operation.rs rename to crates/dc-mcp-server/src/operations/operation.rs diff --git a/crates/apollo-mcp-server/src/operations/operation_source.rs b/crates/dc-mcp-server/src/operations/operation_source.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/operation_source.rs rename to crates/dc-mcp-server/src/operations/operation_source.rs diff --git a/crates/apollo-mcp-server/src/operations/raw_operation.rs b/crates/dc-mcp-server/src/operations/raw_operation.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/raw_operation.rs rename to crates/dc-mcp-server/src/operations/raw_operation.rs diff --git a/crates/apollo-mcp-server/src/operations/schema_walker.rs b/crates/dc-mcp-server/src/operations/schema_walker.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/schema_walker.rs rename to crates/dc-mcp-server/src/operations/schema_walker.rs diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/name.rs b/crates/dc-mcp-server/src/operations/schema_walker/name.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/schema_walker/name.rs rename to crates/dc-mcp-server/src/operations/schema_walker/name.rs diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/type.rs b/crates/dc-mcp-server/src/operations/schema_walker/type.rs similarity index 100% rename from crates/apollo-mcp-server/src/operations/schema_walker/type.rs rename to crates/dc-mcp-server/src/operations/schema_walker/type.rs diff --git a/crates/apollo-mcp-server/src/runtime.rs b/crates/dc-mcp-server/src/runtime.rs similarity index 97% rename from crates/apollo-mcp-server/src/runtime.rs rename to crates/dc-mcp-server/src/runtime.rs index 71a39684..796b9346 100644 --- a/crates/apollo-mcp-server/src/runtime.rs +++ b/crates/dc-mcp-server/src/runtime.rs @@ -47,14 +47,14 @@ pub fn read_config(yaml_path: impl AsRef) -> Result Env { - Env::prefixed("APOLLO_") - .only(&["graph_ref", "key", "uplink_endpoints"]) + Env::prefixed("DC_") + .only(&["graph_ref", "api_key", "uplink_endpoints"]) .map(|key| match key.to_string().to_lowercase().as_str() { "graph_ref" => "GRAPHOS:APOLLO_GRAPH_REF".into(), - "key" => "GRAPHOS:APOLLO_KEY".into(), + "api_key" => "GRAPHOS:APOLLO_KEY".into(), "uplink_endpoints" => "GRAPHOS:APOLLO_UPLINK_ENDPOINTS".into(), // This case should never happen, so we just pass through this case as is @@ -143,7 +143,7 @@ mod test { jail.create_file(path, config)?; jail.set_env( - "APOLLO_UPLINK_ENDPOINTS", + "DC_UPLINK_ENDPOINTS", "http://from_env:4000/,http://from_env2:4000/", ); diff --git a/crates/apollo-mcp-server/src/runtime/config.rs b/crates/dc-mcp-server/src/runtime/config.rs similarity index 97% rename from crates/apollo-mcp-server/src/runtime/config.rs rename to crates/dc-mcp-server/src/runtime/config.rs index 598462bd..94666ede 100644 --- a/crates/apollo-mcp-server/src/runtime/config.rs +++ b/crates/dc-mcp-server/src/runtime/config.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; -use apollo_mcp_server::{cors::CorsConfig, health::HealthCheckConfig, server::Transport}; +use dc_mcp_server::{cors::CorsConfig, health::HealthCheckConfig, server::Transport}; use reqwest::header::HeaderMap; use schemars::JsonSchema; use serde::Deserialize; diff --git a/crates/apollo-mcp-server/src/runtime/endpoint.rs b/crates/dc-mcp-server/src/runtime/endpoint.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/endpoint.rs rename to crates/dc-mcp-server/src/runtime/endpoint.rs diff --git a/crates/apollo-mcp-server/src/runtime/filtering_exporter.rs b/crates/dc-mcp-server/src/runtime/filtering_exporter.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/filtering_exporter.rs rename to crates/dc-mcp-server/src/runtime/filtering_exporter.rs diff --git a/crates/apollo-mcp-server/src/runtime/graphos.rs b/crates/dc-mcp-server/src/runtime/graphos.rs similarity index 85% rename from crates/apollo-mcp-server/src/runtime/graphos.rs rename to crates/dc-mcp-server/src/runtime/graphos.rs index 0723b557..dee2edb4 100644 --- a/crates/apollo-mcp-server/src/runtime/graphos.rs +++ b/crates/dc-mcp-server/src/runtime/graphos.rs @@ -4,7 +4,7 @@ use apollo_mcp_registry::{ platform_api::PlatformApiConfig, uplink::{Endpoints, SecretString, UplinkConfig}, }; -use apollo_mcp_server::errors::ServerError; +use dc_mcp_server::errors::ServerError; use schemars::JsonSchema; use serde::de::Error; use serde::{Deserialize, Deserializer}; @@ -13,8 +13,8 @@ use url::Url; #[cfg(test)] use serde::Serialize; -const APOLLO_GRAPH_REF_ENV: &str = "APOLLO_GRAPH_REF"; -const APOLLO_KEY_ENV: &str = "APOLLO_KEY"; +const DC_API_KEY_ENV: &str = "DC_API_KEY"; +const DEFAULT_GRAPH_REF: &str = "docontrol-api@current"; fn apollo_uplink_endpoints_deserializer<'de, D>(deserializer: D) -> Result, D::Error> where @@ -62,12 +62,13 @@ pub struct GraphOSConfig { } impl GraphOSConfig { - /// Extract the apollo graph reference from the config or from the current env + /// Extract the apollo graph reference from the config, env, or use hardcoded default #[allow(clippy::result_large_err)] pub fn graph_ref(&self) -> Result { - self.apollo_graph_ref + Ok(self + .apollo_graph_ref .clone() - .ok_or_else(|| ServerError::EnvironmentVariable(APOLLO_GRAPH_REF_ENV.to_string())) + .unwrap_or_else(|| DEFAULT_GRAPH_REF.to_string())) } /// Extract the apollo key from the config or from the current env @@ -75,7 +76,7 @@ impl GraphOSConfig { fn key(&self) -> Result { self.apollo_key .clone() - .ok_or_else(|| ServerError::EnvironmentVariable(APOLLO_GRAPH_REF_ENV.to_string())) + .ok_or_else(|| ServerError::EnvironmentVariable(DC_API_KEY_ENV.to_string())) } /// Generate an uplink config based on configuration params @@ -103,7 +104,7 @@ impl GraphOSConfig { let config = PlatformApiConfig::new( self.apollo_key .clone() - .ok_or(ServerError::EnvironmentVariable(APOLLO_KEY_ENV.to_string()))?, + .ok_or(ServerError::EnvironmentVariable(DC_API_KEY_ENV.to_string()))?, Duration::from_secs(30), Duration::from_secs(30), self.apollo_registry_url.clone(), diff --git a/crates/apollo-mcp-server/src/runtime/introspection.rs b/crates/dc-mcp-server/src/runtime/introspection.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/introspection.rs rename to crates/dc-mcp-server/src/runtime/introspection.rs diff --git a/crates/apollo-mcp-server/src/runtime/logging.rs b/crates/dc-mcp-server/src/runtime/logging.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/logging.rs rename to crates/dc-mcp-server/src/runtime/logging.rs diff --git a/crates/apollo-mcp-server/src/runtime/logging/defaults.rs b/crates/dc-mcp-server/src/runtime/logging/defaults.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/logging/defaults.rs rename to crates/dc-mcp-server/src/runtime/logging/defaults.rs diff --git a/crates/apollo-mcp-server/src/runtime/logging/log_rotation_kind.rs b/crates/dc-mcp-server/src/runtime/logging/log_rotation_kind.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/logging/log_rotation_kind.rs rename to crates/dc-mcp-server/src/runtime/logging/log_rotation_kind.rs diff --git a/crates/apollo-mcp-server/src/runtime/logging/parsers.rs b/crates/dc-mcp-server/src/runtime/logging/parsers.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/logging/parsers.rs rename to crates/dc-mcp-server/src/runtime/logging/parsers.rs diff --git a/crates/apollo-mcp-server/src/runtime/operation_source.rs b/crates/dc-mcp-server/src/runtime/operation_source.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/operation_source.rs rename to crates/dc-mcp-server/src/runtime/operation_source.rs diff --git a/crates/apollo-mcp-server/src/runtime/overrides.rs b/crates/dc-mcp-server/src/runtime/overrides.rs similarity index 92% rename from crates/apollo-mcp-server/src/runtime/overrides.rs rename to crates/dc-mcp-server/src/runtime/overrides.rs index 5fcbe66b..f61422f7 100644 --- a/crates/apollo-mcp-server/src/runtime/overrides.rs +++ b/crates/dc-mcp-server/src/runtime/overrides.rs @@ -1,4 +1,4 @@ -use apollo_mcp_server::operations::MutationMode; +use dc_mcp_server::operations::MutationMode; use schemars::JsonSchema; use serde::Deserialize; diff --git a/crates/apollo-mcp-server/src/runtime/schema_source.rs b/crates/dc-mcp-server/src/runtime/schema_source.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/schema_source.rs rename to crates/dc-mcp-server/src/runtime/schema_source.rs diff --git a/crates/apollo-mcp-server/src/runtime/schemas.rs b/crates/dc-mcp-server/src/runtime/schemas.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/schemas.rs rename to crates/dc-mcp-server/src/runtime/schemas.rs diff --git a/crates/apollo-mcp-server/src/runtime/telemetry.rs b/crates/dc-mcp-server/src/runtime/telemetry.rs similarity index 99% rename from crates/apollo-mcp-server/src/runtime/telemetry.rs rename to crates/dc-mcp-server/src/runtime/telemetry.rs index d5d0688b..1c71c44f 100644 --- a/crates/apollo-mcp-server/src/runtime/telemetry.rs +++ b/crates/dc-mcp-server/src/runtime/telemetry.rs @@ -4,7 +4,7 @@ use crate::runtime::Config; use crate::runtime::filtering_exporter::FilteringExporter; use crate::runtime::logging::Logging; use crate::runtime::telemetry::sampler::SamplerOption; -use apollo_mcp_server::generated::telemetry::TelemetryAttribute; +use dc_mcp_server::generated::telemetry::TelemetryAttribute; use opentelemetry::{Key, KeyValue, global, trace::TracerProvider as _}; use opentelemetry_otlp::WithExportConfig; use opentelemetry_sdk::metrics::{Instrument, Stream}; diff --git a/crates/apollo-mcp-server/src/runtime/telemetry/sampler.rs b/crates/dc-mcp-server/src/runtime/telemetry/sampler.rs similarity index 100% rename from crates/apollo-mcp-server/src/runtime/telemetry/sampler.rs rename to crates/dc-mcp-server/src/runtime/telemetry/sampler.rs diff --git a/crates/apollo-mcp-server/src/sanitize.rs b/crates/dc-mcp-server/src/sanitize.rs similarity index 100% rename from crates/apollo-mcp-server/src/sanitize.rs rename to crates/dc-mcp-server/src/sanitize.rs diff --git a/crates/apollo-mcp-server/src/schema_tree_shake.rs b/crates/dc-mcp-server/src/schema_tree_shake.rs similarity index 100% rename from crates/apollo-mcp-server/src/schema_tree_shake.rs rename to crates/dc-mcp-server/src/schema_tree_shake.rs diff --git a/crates/apollo-mcp-server/src/server.rs b/crates/dc-mcp-server/src/server.rs similarity index 92% rename from crates/apollo-mcp-server/src/server.rs rename to crates/dc-mcp-server/src/server.rs index cdbd72e3..089f5f82 100644 --- a/crates/apollo-mcp-server/src/server.rs +++ b/crates/dc-mcp-server/src/server.rs @@ -1,10 +1,12 @@ use std::net::{IpAddr, Ipv4Addr}; +use std::sync::Arc; use apollo_mcp_registry::uplink::schema::SchemaSource; use bon::bon; use reqwest::header::{CONTENT_TYPE, HeaderMap, HeaderValue}; use schemars::JsonSchema; use serde::Deserialize; +use tokio::sync::{Mutex, RwLock}; use url::Url; use crate::auth; @@ -14,6 +16,7 @@ use crate::errors::ServerError; use crate::event::Event as ServerEvent; use crate::health::HealthCheckConfig; use crate::operations::{MutationMode, OperationSource}; +use crate::token_manager::TokenManager; mod states; @@ -26,6 +29,7 @@ pub struct Server { operation_source: OperationSource, endpoint: Url, headers: HeaderMap, + shared_headers: Option>>, execute_introspection: bool, validate_introspection: bool, introspect_introspection: bool, @@ -42,6 +46,7 @@ pub struct Server { index_memory_bytes: usize, health_check: HealthCheckConfig, cors: CorsConfig, + token_manager: Option>>, } #[derive(Debug, Clone, Deserialize, Default, JsonSchema)] @@ -111,6 +116,7 @@ impl Server { operation_source: OperationSource, endpoint: Url, headers: HeaderMap, + #[builder(into)] shared_headers: Option>>, execute_introspection: bool, validate_introspection: bool, introspect_introspection: bool, @@ -127,6 +133,7 @@ impl Server { index_memory_bytes: usize, health_check: HealthCheckConfig, cors: CorsConfig, + token_manager: Option>>, ) -> Self { let headers = { let mut headers = headers.clone(); @@ -139,6 +146,7 @@ impl Server { operation_source, endpoint, headers, + shared_headers, execute_introspection, validate_introspection, introspect_introspection, @@ -155,6 +163,7 @@ impl Server { index_memory_bytes, health_check, cors, + token_manager, } } diff --git a/crates/apollo-mcp-server/src/server/states.rs b/crates/dc-mcp-server/src/server/states.rs similarity index 97% rename from crates/apollo-mcp-server/src/server/states.rs rename to crates/dc-mcp-server/src/server/states.rs index c89f3a63..0a874cfd 100644 --- a/crates/apollo-mcp-server/src/server/states.rs +++ b/crates/dc-mcp-server/src/server/states.rs @@ -1,8 +1,11 @@ +use std::sync::Arc; + use apollo_compiler::{Schema, validation::Valid}; use apollo_federation::{ApiSchemaOptions, Supergraph}; use apollo_mcp_registry::uplink::schema::{SchemaState, event::Event as SchemaEvent}; use futures::{FutureExt as _, Stream, StreamExt as _, stream}; use reqwest::header::HeaderMap; +use tokio::sync::{Mutex, RwLock}; use url::Url; use crate::{ @@ -11,6 +14,7 @@ use crate::{ errors::{OperationError, ServerError}, health::HealthCheckConfig, operations::MutationMode, + token_manager::TokenManager, }; use super::{Server, ServerEvent, Transport}; @@ -34,6 +38,7 @@ struct Config { transport: Transport, endpoint: Url, headers: HeaderMap, + shared_headers: Option>>, execute_introspection: bool, validate_introspection: bool, introspect_introspection: bool, @@ -50,6 +55,7 @@ struct Config { index_memory_bytes: usize, health_check: HealthCheckConfig, cors: CorsConfig, + token_manager: Option>>, } impl StateMachine { @@ -68,6 +74,7 @@ impl StateMachine { transport: server.transport, endpoint: server.endpoint, headers: server.headers, + shared_headers: server.shared_headers, execute_introspection: server.execute_introspection, validate_introspection: server.validate_introspection, introspect_introspection: server.introspect_introspection, @@ -84,6 +91,7 @@ impl StateMachine { index_memory_bytes: server.index_memory_bytes, health_check: server.health_check, cors: server.cors, + token_manager: server.token_manager, }, }); diff --git a/crates/apollo-mcp-server/src/server/states/configuring.rs b/crates/dc-mcp-server/src/server/states/configuring.rs similarity index 100% rename from crates/apollo-mcp-server/src/server/states/configuring.rs rename to crates/dc-mcp-server/src/server/states/configuring.rs diff --git a/crates/apollo-mcp-server/src/server/states/operations_configured.rs b/crates/dc-mcp-server/src/server/states/operations_configured.rs similarity index 100% rename from crates/apollo-mcp-server/src/server/states/operations_configured.rs rename to crates/dc-mcp-server/src/server/states/operations_configured.rs diff --git a/crates/apollo-mcp-server/src/server/states/running.rs b/crates/dc-mcp-server/src/server/states/running.rs similarity index 95% rename from crates/apollo-mcp-server/src/server/states/running.rs rename to crates/dc-mcp-server/src/server/states/running.rs index 6111e0ce..fc8f6abb 100644 --- a/crates/apollo-mcp-server/src/server/states/running.rs +++ b/crates/dc-mcp-server/src/server/states/running.rs @@ -37,13 +37,14 @@ use crate::{ validate::{VALIDATE_TOOL_NAME, Validate}, }, operations::{MutationMode, Operation, RawOperation}, + token_manager::TokenManager, }; #[derive(Clone)] pub(super) struct Running { pub(super) schema: Arc>>, pub(super) operations: Arc>>, - pub(super) headers: HeaderMap, + pub(super) headers: Arc>, pub(super) endpoint: Url, pub(super) execute_tool: Option, pub(super) introspect_tool: Option, @@ -58,6 +59,7 @@ pub(super) struct Running { pub(super) disable_schema_description: bool, pub(super) disable_auth_token_passthrough: bool, pub(super) health_check: Option, + pub(super) token_manager: Option>>, } impl Running { @@ -209,6 +211,15 @@ impl ServerHandler for Running { request: CallToolRequestParam, context: RequestContext, ) -> Result { + // Proactively refresh token if needed before executing any tool + if let Some(token_manager) = &self.token_manager { + let mut tm = token_manager.lock().await; + if let Err(e) = tm.get_valid_token().await { + error!("Failed to refresh token before request: {}", e); + // Don't fail the request, let it try with the current token + } + } + let meter = &meter::METER; let start = std::time::Instant::now(); let tool_name = request.name.clone(); @@ -235,7 +246,7 @@ impl ServerHandler for Running { .await } EXECUTE_TOOL_NAME => { - let mut headers = self.headers.clone(); + let mut headers = self.headers.read().await.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present if !self.disable_auth_token_passthrough @@ -268,7 +279,7 @@ impl ServerHandler for Running { .await } _ => { - let mut headers = self.headers.clone(); + let mut headers = self.headers.read().await.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present if !self.disable_auth_token_passthrough @@ -407,7 +418,7 @@ mod tests { let running = Running { schema: Arc::new(Mutex::new(schema)), operations: Arc::new(Mutex::new(vec![])), - headers: HeaderMap::new(), + headers: Arc::new(RwLock::new(HeaderMap::new())), endpoint: "http://localhost:4000".parse().unwrap(), execute_tool: None, introspect_tool: None, @@ -422,6 +433,7 @@ mod tests { disable_schema_description: false, disable_auth_token_passthrough: false, health_check: None, + token_manager: None, }; let operations = vec![ diff --git a/crates/apollo-mcp-server/src/server/states/schema_configured.rs b/crates/dc-mcp-server/src/server/states/schema_configured.rs similarity index 100% rename from crates/apollo-mcp-server/src/server/states/schema_configured.rs rename to crates/dc-mcp-server/src/server/states/schema_configured.rs diff --git a/crates/apollo-mcp-server/src/server/states/starting.rs b/crates/dc-mcp-server/src/server/states/starting.rs similarity index 97% rename from crates/apollo-mcp-server/src/server/states/starting.rs rename to crates/dc-mcp-server/src/server/states/starting.rs index c377da5a..9fc4570d 100644 --- a/crates/apollo-mcp-server/src/server/states/starting.rs +++ b/crates/dc-mcp-server/src/server/states/starting.rs @@ -139,7 +139,10 @@ impl Starting { let running = Running { schema, operations: Arc::new(Mutex::new(operations)), - headers: self.config.headers, + headers: self + .config + .shared_headers + .unwrap_or_else(|| Arc::new(RwLock::new(self.config.headers))), endpoint: self.config.endpoint, execute_tool, introspect_tool, @@ -154,6 +157,7 @@ impl Starting { disable_schema_description: self.config.disable_schema_description, disable_auth_token_passthrough: self.config.disable_auth_token_passthrough, health_check: health_check.clone(), + token_manager: self.config.token_manager.clone(), }; // Helper to enable auth @@ -355,6 +359,7 @@ mod tests { mutation_mode: MutationMode::All, execute_introspection: true, headers: HeaderMap::new(), + shared_headers: None, validate_introspection: true, introspect_introspection: true, search_introspection: true, @@ -372,6 +377,7 @@ mod tests { ..Default::default() }, cors: Default::default(), + token_manager: None, }, schema: Schema::parse_and_validate("type Query { hello: String }", "test.graphql") .expect("Valid schema"), diff --git a/crates/dc-mcp-server/src/startup.rs b/crates/dc-mcp-server/src/startup.rs new file mode 100644 index 00000000..e3ab8668 --- /dev/null +++ b/crates/dc-mcp-server/src/startup.rs @@ -0,0 +1,165 @@ +//! Startup and initialization functions for Apollo MCP Server + +use crate::config_manager::ConfigManager; +use crate::errors::McpError; +use crate::token_manager::TokenManager; +use reqwest::header::HeaderMap; +use std::env; +use std::sync::Arc; +use tokio::sync::RwLock; +use tracing::{info, warn}; + +/// Create and configure a TokenManager for on-demand token refresh +/// Returns the TokenManager which will refresh tokens when needed before requests +pub fn create_token_manager( + config_path: String, + refresh_token: String, + refresh_url: String, + _graphql_endpoint: String, + shared_headers: Arc>, +) -> Result { + info!("🎯 Apollo MCP Server initializing with token refresh..."); + info!("📝 Config path: {}", config_path); + info!("🔗 Refresh URL: {}", refresh_url); + + // Step 1: Create shared config manager + info!("Step 1: Creating config manager..."); + let config_manager = Arc::new(ConfigManager::new(config_path.clone())); + + info!("Step 1a: Verifying config..."); + config_manager.verify_config().map_err(|e| { + warn!("Config verification failed: {}", e); + e + })?; + info!("✅ Config verified"); + + // Step 2: Initialize token manager with injected config manager and headers + info!("Step 2: Creating token manager..."); + let mut token_manager = TokenManager::new(refresh_token, refresh_url)?; + info!("✅ Token manager created"); + + info!("Step 2a: Setting config manager..."); + token_manager.set_config_manager(Arc::clone(&config_manager)); + info!("✅ Config manager set"); + + info!("Step 2b: Setting headers..."); + token_manager.set_headers(Arc::clone(&shared_headers)); + info!("✅ Headers set"); + + info!("✅ Apollo MCP Server token manager ready for on-demand refresh"); + Ok(token_manager) +} + +/// Check if token refresh is enabled via environment variables +pub fn is_token_refresh_enabled() -> bool { + env::var("DC_TOKEN_REFRESH_ENABLED") + .ok() + .map(|s| s == "true") + .unwrap_or(false) +} + +/// Get refresh token from environment +pub fn get_refresh_token() -> Option { + env::var("DC_REFRESH_TOKEN").ok() +} + +/// Get refresh URL from environment +pub fn get_refresh_url() -> Option { + env::var("DC_REFRESH_URL").ok() +} + +/// Get GraphQL endpoint from environment +pub fn get_graphql_endpoint() -> Option { + env::var("DC_GRAPHQL_ENDPOINT").ok() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config_manager::ConfigManager; + use crate::token_manager::TokenManager; + use std::fs; + use tempfile::TempDir; + + /// Test complete initialization flow + #[tokio::test] + async fn test_complete_initialization_flow() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test_config.yaml"); + + // Create initial config + let initial_config = r#" +endpoint: "https://api.example.com/graphql" +headers: + Authorization: "Bearer old_token" +"#; + fs::write(&config_path, initial_config).unwrap(); + + let _config_manager = ConfigManager::new(config_path.to_string_lossy().to_string()); + + // Test environment variable setup + unsafe { + std::env::set_var("DC_REFRESH_TOKEN", "test_refresh_token"); + std::env::set_var("DC_REFRESH_URL", "https://api.example.com/refresh"); + std::env::set_var("DC_GRAPHQL_ENDPOINT", "https://api.example.com/graphql"); + } + + // Test getting refresh token from environment + let refresh_token = get_refresh_token(); + assert_eq!(refresh_token, Some("test_refresh_token".to_string())); + + // Test getting refresh URL from environment + let refresh_url = get_refresh_url(); + assert_eq!( + refresh_url, + Some("https://api.example.com/refresh".to_string()) + ); + + // Test getting GraphQL endpoint from environment + let endpoint = get_graphql_endpoint(); + assert_eq!( + endpoint, + Some("https://api.example.com/graphql".to_string()) + ); + + // Clean up environment variables + unsafe { + std::env::remove_var("DC_REFRESH_TOKEN"); + std::env::remove_var("DC_REFRESH_URL"); + std::env::remove_var("DC_GRAPHQL_ENDPOINT"); + } + } + + /// Test initialization with missing environment variables + #[tokio::test] + async fn test_initialization_missing_env_vars() { + // Ensure environment variables are not set + unsafe { + std::env::remove_var("DC_REFRESH_TOKEN"); + std::env::remove_var("DC_REFRESH_URL"); + } + + // Test getting missing refresh token + let refresh_token = get_refresh_token(); + assert_eq!(refresh_token, None); + + // Test getting missing refresh URL + let refresh_url = get_refresh_url(); + assert_eq!(refresh_url, None); + } + + /// Test token manager integration + #[tokio::test] + async fn test_token_manager_integration() { + let refresh_token = "test_refresh_token"; + let refresh_url = "https://api.example.com/refresh"; + + // Test creating token manager + let token_manager = TokenManager::new(refresh_token.to_string(), refresh_url.to_string()); + assert!(token_manager.is_ok()); + + let token_manager = token_manager.unwrap(); + assert_eq!(token_manager.refresh_token(), refresh_token); + assert_eq!(token_manager.refresh_url(), refresh_url); + } +} diff --git a/crates/apollo-mcp-server/src/telemetry_attributes.rs b/crates/dc-mcp-server/src/telemetry_attributes.rs similarity index 100% rename from crates/apollo-mcp-server/src/telemetry_attributes.rs rename to crates/dc-mcp-server/src/telemetry_attributes.rs diff --git a/crates/dc-mcp-server/src/token_manager.rs b/crates/dc-mcp-server/src/token_manager.rs new file mode 100644 index 00000000..113289ed --- /dev/null +++ b/crates/dc-mcp-server/src/token_manager.rs @@ -0,0 +1,439 @@ +//! Token refresh functionality for Apollo MCP Server + +use crate::config_manager::ConfigManager; +use crate::errors::McpError; +use reqwest::Client; +use reqwest::header::{AUTHORIZATION, HeaderMap, HeaderValue}; +use rmcp::model::ErrorCode; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; +use tokio::time::sleep; +use tracing::{debug, error, info, warn}; + +#[derive(Debug, Serialize)] +struct RefreshTokenRequest { + #[serde(rename = "refreshToken")] + refresh_token: String, +} + +#[derive(Debug, Deserialize)] +struct RefreshTokenResponse { + #[serde(rename = "accessToken")] + access_token: String, + #[serde(rename = "expiresIn")] + expires_in: Option, +} + +pub struct TokenManager { + refresh_token: String, + refresh_url: String, + access_token: Option, + token_expires_at: Option, + client: Client, + config_manager: Option>, + headers: Option>>, +} + +impl TokenManager { + pub fn new(refresh_token: String, refresh_url: String) -> Result { + // Validate input parameters + if refresh_token.trim().is_empty() { + return Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "Refresh token cannot be empty".to_string(), + None, + )); + } + + if refresh_url.trim().is_empty() { + return Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "Refresh URL cannot be empty".to_string(), + None, + )); + } + + let client = Client::builder() + .timeout(Duration::from_secs(30)) + .connect_timeout(Duration::from_secs(10)) + .user_agent("curl/8.4.0") + .danger_accept_invalid_certs(false) + .danger_accept_invalid_hostnames(false) + .build() + .map_err(|e| { + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to create HTTP client: {}", e), + None, + ) + })?; + + Ok(Self { + refresh_token, + refresh_url, + access_token: None, + token_expires_at: None, + client, + config_manager: None, + headers: None, + }) + } + + /// Inject the config manager for automatic token persistence + pub fn set_config_manager(&mut self, config_manager: Arc) { + self.config_manager = Some(config_manager); + } + + /// Inject the shared headers for automatic token updates + pub fn set_headers(&mut self, headers: Arc>) { + self.headers = Some(headers); + } + + /// Get a valid access token, refreshing if necessary + pub async fn get_valid_token(&mut self) -> Result { + // Check if we have a valid token + if let Some(token) = &self.access_token + && let Some(expires_at) = self.token_expires_at + { + // Check how much time remains until expiry + let remaining = expires_at.saturating_duration_since(Instant::now()); + + // Refresh token if less than 2 minutes remaining (token lifetime is 5 minutes) + if remaining > Duration::from_secs(120) { + debug!( + "Using existing valid token (expires in {}s)", + remaining.as_secs() + ); + return Ok(token.clone()); + } + + info!( + "⏰ Token approaching expiry ({}s remaining), refreshing proactively", + remaining.as_secs() + ); + } + + // Need to refresh token + info!("🔄 Refreshing access token..."); + self.refresh_access_token().await + } + + /// Refresh the access token + async fn refresh_access_token(&mut self) -> Result { + let request_body = RefreshTokenRequest { + refresh_token: self.refresh_token.clone(), + }; + + debug!("Making token refresh request to: {}", self.refresh_url); + + let response = self + .client + .post(&self.refresh_url) + .header("Content-Type", "application/json") + .json(&request_body) + .send() + .await + .map_err(|e| { + error!("Failed to send token refresh request: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to refresh token: {}", e), + None, + ) + })?; + + let status = response.status(); + let response_text = response.text().await.map_err(|e| { + error!("Failed to read token refresh response: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to read token refresh response: {}", e), + None, + ) + })?; + + debug!( + "Token refresh response (status: {}): {}", + status, response_text + ); + + let token_response: RefreshTokenResponse = + serde_json::from_str(&response_text).map_err(|e| { + error!("Failed to parse token refresh response: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!( + "Failed to parse token refresh response (status: {}, body: {}): {}", + status, response_text, e + ), + None, + ) + })?; + + // Update token and expiry + self.access_token = Some(token_response.access_token.clone()); + if let Some(expires_in) = token_response.expires_in { + self.token_expires_at = Some(Instant::now() + Duration::from_secs(expires_in)); + info!( + "✅ Successfully refreshed access token (expires in {}s)", + expires_in + ); + } else { + // Default to 1 hour if no expiry provided + self.token_expires_at = Some(Instant::now() + Duration::from_secs(3600)); + info!("✅ Successfully refreshed access token (expires in 1h)"); + } + + // Create the header value first to ensure it's valid + let header_value = + HeaderValue::from_str(&format!("Bearer {}", token_response.access_token)).map_err( + |e| { + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to create header value from token: {}", e), + None, + ) + }, + )?; + + // Write the token to config file if config manager is set + if let Some(config_manager) = &self.config_manager { + config_manager + .update_auth_token(&token_response.access_token) + .map_err(|e| { + error!("Failed to write refreshed token to config file: {}", e); + e + })?; + info!("✅ Refreshed token written to config file"); + } + + // Update the shared headers if available + if let Some(headers) = &self.headers { + let mut headers_guard = headers.write().await; + headers_guard.insert(AUTHORIZATION, header_value); + info!("✅ Refreshed token updated in shared headers"); + } + + Ok(token_response.access_token) + } + + /// Verify token by making a test API call + pub async fn verify_token( + &self, + token: &str, + graphql_endpoint: &str, + ) -> Result { + debug!("🧪 Verifying token with API test..."); + + let test_query = serde_json::json!({ + "query": "query { company { name } }" + }); + + let response = self + .client + .post(graphql_endpoint) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", token)) + .json(&test_query) + .send() + .await + .map_err(|e| { + error!("Failed to verify token: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to verify token: {}", e), + None, + ) + })?; + + let response_text = response.text().await.map_err(|e| { + error!("Failed to read token verification response: {}", e); + McpError::new( + ErrorCode::INTERNAL_ERROR, + format!("Failed to read token verification response: {}", e), + None, + ) + })?; + + let is_valid = response_text.contains("\"name\""); + + if is_valid { + info!("✅ Token verification successful - API is accessible"); + } else { + warn!( + "❌ Token verification failed. API response: {}", + response_text + ); + } + + Ok(is_valid) + } + + /// Start background token refresh task + pub async fn start_refresh_task(&mut self, graphql_endpoint: String) { + let mut token_manager = self.clone(); + + tokio::spawn(async move { + loop { + // Wait 50 minutes (refresh every 50 minutes to be safe) + sleep(Duration::from_secs(3000)).await; + + match token_manager.get_valid_token().await { + Ok(token) => { + if let Err(e) = token_manager.verify_token(&token, &graphql_endpoint).await + { + error!("Token verification failed in background task: {}", e); + } else { + info!("✅ Background task: token refreshed and verified"); + } + } + Err(e) => { + error!("Background token refresh failed: {}", e); + } + } + } + }); + } +} + +impl Clone for TokenManager { + fn clone(&self) -> Self { + Self { + refresh_token: self.refresh_token.clone(), + refresh_url: self.refresh_url.clone(), + access_token: self.access_token.clone(), + token_expires_at: self.token_expires_at, + client: self.client.clone(), + config_manager: self.config_manager.clone(), + headers: self.headers.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config_manager::ConfigManager; + use std::fs; + use std::time::Instant; + use tempfile::TempDir; + use tokio::time::Duration; + + /// Test that token refresh stores token in memory + #[tokio::test] + async fn test_token_refresh_stores_in_memory() { + let temp_dir = TempDir::new().unwrap(); + let config_path = temp_dir.path().join("test_config.yaml"); + + // Create initial config + let initial_config = r#" +endpoint: "https://api.example.com/graphql" +headers: + Authorization: "Bearer old_token" +"#; + fs::write(&config_path, initial_config).unwrap(); + + let _config_manager = ConfigManager::new(config_path.to_string_lossy().to_string()); + + // Mock refresh URL (this would normally be a real endpoint) + let refresh_url = "https://api.example.com/refresh"; + let refresh_token = "refresh_token_123"; + + let token_manager = + TokenManager::new(refresh_token.to_string(), refresh_url.to_string()).unwrap(); + + // Initially no token in memory + assert!(token_manager.access_token.is_none()); + assert!(token_manager.token_expires_at.is_none()); + + // Note: This test would need a mock server to actually test token refresh + // For now, we test the structure and that it can be created + assert_eq!(token_manager.refresh_token, refresh_token); + assert_eq!(token_manager.refresh_url, refresh_url); + } + + /// Test token manager creation with invalid parameters + #[test] + fn test_token_manager_creation_error() { + // Test with empty refresh token + let result = TokenManager::new( + "".to_string(), + "https://api.example.com/refresh".to_string(), + ); + assert!(result.is_err()); + + // Test with empty refresh URL + let result = TokenManager::new("refresh_token".to_string(), "".to_string()); + assert!(result.is_err()); + } + + /// Test token expiry logic + #[tokio::test] + async fn test_token_expiry_logic() { + let refresh_url = "https://api.example.com/refresh"; + let refresh_token = "refresh_token_123"; + + let mut token_manager = + TokenManager::new(refresh_token.to_string(), refresh_url.to_string()).unwrap(); + + // Set a token that expires in the past + token_manager.access_token = Some("test_token".to_string()); + token_manager.token_expires_at = Some(Instant::now() - Duration::from_secs(3600)); + + // Token should be considered expired + let now = Instant::now(); + if let Some(expires_at) = token_manager.token_expires_at { + assert!(expires_at < now); + } + } + + /// Test token manager clone + #[test] + fn test_token_manager_clone() { + let refresh_url = "https://api.example.com/refresh"; + let refresh_token = "refresh_token_123"; + + let mut token_manager = + TokenManager::new(refresh_token.to_string(), refresh_url.to_string()).unwrap(); + token_manager.access_token = Some("test_token".to_string()); + token_manager.token_expires_at = Some(Instant::now() + Duration::from_secs(3600)); + + let cloned_manager = token_manager.clone(); + + assert_eq!( + cloned_manager.refresh_token(), + token_manager.refresh_token() + ); + assert_eq!(cloned_manager.refresh_url(), token_manager.refresh_url()); + assert_eq!(cloned_manager.access_token(), token_manager.access_token()); + assert_eq!( + cloned_manager.token_expires_at(), + token_manager.token_expires_at() + ); + } + + // Test helper methods for TokenManager + impl TokenManager { + /// Get the refresh token (for testing) + pub fn refresh_token(&self) -> &str { + &self.refresh_token + } + + /// Get the refresh URL (for testing) + pub fn refresh_url(&self) -> &str { + &self.refresh_url + } + + /// Get the current access token (for testing) + pub fn access_token(&self) -> &Option { + &self.access_token + } + + /// Get the token expiry time (for testing) + pub fn token_expires_at(&self) -> &Option { + &self.token_expires_at + } + } +} diff --git a/crates/apollo-mcp-server/telemetry.toml b/crates/dc-mcp-server/telemetry.toml similarity index 100% rename from crates/apollo-mcp-server/telemetry.toml rename to crates/dc-mcp-server/telemetry.toml diff --git a/flake.nix b/flake.nix index 48a84835..da22e10e 100644 --- a/flake.nix +++ b/flake.nix @@ -101,15 +101,12 @@ # Cross targets for supported architectures cross = let # Note: x86_64-apple-darwin doesn't yet work with zig due to an upstream bug + # Keeping only macOS and Ubuntu (Linux GNU) targets for now supportedTargets = [ "aarch64-apple-darwin" - "aarch64-pc-windows-gnullvm" - "aarch64-unknown-linux-gnu" - "aarch64-unknown-linux-musl" "x86_64-apple-darwin" - "x86_64-pc-windows-gnullvm" + "aarch64-unknown-linux-gnu" "x86_64-unknown-linux-gnu" - "x86_64-unknown-linux-musl" ]; crossBuild = target: let diff --git a/nix/apollo-mcp.nix b/nix/apollo-mcp.nix index 0b63cf9a..ef44d0c1 100644 --- a/nix/apollo-mcp.nix +++ b/nix/apollo-mcp.nix @@ -15,7 +15,9 @@ graphqlFilter = path: _type: builtins.match ".*graphql$" path != null; testFilter = path: _type: builtins.match ".*snap$" path != null; srcFilter = path: type: - (graphqlFilter path type) || (testFilter path type) || (craneLib.filterCargoSources path type); + (graphqlFilter path type) + || (testFilter path type) + || (craneLib.filterCargoSources path type); # Crane options src = pkgs.lib.cleanSourceWith { @@ -24,7 +26,8 @@ name = "source"; # Be reproducible, regardless of the directory name }; - craneLib = (crane.mkLib pkgs).overrideToolchain toolchain; + # Use default system toolchain to avoid compatibility issues + craneLib = crane.mkLib pkgs; craneCommonArgs = { inherit src; pname = "apollo-mcp"; @@ -33,6 +36,10 @@ nativeBuildInputs = [perl pkg-config]; buildInputs = []; + # Force native builds only to prevent cross-compilation + CARGO_BUILD_TARGET = pkgs.stdenv.hostPlatform.config; + CARGO_TARGET_DIR = "target"; + # Meta information about the packages meta = { description = "Apollo MCP Server"; @@ -46,32 +53,45 @@ # Generate a derivation for just the dependencies of the project so that they # can be cached across all of the various checks and builders. - cargoArtifacts = craneLib.buildDepsOnly craneCommonArgs; + # Use buildDepsOnly with target override to avoid cross-compilation + cargoArtifacts = craneLib.buildDepsOnly ( + craneCommonArgs + // { + # Force native target only + CARGO_BUILD_TARGET = pkgs.stdenv.hostPlatform.config; + # Override cargo check to specify target (crane already adds --release --locked) + cargoCheckExtraArgs = "--target ${pkgs.stdenv.hostPlatform.config}"; + # Override linker to use native gcc instead of cross-compilation linker + CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER = "gcc"; + CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_CC = "gcc"; + CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_CXX = "g++"; + } + ); in { # Expose the list of build dependencies for inheriting in dev shells nativeDependencies = craneCommonArgs.nativeBuildInputs; dependencies = craneCommonArgs.buildInputs; # Expose derivations that should be cached in CI - cache = [ - cargoArtifacts - ]; + cache = [cargoArtifacts]; # Expose checks for the project used by the root nix flake checks = { - clippy = craneLib.cargoClippy (craneCommonArgs + clippy = craneLib.cargoClippy ( + craneCommonArgs // { inherit cargoArtifacts; - cargoClippyExtraArgs = "--all-targets -- --deny warnings"; - }); - docs = craneLib.cargoDoc (craneCommonArgs + cargoClippyExtraArgs = "-- --deny warnings"; + } + ); + docs = craneLib.cargoDoc ( + craneCommonArgs // { inherit cargoArtifacts; - }); + } + ); - rustfmt = craneLib.cargoFmt { - inherit src; - }; + rustfmt = craneLib.cargoFmt {inherit src;}; toml-fmt = craneLib.taploFmt { src = pkgs.lib.sources.sourceFilesBySuffices src [".toml"]; }; @@ -98,16 +118,19 @@ in { # Helper for generating a command using cargo-zigbuild and other shell-expanded # env vars. mkCmd = cmd: - builtins.concatStringsSep " " ((lib.optionals stdenv.isDarwin ["SDKROOT=${apple-sdk.sdkroot}"]) + builtins.concatStringsSep " " ( + (lib.optionals stdenv.isDarwin ["SDKROOT=${apple-sdk.sdkroot}"]) ++ [ "CARGO_ZIGBUILD_CACHE_DIR=$TMP/.cache/cargo-zigbuild" "ZIG_LOCAL_CACHE_DIR=$TMP/.cache/zig-local" "ZIG_GLOBAL_CACHE_DIR=$TMP/.cache/zig-global" "${cargo-zigbuild-patched}/bin/cargo-zigbuild ${cmd}" - ]); + ] + ); in - craneLib.buildPackage (craneCommonArgs + craneLib.buildPackage ( + craneCommonArgs // { pname = craneCommonArgs.pname + "-${target}"; nativeBuildInputs = [ @@ -123,19 +146,20 @@ in { # Use zig for both CC and linker since it actually supports cross-compilation # nicely. - cargoExtraArgs = lib.strings.concatStringsSep " " ([ - "--target ${zig-target}" - ] + cargoExtraArgs = lib.strings.concatStringsSep " " ( + ["--target ${zig-target}"] # x86_64-apple-darwin compilation has a bug that causes release builds to # fail with "bad relocation", so we build debug targets for it instead. # See: https://github.com/rust-cross/cargo-zigbuild/issues/338 - ++ (lib.optionals (target != "x86_64-apple-darwin") ["--release"])); + ++ (lib.optionals (target != "x86_64-apple-darwin") ["--release"]) + ); cargoCheckCommand = mkCmd "check"; cargoBuildCommand = mkCmd "zigbuild"; # Make sure to compile it for the specified target CARGO_BUILD_TARGET = target; - }); + } + ); }; }