From ba1dba78049e97ce22099a2420ebcbc82c9f1f1f Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 30 Jul 2025 16:24:33 -0400 Subject: [PATCH 1/3] Drop entrypoint script --- deploy/Containerfile | 2 +- deploy/entrypoint.sh | 43 ------------------------------------------- 2 files changed, 1 insertion(+), 44 deletions(-) delete mode 100755 deploy/entrypoint.sh diff --git a/deploy/Containerfile b/deploy/Containerfile index 2702e24d..2f5c492e 100644 --- a/deploy/Containerfile +++ b/deploy/Containerfile @@ -45,4 +45,4 @@ ENV GUIDELLM_TARGET="http://localhost:8000" \ GUIDELLM_MAX_SECONDS="" \ GUIDELLM_OUTPUT_PATH="/results/results.json" -ENTRYPOINT [ "/opt/guidellm/bin/entrypoint.sh" ] +ENTRYPOINT [ "/opt/guidellm/bin/guidellm" ] diff --git a/deploy/entrypoint.sh b/deploy/entrypoint.sh deleted file mode 100755 index d6ff4ea0..00000000 --- a/deploy/entrypoint.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Path to the guidellm binary -guidellm_bin="/opt/guidellm/bin/guidellm" - -# If we receive any arguments switch to guidellm command -if [ $# -gt 0 ]; then - echo "Running command: guidellm $*" - exec $guidellm_bin "$@" -fi - -# Get a list of environment variables that start with GUIDELLM_ -args="$(printenv | cut -d= -f1 | grep -E '^GUIDELLM_')" - -# NOTE: Bash array + exec prevent shell escape issues -CMD=("${guidellm_bin}" "benchmark") - -# Parse environment variables for the benchmark command -for var in $args; do - # Remove GUIDELLM_ prefix - arg_name="${var#GUIDELLM_}" - - # If there is an extra underscore at the - # start than this is a config variable - if [ "${arg_name:0:1}" == "_" ]; then - continue - fi - - # Convert to lowercase - arg_name="${arg_name,,}" - # Replace underscores with dashes - arg_name="${arg_name//_/-}" - - # Add the argument to the command array if set - if [ -n "${!var}" ]; then - CMD+=("--${arg_name}" "${!var}") - fi -done - -# Execute the command -echo "Running command: ${CMD[*]}" -exec "${CMD[@]}" From cceadb534e0ee812d0671219e5af9580a04beb62 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 30 Jul 2025 16:29:23 -0400 Subject: [PATCH 2/3] Integrate changes from #213 --- deploy/Containerfile | 38 ++++++++++++++++---------------------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/deploy/Containerfile b/deploy/Containerfile index 2f5c492e..7715de93 100644 --- a/deploy/Containerfile +++ b/deploy/Containerfile @@ -1,26 +1,26 @@ -ARG PYTHON=3.13 +ARG BASE_IMAGE=docker.io/python:3.13-slim # Use a multi-stage build to create a lightweight production image -FROM docker.io/python:${PYTHON}-slim as builder +FROM $BASE_IMAGE as builder + +# Ensure files are installed as root +USER root # Copy repository files -COPY / /src +COPY / /opt/app-root/src # Create a venv and install guidellm -RUN python3 -m venv /opt/guidellm \ - && /opt/guidellm/bin/pip install --no-cache-dir /src - -# Copy entrypoint script into the venv bin directory -RUN install -m0755 /src/deploy/entrypoint.sh /opt/guidellm/bin/entrypoint.sh +RUN python3 -m venv /opt/app-root/guidellm \ + && /opt/app-root/guidellm/bin/pip install --no-cache-dir /opt/app-root/src # Prod image -FROM docker.io/python:${PYTHON}-slim +FROM $BASE_IMAGE # Copy the virtual environment from the builder stage -COPY --from=builder /opt/guidellm /opt/guidellm +COPY --from=builder /opt/app-root/guidellm /opt/app-root/guidellm # Add guidellm bin to PATH -ENV PATH="/opt/guidellm/bin:$PATH" +ENV PATH="/opt/app-root/guidellm/bin:$PATH" # Create a non-root user RUN useradd -md /results guidellm @@ -35,14 +35,8 @@ WORKDIR /results LABEL org.opencontainers.image.source="https://github.com/vllm-project/guidellm" \ org.opencontainers.image.description="GuideLLM Performance Benchmarking Container" -# Set the environment variable for the benchmark script -# TODO: Replace with scenario environment variables -ENV GUIDELLM_TARGET="http://localhost:8000" \ - GUIDELLM_MODEL="neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16" \ - GUIDELLM_RATE_TYPE="sweep" \ - GUIDELLM_DATA="prompt_tokens=256,output_tokens=128" \ - GUIDELLM_MAX_REQUESTS="100" \ - GUIDELLM_MAX_SECONDS="" \ - GUIDELLM_OUTPUT_PATH="/results/results.json" - -ENTRYPOINT [ "/opt/guidellm/bin/guidellm" ] +# Argument defaults can be set with GUIDELLM_ +ENV GUIDELLM_OUTPUT_PATH="/results/benchmarks.json" + +ENTRYPOINT [ "/opt/app-root/guidellm/bin/guidellm" ] +CMD [ "benchmark", "run" ] From 81755f969ef5b64bf3a330364b837a5f300a77ba Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 30 Jul 2025 16:32:02 -0400 Subject: [PATCH 3/3] Drop default scenario from image ENV --- deploy/Containerfile | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/deploy/Containerfile b/deploy/Containerfile index 7715de93..cee8b958 100644 --- a/deploy/Containerfile +++ b/deploy/Containerfile @@ -35,8 +35,15 @@ WORKDIR /results LABEL org.opencontainers.image.source="https://github.com/vllm-project/guidellm" \ org.opencontainers.image.description="GuideLLM Performance Benchmarking Container" -# Argument defaults can be set with GUIDELLM_ -ENV GUIDELLM_OUTPUT_PATH="/results/benchmarks.json" +# Set the environment variable for the benchmark script +# TODO: Replace with scenario environment variables +ENV GUIDELLM_TARGET="http://localhost:8000" \ + GUIDELLM_MODEL="neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16" \ + GUIDELLM_RATE_TYPE="sweep" \ + GUIDELLM_DATA="prompt_tokens=256,output_tokens=128" \ + GUIDELLM_MAX_REQUESTS="100" \ + GUIDELLM_MAX_SECONDS="" \ + GUIDELLM_OUTPUT_PATH="/results/results.json" ENTRYPOINT [ "/opt/app-root/guidellm/bin/guidellm" ] CMD [ "benchmark", "run" ]