diff --git a/build_artifacts/v3/v3.3/v3.3.4/Dockerfile b/build_artifacts/v3/v3.3/v3.3.4/Dockerfile
new file mode 100644
index 000000000..66349557a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/Dockerfile
@@ -0,0 +1,218 @@
+ARG TAG_FOR_BASE_MICROMAMBA_IMAGE
+FROM mambaorg/micromamba:$TAG_FOR_BASE_MICROMAMBA_IMAGE
+
+ARG CUDA_MAJOR_MINOR_VERSION=''
+ARG ENV_IN_FILENAME
+ARG PINNED_ENV_IN_FILENAME
+ARG ARG_BASED_ENV_IN_FILENAME
+ARG IMAGE_VERSION
+LABEL "org.amazon.sagemaker-distribution.image.version"=$IMAGE_VERSION
+
+ARG AMZN_BASE="/opt/amazon/sagemaker"
+ARG DB_ROOT_DIR="/opt/db"
+ARG DIRECTORY_TREE_STAGE_DIR="${AMZN_BASE}/dir-staging"
+
+ARG NB_USER="sagemaker-user"
+ARG NB_UID=1000
+ARG NB_GID=100
+
+# https://www.openssl.org/source/
+ARG FIPS_VALIDATED_SSL=3.0.8
+ARG MIN_REQUIRED_MICROMAMBA_VERSION=1.5.11
+
+ENV SAGEMAKER_LOGGING_DIR="/var/log/sagemaker/"
+ENV STUDIO_LOGGING_DIR="/var/log/studio/"
+ENV EDITOR="nano"
+ENV IMAGE_VERSION=$IMAGE_VERSION
+ENV PINNED_MICROMAMBA_MINOR_VERSION="1.5.*"
+ENV SAGEMAKER_RECOVERY_MODE_HOME=/tmp/sagemaker-recovery-mode-home
+
+USER root
+# Upgrade micromamba to the latest patch version in the pinned minor version range, if applicable
+RUN CURRENT_MICROMAMBA_VERSION=$(micromamba --version) && \
+ echo "Current micromamba version: $CURRENT_MICROMAMBA_VERSION" && \
+ if [[ "$CURRENT_MICROMAMBA_VERSION" == $PINNED_MICROMAMBA_MINOR_VERSION ]]; then \
+ echo "Upgrading micromamba to the latest $PINNED_MICROMAMBA_MINOR_VERSION version..." && \
+ micromamba self-update -c conda-forge --version "$MIN_REQUIRED_MICROMAMBA_VERSION" && \
+ micromamba clean --all --yes --force-pkgs-dirs; \
+ else \
+ echo "Micromamba is already at version $CURRENT_MICROMAMBA_VERSION (outside $PINNED_MICROMAMBA_MINOR_VERSION). No upgrade performed."; \
+ fi
+
+RUN usermod "--login=${NB_USER}" "--home=/home/${NB_USER}" --move-home "-u ${NB_UID}" "${MAMBA_USER}" && \
+ groupmod "--new-name=${NB_USER}" --non-unique "-g ${NB_GID}" "${MAMBA_USER}" && \
+ # Update the expected value of MAMBA_USER for the
+ # _entrypoint.sh consistency check.
+ echo "${NB_USER}" > "/etc/arg_mamba_user" && \
+ :
+ENV MAMBA_USER=$NB_USER
+ENV USER=$NB_USER
+
+RUN apt-get update && apt-get upgrade -y && \
+ apt-get install -y --no-install-recommends sudo gettext-base wget curl unzip git rsync build-essential openssh-client nano cron less mandoc jq ca-certificates gnupg && \
+ # We just install tzdata below but leave default time zone as UTC. This helps packages like Pandas to function correctly.
+ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata krb5-user libkrb5-dev libsasl2-dev libsasl2-modules && \
+ chmod g+w /etc/passwd && \
+ echo "ALL ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers && \
+ touch /etc/krb5.conf.lock && chown ${NB_USER}:${MAMBA_USER} /etc/krb5.conf* && \
+ # Note that we do NOT run `rm -rf /var/lib/apt/lists/*` here. If we did, anyone building on top of our images will
+ # not be able to run any `apt-get install` commands and that would hamper customizability of the images.
+ curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \
+ unzip awscliv2.zip && \
+ sudo ./aws/install && \
+ rm -rf aws awscliv2.zip && \
+ : && \
+ echo "source /usr/local/bin/_activate_current_env.sh" | tee --append /etc/profile && \
+# CodeEditor - create server, user data dirs
+ mkdir -p /opt/amazon/sagemaker/sagemaker-code-editor-server-data /opt/amazon/sagemaker/sagemaker-code-editor-user-data \
+ && chown $MAMBA_USER:$MAMBA_USER /opt/amazon/sagemaker/sagemaker-code-editor-server-data /opt/amazon/sagemaker/sagemaker-code-editor-user-data && \
+# create dir to store user data files
+ mkdir -p /opt/amazon/sagemaker/user-data \
+ && chown $MAMBA_USER:$MAMBA_USER /opt/amazon/sagemaker/user-data && \
+# Merge in OS directory tree contents.
+ mkdir -p ${DIRECTORY_TREE_STAGE_DIR}
+COPY dirs/ ${DIRECTORY_TREE_STAGE_DIR}/
+RUN rsync -a ${DIRECTORY_TREE_STAGE_DIR}/ / && \
+ rm -rf ${DIRECTORY_TREE_STAGE_DIR} && \
+# CodeEditor - download the extensions
+ mkdir -p /etc/code-editor/extensions && \
+ while IFS= read -r url || [ -n "$url" ]; do \
+ echo "Downloading extension from ${url}..." && \
+ wget --no-check-certificate -P /etc/code-editor/extensions "${url}"; \
+ done < /etc/code-editor/extensions.txt
+
+USER $MAMBA_USER
+COPY --chown=$MAMBA_USER:$MAMBA_USER $ENV_IN_FILENAME *.in /tmp/
+COPY --chown=$MAMBA_USER:$MAMBA_USER $PINNED_ENV_IN_FILENAME *.in /tmp/
+
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+ARG CONDA_OVERRIDE_CUDA=$CUDA_MAJOR_MINOR_VERSION
+
+# Make sure that $ENV_IN_FILENAME and $PINNED_ENV_IN_FILENAME has a newline at the end before the `tee` command runs.
+# Otherwise, nasty things will happen.
+RUN if [[ -z $ARG_BASED_ENV_IN_FILENAME ]] ; \
+ then echo 'No ARG_BASED_ENV_IN_FILENAME passed' ; \
+ else envsubst < /tmp/$ARG_BASED_ENV_IN_FILENAME | tee --append /tmp/$ENV_IN_FILENAME ; \
+ fi && \
+ # Enforce dependencies are all installed from conda-forge
+ micromamba install -y --name base --file /tmp/$ENV_IN_FILENAME --file /tmp/$PINNED_ENV_IN_FILENAME && \
+ mkdir -p $SAGEMAKER_RECOVERY_MODE_HOME && \
+ chown $MAMBA_USER:$MAMBA_USER $SAGEMAKER_RECOVERY_MODE_HOME && \
+ SUPERVISOR_VERSION="supervisor==4.2.5" && \
+ JUPYTERLAB_VERSION="jupyterlab==4.4.3" && \
+ SAGEMAKER_JUPYTERLAB_VERSION="sagemaker-jupyterlab-extension==0.4.2" && \
+ echo "Installing in sagemaker-recovery-mode micromamba environment: $JUPYTERLAB_VERSION $SAGEMAKER_JUPYTERLAB_VERSION" && \
+ micromamba create --prefix /opt/conda/envs/sagemaker-recovery-mode && \
+ micromamba install -c conda-forge --prefix /opt/conda/envs/sagemaker-recovery-mode -y $JUPYTERLAB_VERSION $SAGEMAKER_JUPYTERLAB_VERSION $SUPERVISOR_VERSION && \
+ micromamba clean --all --yes --force-pkgs-dirs && \
+ rm -rf /tmp/*.in && \
+ sudo ln -s $(which python3) /usr/bin/python && \
+ # Update npm version
+ npm i -g npm && \
+ # Enforce to use `conda-forge` as only channel, by removing `defaults`
+ conda config --remove channels defaults && \
+ micromamba config append channels conda-forge --env && \
+ # Configure CodeEditor - Install extensions and set preferences
+ extensionloc=/opt/amazon/sagemaker/sagemaker-code-editor-server-data/extensions && mkdir -p "${extensionloc}" \
+ # Loop through all vsix files in /etc/code-editor/extensions and install them
+ && for ext in /etc/code-editor/extensions/*.vsix; do \
+ echo "Installing extension ${ext}..."; \
+ sagemaker-code-editor --install-extension "${ext}" --extensions-dir "${extensionloc}" --server-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data; \
+ done \
+ # Copy the settings
+ && cp /etc/code-editor/code_editor_machine_settings.json /opt/amazon/sagemaker/sagemaker-code-editor-server-data/data/Machine/settings.json && \
+ cp /etc/code-editor/code_editor_user_settings.json /opt/amazon/sagemaker/sagemaker-code-editor-server-data/data/User/settings.json && \
+ # Install glue kernels, and move to shared directory
+ # Also patching base kernel so Studio background code doesn't start session silently
+ install-glue-kernels && \
+ SITE_PACKAGES=$(pip show aws-glue-sessions | grep Location | awk '{print $2}') && \
+ jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_pyspark --user && \
+ jupyter-kernelspec install $SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_spark --user && \
+ mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_pyspark /opt/conda/share/jupyter/kernels && \
+ mv /home/sagemaker-user/.local/share/jupyter/kernels/glue_spark /opt/conda/share/jupyter/kernels && \
+ sed -i '/if not store_history and (/i\ if "sm_analytics_runtime_check" in code:\n return await self._complete_cell()\n' \
+ "$SITE_PACKAGES/aws_glue_interactive_sessions_kernel/glue_kernel_base/BaseKernel.py" && \
+ # Install FIPS Provider for OpenSSL, on top of existing OpenSSL installation
+ # v3.0.8 is latest FIPS validated provider, so this is the one we install
+ # But we need to run tests against the installed version.
+ # see https://github.com/openssl/openssl/blob/master/README-FIPS.md https://www.openssl.org/source/
+ INSTALLED_SSL=$(micromamba list | grep openssl | tr -s ' ' | cut -d ' ' -f 3 | head -n 1) && \
+ # download source code for installed, and FIPS validated openssl versions
+ curl -L https://github.com/openssl/openssl/releases/download/openssl-$FIPS_VALIDATED_SSL/openssl-$FIPS_VALIDATED_SSL.tar.gz > openssl-$FIPS_VALIDATED_SSL.tar.gz && \
+ curl -L https://github.com/openssl/openssl/releases/download/openssl-$INSTALLED_SSL/openssl-$INSTALLED_SSL.tar.gz > openssl-$INSTALLED_SSL.tar.gz && \
+ tar -xf openssl-$FIPS_VALIDATED_SSL.tar.gz && tar -xf openssl-$INSTALLED_SSL.tar.gz && cd openssl-$FIPS_VALIDATED_SSL && \
+ # Configure both versions to enable FIPS and build
+ ./Configure enable-fips --prefix=/opt/conda --openssldir=/opt/conda/ssl && make && \
+ cd ../openssl-$INSTALLED_SSL && \
+ ./Configure enable-fips --prefix=/opt/conda --openssldir=/opt/conda/ssl && make && \
+ # Copy validated provider to installed version for testing
+ cp ../openssl-$FIPS_VALIDATED_SSL/providers/fips.so providers/. && \
+ cp ../openssl-$FIPS_VALIDATED_SSL/providers/fipsmodule.cnf providers/. && \
+ make tests && cd ../openssl-$FIPS_VALIDATED_SSL && \
+ # After tests pass, install FIPS provider and remove source code
+ make install_fips && cd .. && rm -rf ./openssl-* && \
+# Create new config file with fips-enabled. Then user can override OPENSSL_CONF to enable FIPS
+# e.g. export OPENSSL_CONF=/opt/conda/ssl/openssl-fips.cnf
+ cp /opt/conda/ssl/openssl.cnf /opt/conda/ssl/openssl-fips.cnf && \
+ sed -i "s:# .include fipsmodule.cnf:.include /opt/conda/ssl/fipsmodule.cnf:" /opt/conda/ssl/openssl-fips.cnf && \
+ sed -i 's:# fips = fips_sect:fips = fips_sect:' /opt/conda/ssl/openssl-fips.cnf && \
+# Install Kerberos.
+# Make sure no dependency is added/updated
+ pip install "krb5>=0.5.1,<0.6" && \
+ pip show krb5 | grep Require | xargs -i sh -c '[ $(echo {} | cut -d: -f2 | wc -w) -eq 0 ] ' && \
+# https://stackoverflow.com/questions/122327
+ SYSTEM_PYTHON_PATH=$(python3 -c "from __future__ import print_function;import sysconfig; print(sysconfig.get_paths().get('purelib'))") && \
+ # Remove SparkRKernel as it's not supported \
+ jupyter-kernelspec remove -f -y sparkrkernel && \
+ # Patch Sparkmagic lib to support Custom Certificates \
+ # https://github.com/jupyter-incubator/sparkmagic/pull/435/files \
+ cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/configuration.py ${SYSTEM_PYTHON_PATH}/sparkmagic/utils/ && \
+ cp -a ${SYSTEM_PYTHON_PATH}/sagemaker_studio_analytics_extension/patches/reliablehttpclient.py ${SYSTEM_PYTHON_PATH}/sparkmagic/livyclientlib/reliablehttpclient.py && \
+ sed -i 's= "python"= "/opt/conda/bin/python"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
+ sed -i 's="Spark"="SparkMagic Spark"=g' /opt/conda/share/jupyter/kernels/sparkkernel/kernel.json && \
+ sed -i 's="PySpark"="SparkMagic PySpark"=g' /opt/conda/share/jupyter/kernels/pysparkkernel/kernel.json && \
+ # Configure RTC - disable jupyter_collaboration by default
+ jupyter labextension disable @jupyter/collaboration-extension && \
+ # Disable docprovider-extension for v3 and above images
+ jupyter labextension disable @jupyter/docprovider-extension
+
+# Patch glue kernels to use kernel wrapper
+COPY patch_glue_pyspark.json /opt/conda/share/jupyter/kernels/glue_pyspark/kernel.json
+COPY patch_glue_spark.json /opt/conda/share/jupyter/kernels/glue_spark/kernel.json
+
+USER root
+
+# Create logging directories for supervisor
+RUN mkdir -p $SAGEMAKER_LOGGING_DIR && \
+ chmod a+rw $SAGEMAKER_LOGGING_DIR && \
+ mkdir -p ${STUDIO_LOGGING_DIR} && \
+ chown ${NB_USER}:${MAMBA_USER} ${STUDIO_LOGGING_DIR} && \
+ # Create sagemaker pysdk admin default config directory
+ mkdir -p /etc/xdg/sagemaker && \
+ chmod a+rw /etc/xdg/sagemaker && \
+ # Clean up CodeEditor artifacts
+ rm -rf /etc/code-editor && \
+ # Create supervisord runtime directory
+ mkdir -p /var/run/supervisord && \
+ chmod a+rw /var/run/supervisord && \
+ # Create root directory for DB
+ # Create logging directories for supervisor
+ mkdir -p $DB_ROOT_DIR && \
+ chmod a+rw $DB_ROOT_DIR && \
+ HOME_DIR="/home/${NB_USER}/licenses" \
+ && mkdir -p ${HOME_DIR} \
+ && curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
+ && unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
+ && cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
+ && chmod +x /usr/local/bin/testOSSCompliance \
+ && chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
+ && ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
+ && rm -rf ${HOME_DIR}/oss_compliance*
+
+# Explicitly disable BuildKit for SM Studio Docker functionality
+ENV DOCKER_BUILDKIT=0
+ENV PATH="/etc/sagemaker-inference-server:/opt/conda/bin:/opt/conda/condabin:$PATH"
+WORKDIR "/home/${NB_USER}"
+ENV SHELL=/bin/bash
+ENV OPENSSL_MODULES=/opt/conda/lib64/ossl-modules/
+USER $MAMBA_USER
diff --git a/build_artifacts/v3/v3.3/v3.3.4/aws-cli-public-key.asc b/build_artifacts/v3/v3.3/v3.3.4/aws-cli-public-key.asc
new file mode 100644
index 000000000..b415d17d9
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/aws-cli-public-key.asc
@@ -0,0 +1,29 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBF2Cr7UBEADJZHcgusOJl7ENSyumXh85z0TRV0xJorM2B/JL0kHOyigQluUG
+ZMLhENaG0bYatdrKP+3H91lvK050pXwnO/R7fB/FSTouki4ciIx5OuLlnJZIxSzx
+PqGl0mkxImLNbGWoi6Lto0LYxqHN2iQtzlwTVmq9733zd3XfcXrZ3+LblHAgEt5G
+TfNxEKJ8soPLyWmwDH6HWCnjZ/aIQRBTIQ05uVeEoYxSh6wOai7ss/KveoSNBbYz
+gbdzoqI2Y8cgH2nbfgp3DSasaLZEdCSsIsK1u05CinE7k2qZ7KgKAUIcT/cR/grk
+C6VwsnDU0OUCideXcQ8WeHutqvgZH1JgKDbznoIzeQHJD238GEu+eKhRHcz8/jeG
+94zkcgJOz3KbZGYMiTh277Fvj9zzvZsbMBCedV1BTg3TqgvdX4bdkhf5cH+7NtWO
+lrFj6UwAsGukBTAOxC0l/dnSmZhJ7Z1KmEWilro/gOrjtOxqRQutlIqG22TaqoPG
+fYVN+en3Zwbt97kcgZDwqbuykNt64oZWc4XKCa3mprEGC3IbJTBFqglXmZ7l9ywG
+EEUJYOlb2XrSuPWml39beWdKM8kzr1OjnlOm6+lpTRCBfo0wa9F8YZRhHPAkwKkX
+XDeOGpWRj4ohOx0d2GWkyV5xyN14p2tQOCdOODmz80yUTgRpPVQUtOEhXQARAQAB
+tCFBV1MgQ0xJIFRlYW0gPGF3cy1jbGlAYW1hem9uLmNvbT6JAlQEEwEIAD4CGwMF
+CwkIBwIGFQoJCAsCBBYCAwECHgECF4AWIQT7Xbd/1cEYuAURraimMQrMRnJHXAUC
+ZqFYbwUJCv/cOgAKCRCmMQrMRnJHXKYuEAC+wtZ611qQtOl0t5spM9SWZuszbcyA
+0xBAJq2pncnp6wdCOkuAPu4/R3UCIoD2C49MkLj9Y0Yvue8CCF6OIJ8L+fKBv2DI
+yWZGmHL0p9wa/X8NCKQrKxK1gq5PuCzi3f3SqwfbZuZGeK/ubnmtttWXpUtuU/Iz
+VR0u/0sAy3j4uTGKh2cX7XnZbSqgJhUk9H324mIJiSwzvw1Ker6xtH/LwdBeJCck
+bVBdh3LZis4zuD4IZeBO1vRvjot3Oq4xadUv5RSPATg7T1kivrtLCnwvqc6L4LnF
+0OkNysk94L3LQSHyQW2kQS1cVwr+yGUSiSp+VvMbAobAapmMJWP6e/dKyAUGIX6+
+2waLdbBs2U7MXznx/2ayCLPH7qCY9cenbdj5JhG9ibVvFWqqhSo22B/URQE/CMrG
++3xXwtHEBoMyWEATr1tWwn2yyQGbkUGANneSDFiTFeoQvKNyyCFTFO1F2XKCcuDs
+19nj34PE2TJilTG2QRlMr4D0NgwLLAMg2Los1CK6nXWnImYHKuaKS9LVaCoC8vu7
+IRBik1NX6SjrQnftk0M9dY+s0ZbAN1gbdjZ8H3qlbl/4TxMdr87m8LP4FZIIo261
+Eycv34pVkCePZiP+dgamEiQJ7IL4ZArio9mv6HbDGV6mLY45+l6/0EzCwkI5IyIf
+BfWC9s/USgxchg==
+=ptgS
+-----END PGP PUBLIC KEY BLOCK-----
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/cpu.env.in b/build_artifacts/v3/v3.3/v3.3.4/cpu.env.in
new file mode 100644
index 000000000..242b8a490
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/cpu.env.in
@@ -0,0 +1,66 @@
+# This file is auto-generated.
+conda-forge::s3fs[version='>=2024.12.0,<2024.13.0']
+conda-forge::seaborn[version='>=0.13.2,<0.14.0']
+conda-forge::jupyter-activity-monitor-extension[version='>=0.3.2,<0.4.0']
+conda-forge::mlflow[version='>=2.22.0,<2.23.0']
+conda-forge::sagemaker-mlflow[version='>=0.1.0,<0.2.0']
+conda-forge::langchain-aws[version='>=0.2.19,<0.3.0']
+conda-forge::jupyter-collaboration[version='>=3.1.0,<3.2.0']
+conda-forge::sagemaker-code-editor[version='>=1.6.3,<1.7.0']
+conda-forge::amazon_sagemaker_sql_editor[version='>=0.1.18,<0.2.0']
+conda-forge::amazon-sagemaker-sql-magic[version='>=0.1.4,<0.2.0']
+conda-forge::amazon-sagemaker-jupyter-ai-q-developer[version='>=1.2.7,<1.3.0']
+conda-forge::amazon-q-developer-jupyterlab-ext[version='>=3.4.8,<3.5.0']
+conda-forge::langchain[version='>=0.3.27,<0.4.0']
+conda-forge::fastapi[version='>=0.115.14,<0.116.0']
+conda-forge::uvicorn[version='>=0.35.0,<0.36.0']
+conda-forge::pytorch[version='>=2.6.0,<2.7.0']
+conda-forge::tensorflow[version='>=2.18.0,<2.19.0']
+conda-forge::python[version='>=3.12.9,<3.13.0']
+conda-forge::pip[version='>=25.1.1,<25.2.0']
+conda-forge::torchvision[version='>=0.21.0,<0.22.0']
+conda-forge::numpy[version='>=1.26.4,<1.27.0']
+conda-forge::pandas[version='>=2.2.3,<2.3.0']
+conda-forge::scikit-learn[version='>=1.6.1,<1.7.0']
+conda-forge::jinja2[version='>=3.1.6,<3.2.0']
+conda-forge::matplotlib-base[version='>=3.10.6,<3.11.0']
+conda-forge::sagemaker-headless-execution-driver[version='>=0.0.13,<0.1.0']
+conda-forge::ipython[version='>=8.37.0,<8.38.0']
+conda-forge::scipy[version='>=1.15.2,<1.16.0']
+conda-forge::keras[version='>=3.10.0,<3.11.0']
+conda-forge::py-xgboost-cpu[version='>=2.1.4,<2.2.0']
+conda-forge::jupyterlab[version='>=4.4.7,<4.5.0']
+conda-forge::ipywidgets[version='>=8.1.7,<8.2.0']
+conda-forge::conda[version='>=25.5.1,<25.6.0']
+conda-forge::boto3[version='>=1.37.1,<1.38.0']
+conda-forge::sagemaker-python-sdk[version='>=2.245.0,<2.246.0']
+conda-forge::supervisor[version='>=4.2.5,<4.3.0']
+conda-forge::autogluon[version='>=1.3.1,<1.4.0']
+conda-forge::aws-glue-sessions[version='>=1.0.9,<1.1.0']
+conda-forge::sagemaker-kernel-wrapper[version='>=0.0.6,<0.1.0']
+conda-forge::jupyter-ai[version='>=2.31.6,<2.32.0']
+conda-forge::jupyter-scheduler[version='>=2.11.0,<2.12.0']
+conda-forge::jupyter-lsp[version='>=2.2.6,<2.3.0']
+conda-forge::jupyterlab-lsp[version='>=5.0.3,<5.1.0']
+conda-forge::python-lsp-server[version='>=1.13.1,<1.14.0']
+conda-forge::jupyterlab-git[version='>=0.51.2,<0.52.0']
+conda-forge::notebook[version='>=7.4.5,<7.5.0']
+conda-forge::altair[version='>=5.5.0,<5.6.0']
+conda-forge::sagemaker-studio-analytics-extension[version='>=0.2.2,<0.3.0']
+conda-forge::jupyter-dash[version='>=0.4.2,<0.5.0']
+conda-forge::sagemaker-jupyterlab-extension[version='>=0.5.1,<0.6.0']
+conda-forge::sagemaker-jupyterlab-emr-extension[version='>=0.4.3,<0.5.0']
+conda-forge::amazon-sagemaker-jupyter-scheduler[version='>=3.1.15,<3.2.0']
+conda-forge::jupyter-server-proxy[version='>=4.4.0,<4.5.0']
+conda-forge::pyhive[version='>=0.7.0,<0.8.0']
+conda-forge::python-gssapi[version='>=1.9.0,<1.10.0']
+conda-forge::tf-keras[version='>=2.18.0,<2.19.0']
+conda-forge::git-remote-codecommit[version='>=1.16,<1.17.0']
+conda-forge::docker-cli[version='>=27.5.1,<27.6.0']
+conda-forge::aioboto3[version='>=14.1.0,<14.2.0']
+conda-forge::sagemaker-studio-cli[version='>=1.0.7,<1.1.0']
+conda-forge::sagemaker-studio[version='>=1.0.21,<1.1.0']
+conda-forge::sagemaker-studio-dataengineering-sessions[version='>=1.1.7,<1.2.0']
+conda-forge::sagemaker-studio-dataengineering-extensions[version='>=1.1.4,<1.2.0']
+conda-forge::amzn-sagemaker-aiops-jupyterlab-extension[version='>=1.0.4,<1.1.0']
+conda-forge::aws-s3-access-grants-boto3-plugin[version='>=1.2.0,<1.3.0']
diff --git a/build_artifacts/v3/v3.3/v3.3.4/cpu.pinned_env.in b/build_artifacts/v3/v3.3/v3.3.4/cpu.pinned_env.in
new file mode 100644
index 000000000..2c088f01b
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/cpu.pinned_env.in
@@ -0,0 +1,7 @@
+conda-forge::dash[version='<=2.18.1']
+conda-forge::evaluate[version='<0.4.2']
+conda-forge::catboost[version='>=1.1.1,<1.3.0',build='*cpu*']
+conda-forge::libsqlite[version='<3.49.0']
+conda-forge::urllib3[version='<2']
+conda-forge::papermill[version='>=2.6.0']
+conda-forge::plotly[version='<6.1.0']
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_machine_settings.json b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_machine_settings.json
new file mode 100644
index 000000000..44fb8ef7f
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_machine_settings.json
@@ -0,0 +1,4 @@
+{
+ "python.terminal.activateEnvironment": false,
+ "python.defaultInterpreterPath": "/opt/conda/bin/python"
+}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_user_settings.json b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_user_settings.json
new file mode 100644
index 000000000..ed3ef56a4
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/code_editor_user_settings.json
@@ -0,0 +1,3 @@
+{
+ "extensions.autoUpdate": false
+}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/extensions.txt b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/extensions.txt
new file mode 100644
index 000000000..7672e9df9
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/code-editor/extensions.txt
@@ -0,0 +1,4 @@
+https://open-vsx.org/api/ms-toolsai/jupyter/2024.5.0/file/ms-toolsai.jupyter-2024.5.0.vsix
+https://open-vsx.org/api/ms-python/python/2023.20.0/file/ms-python.python-2023.20.0.vsix
+https://open-vsx.org/api/amazonwebservices/aws-toolkit-vscode/3.30.0/file/amazonwebservices.aws-toolkit-vscode-3.30.0.vsix
+https://open-vsx.org/api/amazonwebservices/amazon-q-vscode/1.58.0/file/amazonwebservices.amazon-q-vscode-1.58.0.vsix
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/conda/.condarc b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/conda/.condarc
new file mode 100644
index 000000000..c3616df50
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/conda/.condarc
@@ -0,0 +1,6 @@
+envs_dirs:
+ - ~/.conda/envs
+ - /opt/conda/envs
+pkgs_dirs:
+ - ~/.conda/pkgs
+ - /opt/conda/pkgs
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/jupyter/jupyter_server_config.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/jupyter/jupyter_server_config.py
new file mode 100644
index 000000000..0182cc238
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/jupyter/jupyter_server_config.py
@@ -0,0 +1,28 @@
+# Default Jupyter server config
+# Note: those config can be overridden by user-level configs.
+
+c.ServerApp.terminado_settings = {"shell_command": ["/bin/bash"]}
+c.ServerApp.tornado_settings = {"compress_response": True}
+
+# Do not delete files to trash. Instead, permanently delete files.
+c.FileContentsManager.delete_to_trash = False
+
+# Allow deleting non-empty directory via file browser. Related documentation:
+# https://github.com/jupyter-server/jupyter_server/blob/main/jupyter_server/services/contents/filemanager.py#L125-L129
+c.FileContentsManager.always_delete_dir = True
+
+# Enable `allow_hidden` by default, so hidden files are accessible via Jupyter server
+# Related documentation: https://jupyterlab.readthedocs.io/en/stable/user/files.html#displaying-hidden-files
+c.ContentsManager.allow_hidden = True
+
+# This will set the LanguageServerManager.extra_node_roots setting if amazon_sagemaker_sql_editor exists in the
+# environment. Ignore otherwise, don't fail the JL server start
+# Related documentation: https://jupyterlab-lsp.readthedocs.io/en/v3.4.0/Configuring.html
+try:
+ import os
+
+ module = __import__("amazon_sagemaker_sql_editor")
+ module_location = os.path.dirname(module.__file__)
+ c.LanguageServerManager.extra_node_roots = [f"{module_location}/sql-language-server"]
+except:
+ pass
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/apply_patches.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/apply_patches.sh
new file mode 100755
index 000000000..c4ec7604c
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/apply_patches.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+set -eux
+
+# Check if parameter is provided
+if [ $# -ne 1 ]; then
+ echo "Usage: $0 [smus|studio-ai]"
+ exit 1
+fi
+
+# Validate parameter
+case "$1" in
+ "smus")
+ PATCH_DIR="/etc/patches/smus"
+ ;;
+ "studio-ai")
+ PATCH_DIR="/etc/patches/studio-ai"
+ ;;
+ *)
+ echo "Error: Parameter must be either 'smus' or 'studio-ai'"
+ exit 1
+ ;;
+esac
+
+# Check if patch directory exists
+if [ ! -d "$PATCH_DIR" ]; then
+ echo "Error: Patch directory $PATCH_DIR does not exist"
+ exit 1
+fi
+
+# Patch files can be generated via "diff -u /path/to/original_file /path/to/new_file > XXX_bad_package.patch"
+# See https://www.thegeekstuff.com/2014/12/patch-command-examples/
+for PATCHFILE in "$PATCH_DIR"/*.patch; do
+ [ -f "$PATCHFILE" ] || continue
+ echo "Applying $PATCHFILE"
+ (cd "/opt/conda" && patch --strip=3 < "$PATCHFILE")
+done
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0001-sparkmagic.patch b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0001-sparkmagic.patch
new file mode 100644
index 000000000..25d380b9d
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0001-sparkmagic.patch
@@ -0,0 +1,10 @@
+--- /opt/conda/lib/python3.12/site-packages/sparkmagic/livyclientlib/command.py
++++ /opt/conda/lib/python3.12/site-packages/sparkmagic/livyclientlib/command.py
+@@ -54,6 +54,7 @@
+ try:
+ session.wait_for_idle()
+ data = {"code": self.code}
++ data["kind"] = session.kind
+ response = session.http_client.post_statement(session.id, data)
+ statement_id = response["id"]
+ output = self._get_statement_output(session, statement_id)
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0002-sparkmagic-override-info-table.patch b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0002-sparkmagic-override-info-table.patch
new file mode 100644
index 000000000..0e6959fc3
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0002-sparkmagic-override-info-table.patch
@@ -0,0 +1,24 @@
+--- /opt/conda/lib/python3.12/site-packages/sparkmagic/utils/utils.py
++++ /opt/conda/lib/python3.12/site-packages/sparkmagic/utils/utils.py
+@@ -94,15 +94,12 @@
+
+
+ def get_sessions_info_html(info_sessions, current_session_id):
+- html = (
+- """
+-ID | YARN Application ID | Kind | State | Spark UI | Driver log | User | Current session? |
"""
+- + "".join(
+- [session.get_row_html(current_session_id) for session in info_sessions]
+- )
+- + "
"
+- )
+-
++ current_session = next((session for session in info_sessions if session.id == current_session_id), None)
++ spark_ui_url = current_session.get_spark_ui_url()
++ driver_log_url = current_session.get_driver_log_url()
++
++ from sagemaker_studio_dataengineering_sessions.sagemaker_base_session_manager.common.debugging_utils import get_sessions_info_html
++ html = get_sessions_info_html(current_session.get_app_id(), spark_ui_url, driver_log_url)
+ return html
+
+
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0004-fix-boto3-endpoints.patch b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0004-fix-boto3-endpoints.patch
new file mode 100644
index 000000000..cadcc70b3
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/patches/smus/0004-fix-boto3-endpoints.patch
@@ -0,0 +1,12 @@
+--- /opt/conda/lib/python3.12/site-packages/botocore/data/endpoints.json
++++ /opt/conda/lib/python3.12/site-packages/botocore/data/endpoints.json
+@@ -5404,6 +5404,9 @@
+ "ap-northeast-3" : {
+ "hostname" : "datazone.ap-northeast-3.api.aws"
+ },
++ "ap-south-1" : {
++ "hostname" : "datazone.ap-south-1.api.aws"
++ },
+ "ap-south-2" : {
+ "hostname" : "datazone.ap-south-2.api.aws"
+ },
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/__init__.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/__init__.py
new file mode 100644
index 000000000..0427e383a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/__init__.py
@@ -0,0 +1,3 @@
+from __future__ import absolute_import
+
+import utils.logger
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve
new file mode 100755
index 000000000..bd604df37
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve
@@ -0,0 +1,2 @@
+#!/bin/bash
+python /etc/sagemaker-inference-server/serve.py
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve.py
new file mode 100644
index 000000000..0d23ada86
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/serve.py
@@ -0,0 +1,25 @@
+from __future__ import absolute_import
+
+"""
+TODO: when adding support for more serving frameworks, move the below logic into a condition statement.
+We also need to define the right environment variable for signify what serving framework to use.
+
+Ex.
+
+inference_server = None
+serving_framework = os.getenv("SAGEMAKER_INFERENCE_FRAMEWORK", None)
+
+if serving_framework == "FastAPI":
+ inference_server = FastApiServer()
+elif serving_framework == "Flask":
+ inference_server = FlaskServer()
+else:
+ inference_server = TornadoServer()
+
+inference_server.serve()
+
+"""
+from tornado_server.server import TornadoServer
+
+inference_server = TornadoServer()
+inference_server.serve()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/__init__.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/__init__.py
new file mode 100644
index 000000000..28b0e2cc2
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import
+
+import pathlib
+import sys
+
+# make the utils modules accessible to modules from within the tornado_server folder
+utils_path = pathlib.Path(__file__).parent.parent / "utils"
+sys.path.insert(0, str(utils_path.resolve()))
+
+# make the tornado_server modules accessible to each other
+tornado_module_path = pathlib.Path(__file__).parent
+sys.path.insert(0, str(tornado_module_path.resolve()))
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/async_handler.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/async_handler.py
new file mode 100644
index 000000000..9eb946086
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/async_handler.py
@@ -0,0 +1,76 @@
+from __future__ import absolute_import
+
+import asyncio
+import logging
+from typing import AsyncIterator, Iterator
+
+import tornado.web
+from stream_handler import StreamHandler
+
+from utils.environment import Environment
+from utils.exception import AsyncInvocationsException
+from utils.logger import SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER
+
+logger = logging.getLogger(SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER)
+
+
+class InvocationsHandler(tornado.web.RequestHandler, StreamHandler):
+ """Handler mapped to the /invocations POST route.
+
+ This handler wraps the async handler retrieved from the inference script
+ and encapsulates it behind the post() method. The post() method is done
+ asynchronously.
+ """
+
+ def initialize(self, handler: callable, environment: Environment):
+ """Initializes the handler function and the serving environment."""
+
+ self._handler = handler
+ self._environment = environment
+
+ async def post(self):
+ """POST method used to encapsulate and invoke the async handle method asynchronously"""
+
+ try:
+ response = await self._handler(self.request)
+
+ if isinstance(response, Iterator):
+ await self.stream(response)
+ elif isinstance(response, AsyncIterator):
+ await self.astream(response)
+ else:
+ self.write(response)
+ except Exception as e:
+ raise AsyncInvocationsException(e)
+
+
+class PingHandler(tornado.web.RequestHandler):
+ """Handler mapped to the /ping GET route.
+
+ Ping handler to monitor the health of the Tornados server.
+ """
+
+ def get(self):
+ """Simple GET method to assess the health of the server."""
+
+ self.write("")
+
+
+async def handle(handler: callable, environment: Environment):
+ """Serves the async handler function using Tornado.
+
+ Opens the /invocations and /ping routes used by a SageMaker Endpoint
+ for inference serving capabilities.
+ """
+
+ logger.info("Starting inference server in asynchronous mode...")
+
+ app = tornado.web.Application(
+ [
+ (r"/invocations", InvocationsHandler, dict(handler=handler, environment=environment)),
+ (r"/ping", PingHandler),
+ ]
+ )
+ app.listen(environment.port)
+ logger.debug(f"Asynchronous inference server listening on port: `{environment.port}`")
+ await asyncio.Event().wait()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/server.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/server.py
new file mode 100644
index 000000000..496bfac89
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/server.py
@@ -0,0 +1,127 @@
+from __future__ import absolute_import
+
+import asyncio
+import importlib
+import logging
+import subprocess
+import sys
+from pathlib import Path
+
+from utils.environment import Environment
+from utils.exception import (
+ InferenceCodeLoadException,
+ RequirementsInstallException,
+ ServerStartException,
+)
+from utils.logger import SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER
+
+logger = logging.getLogger(SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER)
+
+
+class TornadoServer:
+ """Holds serving logic using the Tornado framework.
+
+ The serve.py script will invoke TornadoServer.serve() to start the serving process.
+ The TornadoServer will install the runtime requirements specified through a requirements file.
+ It will then load an handler function within an inference script and then front it will an /invocations
+ route using the Tornado framework.
+ """
+
+ def __init__(self):
+ """Initialize the serving behaviors.
+
+ Defines the serving behavior through Environment() and locate where
+ the inference code is contained.
+ """
+
+ self._environment = Environment()
+ logger.setLevel(int(self._environment.logging_level))
+ logger.debug(f"Environment: {str(self._environment)}")
+
+ self._path_to_inference_code = (
+ Path(self._environment.base_directory).joinpath(self._environment.code_directory)
+ if self._environment.code_directory
+ else Path(self._environment.base_directory)
+ )
+ logger.debug(f"Path to inference code: `{str(self._path_to_inference_code)}`")
+
+ def initialize(self):
+ """Initialize the serving artifacts and dependencies.
+
+ Install the runtime requirements and then locate the handler function from
+ the inference script.
+ """
+
+ logger.info("Initializing inference server...")
+ self._install_runtime_requirements()
+ self._handler = self._load_inference_handler()
+
+ def serve(self):
+ """Orchestrate the initialization and server startup behavior.
+
+ Call the initalize() method, determine the right Tornado serving behavior (async or sync),
+ and then start the Tornado server through asyncio
+ """
+
+ logger.info("Serving inference requests using Tornado...")
+ self.initialize()
+
+ if asyncio.iscoroutinefunction(self._handler):
+ import async_handler as inference_handler
+ else:
+ import sync_handler as inference_handler
+
+ try:
+ asyncio.run(inference_handler.handle(self._handler, self._environment))
+ except Exception as e:
+ raise ServerStartException(e)
+
+ def _install_runtime_requirements(self):
+ """Install the runtime requirements."""
+
+ logger.info("Installing runtime requirements...")
+ requirements_txt = self._path_to_inference_code.joinpath(self._environment.requirements)
+ if requirements_txt.is_file():
+ try:
+ subprocess.check_call(["micromamba", "install", "--yes", "--file", str(requirements_txt)])
+ except Exception as e:
+ logger.error(
+ "Failed to install requirements using `micromamba install`. Falling back to `pip install`..."
+ )
+ try:
+ subprocess.check_call(["pip", "install", "-r", str(requirements_txt)])
+ except Exception as e:
+ raise RequirementsInstallException(e)
+ else:
+ logger.debug(f"No requirements file was found at `{str(requirements_txt)}`")
+
+ def _load_inference_handler(self) -> callable:
+ """Load the handler function from the inference script."""
+
+ logger.info("Loading inference handler...")
+ inference_module_name, handle_name = self._environment.code.split(".")
+ if inference_module_name and handle_name:
+ inference_module_file = f"{inference_module_name}.py"
+ module_spec = importlib.util.spec_from_file_location(
+ inference_module_file, str(self._path_to_inference_code.joinpath(inference_module_file))
+ )
+ if module_spec:
+ sys.path.insert(0, str(self._path_to_inference_code.resolve()))
+ module = importlib.util.module_from_spec(module_spec)
+ module_spec.loader.exec_module(module)
+
+ if hasattr(module, handle_name):
+ handler = getattr(module, handle_name)
+ else:
+ raise InferenceCodeLoadException(
+ f"Handler `{handle_name}` could not be found in module `{inference_module_file}`"
+ )
+ logger.debug(f"Loaded handler `{handle_name}` from module `{inference_module_name}`")
+ return handler
+ else:
+ raise InferenceCodeLoadException(
+ f"Inference code could not be found at `{str(self._path_to_inference_code.joinpath(inference_module_file))}`"
+ )
+ raise InferenceCodeLoadException(
+ f"Inference code expected in the format of `.` but was provided as {self._environment.code}"
+ )
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/stream_handler.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/stream_handler.py
new file mode 100644
index 000000000..ab01f5878
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/stream_handler.py
@@ -0,0 +1,59 @@
+from __future__ import absolute_import
+
+import logging
+from typing import AsyncIterator, Iterator
+
+from tornado.ioloop import IOLoop
+
+from utils.logger import SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER
+
+logger = logging.getLogger(SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER)
+
+
+class StreamHandler:
+ """Mixin that enables async and sync streaming capabilities to the async and sync handlers
+
+ stream() runs a provided iterator/generator fn in an async manner.
+ astream() runs a provided async iterator/generator fn in an async manner.
+ """
+
+ async def stream(self, iterator: Iterator):
+ """Streams the response from a sync response iterator
+
+ A sync iterator must be manually iterated through asynchronously.
+ In a loop, iterate through each next(iterator) call in an async execution.
+ """
+
+ self._set_stream_headers()
+
+ while True:
+ try:
+ chunk = await IOLoop.current().run_in_executor(None, next, iterator)
+ # Some iterators do not throw a StopIteration upon exhaustion.
+ # Instead, they return an empty response. Account for this case.
+ if not chunk:
+ raise StopIteration()
+
+ self.write(chunk)
+ await self.flush()
+ except StopIteration:
+ break
+ except Exception as e:
+ logger.error("Unexpected exception occurred when streaming response...")
+ break
+
+ async def astream(self, aiterator: AsyncIterator):
+ """Streams the response from an async response iterator"""
+
+ self._set_stream_headers()
+
+ async for chunk in aiterator:
+ self.write(chunk)
+ await self.flush()
+
+ def _set_stream_headers(self):
+ """Set the headers in preparation for the streamed response"""
+
+ self.set_header("Content-Type", "text/event-stream")
+ self.set_header("Cache-Control", "no-cache")
+ self.set_header("Connection", "keep-alive")
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/sync_handler.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/sync_handler.py
new file mode 100644
index 000000000..919f803ac
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/tornado_server/sync_handler.py
@@ -0,0 +1,77 @@
+from __future__ import absolute_import
+
+import asyncio
+import logging
+from typing import AsyncIterator, Iterator
+
+import tornado.web
+from stream_handler import StreamHandler
+from tornado.ioloop import IOLoop
+
+from utils.environment import Environment
+from utils.exception import SyncInvocationsException
+from utils.logger import SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER
+
+logger = logging.getLogger(SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER)
+
+
+class InvocationsHandler(tornado.web.RequestHandler, StreamHandler):
+ """Handler mapped to the /invocations POST route.
+
+ This handler wraps the sync handler retrieved from the inference script
+ and encapsulates it behind the post() method. The post() method is done
+ asynchronously.
+ """
+
+ def initialize(self, handler: callable, environment: Environment):
+ """Initializes the handler function and the serving environment."""
+
+ self._handler = handler
+ self._environment = environment
+
+ async def post(self):
+ """POST method used to encapsulate and invoke the sync handle method asynchronously"""
+
+ try:
+ response = await IOLoop.current().run_in_executor(None, self._handler, self.request)
+
+ if isinstance(response, Iterator):
+ await self.stream(response)
+ elif isinstance(response, AsyncIterator):
+ await self.astream(response)
+ else:
+ self.write(response)
+ except Exception as e:
+ raise SyncInvocationsException(e)
+
+
+class PingHandler(tornado.web.RequestHandler):
+ """Handler mapped to the /ping GET route.
+
+ Ping handler to monitor the health of the Tornados server.
+ """
+
+ def get(self):
+ """Simple GET method to assess the health of the server."""
+
+ self.write("")
+
+
+async def handle(handler: callable, environment: Environment):
+ """Serves the sync handler function using Tornado.
+
+ Opens the /invocations and /ping routes used by a SageMaker Endpoint
+ for inference serving capabilities.
+ """
+
+ logger.info("Starting inference server in synchronous mode...")
+
+ app = tornado.web.Application(
+ [
+ (r"/invocations", InvocationsHandler, dict(handler=handler, environment=environment)),
+ (r"/ping", PingHandler),
+ ]
+ )
+ app.listen(environment.port)
+ logger.debug(f"Synchronous inference server listening on port: `{environment.port}`")
+ await asyncio.Event().wait()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/__init__.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/__init__.py
new file mode 100644
index 000000000..c3961685a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/__init__.py
@@ -0,0 +1 @@
+from __future__ import absolute_import
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/environment.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/environment.py
new file mode 100644
index 000000000..18870de27
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/environment.py
@@ -0,0 +1,59 @@
+from __future__ import absolute_import
+
+import json
+import os
+from enum import Enum
+
+
+class SageMakerInference(str, Enum):
+ """Simple enum to define the mapping between dictionary key and environement variable."""
+
+ BASE_DIRECTORY = "SAGEMAKER_INFERENCE_BASE_DIRECTORY"
+ REQUIREMENTS = "SAGEMAKER_INFERENCE_REQUIREMENTS"
+ CODE_DIRECTORY = "SAGEMAKER_INFERENCE_CODE_DIRECTORY"
+ CODE = "SAGEMAKER_INFERENCE_CODE"
+ LOG_LEVEL = "SAGEMAKER_INFERENCE_LOG_LEVEL"
+ PORT = "SAGEMAKER_INFERENCE_PORT"
+
+
+class Environment:
+ """Retrieves and encapsulates SAGEMAKER_INFERENCE prefixed environment variables."""
+
+ def __init__(self):
+ """Initialize the environment variable mapping"""
+
+ self._environment_variables = {
+ SageMakerInference.BASE_DIRECTORY: "/opt/ml/model",
+ SageMakerInference.REQUIREMENTS: "requirements.txt",
+ SageMakerInference.CODE_DIRECTORY: os.getenv(SageMakerInference.CODE_DIRECTORY, None),
+ SageMakerInference.CODE: os.getenv(SageMakerInference.CODE, "inference.handler"),
+ SageMakerInference.LOG_LEVEL: os.getenv(SageMakerInference.LOG_LEVEL, 10),
+ SageMakerInference.PORT: 8080,
+ }
+
+ def __str__(self):
+ return json.dumps(self._environment_variables)
+
+ @property
+ def base_directory(self):
+ return self._environment_variables.get(SageMakerInference.BASE_DIRECTORY)
+
+ @property
+ def requirements(self):
+ return self._environment_variables.get(SageMakerInference.REQUIREMENTS)
+
+ @property
+ def code_directory(self):
+ return self._environment_variables.get(SageMakerInference.CODE_DIRECTORY)
+
+ @property
+ def code(self):
+ return self._environment_variables.get(SageMakerInference.CODE)
+
+ @property
+ def logging_level(self):
+ return self._environment_variables.get(SageMakerInference.LOG_LEVEL)
+
+ @property
+ def port(self):
+ return self._environment_variables.get(SageMakerInference.PORT)
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/exception.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/exception.py
new file mode 100644
index 000000000..eb961889d
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/exception.py
@@ -0,0 +1,21 @@
+from __future__ import absolute_import
+
+
+class RequirementsInstallException(Exception):
+ pass
+
+
+class InferenceCodeLoadException(Exception):
+ pass
+
+
+class ServerStartException(Exception):
+ pass
+
+
+class SyncInvocationsException(Exception):
+ pass
+
+
+class AsyncInvocationsException(Exception):
+ pass
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/logger.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/logger.py
new file mode 100644
index 000000000..c88008689
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-inference-server/utils/logger.py
@@ -0,0 +1,43 @@
+from __future__ import absolute_import
+
+import logging.config
+
+SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER = "sagemaker_distribution.inference_server"
+LOGGING_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": True,
+ "formatters": {
+ "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"},
+ },
+ "handlers": {
+ "default": {
+ "level": "DEBUG",
+ "formatter": "standard",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout",
+ },
+ },
+ "loggers": {
+ SAGEMAKER_DISTRIBUTION_INFERENCE_LOGGER: {
+ "level": "DEBUG",
+ "handlers": ["default"],
+ "propagate": True,
+ },
+ "tornado.application": {
+ "level": "DEBUG",
+ "handlers": ["default"],
+ "propagate": True,
+ },
+ "tornado.general": {
+ "level": "DEBUG",
+ "handlers": ["default"],
+ "propagate": True,
+ },
+ "tornado.access": {
+ "level": "DEBUG",
+ "handlers": ["default"],
+ "propagate": True,
+ },
+ },
+}
+logging.config.dictConfig(LOGGING_CONFIG)
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_clone.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_clone.sh
new file mode 100755
index 000000000..714d687e2
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_clone.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+set -eux
+
+sourceMetaData=/opt/ml/metadata/resource-metadata.json
+
+# Extract the required fields from meta data stored in opt/ml/metadata.
+dataZoneDomainId=$(jq -r '.AdditionalMetadata.DataZoneDomainId' < $sourceMetaData)
+dataZoneUserId=$(jq -r '.AdditionalMetadata.DataZoneUserId' < $sourceMetaData)
+dataZoneEndPoint=$(jq -r '.AdditionalMetadata.DataZoneEndpoint' < $sourceMetaData)
+dataZoneProjectId=$(jq -r '.AdditionalMetadata.DataZoneProjectId' < $sourceMetaData)
+
+DEFAULT_DESTINATION_PATH=$HOME/src
+DESTINATION_PATH=${1:-$DEFAULT_DESTINATION_PATH}
+
+echo "Cloning to ${DESTINATION_PATH}"
+
+# Function to clone the CodeCommit repository
+clone_code_commit_repo() {
+ if [ -d "${DESTINATION_PATH}/.git" ]; then
+ echo "Repository already exists at ${DESTINATION_PATH}"
+ else
+ rm -rf "${DESTINATION_PATH}"
+ local repoName=$1
+ git clone codecommit::$AWS_REGION://$repoName $DESTINATION_PATH
+ # if the above command exit with nonzero exit code ,delete the partially cloned Repo.
+ if [ $? -ne 0 ]; then
+ echo "Git clone of the Project repository has failed. Please refer to the documentation to understand how to fix this."
+ if [ -d $DESTINATION_PATH ]; then
+ rm -rf $DESTINATION_PATH
+ fi
+ fi
+ fi
+}
+
+# Get the clone URL for the project
+response=$(sagemaker-studio git get-clone-url --domain-id "$dataZoneDomainId" --project-id "$dataZoneProjectId" --profile DomainExecutionRoleCreds)
+cloneUrl=$(echo "$response" | jq -r '.cloneUrl')
+# Get the project default environment and extract the gitConnectionArn and gitBranchName
+getProjectDefaultEnvResponse=$(sagemaker-studio project get-project-default-environment --domain-id "$dataZoneDomainId" --project-id "$dataZoneProjectId" --profile DomainExecutionRoleCreds)
+gitConnectionArn=$(echo "$getProjectDefaultEnvResponse" | jq -r '.provisionedResources[] | select(.name=="gitConnectionArn") | .value')
+gitBranchName=$(echo "$getProjectDefaultEnvResponse" | jq -r '.provisionedResources[] | select(.name=="gitBranchName") | .value')
+dataZoneProjectRepositoryName=$(echo "$getProjectDefaultEnvResponse" | jq -r '.provisionedResources[] | select(.name=="codeRepositoryName") | .value')
+
+ # Check if the cloneUrl is available
+if [[ -n "$cloneUrl" ]]; then
+ # Check if the cloneUrl contains "codeconnections" or "codestar-connections" (For customers created connection before Jun 7th)
+ if [[ "$cloneUrl" == *"codeconnections"* ]] || [[ "$cloneUrl" == *"codestar-connections"* ]]; then
+ # Check if the DomainExecutionRoleCreds profile exists in the AWS config file
+ if grep -q 'DomainExecutionRoleCreds' /home/sagemaker-user/.aws/config; then
+ /bin/bash /etc/sagemaker-ui/git_config.sh
+ # Clone the repository using the cloneUrl and gitBranchName
+ git clone "$cloneUrl" $DESTINATION_PATH -b "$gitBranchName"
+ fi
+ else
+ # Clone the codeCommit repository
+ clone_code_commit_repo "$dataZoneProjectRepositoryName"
+ fi
+else
+ # If the cloneUrl is not available, check if the gitConnectionArn is available
+ # If not available, clone codeCommit repository.
+ [[ -z "$gitConnectionArn" ]] && clone_code_commit_repo "$dataZoneProjectRepositoryName"
+fi
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_config.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_config.sh
new file mode 100644
index 000000000..5f4d61e5e
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/git_config.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+set -eux
+
+sourceMetaData=/opt/ml/metadata/resource-metadata.json
+dataZoneDomainRegion=$(jq -r '.AdditionalMetadata.DataZoneDomainRegion' < $sourceMetaData)
+
+# Configure Git to use the AWS CodeCommit credential helper with profile DomainExecutionRoleCreds
+git config --global credential.helper "!aws --profile DomainExecutionRoleCreds --region $dataZoneDomainRegion codecommit credential-helper --ignore-host-check $@"
+git config --global credential.UseHttpPath true
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/overrides.json b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/overrides.json
new file mode 100644
index 000000000..4c84693d5
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/overrides.json
@@ -0,0 +1,5 @@
+{
+ "@jupyterlab/apputils-extension:themes": {
+ "theme": "Amazon SageMaker Unified Studio Dark"
+ }
+}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/page_config.json b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/page_config.json
new file mode 100644
index 000000000..fe64b8de5
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/lab/settings/page_config.json
@@ -0,0 +1,14 @@
+{
+ "disabledExtensions": {
+ "@jupyterlab/apputils-extension:announcements": true,
+ "@amzn/sagemaker-jupyterlab-emr-extension": true,
+ "@amzn/sagemaker-jupyter-scheduler": true,
+ "@amzn/sagemaker-jupyterlab-extension-common:panorama": true,
+ "@amzn/sagemaker-jupyterlab-extensions:sessionmanagement": true,
+ "@amzn/sagemaker-jupyterlab-extensions:spacemenu": true,
+ "@amzn/amazon_sagemaker_sql_editor": true,
+ "@sagemaker-studio:EmrCluster": true,
+ "@jupyterlab/scheduler": true,
+ "@jupyter/collaboration-extension": true
+ }
+}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/server/jupyter_server_config.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/server/jupyter_server_config.py
new file mode 100644
index 000000000..7ce6e079e
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/jupyter/server/jupyter_server_config.py
@@ -0,0 +1,30 @@
+import os
+
+c.ServerApp.terminado_settings = {"shell_command": ["/bin/bash"]}
+region = os.environ.get("AWS_REGION")
+csp_rule = os.environ.get("JUPYTERSERVER_CSP_RULE")
+
+c.ServerApp.tornado_settings = {"compress_response": True, "headers": {"Content-Security-Policy": csp_rule}}
+
+# Do not delete files to trash. Instead, permanently delete files.
+c.FileContentsManager.delete_to_trash = False
+
+# Allow deleting non-empty directory via file browser. Related documentation:
+# https://github.com/jupyter-server/jupyter_server/blob/main/jupyter_server/services/contents/filemanager.py#L125-L129
+c.FileContentsManager.always_delete_dir = True
+
+# Enable `allow_hidden` by default, so hidden files are accessible via Jupyter server
+# Related documentation: https://jupyterlab.readthedocs.io/en/stable/user/files.html#displaying-hidden-files
+c.ContentsManager.allow_hidden = True
+
+# This will set the LanguageServerManager.extra_node_roots setting if amazon_sagemaker_sql_editor exists in the
+# environment. Ignore otherwise, don't fail the JL server start
+# Related documentation: https://jupyterlab-lsp.readthedocs.io/en/v3.4.0/Configuring.html
+try:
+ import os
+
+ module = __import__("amazon_sagemaker_sql_editor")
+ module_location = os.path.dirname(module.__file__)
+ c.LanguageServerManager.extra_node_roots = [f"{module_location}/sql-language-server"]
+except:
+ pass
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/kernel_launchers/python3_kernel_launcher.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/kernel_launchers/python3_kernel_launcher.sh
new file mode 100755
index 000000000..ca1a8fb75
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/kernel_launchers/python3_kernel_launcher.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+kernel_type=$2
+connection_file=$4
+
+if [ ! -e "/opt/ml/metadata/resource-metadata.json" ] && [ -z "$SPARKMAGIC_CONF_DIR" ]; then
+ export SPARKMAGIC_CONF_DIR="$SM_EXECUTION_INPUT_PATH"
+fi
+
+if [ -n "$SPARKMAGIC_CONF_DIR" ]; then
+ mkdir -p $SPARKMAGIC_CONF_DIR
+ config_file_path=${SPARKMAGIC_CONF_DIR}/config.json
+else
+ sparkmagicHomeDir=${HOME}/.sparkmagic
+ mkdir -p $sparkmagicHomeDir
+ config_file_path=${sparkmagicHomeDir}/config.json
+fi
+
+if [ ! -f "$config_file_path" ]; then
+ cat << EOT > "$config_file_path"
+{
+ "livy_session_startup_timeout_seconds": 180,
+ "logging_config": {
+ "version": 1,
+ "formatters": {
+ "magicsFormatter": {
+ "format": "%(asctime)s\t%(levelname)s\t%(message)s",
+ "datefmt": ""
+ }
+ },
+ "handlers": {
+ "magicsHandler": {
+ "class": "sagemaker_studio_dataengineering_sessions.sagemaker_base_session_manager.common.logger_utils.SessionManagerFileHandler",
+ "formatter": "magicsFormatter",
+ "file_name": "spark_magic"
+ }
+ },
+ "loggers": {
+ "magicsLogger": {
+ "handlers": ["magicsHandler"],
+ "level": "INFO",
+ "propagate": 0
+ }
+ }
+ }
+}
+EOT
+else
+ sed -i 's/\"sagemaker_base_session_manager/\"sagemaker_studio_dataengineering_sessions.sagemaker_base_session_manager/g' $config_file_path
+fi
+exec /opt/conda/bin/python -m ${kernel_type} -f ${connection_file}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/python3/kernel.json b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/python3/kernel.json
new file mode 100644
index 000000000..7db91492d
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/kernels/python3/kernel.json
@@ -0,0 +1,14 @@
+{
+ "argv": [
+ "/opt/conda/share/jupyter/kernels/kernel_launchers/python3_kernel_launcher.sh",
+ "-m",
+ "ipykernel_launcher",
+ "-f",
+ "{connection_file}"
+ ],
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "metadata": {
+ "debugger": true
+ }
+}
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/libmgmt/install-lib.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/libmgmt/install-lib.sh
new file mode 100755
index 000000000..84999cf14
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/libmgmt/install-lib.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -eux
+PROJECT_DIR=${SMUS_PROJECT_DIR:-"$HOME/src"}
+# Check if the .libs.json file exists
+if [ -e $PROJECT_DIR/.libs.json ]; then
+ lib_config_json=`cat $PROJECT_DIR/.libs.json`
+
+ apply_change_to_space=`echo $lib_config_json | jq -r '.ApplyChangeToSpace'`
+ # Extract conda channels from the config, add `-c ` before each channel and join the strings
+ conda_channels=`echo $lib_config_json | jq -r '.Python.CondaPackages.Channels | .[]' | sed 's/^/-c /g'`
+ # Extract conda package spec from the config and join the strings
+ conda_package=`echo $lib_config_json | jq -r '.Python.CondaPackages.PackageSpecs | .[]'`
+
+ if [ ${apply_change_to_space} == "true" -a -n "$conda_package" ]; then
+ # if conda package spec exists, install the packages
+ micromamba install --freeze-installed -y $conda_channels $conda_package
+ fi
+fi
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/network_validation.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/network_validation.sh
new file mode 100644
index 000000000..aac0ff96f
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/network_validation.sh
@@ -0,0 +1,181 @@
+#!/bin/bash
+set -eux
+
+# Input parameters with defaults:
+# Default to 1 (Git storage) if no parameter is passed.
+is_s3_storage=${1:-"1"}
+# Output file path for unreachable services JSON
+network_validation_file=${2:-"/tmp/.network_validation.json"}
+
+# Function to write unreachable services to a JSON file
+write_unreachable_services_to_file() {
+ local value="$1"
+ local file="$network_validation_file"
+
+ # Create the file if it doesn't exist
+ if [ ! -f "$file" ]; then
+ touch "$file" || {
+ echo "Failed to create $file" >&2
+ return 0
+ }
+ fi
+
+ # Check file is writable
+ if [ ! -w "$file" ]; then
+ echo "Error: $file is not writable" >&2
+ return 0
+ fi
+
+ # Write JSON object with UnreachableServices key and the comma-separated list value
+ jq -n --arg value "$value" '{"UnreachableServices": $value}' > "$file"
+}
+
+# Configure AWS CLI region using environment variable REGION_NAME
+aws configure set region "${REGION_NAME}"
+echo "Successfully configured region to ${REGION_NAME}"
+
+# Metadata file location containing DataZone info
+sourceMetaData=/opt/ml/metadata/resource-metadata.json
+
+# Extract necessary DataZone metadata fields via jq
+dataZoneDomainId=$(jq -r '.AdditionalMetadata.DataZoneDomainId' < "$sourceMetaData")
+dataZoneProjectId=$(jq -r '.AdditionalMetadata.DataZoneProjectId' < "$sourceMetaData")
+dataZoneEndPoint=$(jq -r '.AdditionalMetadata.DataZoneEndpoint' < "$sourceMetaData")
+dataZoneDomainRegion=$(jq -r '.AdditionalMetadata.DataZoneDomainRegion' < "$sourceMetaData")
+s3Path=$(jq -r '.AdditionalMetadata.ProjectS3Path' < "$sourceMetaData")
+
+# Extract bucket name, fallback to empty string if not found
+s3ValidationBucket=$(echo "${s3Path:-}" | sed -E 's#s3://([^/]+).*#\1#')
+
+# Call AWS CLI list-connections, including endpoint if specified
+if [ -n "$dataZoneEndPoint" ]; then
+ response=$(aws datazone list-connections \
+ --endpoint-url "$dataZoneEndPoint" \
+ --domain-identifier "$dataZoneDomainId" \
+ --project-identifier "$dataZoneProjectId" \
+ --region "$dataZoneDomainRegion")
+else
+ response=$(aws datazone list-connections \
+ --domain-identifier "$dataZoneDomainId" \
+ --project-identifier "$dataZoneProjectId" \
+ --region "$dataZoneDomainRegion")
+fi
+
+# Extract each connection item as a compact JSON string
+connection_items=$(echo "$response" | jq -c '.items[]')
+
+# Required AWS Services for Compute connections and Git
+# Initialize SERVICE_COMMANDS with always-needed STS and S3 checks
+declare -A SERVICE_COMMANDS=(
+ ["STS"]="aws sts get-caller-identity"
+ ["S3"]="aws s3api list-objects --bucket \"$s3ValidationBucket\" --max-items 1"
+)
+
+# Track connection types found for conditional checks
+declare -A seen_types=()
+
+# Iterate over each connection to populate service commands conditionally
+while IFS= read -r item; do
+ # Extract connection type
+ type=$(echo "$item" | jq -r '.type')
+ seen_types["$type"]=1
+
+ # For SPARK connections, check for Glue and EMR properties
+ if [[ "$type" == "SPARK" ]]; then
+ # If sparkGlueProperties present, add Glue check
+ if echo "$item" | jq -e '.props.sparkGlueProperties' > /dev/null; then
+ SERVICE_COMMANDS["Glue"]="aws glue get-databases --max-items 1"
+ fi
+
+ # Check for emr-serverless in sparkEmrProperties.computeArn for EMR Serverless check
+ emr_arn=$(echo "$item" | jq -r '.props.sparkEmrProperties.computeArn // empty')
+ if [[ "$emr_arn" == *"emr-serverless"* && "$emr_arn" == *"/applications/"* ]]; then
+ # Extract the application ID from the ARN
+ emr_app_id=$(echo "$emr_arn" | sed -E 's#.*/applications/([^/]+)#\1#')
+
+ # Only set the service command if the application ID is valid
+ if [[ -n "$emr_app_id" ]]; then
+ SERVICE_COMMANDS["EMR Serverless"]="aws emr-serverless get-application --application-id \"$emr_app_id\""
+ fi
+ fi
+ fi
+done <<< "$connection_items"
+
+# Add Athena if ATHENA connection found
+[[ -n "${seen_types["ATHENA"]}" ]] && SERVICE_COMMANDS["Athena"]="aws athena list-data-catalogs --max-items 1"
+
+# Add Redshift checks if REDSHIFT connection found
+if [[ -n "${seen_types["REDSHIFT"]}" ]]; then
+ SERVICE_COMMANDS["Redshift Clusters"]="aws redshift describe-clusters --max-records 20"
+ SERVICE_COMMANDS["Redshift Serverless"]="aws redshift-serverless list-namespaces --max-results 1"
+fi
+
+# If using Git Storage (S3 storage flag == 1), check CodeConnections connectivity
+# Domain Execution role contains permissions for CodeConnections
+if [[ "$is_s3_storage" == "1" ]]; then
+ SERVICE_COMMANDS["CodeConnections"]="aws codeconnections list-connections --max-results 1 --profile DomainExecutionRoleCreds"
+fi
+
+# Timeout (seconds) for each API call
+api_time_out_limit=10
+# Array to accumulate unreachable services
+unreachable_services=()
+# Create a temporary directory to store individual service results
+temp_dir=$(mktemp -d)
+
+# Launch all service API checks in parallel background jobs
+for service in "${!SERVICE_COMMANDS[@]}"; do
+ {
+ # Run command with timeout, discard stdout/stderr
+ if timeout "${api_time_out_limit}s" bash -c "${SERVICE_COMMANDS[$service]}" > /dev/null 2>&1; then
+ # Success: write OK to temp file
+ echo "OK" > "$temp_dir/$service"
+ else
+ # Get exit code to differentiate timeout or other errors
+ exit_code=$?
+ if [ "$exit_code" -eq 124 ]; then
+ # Timeout exit code
+ echo "TIMEOUT" > "$temp_dir/$service"
+ else
+ # Other errors (e.g., permission denied)
+ echo "ERROR" > "$temp_dir/$service"
+ fi
+ fi
+ } &
+done
+
+# Wait for all background jobs to complete before continuing
+wait
+
+# Process each service's result file to identify unreachable ones
+for service in "${!SERVICE_COMMANDS[@]}"; do
+ result_file="$temp_dir/$service"
+ if [ -f "$result_file" ]; then
+ result=$(<"$result_file")
+ if [[ "$result" == "TIMEOUT" ]]; then
+ echo "$service API did NOT resolve within ${api_time_out_limit}s. Marking as unreachable."
+ unreachable_services+=("$service")
+ elif [[ "$result" == "OK" ]]; then
+ echo "$service API is reachable."
+ else
+ echo "$service API returned an error (but not a timeout). Ignored for network check."
+ fi
+ else
+ echo "$service check did not produce a result file. Skipping."
+ fi
+done
+
+# Cleanup temporary directory
+rm -rf "$temp_dir"
+
+# Write unreachable services to file if any, else write empty string
+if (( ${#unreachable_services[@]} > 0 )); then
+ joined_services=$(IFS=','; echo "${unreachable_services[*]}")
+ # Add spaces after commas for readability
+ joined_services_with_spaces=${joined_services//,/,\ }
+ write_unreachable_services_to_file "$joined_services_with_spaces"
+ echo "Unreachable AWS Services: ${joined_services_with_spaces}"
+else
+ write_unreachable_services_to_file ""
+ echo "All required AWS services reachable within ${api_time_out_limit}s"
+fi
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/sagemaker_ui_post_startup.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/sagemaker_ui_post_startup.sh
new file mode 100755
index 000000000..db7da9f55
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/sagemaker_ui_post_startup.sh
@@ -0,0 +1,253 @@
+#!/bin/bash
+set -eux
+
+# Writes script status to file. This file is read by an IDE extension responsible for dispatching UI post-startup-status to the user.
+write_status_to_file() {
+ local status="$1"
+ local message="$2"
+ local file="/tmp/.post-startup-status.json"
+
+ # Check if the file exists, if not, create it
+ if [ ! -f "$file" ]; then
+ touch "$file" || {
+ echo "Failed to create $file" >&2
+ return 0
+ }
+ fi
+
+ # Ensure the file is writable
+ if [ ! -w "$file" ]; then
+ echo "Error: $file is not writable" >&2
+ return 0
+ fi
+
+ # Create the JSON object and write to file
+ jq -n --arg status "$status" --arg message "$message" '{"status":$status,"message":$message}' > "$file"
+
+}
+
+# checks if the script status is "in-progress". If so, no errors were detected and it can be marked successful.
+write_status_to_file_on_script_complete() {
+ local file="/tmp/.post-startup-status.json"
+ local check_key="status"
+ local check_value="in-progress"
+
+
+ if jq -e --arg key "$check_key" --arg value "$check_value" '.[$key] == $value' "$file" > /dev/null; then
+ write_status_to_file "success" "IDE configured successfully."
+ echo "Post-startup script completed successfully. Success status written to $file"
+ else
+ echo "Skipping writing post-startup script "success" status. An error was detected during execution and written to $file".
+ fi
+}
+
+write_status_to_file "in-progress" "IDE configuration in progress."
+
+sourceMetaData=/opt/ml/metadata/resource-metadata.json
+
+# Extract the required fields from meta data stored in opt/ml/metadata.
+dataZoneDomainId=$(jq -r '.AdditionalMetadata.DataZoneDomainId' < $sourceMetaData)
+dataZoneUserId=$(jq -r '.AdditionalMetadata.DataZoneUserId' < $sourceMetaData)
+dataZoneProjectRepositoryName=$(jq -r '.AdditionalMetadata.DataZoneProjectRepositoryName' < $sourceMetaData)
+dataZoneEndPoint=$(jq -r '.AdditionalMetadata.DataZoneEndpoint' < $sourceMetaData)
+dataZoneProjectId=$(jq -r '.AdditionalMetadata.DataZoneProjectId' < $sourceMetaData)
+dataZoneDomainRegion=$(jq -r '.AdditionalMetadata.DataZoneDomainRegion' < $sourceMetaData)
+
+set +e
+
+# Remove the ~/.aws/config file to start clean when space restart
+rm -f /home/sagemaker-user/.aws/config
+echo "Successfully removed the ~/.aws/config file"
+
+aws configure set credential_source EcsContainer
+echo "Successfully configured default profile"
+
+# Add region configuration using REGION_NAME environment variable
+aws configure set region "${REGION_NAME}"
+echo "Successfully configured region to ${REGION_NAME}"
+
+# add SparkMonitor and Connection Magic entrypoint
+NB_USER=sagemaker-user
+
+config_path=/home/${NB_USER}/.ipython/profile_default/ipython_config.py
+
+if [ ! -f "$config_path" ] || ! grep -q "sagemaker_studio_dataengineering_sessions" "$config_path"; then
+ ipython profile create && echo "c.InteractiveShellApp.extensions.extend(['sagemaker_sparkmonitor.kernelextension','sagemaker_studio_dataengineering_sessions.sagemaker_connection_magic'])" >> $config_path
+ cat << EOT >> "$config_path"
+c.Application.logging_config = {
+ "loggers": {
+ "": {
+ "level": "INFO",
+ # console handler is required to keep the default behavior of jupyter logging.
+ # https://jupyter-server.readthedocs.io/en/latest/operators/configuring-logging.html
+ "handlers": ["console"],
+ },
+ },
+}
+EOT
+fi
+
+# Setting this to +x to not log credentials from the response of fetching credentials.
+set +x
+
+# Note: The $? check immediately follows the sagemaker-studio command to ensure we're checking its exit status.
+# Adding commands between these lines could lead to incorrect error handling.
+response=$(timeout 30 sagemaker-studio credentials get-domain-execution-role-credential-in-space --domain-id "$dataZoneDomainId" --profile default)
+responseStatus=$?
+
+set -x
+
+if [ $responseStatus -ne 0 ]; then
+ echo "Failed to fetch domain execution role credentials. Will skip adding new credentials profile: DomainExecutionRoleCreds."
+ write_status_to_file "error" "Network issue detected. Your domain may be using a public subnet, which affects IDE functionality. Please contact your admin."
+else
+ aws configure set credential_process "sagemaker-studio credentials get-domain-execution-role-credential-in-space --domain-id $dataZoneDomainId --profile default" --profile DomainExecutionRoleCreds
+ echo "Successfully configured DomainExecutionRoleCreds profile"
+fi
+
+# Run AWS CLI command to get the username from DataZone User Profile.
+if [ ! -z "$dataZoneEndPoint" ]; then
+ response=$( aws datazone get-user-profile --endpoint-url "$dataZoneEndPoint" --domain-identifier "$dataZoneDomainId" --user-identifier "$dataZoneUserId" --region "$dataZoneDomainRegion" )
+else
+ response=$( aws datazone get-user-profile --domain-identifier "$dataZoneDomainId" --user-identifier "$dataZoneUserId" --region "$dataZoneDomainRegion" )
+fi
+
+# Extract the Auth Mode from the response. Unified Studio currently supports IAM, SSO and SAML.
+auth_mode=$(echo "$response" | jq -r '.type')
+
+case "$auth_mode" in
+ "IAM")
+ # For IAM users - extract IAM ARN from response. Response does not contain username or email.
+ arn=$(echo "$response" | jq -r '.details.iam.arn')
+ # Split ARN by / and return the last field
+ username=$(echo "$arn" | awk -F'/' '{print $NF}')
+ email="$arn"
+ ;;
+ "SSO"|"SAML")
+ # For SSO and SAML user, extract username and email if present in response.
+ username=$(echo "$response" | jq -r '.details.sso.username')
+ email=$(echo "$response" | jq -r '.details.sso.email')
+ # Setting up the email as username if email is not present
+ if [ -z "$email" ] || [ "$email" = "null" ]; then
+ email="$username"
+ fi
+ ;;
+ *)
+ echo "Unknown authentication mode: $auth_mode"
+ exit 1
+ ;;
+esac
+
+# Checks if the project is using Git or S3 storage
+is_s3_storage() {
+ getProjectDefaultEnvResponse=$(sagemaker-studio project get-project-default-environment --domain-id "$dataZoneDomainId" --project-id "$dataZoneProjectId" --profile DomainExecutionRoleCreds)
+ gitConnectionArn=$(echo "$getProjectDefaultEnvResponse" | jq -r '.provisionedResources[] | select(.name=="gitConnectionArn") | .value')
+ codeRepositoryName=$(echo "$getProjectDefaultEnvResponse" | jq -r '.provisionedResources[] | select(.name=="codeRepositoryName") | .value')
+
+ if [ -z "$gitConnectionArn" ] && [ -z "$codeRepositoryName" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+echo "Checking Project Storage Type"
+
+# Execute once to store the result
+is_s3_storage
+is_s3_storage_flag=$? # 0 if S3 storage, 1 if Git
+
+if [ "$is_s3_storage_flag" -eq 0 ]; then
+ export IS_GIT_PROJECT=false
+ export SMUS_PROJECT_DIR="$HOME/shared"
+ echo "Project is using S3 storage, project directory set to: $SMUS_PROJECT_DIR"
+else
+ export IS_GIT_PROJECT=true
+ export SMUS_PROJECT_DIR="$HOME/src"
+ echo "Project is using Git storage, project directory set to: $SMUS_PROJECT_DIR"
+fi
+
+if grep -q "^SMUS_PROJECT_DIR=" ~/.bashrc; then
+ echo "SMUS_PROJECT_DIR is defined in the env"
+else
+ echo SMUS_PROJECT_DIR="$SMUS_PROJECT_DIR" >> ~/.bashrc
+ echo readonly SMUS_PROJECT_DIR >> ~/.bashrc
+fi
+
+# Write SMUS_PROJECT_DIR to a JSON file to be accessed by JupyterLab Extensions
+mkdir -p "$HOME/.config" # Create config directory if it doesn't exist
+jq -n \
+ --arg smusProjectDirectory "$SMUS_PROJECT_DIR" \
+ --arg isGitProject "$IS_GIT_PROJECT" \
+ '{
+ smusProjectDirectory: $smusProjectDirectory,
+ isGitProject: ($isGitProject == "true")
+ }' > "$HOME/.config/smus-storage-metadata.json"
+
+if [ $is_s3_storage_flag -ne 0 ]; then
+ # Creating a directory where the repository will be cloned
+ mkdir -p "$HOME/src"
+
+ echo "Starting execution of Git Cloning script"
+ bash /etc/sagemaker-ui/git_clone.sh
+
+ # Setting up the Git identity for the user .
+ git config --global user.email "$email"
+ git config --global user.name "$username"
+else
+ echo "Project is using Non-Git storage, skipping git repository setup and ~/src dir creation"
+fi
+
+# MLFlow tracking server uses the LOGNAME environment variable to track identity. Set the LOGNAME to the username of the user associated with the space
+export LOGNAME=$username
+if grep -q "^LOGNAME=" ~/.bashrc; then
+ echo "LOGNAME is defined in the env"
+else
+ echo LOGNAME=$username >> ~/.bashrc
+ echo readonly LOGNAME >> ~/.bashrc
+fi
+
+# Generate sagemaker pysdk intelligent default config
+nohup python /etc/sagemaker/sm_pysdk_default_config.py &
+# Only run the following commands if SAGEMAKER_APP_TYPE_LOWERCASE is jupyterlab
+if [ "${SAGEMAKER_APP_TYPE_LOWERCASE}" = "jupyterlab" ]; then
+ # do not fail immediately for non-zero exit code returned
+ # by start-workflows-container. An expected non-zero exit
+ # code will be returned if there is not a minimum of 2
+ # CPU cores available.
+ # Start workflows local runner
+ bash /etc/sagemaker-ui/workflows/start-workflows-container.sh
+
+ # ensure functions inherit traps and fail immediately
+ set -eE
+
+ # write unexpected error to file if any of the remaining scripts fail.
+ trap 'write_status_to_file "error" "An unexpected error occurred. Please stop and restart your space to retry."' ERR
+
+ # Install conda and pip dependencies if lib mgmt config existing
+ bash /etc/sagemaker-ui/libmgmt/install-lib.sh
+
+ # Install sm-spark-cli
+ bash /etc/sagemaker-ui/workflows/sm-spark-cli-install.sh
+fi
+
+# Execute network validation script, to check if any required AWS Services are unreachable
+echo "Starting network validation script..."
+
+network_validation_file="/tmp/.network_validation.json"
+
+# Run the validation script; only if it succeeds, check unreachable services
+if bash /etc/sagemaker-ui/network_validation.sh "$is_s3_storage_flag" "$network_validation_file"; then
+ # Read unreachable services from JSON file
+ failed_services=$(jq -r '.UnreachableServices // empty' "$network_validation_file" || echo "")
+ if [[ -n "$failed_services" ]]; then
+ error_message="$failed_services are unreachable. Please contact your admin."
+ # Example error message: Redshift Clusters, Athena, STS, Glue are unreachable. Please contact your admin.
+ write_status_to_file "error" "$error_message"
+ echo "$error_message"
+ fi
+else
+ echo "Warning: network_validation.sh failed, skipping unreachable services check."
+fi
+
+write_status_to_file_on_script_complete
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/set_code_editor_theme.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/set_code_editor_theme.sh
new file mode 100755
index 000000000..f295052c5
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/set_code_editor_theme.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -e
+
+USER_SETTINGS_FILE="/home/sagemaker-user/sagemaker-code-editor-server-data/data/User/settings.json"
+COLOR_THEME_KEY="workbench.colorTheme"
+COLOR_THEME_VALUE="Default Dark Modern"
+
+# Check if the settings file exists
+if [ ! -f "$USER_SETTINGS_FILE" ]; then
+ echo "Code Editor user settings file not found. Creating..."
+ mkdir -p "$(dirname "$USER_SETTINGS_FILE")"
+ echo "{}" > "$USER_SETTINGS_FILE"
+fi
+
+EXISTING_COLOR_THEME_VALUE=$(jq -r --arg key "$COLOR_THEME_KEY" '.[$key] // empty' "$USER_SETTINGS_FILE")
+
+if [[ -n "$EXISTING_COLOR_THEME_VALUE" ]]; then
+ echo "Theme is already set in user settings as '$EXISTING_COLOR_THEME_VALUE'. No changes made."
+else
+ # Set theme
+ jq --arg key "$COLOR_THEME_KEY" --arg value "$COLOR_THEME_VALUE" '.[$key] = $value' "$USER_SETTINGS_FILE" > "${USER_SETTINGS_FILE}.tmp" && mv "${USER_SETTINGS_FILE}.tmp" "$USER_SETTINGS_FILE"
+ echo "Successfully set Code Editor theme to '$COLOR_THEME_VALUE'."
+fi
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/docker-compose.yaml b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/docker-compose.yaml
new file mode 100644
index 000000000..d04f00995
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/docker-compose.yaml
@@ -0,0 +1,77 @@
+x-airflow-common: &airflow-common
+ image: 058264401727.dkr.ecr.${AWS_REGION}.amazonaws.com/mwaa_image:latest
+ network_mode: sagemaker
+ environment:
+ # AWS credentials
+ AWS_REGION: ${AWS_REGION}
+ AWS_DEFAULT_REGION: ${AWS_REGION}
+ AWS_CONTAINER_CREDENTIALS_RELATIVE_URI: ${AWS_CONTAINER_CREDENTIALS_RELATIVE_URI}
+
+ # Logging configuration.
+ MWAA__LOGGING__AIRFLOW_DAGPROCESSOR_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_DAGPROCESSOR_LOG_LEVEL: "INFO"
+ MWAA__LOGGING__AIRFLOW_SCHEDULER_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_SCHEDULER_LOG_LEVEL: "INFO"
+ MWAA__LOGGING__AIRFLOW_TRIGGERER_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_TRIGGERER_LOG_LEVEL: "INFO"
+ MWAA__LOGGING__AIRFLOW_WEBSERVER_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_WEBSERVER_LOG_LEVEL: "INFO"
+ MWAA__LOGGING__AIRFLOW_WORKER_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_WORKER_LOG_LEVEL: "INFO"
+ MWAA__LOGGING__AIRFLOW_TASK_LOGS_ENABLED: "true"
+ MWAA__LOGGING__AIRFLOW_TASK_LOG_LEVEL: "INFO"
+
+ MWAA__LOGGING__AIRFLOW_DAGPROCESSOR_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+ MWAA__LOGGING__AIRFLOW_SCHEDULER_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+ MWAA__LOGGING__AIRFLOW_TRIGGERER_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+ MWAA__LOGGING__AIRFLOW_WEBSERVER_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+ MWAA__LOGGING__AIRFLOW_WORKER_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+ MWAA__LOGGING__AIRFLOW_TASK_LOG_GROUP_ARN: "arn:aws:logs:${AWS_REGION}:${ACCOUNT_ID}:log-group:/aws/sagemaker/studio"
+
+ # Airflow URL configuration
+ AIRFLOW__CLI__ENDPOINT_URL: "/jupyterlab/default/proxy/absolute/8080"
+ AIRFLOW__WEBSERVER__BASE_URL: "/jupyterlab/default/proxy/absolute/8080"
+ MWAA__CORE__REQUIREMENTS_PATH: "/usr/local/airflow/requirements/requirements.txt"
+ #Project configuration
+ DataZoneDomainId: ${DZ_DOMAIN_ID}
+ DataZoneProjectId: ${DZ_PROJECT_ID}
+ DataZoneEnvironmentId: ${DZ_ENV_ID}
+ DataZoneDomainRegion: ${DZ_DOMAIN_REGION}
+ ProjectS3Path: ${DZ_PROJECT_S3PATH}
+ volumes:
+ - /home/sagemaker-user/${PROJECT_DIR}/workflows/dags:/usr/local/airflow/dags
+ - /home/sagemaker-user/.workflows_setup/plugins:/usr/local/airflow/plugins
+ - /home/sagemaker-user/.workflows_setup/requirements:/usr/local/airflow/requirements
+ - /home/sagemaker-user/.workflows_setup/startup:/usr/local/airflow/startup
+ - ${MOUNT_DIR}:/home/sagemaker-user/src:rw
+ - /home/sagemaker-user/jobs:/home/sagemaker-user/jobs:rw
+ depends_on: &airflow-common-depends-on
+ postgres:
+ condition: service_healthy
+
+services:
+ postgres:
+ image: 058264401727.dkr.ecr.${AWS_REGION}.amazonaws.com/postgres:13
+ network_mode: sagemaker
+ container_name: mwaa-292-db
+ environment:
+ POSTGRES_USER: airflow
+ POSTGRES_PASSWORD: airflow
+ POSTGRES_DB: airflow
+ volumes:
+ - /home/sagemaker-user/.workflows_setup/db-data:/var/lib/postgresql/data
+ healthcheck:
+ test: ["CMD", "pg_isready", "-U", "airflow"]
+ interval: 10s
+ retries: 5
+ start_period: 5s
+
+ webserver:
+ <<: *airflow-common
+ command: webserver
+ container_name: mwaa-292-webserver
+
+ scheduler:
+ <<: *airflow-common
+ command: scheduler
+ container_name: mwaa-292-scheduler
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/healthcheck.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/healthcheck.sh
new file mode 100755
index 000000000..a9581b549
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/healthcheck.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+set -e
+
+POLLING_INTERVAL=1 # seconds
+LOCAL_RUNNER_HEALTH_ENDPOINT="http://default:8888/jupyterlab/default/proxy/absolute/8080/api/v1/health"
+
+while true; do
+ response=$(curl -s -w "%{http_code}" $LOCAL_RUNNER_HEALTH_ENDPOINT)
+ curl_exit_code=$?
+
+ if [[ $curl_exit_code -ne 0 ]]; then
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'unhealthy' --detailed-status 'Local runner health endpoint is unreachable'
+ else
+
+ http_code=${response: -3}
+ body=${response:0:${#response}-3}
+
+ if [[ $http_code -ne 200 ]]; then
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'unhealthy' --detailed-status 'Local runner health endpoint is unreachable'
+ elif [[ -z "$body" ]]; then
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'unhealthy' --detailed-status 'Local runner health endpoint did not return a response'
+ else
+ status=$(echo $body | jq -r '.metadatabase.status, .scheduler.status, .triggerer.status, .dag_processor.status')
+ if [[ "$status" == *"unhealthy"* ]]; then
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'unhealthy' --detailed-status 'Local runner is unhealthy'
+ else
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'healthy' --detailed-status 'Local runner is healthy'
+ POLLING_INTERVAL=10 # raise to 10 seconds after startup
+ fi
+ fi
+ fi
+
+ sleep $POLLING_INTERVAL
+done
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_SagemakerWorkflowsOperator-1.0-py3-none-any.whl b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_SagemakerWorkflowsOperator-1.0-py3-none-any.whl
new file mode 100644
index 000000000..3f14c1ec5
Binary files /dev/null and b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_SagemakerWorkflowsOperator-1.0-py3-none-any.whl differ
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_sagemaker_studio-1.0.15-py3-none-any.whl b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_sagemaker_studio-1.0.15-py3-none-any.whl
new file mode 100644
index 000000000..2a140acfa
Binary files /dev/null and b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/plugins/amzn_sagemaker_studio-1.0.15-py3-none-any.whl differ
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/requirements/requirements.txt b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/requirements/requirements.txt
new file mode 100644
index 000000000..1ce361735
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/requirements/requirements.txt
@@ -0,0 +1,5 @@
+--find-links /usr/local/airflow/plugins
+--no-index
+
+amzn_sagemaker_studio
+amzn_SagemakerWorkflowsOperator
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sample_dag.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sample_dag.py
new file mode 100644
index 000000000..aa3fc2f14
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sample_dag.py
@@ -0,0 +1,18 @@
+from airflow.decorators import dag
+from airflow.operators.bash import BashOperator
+
+default_args = {
+ "owner": "airflow",
+}
+
+
+@dag(default_args=default_args, tags=["sample"])
+def sample_dag():
+ def sample_task():
+ _task = BashOperator(task_id="hello_world", bash_command="echo 'hello world!'")
+ return _task
+
+ task = sample_task()
+
+
+sample_dag = sample_dag()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm-spark-cli-install.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm-spark-cli-install.sh
new file mode 100755
index 000000000..6a0d50e33
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm-spark-cli-install.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# install sm-spark-cli
+sudo curl -LO https://github.com/aws-samples/amazon-sagemaker-spark-ui/releases/download/v0.9.1/amazon-sagemaker-spark-ui.tar.gz && \
+sudo tar -xvzf amazon-sagemaker-spark-ui.tar.gz && \
+sudo chmod +x amazon-sagemaker-spark-ui/install-scripts/studio/install-history-server.sh && \
+sudo amazon-sagemaker-spark-ui/install-scripts/studio/install-history-server.sh && \
+rm -rf ~/.m2 && \
+sudo rm -rf amazon-sagemaker-spark-ui*
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm_init_script.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm_init_script.sh
new file mode 100755
index 000000000..57a4616fa
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/sm_init_script.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+# Create a default aws profile in SM training container
+aws configure set credential_source EcsContainer
+
+# Create a default ipython profile and load extensions in SM processing container
+NB_USER=sagemaker-user
+config_path=/home/${NB_USER}/.ipython/profile_default/ipython_config.py
+# SparkMonitor Widget and Connection Magic - create entrypoint
+if [ ! -f "$config_path" ] || ! grep -q "sagemaker_studio_dataengineering_sessions" "$config_path"; then
+ ipython profile create && echo "c.InteractiveShellApp.extensions.extend(['sagemaker_sparkmonitor.kernelextension','sagemaker_studio_dataengineering_sessions.sagemaker_connection_magic'])" >> $config_path
+ cat << EOT >> "$config_path"
+c.Application.logging_config = {
+ "loggers": {
+ "": {
+ "level": "INFO",
+ # console handler is required to keep the default behavior of jupyter logging.
+ # https://jupyter-server.readthedocs.io/en/latest/operators/configuring-logging.html
+ "handlers": ["console"],
+ },
+ },
+}
+EOT
+fi
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/start-workflows-container.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/start-workflows-container.sh
new file mode 100755
index 000000000..42aae3824
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/start-workflows-container.sh
@@ -0,0 +1,202 @@
+#!/bin/bash
+set -eu
+
+# Get project directory based on storage type
+PROJECT_DIR=${SMUS_PROJECT_DIR:-"$HOME/src"}
+if [ -z "$SMUS_PROJECT_DIR" ]; then
+ MOUNT_DIR=$PROJECT_DIR
+else
+ MOUNT_DIR=$(readlink -f "$PROJECT_DIR") # get the symlink source
+fi
+
+# Datazone project metadata
+RESOURCE_METADATA_FILE=/opt/ml/metadata/resource-metadata.json
+SM_DOMAIN_ID=$(jq -r ".DomainId" < $RESOURCE_METADATA_FILE)
+AWS_ACCOUNT_ID=$(jq -r '.ExecutionRoleArn | split(":")[4]' < $RESOURCE_METADATA_FILE)
+ECR_ACCOUNT_ID=058264401727
+DZ_DOMAIN_ID=$(jq -r '.AdditionalMetadata.DataZoneDomainId' < $RESOURCE_METADATA_FILE)
+DZ_PROJECT_ID=$(jq -r '.AdditionalMetadata.DataZoneProjectId' < $RESOURCE_METADATA_FILE)
+DZ_ENV_ID=$(jq -r '.AdditionalMetadata.DataZoneEnvironmentId' < $RESOURCE_METADATA_FILE)
+DZ_DOMAIN_REGION=$(jq -r '.AdditionalMetadata.DataZoneDomainRegion' < $RESOURCE_METADATA_FILE)
+DZ_ENDPOINT=$(jq -r '.AdditionalMetadata.DataZoneEndpoint' < $RESOURCE_METADATA_FILE)
+DZ_PROJECT_S3PATH=$(jq -r '.AdditionalMetadata.ProjectS3Path' < $RESOURCE_METADATA_FILE)
+
+# Workflows paths in JL
+WORKFLOW_DAG_PATH="${PROJECT_DIR}/workflows/dags"
+WORKFLOW_CONFIG_PATH="${PROJECT_DIR}/workflows/config"
+WORKFLOW_DB_DATA_PATH="$HOME/.workflows_setup/db-data"
+WORKFLOW_REQUIREMENTS_PATH="$HOME/.workflows_setup/requirements/"
+WORKFLOW_PLUGINS_PATH="$HOME/.workflows_setup/plugins"
+WORKFLOW_STARTUP_PATH="$HOME/.workflows_setup/startup/"
+WORKFLOW_ARTIFACTS_SOURCE_DIR="/etc/sagemaker-ui/workflows"
+WORKFLOW_PLUGINS_SOURCE_PATH="${WORKFLOW_ARTIFACTS_SOURCE_DIR}/plugins/*.whl"
+WORKFLOW_REQUIREMENTS_SOURCE_PATH="${WORKFLOW_ARTIFACTS_SOURCE_DIR}/requirements/requirements.txt"
+WORKFLOW_AIRFLOW_REQUIREMENTS_SOURCE_PATH="/etc/sagemaker-ui/workflows/requirements/requirements.txt"
+WORKFLOW_OUTPUT_PATH="$HOME/jobs"
+USER_REQUIREMENTS_FILE="${WORKFLOW_CONFIG_PATH}/requirements.txt"
+USER_PLUGINS_FOLDER="${WORKFLOW_CONFIG_PATH}/plugins"
+USER_STARTUP_FILE="${WORKFLOW_CONFIG_PATH}/startup.sh"
+
+
+handle_workflows_startup_error() {
+ local step=$1
+ local detailed_status=""
+ case $step in
+ 0)
+ detailed_status="Workflows blueprint not enabled"
+ ;;
+ 1)
+ detailed_status="Not enough memory"
+ ;;
+ 2)
+ detailed_status="Error creating directories"
+ ;;
+ 3)
+ detailed_status="Error installing docker"
+ ;;
+ 4)
+ detailed_status="Error copying prerequisite files"
+ ;;
+ 5)
+ detailed_status="Error starting workflows image"
+ # Kill any orphans that may have started
+ python /etc/sagemaker-ui/workflows/workflow_client.py stop-local-runner
+ ;;
+ *)
+ detailed_status="Unknown error"
+ ;;
+ esac
+ python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'unhealthy' --detailed-status "$detailed_status"
+ exit 1
+}
+
+# Create status log file if it doesn't exist
+WORKFLOW_HEALTH_PATH="$HOME/.workflows_setup/health"
+mkdir -p $WORKFLOW_HEALTH_PATH
+if [ ! -f "${WORKFLOW_HEALTH_PATH}/status.json" ]; then
+ echo "Creating status file"
+ echo "[]" > "${WORKFLOW_HEALTH_PATH}/status.json"
+fi
+
+# Only start local runner if Workflows blueprint is enabled
+if [ "$(python /etc/sagemaker-ui/workflows/workflow_client.py check-blueprint --region "$DZ_DOMAIN_REGION" --domain-id "$DZ_DOMAIN_ID" --endpoint "$DZ_ENDPOINT")" = "False" ]; then
+ echo "Workflows blueprint is not enabled. Workflows will not start."
+ handle_workflows_startup_error 0
+fi
+
+# Do minimum system requirements check: 4GB RAM and more than 2 CPU cores
+free_mem=$(free -m | awk '/^Mem:/ {print $7}')
+cpu_cores=$(nproc)
+if [[ $free_mem -lt 4096 ]] || [[ $cpu_cores -le 2 ]]; then
+ echo "There is less than 4GB of available RAM or <=2 CPU cores. Workflows will not start. Free mem: $free_mem MB, CPU cores: $cpu_cores"
+ handle_workflows_startup_error 1
+fi
+
+(
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'starting' --detailed-status 'Creating directories'
+
+# Create necessary directories
+mkdir -p $WORKFLOW_DAG_PATH
+mkdir -p $WORKFLOW_CONFIG_PATH
+mkdir -p $WORKFLOW_DB_DATA_PATH
+mkdir -p $WORKFLOW_REQUIREMENTS_PATH
+mkdir -p $WORKFLOW_PLUGINS_PATH
+mkdir -p $WORKFLOW_STARTUP_PATH
+mkdir -p $WORKFLOW_OUTPUT_PATH
+) || handle_workflows_startup_error 2
+
+(
+# Set the status of the status file to 'starting'
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'starting' --detailed-status 'Installing prerequisites'
+
+# Workflows execution environment install
+sudo apt-get update
+sudo install -m 0755 -d /etc/apt/keyrings
+sudo rm -f /etc/apt/keyrings/docker.gpg
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
+sudo chmod a+r /etc/apt/keyrings/docker.gpg
+echo \
+"deb [arch="$(dpkg --print-architecture)" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
+"$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | \
+sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+sudo apt-get update
+VERSION_ID=$(cat /etc/os-release | grep -oP 'VERSION_ID=".*"' | cut -d'"' -f2)
+VERSION_STRING=$(sudo apt-cache madison docker-ce | awk '{ print $3 }' | grep -i $VERSION_ID | head -n 1)
+sudo apt-get install docker-ce-cli=$VERSION_STRING docker-compose-plugin=2.29.2-1~ubuntu.22.04~jammy -y --allow-downgrades
+) || handle_workflows_startup_error 3
+
+(
+# Set status to copying files
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'starting' --detailed-status 'Copying files'
+
+# Create .airflowignore file
+cat >>"$WORKFLOW_DAG_PATH/.airflowignore" <<'END'
+.ipynb_checkpoints
+END
+
+#copy plugins from conda
+cp $WORKFLOW_PLUGINS_SOURCE_PATH $WORKFLOW_PLUGINS_PATH
+#copy requirements from conda
+cp $WORKFLOW_REQUIREMENTS_SOURCE_PATH $WORKFLOW_REQUIREMENTS_PATH
+
+# Copy system startup
+cp /etc/sagemaker-ui/workflows/startup/startup.sh $WORKFLOW_STARTUP_PATH
+
+# Append user's custom startup script, if exists
+if [ -f $USER_STARTUP_FILE ]; then
+ tail -n +2 $USER_STARTUP_FILE >> "${WORKFLOW_STARTUP_PATH}startup.sh"
+else
+ # Give the user a template startup script
+ echo "#!/bin/bash" > "${USER_STARTUP_FILE}"
+ echo "# Place any special instructions you'd like run during your workflows environment startup here" >> "${USER_STARTUP_FILE}"
+ echo "# Note that you will need to restart your space for changes to take effect." >> "${USER_STARTUP_FILE}"
+ echo "# For example:" >> "${USER_STARTUP_FILE}"
+ echo "# pip install dbt-core" >> "${USER_STARTUP_FILE}"
+fi
+
+# Append user's custom requirements, if exists
+if [ -f $USER_REQUIREMENTS_FILE ]; then
+ cat $USER_REQUIREMENTS_FILE >> "${WORKFLOW_REQUIREMENTS_PATH}requirements.txt"
+else
+ # Give the user a template requirements.txt file
+ echo "# Place any requirements you'd like included in your workflows environment here" > "${USER_REQUIREMENTS_FILE}"
+ echo "# Note that you will need to restart your space for changes to take effect." >> "${USER_REQUIREMENTS_FILE}"
+ echo "# For example:" >> "${USER_REQUIREMENTS_FILE}"
+ echo "# numpy==1.26.4" >> "${USER_REQUIREMENTS_FILE}"
+fi
+
+# Copy over any user-specified plugins, if they exist
+if [ -d $USER_PLUGINS_FOLDER ]; then
+ cp -r $USER_PLUGINS_FOLDER/* $WORKFLOW_PLUGINS_PATH
+fi
+
+) || handle_workflows_startup_error 4
+
+(
+# Set status to installing workflows image
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'starting' --detailed-status 'Installing workflows image'
+
+# Copy sample dag if it does not exist
+cp -n "/etc/sagemaker-ui/workflows/sample_dag.py" "${WORKFLOW_DAG_PATH}/"
+
+# Log into ECR repository
+aws ecr get-login-password --region ${AWS_REGION} | docker login --username AWS --password-stdin ${ECR_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com
+
+PROJECT_DIR=$(basename $PROJECT_DIR) \
+MOUNT_DIR=$MOUNT_DIR \
+ECR_ACCOUNT_ID=$ECR_ACCOUNT_ID \
+ACCOUNT_ID=$AWS_ACCOUNT_ID \
+DZ_DOMAIN_ID=$DZ_DOMAIN_ID \
+DZ_PROJECT_ID=$DZ_PROJECT_ID \
+DZ_ENV_ID=$DZ_ENV_ID \
+DZ_DOMAIN_REGION=$DZ_DOMAIN_REGION \
+DZ_PROJECT_S3PATH=$DZ_PROJECT_S3PATH \
+ docker compose -f /etc/sagemaker-ui/workflows/docker-compose.yaml up -d --quiet-pull
+) || handle_workflows_startup_error 5
+
+# Set status to waiting for image to start
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'starting' --detailed-status 'Waiting for workflows image to start'
+
+# Start healthchecker
+sleep 30 # give the container some time to start
+supervisorctl -s unix:///var/run/supervisord/supervisor.sock start workflows_healthcheck
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/startup/startup.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/startup/startup.sh
new file mode 100755
index 000000000..1a2485251
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/startup/startup.sh
@@ -0,0 +1 @@
+#!/bin/sh
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/stop-workflows-container.sh b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/stop-workflows-container.sh
new file mode 100755
index 000000000..0ce31e2d5
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/stop-workflows-container.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+DOCKER_EXECUTABLE=$(which docker)
+
+# Stop healthchecker
+supervisorctl -s unix:///var/run/supervisord/supervisor.sock stop workflows_healthcheck
+
+# Stop the containers
+$DOCKER_EXECUTABLE compose -f /etc/sagemaker-ui/workflows/docker-compose.yaml down
+
+# Update status to stopped
+python /etc/sagemaker-ui/workflows/workflow_client.py update-local-runner-status --status 'stopped' --detailed-status 'Shutdown completed'
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/workflow_client.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/workflow_client.py
new file mode 100644
index 000000000..4ccec3872
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker-ui/workflows/workflow_client.py
@@ -0,0 +1,114 @@
+import argparse
+from datetime import datetime, timezone
+from typing import Optional
+
+import boto3
+import requests
+
+JUPYTERLAB_URL = "http://default:8888/jupyterlab/default/"
+WORKFLOWS_API_ENDPOINT = "api/sagemaker/workflows"
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f%z"
+
+
+def _validate_response(function_name: str, response: requests.Response):
+ if response.status_code == 200:
+ return response
+ else:
+ raise RuntimeError(f"{function_name} returned {response.status_code}: {str(response.content)}")
+
+
+def update_local_runner_status(session: requests.Session, status: str, detailed_status: Optional[str] = None, **kwargs):
+ response = session.post(
+ url=JUPYTERLAB_URL + WORKFLOWS_API_ENDPOINT + "/update-local-runner-status",
+ headers={"X-Xsrftoken": session.cookies.get_dict()["_xsrf"]},
+ json={
+ "timestamp": datetime.now(timezone.utc).strftime(TIMESTAMP_FORMAT),
+ "status": status,
+ "detailed_status": detailed_status,
+ },
+ )
+ return _validate_response("UpdateLocalRunner", response)
+
+
+def start_local_runner(session: requests.Session, **kwargs):
+ response = session.post(
+ url=JUPYTERLAB_URL + WORKFLOWS_API_ENDPOINT + "/start-local-runner",
+ headers={"X-Xsrftoken": session.cookies.get_dict()["_xsrf"]},
+ json={},
+ )
+ return _validate_response("StartLocalRunner", response)
+
+
+def stop_local_runner(session: requests.Session, **kwargs):
+ response = session.post(
+ url=JUPYTERLAB_URL + WORKFLOWS_API_ENDPOINT + "/stop-local-runner",
+ headers={"X-Xsrftoken": session.cookies.get_dict()["_xsrf"]},
+ json={},
+ )
+ return _validate_response("StopLocalRunner", response)
+
+
+def check_blueprint(region: str, domain_id: str, endpoint: str, **kwargs):
+ DZ_CLIENT = boto3.client("datazone")
+ # add correct endpoint for gamma env
+ if endpoint != "":
+ DZ_CLIENT = boto3.client("datazone", endpoint_url=endpoint)
+ try:
+ blueprint_id = DZ_CLIENT.list_environment_blueprints(
+ managed=True, domainIdentifier=domain_id, name="Workflows"
+ )["items"][0]["id"]
+ blueprint_config = DZ_CLIENT.get_environment_blueprint_configuration(
+ domainIdentifier=domain_id, environmentBlueprintIdentifier=blueprint_id
+ )
+ enabled_regions = blueprint_config["enabledRegions"]
+ print(str(region in enabled_regions))
+ except:
+ print("False")
+
+
+COMMAND_REGISTRY = {
+ "update-local-runner-status": update_local_runner_status,
+ "start-local-runner": start_local_runner,
+ "stop-local-runner": stop_local_runner,
+ "check-blueprint": check_blueprint,
+}
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Workflow local runner client")
+ subparsers = parser.add_subparsers(dest="command", help="Available commands")
+
+ update_status_parser = subparsers.add_parser("update-local-runner-status", help="Update status of local runner")
+ update_status_parser.add_argument("--status", type=str, required=True, help="Status to update")
+ update_status_parser.add_argument("--detailed-status", type=str, required=False, help="Detailed status text")
+
+ start_parser = subparsers.add_parser("start-local-runner", help="Start local runner")
+
+ stop_parser = subparsers.add_parser("stop-local-runner", help="Stop local runner")
+
+ check_blueprint_parser = subparsers.add_parser("check-blueprint", help="Check Workflows blueprint")
+ check_blueprint_parser.add_argument(
+ "--domain-id", type=str, required=True, help="Datazone Domain ID for blueprint check"
+ )
+ check_blueprint_parser.add_argument("--region", type=str, required=True, help="Datazone Domain region")
+ check_blueprint_parser.add_argument(
+ "--endpoint", type=str, required=True, help="Datazone endpoint for blueprint check"
+ )
+
+ args = parser.parse_args()
+
+ # create the request session
+ session = requests.Session()
+ # populate XSRF cookie
+ session.get(JUPYTERLAB_URL)
+
+ kwargs = vars(args) | {"session": session}
+
+ if args.command in COMMAND_REGISTRY:
+ COMMAND_REGISTRY[args.command](**kwargs)
+ else:
+ parser.print_help()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker/sm_pysdk_default_config.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker/sm_pysdk_default_config.py
new file mode 100644
index 000000000..1a8457b4c
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/sagemaker/sm_pysdk_default_config.py
@@ -0,0 +1,122 @@
+import json
+import os
+import re
+
+import yaml
+
+
+def generate_intelligent_default_config(metadata: str) -> dict:
+ config = {
+ "SchemaVersion": "1.0",
+ "SageMaker": {
+ "PythonSDK": {
+ "Modules": {
+ "Session": {
+ "DefaultS3Bucket": metadata["S3Bucket"],
+ "DefaultS3ObjectKeyPrefix": metadata["S3ObjectKeyPrefix"],
+ },
+ "RemoteFunction": {
+ "IncludeLocalWorkDir": True,
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]},
+ },
+ "NotebookJob": {
+ "RoleArn": metadata["UserRoleArn"],
+ "S3RootUri": f"s3://{metadata['S3Bucket']}/{metadata['S3ObjectKeyPrefix']}",
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]},
+ },
+ "Serve": {"S3ModelDataUri": f"s3://{metadata['S3Bucket']}/{metadata['S3ObjectKeyPrefix']}"},
+ }
+ },
+ "MonitoringSchedule": {
+ "MonitoringScheduleConfig": {
+ "MonitoringJobDefinition": {
+ "NetworkConfig": {
+ "VpcConfig": {
+ "SecurityGroupIds": metadata["SecurityGroupIds"],
+ "Subnets": metadata["Subnets"],
+ }
+ }
+ }
+ }
+ },
+ "AutoMLJob": {
+ "AutoMLJobConfig": {
+ "SecurityConfig": {
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]}
+ }
+ }
+ },
+ "AutoMLJobV2": {
+ "SecurityConfig": {
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]}
+ }
+ },
+ "CompilationJob": {
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]}
+ },
+ "Pipeline": {"RoleArn": metadata["UserRoleArn"]},
+ "Model": {
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]},
+ "ExecutionRoleArn": metadata["UserRoleArn"],
+ },
+ "ModelPackage": {"ValidationSpecification": {"ValidationRole": metadata["UserRoleArn"]}},
+ "ProcessingJob": {
+ "NetworkConfig": {
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]}
+ },
+ "RoleArn": metadata["UserRoleArn"],
+ },
+ "TrainingJob": {
+ "RoleArn": metadata["UserRoleArn"],
+ "VpcConfig": {"SecurityGroupIds": metadata["SecurityGroupIds"], "Subnets": metadata["Subnets"]},
+ },
+ },
+ }
+ return config
+
+
+if __name__ == "__main__":
+ try:
+ config = {}
+ resource_metadata = "/opt/ml/metadata/resource-metadata.json"
+
+ PROJECT_S3_PATH = "ProjectS3Path"
+ SECURITY_GROUP = "SecurityGroup"
+ PRIVATE_SUBNETS = "PrivateSubnets"
+ META_DATA = "AdditionalMetadata"
+ EXECUTION_ROLE_ARN = "ExecutionRoleArn"
+ CONFIG_FILE_NAME = "config.yaml"
+ CONFIG_DIR = "/etc/xdg/sagemaker/"
+
+ if os.path.exists(resource_metadata):
+ with open(resource_metadata, "r") as file:
+ data = json.load(file)
+
+ s3_path = data[META_DATA].get(PROJECT_S3_PATH, "")
+ metadata = {
+ # user provided bucket
+ "S3Bucket": re.search(r"s3://([^/]+)/", s3_path).group(1),
+ # ${datazoneEnvironmentDomainId}/${datazoneEnvironmentProjectId}/${datazoneScopeName}/
+ "S3ObjectKeyPrefix": s3_path.split("//")[1].split("/", 1)[1],
+ # TODO: Is this a billing concern if set default
+ # 'InstanceType': 'ml.m5.xlarge',
+ "SecurityGroupIds": data[META_DATA].get(SECURITY_GROUP, "").split(","),
+ "Subnets": data[META_DATA].get(PRIVATE_SUBNETS, "").split(","),
+ "UserRoleArn": data[EXECUTION_ROLE_ARN],
+ }
+
+ # Not create config file when invalid value exists in metadata
+ empty_values = [key for key, value in metadata.items() if value == "" or value == [""]]
+ if empty_values:
+ raise AttributeError(f"There are empty values in the metadata: {empty_values}")
+
+ config = generate_intelligent_default_config(metadata)
+ else:
+ raise FileNotFoundError("No resource-metadata.json exists on host!")
+
+ # Write the config YAML file to default location of the admin config file
+ with open(os.path.join(CONFIG_DIR, CONFIG_FILE_NAME), "w") as f:
+ yaml.dump(config, f, default_flow_style=False, sort_keys=False)
+
+ except Exception as e:
+ print(f"Error: {e}, SageMaker PySDK intelligent config file is not valid!")
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-code-editor.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-code-editor.conf
new file mode 100644
index 000000000..58e187d02
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-code-editor.conf
@@ -0,0 +1,12 @@
+[include]
+files = supervisord-common.conf
+
+[program:codeeditorserver]
+directory=%(ENV_HOME)s
+command=start-code-editor
+autostart=true
+autorestart=true
+stdout_logfile=/dev/fd/1 ; Redirect web server logs to stdout
+stderr_logfile=/dev/fd/1
+stdout_logfile_maxbytes = 0 ; Fix: https://github.com/Supervisor/supervisor/issues/935
+stderr_logfile_maxbytes = 0 ; Fix: https://github.com/Supervisor/supervisor/issues/935
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-common.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-common.conf
new file mode 100644
index 000000000..27820d4c4
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-common.conf
@@ -0,0 +1,18 @@
+[supervisord]
+nodaemon=true
+
+pidfile=/var/run/supervisord/supervisord.pid
+logfile=%(ENV_STUDIO_LOGGING_DIR)s/%(ENV_SAGEMAKER_APP_TYPE_LOWERCASE)s/supervisord/supervisord.log
+logfile_maxbytes=5MB
+logfile_backups=10
+redirect_stderr=true
+
+[unix_http_server]
+file=/var/run/supervisord/supervisor.sock
+chmod=0700
+
+[supervisorctl]
+serverurl=unix:///var/run/supervisord/supervisor.sock
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-jupyter-lab.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-jupyter-lab.conf
new file mode 100644
index 000000000..5694ac116
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-jupyter-lab.conf
@@ -0,0 +1,11 @@
+[include]
+files = supervisord-common.conf
+
+[program:jupyterlabserver]
+directory=%(ENV_HOME)s
+command=start-jupyter-server
+stopasgroup=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-code-editor.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-code-editor.conf
new file mode 100644
index 000000000..41dec7fe1
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-code-editor.conf
@@ -0,0 +1,28 @@
+[supervisord]
+nodaemon=true
+
+pidfile=/var/run/supervisord/supervisord.pid
+logfile=%(ENV_STUDIO_LOGGING_DIR)s/%(ENV_SAGEMAKER_APP_TYPE_LOWERCASE)s/supervisord/supervisord.log
+logfile_maxbytes=5MB
+logfile_backups=10
+redirect_stderr=true
+
+[unix_http_server]
+file=/var/run/supervisord/supervisor.sock
+chmod=0700
+
+[supervisorctl]
+serverurl=unix:///var/run/supervisord/supervisor.sock
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[program:codeeditorserver]
+directory=%(ENV_HOME)s
+command=start-sagemaker-ui-code-editor
+autostart=true
+autorestart=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-default.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-default.conf
new file mode 100644
index 000000000..13ffc89f3
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui-default.conf
@@ -0,0 +1,27 @@
+[supervisord]
+nodaemon=true
+
+pidfile=/var/run/supervisord/supervisord.pid
+logfile=%(ENV_STUDIO_LOGGING_DIR)s/%(ENV_SAGEMAKER_APP_TYPE_LOWERCASE)s/supervisord/supervisord.log
+logfile_maxbytes=5MB
+logfile_backups=10
+redirect_stderr=true
+
+[unix_http_server]
+file=/var/run/supervisord/supervisor.sock
+chmod=0700
+
+[supervisorctl]
+serverurl=unix:///var/run/supervisord/supervisor.sock
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[program:jupyterlabserver]
+directory=%(ENV_HOME)s
+command=start-sagemaker-ui-default-jupyter-server
+stopasgroup=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui.conf
new file mode 100644
index 000000000..ed00f4d5b
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord-sagemaker-ui.conf
@@ -0,0 +1,35 @@
+[supervisord]
+nodaemon=true
+
+pidfile=/var/run/supervisord/supervisord.pid
+logfile=%(ENV_STUDIO_LOGGING_DIR)s/%(ENV_SAGEMAKER_APP_TYPE_LOWERCASE)s/supervisord/supervisord.log
+logfile_maxbytes=5MB
+logfile_backups=10
+redirect_stderr=true
+
+[unix_http_server]
+file=/var/run/supervisord/supervisor.sock
+chmod=0700
+
+[supervisorctl]
+serverurl=unix:///var/run/supervisord/supervisor.sock
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[program:jupyterlabserver]
+directory=%(ENV_HOME)s
+command=start-sagemaker-ui-jupyter-server
+stopasgroup=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+
+[program:workflows_healthcheck]
+command=bash -c "/etc/sagemaker-ui/workflows/healthcheck.sh"
+autostart=false
+redirect_stderr=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile_maxbytes=0
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord.conf b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord.conf
new file mode 100644
index 000000000..686f4a5c4
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/etc/supervisor/conf.d/supervisord.conf
@@ -0,0 +1,27 @@
+[supervisord]
+nodaemon=true
+
+pidfile=/var/run/supervisord/supervisord.pid
+logfile=%(ENV_STUDIO_LOGGING_DIR)s/%(ENV_SAGEMAKER_APP_TYPE_LOWERCASE)s/supervisord/supervisord.log
+logfile_maxbytes=5MB
+logfile_backups=10
+redirect_stderr=true
+
+[unix_http_server]
+file=/var/run/supervisord/supervisor.sock
+chmod=0700
+
+[supervisorctl]
+serverurl=unix:///var/run/supervisord/supervisor.sock
+
+[rpcinterface:supervisor]
+supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
+
+[program:jupyterlabserver]
+directory=%(ENV_HOME)s
+command=start-jupyter-server
+stopasgroup=true
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-code-editor b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-code-editor
new file mode 100755
index 000000000..bf55a3713
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-code-editor
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+set -e
+
+# Generate and execute the shell code to modifies shell variables to include
+# micromamba commands (e.g. using `micromamba activate` to activate environments)
+eval "$(micromamba shell hook --shell=bash)"
+
+# Activate conda environment 'base', where supervisord is installed
+micromamba activate base
+
+# Set up SAGEMAKER_APP_TYPE_LOWERCASE based on SAGEMAKER_APP_TYPE
+export SAGEMAKER_APP_TYPE_LOWERCASE=$(echo $SAGEMAKER_APP_TYPE | tr '[:upper:]' '[:lower:]')
+
+mkdir -p $STUDIO_LOGGING_DIR/$SAGEMAKER_APP_TYPE_LOWERCASE/supervisord
+exec supervisord -c /etc/supervisor/conf.d/supervisord-code-editor.conf -n
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-jupyter-server
new file mode 100755
index 000000000..f71e97f79
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-jupyter-server
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+set -e
+
+# Generate and execute the shell code to modifies shell variables to include
+# micromamba commands (e.g. using `micromamba activate` to activate environments)
+eval "$(micromamba shell hook --shell=bash)"
+
+if [ -n "$SAGEMAKER_RECOVERY_MODE" ]; then
+ export HOME=$SAGEMAKER_RECOVERY_MODE_HOME
+ # Activate conda environment `sagemaker-recovery-mode`
+ micromamba activate /opt/conda/envs/sagemaker-recovery-mode
+else
+ # Activate conda environment 'base'
+ micromamba activate base
+fi
+
+# Set up SAGEMAKER_APP_TYPE_LOWERCASE based on SAGEMAKER_APP_TYPE
+export SAGEMAKER_APP_TYPE_LOWERCASE=$(echo $SAGEMAKER_APP_TYPE | tr '[:upper:]' '[:lower:]')
+
+# Start supervisord with supervisord configuration
+# Since program 'jupyterlabserver' autostarts by default, it will be started
+# automatically along with supervisord
+mkdir -p $STUDIO_LOGGING_DIR/$SAGEMAKER_APP_TYPE_LOWERCASE/supervisord
+exec supervisord -c /etc/supervisor/conf.d/supervisord.conf -n
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-code-editor b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-code-editor
new file mode 100755
index 000000000..82ed47ffe
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-code-editor
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+set -e
+
+# Generate and execute the shell code to modifies shell variables to include
+# micromamba commands (e.g. using `micromamba activate` to activate environments)
+eval "$(micromamba shell hook --shell=bash)"
+
+# Activate conda environment 'base', where supervisord is installed
+micromamba activate base
+
+export SAGEMAKER_APP_TYPE_LOWERCASE=$(echo $SAGEMAKER_APP_TYPE | tr '[:upper:]' '[:lower:]')
+export SERVICE_NAME='SageMakerUnifiedStudio'
+
+mkdir -p $STUDIO_LOGGING_DIR/$SAGEMAKER_APP_TYPE_LOWERCASE/supervisord
+exec supervisord -c /etc/supervisor/conf.d/supervisord-sagemaker-ui-code-editor.conf -n
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-jupyter-server
new file mode 100755
index 000000000..49c34623a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/entrypoint-sagemaker-ui-jupyter-server
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+set -e
+
+# Generate and execute the shell code to modifies shell variables to include
+# micromamba commands (e.g. using `micromamba activate` to activate environments)
+eval "$(micromamba shell hook --shell=bash)"
+
+# apply patches for SMUS
+/etc/patches/apply_patches.sh smus && sudo rm -rf /etc/patches
+
+# Activate conda environment depending on if we are in Recovery or Standard mode.
+if [ -n "$SAGEMAKER_RECOVERY_MODE" ]; then
+ export HOME=$SAGEMAKER_RECOVERY_MODE_HOME
+ # Activate conda environment `sagemaker-recovery-mode`
+ micromamba activate sagemaker-recovery-mode
+else
+ # Activate conda environment 'base', where supervisord is installed
+ micromamba activate base
+fi
+
+export SAGEMAKER_APP_TYPE_LOWERCASE=$(echo $SAGEMAKER_APP_TYPE | tr '[:upper:]' '[:lower:]')
+export SERVICE_NAME='SageMakerUnifiedStudio'
+
+mkdir -p $STUDIO_LOGGING_DIR/$SAGEMAKER_APP_TYPE_LOWERCASE/supervisord
+exec supervisord -c /etc/supervisor/conf.d/supervisord-sagemaker-ui.conf -n
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/merge-settings-util.py b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/merge-settings-util.py
new file mode 100644
index 000000000..8b93cae1c
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/merge-settings-util.py
@@ -0,0 +1,22 @@
+import json
+from sys import argv
+
+
+# merges json files file1 and file2, keys in file2 overwriting any keys that already exist in file1
+def main():
+ file1, file2 = argv[1], argv[2]
+ # Read JSON data from files
+ with open(file1, "r") as f1, open(file2, "r") as f2:
+ data1 = json.load(f1)
+ data2 = json.load(f2)
+
+ # Merge the data (simple update)
+ merged_data = {**data1, **data2}
+
+ # Write the merged data to a new file
+ with open(file1, "w") as f:
+ json.dump(merged_data, f)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-jupyter-server
new file mode 100755
index 000000000..6f2af98d3
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-jupyter-server
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+echo "Restarting the Jupyter server. This page should refresh in a few seconds. Note that any terminals will be closed."
+echo "If this page doesn't refresh after a few seconds, try reloading your browser window."
+echo "Restarting now..."
+nohup supervisorctl -c /etc/supervisor/conf.d/supervisord.conf restart jupyterlabserver > /dev/null 2>&1 &
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-sagemaker-ui-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-sagemaker-ui-jupyter-server
new file mode 100755
index 000000000..8f5207b8a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/restart-sagemaker-ui-jupyter-server
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+echo "Restarting the Jupyter server. This page should refresh in a few seconds. Note that any terminals will be closed."
+echo "If this page doesn't refresh after a few seconds, try reloading your browser window."
+echo "Restarting now..."
+nohup supervisorctl -c /etc/supervisor/conf.d/supervisord-sagemaker-ui.conf restart jupyterlabserver > /dev/null 2>&1 &
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-code-editor b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-code-editor
new file mode 100755
index 000000000..bdd97c16a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-code-editor
@@ -0,0 +1,84 @@
+#!/bin/bash
+set -e
+
+EFS_MOUNT_POINT="/opt/amazon/sagemaker"
+EBS_MOUNT_POINT="/home/sagemaker-user"
+
+persistent_settings_folder="${EBS_MOUNT_POINT}/sagemaker-code-editor-server-data"
+default_settings_folder="${EFS_MOUNT_POINT}/sagemaker-code-editor-server-data"
+
+override_machine_settings() {
+ # create a new settings file with preset defaults or merge the defaults into the existing settings file
+ local settings_relative_path="data/Machine"
+ local settings_file_path_suffix="${settings_relative_path}/settings.json"
+ local persistent_machine_settings_file="${persistent_settings_folder}/${settings_file_path_suffix}"
+ local default_machine_settings_file="${default_settings_folder}/${settings_file_path_suffix}"
+
+ if [ ! -f "$persistent_machine_settings_file" ]; then
+ # copy settings file to EBS if it doesn't exist in EBS
+ mkdir -p "${persistent_settings_folder}/${settings_relative_path}"
+ cp "$default_machine_settings_file" "$persistent_machine_settings_file"
+ echo "Created persistent settings file with default settings at $persistent_machine_settings_file"
+ else
+ # if it does exist then merge settings
+ echo "File already exists: ${persistent_machine_settings_file}. Merging default settings with existing settings."
+ python3 /usr/local/bin/merge-settings-util.py "$persistent_machine_settings_file" "$default_machine_settings_file"
+ fi
+}
+
+copy_user_settings() {
+ local settings_relative_path="data/User"
+ local settings_file_path_suffix="${settings_relative_path}/settings.json"
+ local persistent_user_settings_file="${persistent_settings_folder}/${settings_file_path_suffix}"
+ local default_user_settings_file="${default_settings_folder}/${settings_file_path_suffix}"
+ if [ ! -f "$persistent_user_settings_file" ]; then
+ # copy user settings file to EBS if it doesn't exist in EBS
+ mkdir -p "${persistent_settings_folder}/${settings_relative_path}"
+ cp "$default_user_settings_file" "$persistent_user_settings_file"
+ echo "Created persistent settings file with default settings at $persistent_user_settings_file"
+ fi
+}
+
+install_prepackaged_extensions() {
+ local prepackaged_extensions_dir="${default_settings_folder}/extensions"
+ local persistent_extensions_dir="${persistent_settings_folder}/extensions"
+
+ # if extensions directory doesn't exist then this is the first time opening the app
+ if [ ! -d "${persistent_extensions_dir}" ]; then
+ mkdir -p $persistent_extensions_dir
+
+ # find all pre-packaged extensions folders, symlink them to the persistent volume
+ find "$prepackaged_extensions_dir" -mindepth 1 -maxdepth 1 -type d | while read -r dir; do
+ dir_name=$(basename "$dir")
+
+ ln -s "$dir" "${persistent_extensions_dir}/${dir_name}"
+ echo "Added extension: $dir"
+ done
+ fi
+}
+
+eval "$(micromamba shell hook --shell=bash)"
+
+# Activate conda environment 'base', which is the default environment for sagemaker-distribution
+micromamba activate base
+
+# Start code-editor server
+if [ -n "$SAGEMAKER_APP_TYPE_LOWERCASE" ]; then
+ # SAGEMAKER_APP_TYPE is set, indicating the server is running within a SageMaker app.
+ override_machine_settings
+ copy_user_settings
+ install_prepackaged_extensions
+ # Configure the base url to be `//default`.
+ sagemaker-code-editor --host 0.0.0.0 --port 8888 \
+ --without-connection-token \
+ --base-path "/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --server-data-dir $persistent_settings_folder \
+ --extensions-dir ${persistent_settings_folder}/extensions \
+ --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data
+else
+ sagemaker-code-editor --host 0.0.0.0 --port 8888 \
+ --without-connection-token \
+ --server-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data \
+ --extension-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data/extensions \
+ --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data
+fi
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-jupyter-server
new file mode 100755
index 000000000..112df1018
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-jupyter-server
@@ -0,0 +1,47 @@
+#!/bin/bash
+set -e
+
+eval "$(micromamba shell hook --shell=bash)"
+
+if [ -n "$SAGEMAKER_RECOVERY_MODE" ]; then
+ export HOME=$SAGEMAKER_RECOVERY_MODE_HOME
+ # Activate conda environment `sagemaker-recovery-mode`
+ micromamba activate /opt/conda/envs/sagemaker-recovery-mode
+else
+ # Activate conda environment 'base'
+ micromamba activate base
+ # Uninstall SMUS-specific extensions
+ micromamba remove -y sagemaker-studio-dataengineering-sessions sagemaker-studio-dataengineering-extensions
+ # Disable SMUS-specific extensions
+ jupyter labextension disable sagemaker-data-explorer:plugin
+fi
+
+# Start Jupyter server in rtc mode for shared spaces
+if [ -n "$SAGEMAKER_APP_TYPE_LOWERCASE" ] && [ "$SAGEMAKER_SPACE_TYPE_LOWERCASE" == "shared" ]; then
+ jupyter labextension enable @jupyter/collaboration-extension
+ jupyter labextension enable @jupyter/docprovider-extension
+ # SAGEMAKER_APP_TYPE is set, indicating the server is running within a SageMaker
+ # app. Configure the base url to be `//default`.
+ # SAGEMAKER_SPACE_TYPE_LOWERCASE flag is used to determine if the server should start
+ # in real-time-collaboration mode for a given space.
+ jupyter lab --ip 0.0.0.0 --port 8888 \
+ --ServerApp.base_url="/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --ServerApp.token='' \
+ --ServerApp.allow_origin='*' \
+ --collaborative \
+ --ServerApp.identity_provider_class=sagemaker_jupyterlab_extension_common.identity.SagemakerIdentityProvider \
+ --YDocExtension.ystore_class=sagemaker_jupyterlab_extension_common.ydoc_override.ydoc.MySQLiteYStore
+
+# Start Jupyter server
+elif [ -n "$SAGEMAKER_APP_TYPE_LOWERCASE" ]; then
+ # SAGEMAKER_APP_TYPE is set, indicating the server is running within a SageMaker
+ # app. Configure the base url to be `//default`.
+ jupyter lab --ip 0.0.0.0 --port 8888 \
+ --ServerApp.base_url="/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --ServerApp.token='' \
+ --ServerApp.allow_origin='*'
+else
+ jupyter lab --ip 0.0.0.0 --port 8888 \
+ --ServerApp.token='' \
+ --ServerApp.allow_origin='*'
+fi
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-code-editor b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-code-editor
new file mode 100755
index 000000000..9183f4858
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-code-editor
@@ -0,0 +1,107 @@
+#!/bin/bash
+set -e
+
+EFS_MOUNT_POINT="/opt/amazon/sagemaker"
+EBS_MOUNT_POINT="/home/sagemaker-user"
+
+persistent_settings_folder="${EBS_MOUNT_POINT}/sagemaker-code-editor-server-data"
+default_settings_folder="${EFS_MOUNT_POINT}/sagemaker-code-editor-server-data"
+
+override_machine_settings() {
+ # create a new settings file with preset defaults or merge the defaults into the existing settings file
+ local settings_relative_path="data/Machine"
+ local settings_file_path_suffix="${settings_relative_path}/settings.json"
+ local persistent_machine_settings_file="${persistent_settings_folder}/${settings_file_path_suffix}"
+ local default_machine_settings_file="${default_settings_folder}/${settings_file_path_suffix}"
+
+ if [ ! -f "$persistent_machine_settings_file" ]; then
+ # copy settings file to EBS if it doesn't exist in EBS
+ mkdir -p "${persistent_settings_folder}/${settings_relative_path}"
+ cp "$default_machine_settings_file" "$persistent_machine_settings_file"
+ echo "Created persistent settings file with default settings at $persistent_machine_settings_file"
+ else
+ # if it does exist then merge settings
+ echo "File already exists: ${persistent_machine_settings_file}. Merging default settings with existing settings."
+ python3 /usr/local/bin/merge-settings-util.py "$persistent_machine_settings_file" "$default_machine_settings_file"
+ fi
+}
+
+copy_user_settings() {
+ local settings_relative_path="data/User"
+ local settings_file_path_suffix="${settings_relative_path}/settings.json"
+ local persistent_user_settings_file="${persistent_settings_folder}/${settings_file_path_suffix}"
+ local default_user_settings_file="${default_settings_folder}/${settings_file_path_suffix}"
+ if [ ! -f "$persistent_user_settings_file" ]; then
+ # copy user settings file to EBS if it doesn't exist in EBS
+ mkdir -p "${persistent_settings_folder}/${settings_relative_path}"
+ cp "$default_user_settings_file" "$persistent_user_settings_file"
+ echo "Created persistent settings file with default settings at $persistent_user_settings_file"
+ fi
+}
+
+install_prepackaged_extensions() {
+ local prepackaged_extensions_dir="${default_settings_folder}/extensions"
+ local persistent_extensions_dir="${persistent_settings_folder}/extensions"
+
+ # if extensions directory doesn't exist then this is the first time opening the app
+ if [ ! -d "${persistent_extensions_dir}" ]; then
+ mkdir -p $persistent_extensions_dir
+
+ # find all pre-packaged extensions folders, symlink them to the persistent volume
+ find "$prepackaged_extensions_dir" -mindepth 1 -maxdepth 1 -type d | while read -r dir; do
+ dir_name=$(basename "$dir")
+
+ ln -s "$dir" "${persistent_extensions_dir}/${dir_name}"
+ echo "Added extension: $dir"
+ done
+ fi
+}
+
+set_code_editor_theme() {
+ echo "Setting code editor theme"
+ bash /etc/sagemaker-ui/set_code_editor_theme.sh
+}
+
+disableJupyterKernels() {
+ local kernel_list
+ kernel_list=$(jupyter kernelspec list)
+
+ for kernel in "$@"; do
+ if echo "$kernel_list" | grep -q "$kernel"; then
+ echo "Removing kernel: $kernel"
+ jupyter kernelspec remove -f -y "$kernel"
+ else
+ echo "Kernel not found: $kernel"
+ fi
+ done
+}
+
+eval "$(micromamba shell hook --shell=bash)"
+
+# Activate conda environment 'base', which is the default environment for sagemaker-distribution
+micromamba activate base
+
+# Disable Jupyter Kernels
+disableJupyterKernels pysparkkernel sparkkernel glue_spark glue_pyspark
+
+# Start code-editor server
+if [ -n "$SAGEMAKER_APP_TYPE_LOWERCASE" ]; then
+ # SAGEMAKER_APP_TYPE is set, indicating the server is running within a SageMaker app.
+ override_machine_settings
+ copy_user_settings
+ install_prepackaged_extensions
+ set_code_editor_theme
+ # Configure the base url to be `//default`.
+ sagemaker-code-editor --host 0.0.0.0 --port 8888 \
+ --without-connection-token \
+ --base-path "/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --server-data-dir $persistent_settings_folder \
+ --extensions-dir ${persistent_settings_folder}/extensions \
+ --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data
+else
+ sagemaker-code-editor --host 0.0.0.0 --port 8888 \
+ --without-connection-token \
+ --server-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data \
+ --extension-dir /opt/amazon/sagemaker/sagemaker-code-editor-server-data/extensions \
+ --user-data-dir /opt/amazon/sagemaker/sagemaker-code-editor-user-data
+fi
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-jupyter-server b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-jupyter-server
new file mode 100755
index 000000000..dfe780ef2
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/dirs/usr/local/bin/start-sagemaker-ui-jupyter-server
@@ -0,0 +1,52 @@
+#!/bin/bash
+set -e
+
+eval "$(micromamba shell hook --shell=bash)"
+
+# Activate conda environment depending on if we are in Recovery or Standard mode.
+if [ -n "$SAGEMAKER_RECOVERY_MODE" ]; then
+ # Activate conda environment `sagemaker-recovery-mode`
+ micromamba activate sagemaker-recovery-mode
+else
+ # Activate conda environment 'base' which is the default for Cosmos
+ micromamba activate base
+fi
+
+sudo cp -r /etc/sagemaker-ui/kernels/. /opt/conda/share/jupyter/kernels/
+sudo cp /etc/sagemaker-ui/jupyter/server/jupyter_server_config.py /opt/conda/etc/jupyter/
+
+mkdir -p /opt/conda/share/jupyter/lab/settings
+cp -r /etc/sagemaker-ui/jupyter/lab/settings/. /opt/conda/share/jupyter/lab/settings
+
+if [[ $(jupyter kernelspec list | grep pysparkkernel) ]]; then
+ jupyter-kernelspec remove -f -y pysparkkernel
+fi
+
+if [[ $(jupyter kernelspec list | grep sparkkernel) ]]; then
+ jupyter-kernelspec remove -f -y sparkkernel
+fi
+
+if [[ $(jupyter kernelspec list | grep glue_spark) ]]; then
+ jupyter-kernelspec remove -f -y glue_spark
+fi
+
+if [[ $(jupyter kernelspec list | grep glue_pyspark) ]]; then
+ jupyter-kernelspec remove -f -y glue_pyspark
+fi
+
+if [ -n "$SAGEMAKER_RECOVERY_MODE" ]; then
+ # Disabling collaboration and identity_provider_class flags for recovery mode
+ jupyter lab --ip 0.0.0.0 --port 8888 \
+ --ServerApp.base_url="/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --ServerApp.token='' \
+ --ServerApp.allow_origin='*' \
+ --SchedulerApp.db_url='sqlite:////tmp/.jupyter_scheduler_do_not_delete.sqlite'
+else
+ jupyter lab --ip 0.0.0.0 --port 8888 \
+ --ServerApp.base_url="/$SAGEMAKER_APP_TYPE_LOWERCASE/default" \
+ --ServerApp.token='' \
+ --ServerApp.allow_origin='*' \
+ --SchedulerApp.db_url='sqlite:////tmp/.jupyter_scheduler_do_not_delete.sqlite' \
+ --collaborative \
+ --ServerApp.identity_provider_class='sagemaker_jupyter_server_extension.identity.SageMakerIdentityProvider'
+fi
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/gpu.arg_based_env.in b/build_artifacts/v3/v3.3/v3.3.4/gpu.arg_based_env.in
new file mode 100644
index 000000000..577d9abbe
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/gpu.arg_based_env.in
@@ -0,0 +1,6 @@
+conda-forge::cuda-compiler=$CUDA_MAJOR_MINOR_VERSION
+conda-forge::cuda-libraries=$CUDA_MAJOR_MINOR_VERSION
+conda-forge::cuda-libraries-dev=$CUDA_MAJOR_MINOR_VERSION
+conda-forge::cuda-nvml-dev=12.6.77.*
+conda-forge::cuda-command-line-tools=$CUDA_MAJOR_MINOR_VERSION
+conda-forge::gds-tools=1.11.1.6.*
\ No newline at end of file
diff --git a/build_artifacts/v3/v3.3/v3.3.4/gpu.env.in b/build_artifacts/v3/v3.3/v3.3.4/gpu.env.in
new file mode 100644
index 000000000..4ed99289c
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/gpu.env.in
@@ -0,0 +1,66 @@
+# This file is auto-generated.
+conda-forge::s3fs[version='>=2024.12.0,<2024.13.0']
+conda-forge::seaborn[version='>=0.13.2,<0.14.0']
+conda-forge::jupyter-activity-monitor-extension[version='>=0.3.2,<0.4.0']
+conda-forge::mlflow[version='>=2.22.0,<2.23.0']
+conda-forge::sagemaker-mlflow[version='>=0.1.0,<0.2.0']
+conda-forge::langchain-aws[version='>=0.2.19,<0.3.0']
+conda-forge::jupyter-collaboration[version='>=3.1.0,<3.2.0']
+conda-forge::sagemaker-code-editor[version='>=1.6.3,<1.7.0']
+conda-forge::amazon-sagemaker-jupyter-ai-q-developer[version='>=1.2.7,<1.3.0']
+conda-forge::amazon-q-developer-jupyterlab-ext[version='>=3.4.8,<3.5.0']
+conda-forge::amazon_sagemaker_sql_editor[version='>=0.1.18,<0.2.0']
+conda-forge::amazon-sagemaker-sql-magic[version='>=0.1.4,<0.2.0']
+conda-forge::langchain[version='>=0.3.27,<0.4.0']
+conda-forge::fastapi[version='>=0.115.14,<0.116.0']
+conda-forge::uvicorn[version='>=0.35.0,<0.36.0']
+conda-forge::pytorch[version='>=2.6.0,<2.7.0']
+conda-forge::tensorflow[version='>=2.18.0,<2.19.0']
+conda-forge::python[version='>=3.12.9,<3.13.0']
+conda-forge::pip[version='>=25.1.1,<25.2.0']
+conda-forge::torchvision[version='>=0.21.0,<0.22.0']
+conda-forge::numpy[version='>=1.26.4,<1.27.0']
+conda-forge::pandas[version='>=2.2.3,<2.3.0']
+conda-forge::scikit-learn[version='>=1.6.1,<1.7.0']
+conda-forge::jinja2[version='>=3.1.6,<3.2.0']
+conda-forge::matplotlib-base[version='>=3.10.6,<3.11.0']
+conda-forge::sagemaker-headless-execution-driver[version='>=0.0.13,<0.1.0']
+conda-forge::ipython[version='>=8.37.0,<8.38.0']
+conda-forge::scipy[version='>=1.15.2,<1.16.0']
+conda-forge::keras[version='>=3.10.0,<3.11.0']
+conda-forge::py-xgboost-gpu[version='>=2.1.4,<2.2.0']
+conda-forge::jupyterlab[version='>=4.4.7,<4.5.0']
+conda-forge::ipywidgets[version='>=8.1.7,<8.2.0']
+conda-forge::conda[version='>=25.5.1,<25.6.0']
+conda-forge::boto3[version='>=1.37.1,<1.38.0']
+conda-forge::sagemaker-python-sdk[version='>=2.245.0,<2.246.0']
+conda-forge::supervisor[version='>=4.2.5,<4.3.0']
+conda-forge::autogluon[version='>=1.3.1,<1.4.0']
+conda-forge::aws-glue-sessions[version='>=1.0.9,<1.1.0']
+conda-forge::sagemaker-kernel-wrapper[version='>=0.0.6,<0.1.0']
+conda-forge::jupyter-ai[version='>=2.31.6,<2.32.0']
+conda-forge::jupyter-scheduler[version='>=2.11.0,<2.12.0']
+conda-forge::jupyter-lsp[version='>=2.2.6,<2.3.0']
+conda-forge::jupyterlab-lsp[version='>=5.0.3,<5.1.0']
+conda-forge::jupyterlab-git[version='>=0.51.2,<0.52.0']
+conda-forge::python-lsp-server[version='>=1.12.2,<1.13.0']
+conda-forge::notebook[version='>=7.4.5,<7.5.0']
+conda-forge::altair[version='>=5.5.0,<5.6.0']
+conda-forge::sagemaker-studio-analytics-extension[version='>=0.2.2,<0.3.0']
+conda-forge::jupyter-dash[version='>=0.4.2,<0.5.0']
+conda-forge::sagemaker-jupyterlab-extension[version='>=0.5.1,<0.6.0']
+conda-forge::sagemaker-jupyterlab-emr-extension[version='>=0.4.3,<0.5.0']
+conda-forge::amazon-sagemaker-jupyter-scheduler[version='>=3.1.15,<3.2.0']
+conda-forge::jupyter-server-proxy[version='>=4.4.0,<4.5.0']
+conda-forge::pyhive[version='>=0.7.0,<0.8.0']
+conda-forge::python-gssapi[version='>=1.9.0,<1.10.0']
+conda-forge::tf-keras[version='>=2.18.0,<2.19.0']
+conda-forge::git-remote-codecommit[version='>=1.16,<1.17.0']
+conda-forge::docker-cli[version='>=27.5.1,<27.6.0']
+conda-forge::aioboto3[version='>=14.1.0,<14.2.0']
+conda-forge::sagemaker-studio-cli[version='>=1.0.7,<1.1.0']
+conda-forge::sagemaker-studio[version='>=1.0.21,<1.1.0']
+conda-forge::sagemaker-studio-dataengineering-sessions[version='>=1.1.7,<1.2.0']
+conda-forge::sagemaker-studio-dataengineering-extensions[version='>=1.1.4,<1.2.0']
+conda-forge::amzn-sagemaker-aiops-jupyterlab-extension[version='>=1.0.4,<1.1.0']
+conda-forge::aws-s3-access-grants-boto3-plugin[version='>=1.2.0,<1.3.0']
diff --git a/build_artifacts/v3/v3.3/v3.3.4/gpu.pinned_env.in b/build_artifacts/v3/v3.3/v3.3.4/gpu.pinned_env.in
new file mode 100644
index 000000000..f066ccf6a
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/gpu.pinned_env.in
@@ -0,0 +1,6 @@
+conda-forge::dash[version='<=2.18.1']
+conda-forge::evaluate[version='<0.4.2']
+conda-forge::libsqlite[version='<3.49.0']
+conda-forge::urllib3[version='<2']
+conda-forge::papermill[version='>=2.6.0']
+conda-forge::plotly[version='<6.1.0']
diff --git a/build_artifacts/v3/v3.3/v3.3.4/patch_glue_pyspark.json b/build_artifacts/v3/v3.3/v3.3.4/patch_glue_pyspark.json
new file mode 100644
index 000000000..f92eee896
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/patch_glue_pyspark.json
@@ -0,0 +1,15 @@
+{
+ "argv": [
+ "/opt/conda/bin/python",
+ "-m",
+ "sagemaker_kernel_wrapper.sm_gis_wrapper",
+ "-m",
+ "aws_glue_interactive_sessions_kernel.glue_pyspark.GlueKernel",
+ "-f",
+ "{connection_file}"
+ ],
+ "display_name": "Glue PySpark",
+ "env": {"request_origin": "SageMakerStudioPySparkNotebook", "glue_version": "3.0"},
+ "language": "python"
+}
+
diff --git a/build_artifacts/v3/v3.3/v3.3.4/patch_glue_spark.json b/build_artifacts/v3/v3.3/v3.3.4/patch_glue_spark.json
new file mode 100644
index 000000000..1bd168e75
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/patch_glue_spark.json
@@ -0,0 +1,15 @@
+{
+ "argv": [
+ "/opt/conda/bin/python",
+ "-m",
+ "sagemaker_kernel_wrapper.sm_gis_wrapper",
+ "-m",
+ "aws_glue_interactive_sessions_kernel.glue_spark.GlueKernel",
+ "-f",
+ "{connection_file}"
+ ],
+ "display_name": "Glue Spark",
+ "env": {"request_origin": "SageMakerStudioSparkNotebook", "glue_version": "3.0"},
+ "language": "python"
+}
+
diff --git a/build_artifacts/v3/v3.3/v3.3.4/source-version.txt b/build_artifacts/v3/v3.3/v3.3.4/source-version.txt
new file mode 100644
index 000000000..3f09e9109
--- /dev/null
+++ b/build_artifacts/v3/v3.3/v3.3.4/source-version.txt
@@ -0,0 +1 @@
+3.3.3
\ No newline at end of file