Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@

# TODO: once the pyenv Chef resource supports installing packages from a path (e.g. `pip install .`), convert the
# bash block to a recipe that uses the pyenv resource.
if aws_region.start_with?("us-iso")
command = "pip install . --no-build-isolation"
else
command = "pip install ."
end

if aws_region.start_with?("us-iso") && platform?('amazon') && node['platform_version'] == "2"
remote_file "#{node['cluster']['base_dir']}/node-dependencies.tgz" do
source "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/#{node['kernel']['machine']}/node-dependencies.tgz"
Expand Down Expand Up @@ -59,7 +65,7 @@
mkdir aws-parallelcluster-custom-node
tar -xzf aws-parallelcluster-node.tgz --directory aws-parallelcluster-custom-node
cd aws-parallelcluster-custom-node/*aws-parallelcluster-node*
pip install .
#{command}
deactivate
NODE
end
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,14 @@
end

if aws_region.start_with?("us-iso")
dependency_package_name = "pypi-cfn-dependencies-#{node['cluster']['python-major-minor-version']}-#{node['kernel']['machine']}"
dependency_folder_name = dependency_package_name
if platform?('amazon') && node['platform_version'] == "2"
dependency_package_name = "cfn-dependencies"
dependency_folder_name = "cfn"
end
remote_file "#{node['cluster']['base_dir']}/cfn-dependencies.tgz" do
source "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/#{node['kernel']['machine']}/cfn-dependencies.tgz"
source "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/#{node['kernel']['machine']}/#{dependency_package_name}.tgz"
mode '0644'
retries 3
retry_delay 5
Expand All @@ -49,7 +55,7 @@
code <<-REQ
set -e
tar xzf cfn-dependencies.tgz
cd cfn
cd #{dependency_folder_name}
#{virtualenv_path}/bin/pip install * -f ./ --no-index
REQ
end
Expand All @@ -73,11 +79,16 @@
retry_delay 5
end

if aws_region.start_with?("us-iso")
command = "#{virtualenv_path}/bin/pip install #{cfnbootstrap_package} --no-build-isolation"
else
command = "#{virtualenv_path}/bin/pip install #{cfnbootstrap_package}"
end
bash "Install CloudFormation helpers from #{cfnbootstrap_package}" do
user 'root'
group 'root'
cwd '/tmp'
code "#{virtualenv_path}/bin/pip install #{cfnbootstrap_package}"
code command
creates "#{virtualenv_path}/bin/cfn-hup"
end

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,24 @@

use 'partial/_get_package_version_rpm'
use 'partial/_common'
use 'partial/_redhat_based'
# use 'partial/_redhat_based'
use 'partial/_install_from_tar'
use 'partial/_mount_umount'

def install_script_code(efs_utils_tarball, efs_utils_package, efs_utils_version)
<<-EFSUTILSINSTALL
set -e
tar xf #{efs_utils_tarball}
mv efs-proxy-dependencies-#{efs_utils_version}.tar.gz efs-utils-#{efs_utils_version}/src/proxy/
cd efs-utils-#{efs_utils_version}/src/proxy/
tar -xf efs-proxy-dependencies-#{efs_utils_version}.tar.gz
cargo build --offline
cd ../..
make rpm
yum -y install ./build/#{efs_utils_package}*rpm
EFSUTILSINSTALL
end

def prerequisites
%w(rpm-build make rust cargo openssl-devel)
end
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,20 @@
action :create_if_missing
end

if aws_region.start_with?("us-iso") && platform?('redhat') && node['platform_version'] == "8"
efs_proxy_deps = "efs-proxy-dependencies-#{package_version}.tar.gz"
efs_proxy_deps_tarball = "#{node['cluster']['sources_dir']}/#{efs_proxy_deps}"
efs_proxy_deps_url = "#{node['cluster']['artifacts_s3_url']}/dependencies/efs/#{efs_proxy_deps}"
remote_file efs_proxy_deps_tarball do
source efs_proxy_deps_url
mode '0644'
retries 3
retry_delay 5
checksum new_resource.efs_utils_checksum
action :create_if_missing
end
end

# Install EFS Utils following https://docs.aws.amazon.com/efs/latest/ug/installing-amazon-efs-utils.html
bash "install efs utils" do
cwd node['cluster']['sources_dir']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@
# limitations under the License.

virtualenv_path = cookbook_virtualenv_path
pypi_s3_uri = "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/pypi-dependencies-#{node['cluster']['python-major-minor-version']}-#{node['kernel']['machine']}.tgz"
dependency_package_name = "pypi-cookbook-dependencies-#{node['cluster']['python-major-minor-version']}-#{node['kernel']['machine']}"
pypi_s3_uri = "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/#{dependency_package_name}.tgz"
if platform?('amazon') && node['platform_version'] == "2"
dependency_package_name = "dependencies"
pypi_s3_uri = "#{node['cluster']['artifacts_s3_url']}/dependencies/PyPi/#{node['kernel']['machine']}/cookbook-dependencies.tgz"
end

Expand Down Expand Up @@ -46,7 +48,7 @@
code <<-REQ
set -e
tar xzf cookbook-dependencies.tgz
cd dependencies
cd #{dependency_package_name}
#{virtualenv_path}/bin/pip install * -f ./ --no-index
REQ
end
13 changes: 7 additions & 6 deletions util/upload-cookbook.sh
Original file line number Diff line number Diff line change
Expand Up @@ -93,21 +93,22 @@ main() {
# Create archive and md5
_cwd=$(pwd)
pushd "${_srcdir}" > /dev/null || exit
GIT_REF=$(git rev-parse HEAD)
_stashName=$(git stash create)
git archive --format tar --prefix="aws-parallelcluster-cookbook-${_version}/" "${_stashName:-HEAD}" | gzip > "${_cwd}/aws-parallelcluster-cookbook-${_version}.tgz"
git archive --format tar --prefix="aws-parallelcluster-cookbook-${_version}/" "${_stashName:-HEAD}" | gzip > "${_cwd}/aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz"
#tar zcvf "${_cwd}/aws-parallelcluster-cookbook-${_version}.tgz" --transform "s,^aws-parallelcluster-cookbook/,aws-parallelcluster-cookbook-${_version}/," ../aws-parallelcluster-cookbook
popd > /dev/null || exit
md5sum aws-parallelcluster-cookbook-${_version}.tgz > aws-parallelcluster-cookbook-${_version}.md5
md5sum aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz > aws-parallelcluster-cookbook-${_version}-${GIT_REF}.md5

# upload packages
_key_path="parallelcluster/${_version}/cookbooks"
if [ -n "${_scope}" ]; then
_key_path="${_key_path}/${_scope}"
fi
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}.tgz s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}.tgz || _error_exit 'Failed to push cookbook to S3'
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}.md5 s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}.md5 || _error_exit 'Failed to push cookbook md5 to S3'
aws ${_profile} --region "${_region}" s3api head-object --bucket ${_bucket} --key ${_key_path}/aws-parallelcluster-cookbook-${_version}.tgz --output text --query LastModified > aws-parallelcluster-cookbook-${_version}.tgz.date || _error_exit 'Failed to fetch LastModified date'
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}.tgz.date s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}.tgz.date || _error_exit 'Failed to push cookbook date'
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz || _error_exit 'Failed to push cookbook to S3'
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}-${GIT_REF}.md5 s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}-${GIT_REF}.md5 || _error_exit 'Failed to push cookbook md5 to S3'
aws ${_profile} --region "${_region}" s3api head-object --bucket ${_bucket} --key ${_key_path}/aws-parallelcluster-cookbook-${_version}.tgz --output text --query LastModified > aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz.date || _error_exit 'Failed to fetch LastModified date'
aws ${_profile} --region "${_region}" s3 cp aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz.date s3://${_bucket}/${_key_path}/aws-parallelcluster-cookbook-${_version}-${GIT_REF}.tgz.date || _error_exit 'Failed to push cookbook date'

_bucket_region=$(aws ${_profile} s3api get-bucket-location --bucket ${_bucket} --output text)
if [ ${_bucket_region} = "None" ]; then
Expand Down
Loading